diff --git a/PKG-INFO b/PKG-INFO
index fbc9687..73613b8 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,7 +1,7 @@
Metadata-Version: 2.1
Name: tuneinsight
-Version: 0.9.2
-Summary: Diapason is the official Python SDK for the Tune Insight API. Version 0.6.2 targets the API v0.8.0.
+Version: 0.10.2
+Summary: Diapason is the official Python SDK for the Tune Insight API. The current version is compatible with the same version of the API.
License: Apache-2.0
Author: Tune Insight SA
Requires-Python: >=3.8,<3.12
diff --git a/pyproject.toml b/pyproject.toml
index 144e21c..6078321 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[tool.poetry]
name = "tuneinsight"
-version = "0.9.2"
-description = "Diapason is the official Python SDK for the Tune Insight API. Version 0.6.2 targets the API v0.8.0."
+version = "0.10.2"
+description = "Diapason is the official Python SDK for the Tune Insight API. The current version is compatible with the same version of the API."
authors = ["Tune Insight SA"]
license = "Apache-2.0"
include = [
@@ -12,6 +12,12 @@ include = [
]
readme = "src/tuneinsight/README.md"
+[tool.poetry-dynamic-versioning]
+enable = false
+style = "pep440"
+pattern = "^v(?P\\d+\\.\\d+\\.\\d+)"
+format = "{base}"
+
[tool.poetry.dependencies]
python = ">= 3.8,<3.12"
python-keycloak = "^3.9.0"
@@ -38,8 +44,8 @@ pyvcf3 = "^1.0.3" # For GWAS .vcf file parsing
pytest = "^8.1.1"
[build-system]
-requires = ["poetry-core>=1.0.0"]
-build-backend = "poetry.core.masonry.api"
+requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning>=1.0.0,<2.0.0"]
+build-backend = "poetry_dynamic_versioning.backend"
[tool.black]
include = '\.pyi?$'
diff --git a/src/tuneinsight/api/api-checksum b/src/tuneinsight/api/api-checksum
index e8cd163..bcf6042 100644
--- a/src/tuneinsight/api/api-checksum
+++ b/src/tuneinsight/api/api-checksum
@@ -1 +1 @@
-1543c5968e0e568095cb5eeb44a3c7ab3179d6491f270932e268ab579c8d5948
+dcb50c2cb9eac6743b41006a2e73a209bea8c9697c526f31a608a0aa602ec4de
diff --git a/src/tuneinsight/api/sdk/api/api_admin/get_config.py b/src/tuneinsight/api/sdk/api/api_admin/get_config.py
new file mode 100644
index 0000000..37c1944
--- /dev/null
+++ b/src/tuneinsight/api/sdk/api/api_admin/get_config.py
@@ -0,0 +1,162 @@
+from http import HTTPStatus
+from typing import Any, Dict, Optional, Union
+
+import httpx
+
+from ... import errors
+from ...client import Client
+from ...models.error import Error
+from ...models.instance_configuration import InstanceConfiguration
+from ...types import Response
+
+
+def _get_kwargs(
+ *,
+ client: Client,
+) -> Dict[str, Any]:
+ url = "{}/config".format(client.base_url)
+
+ headers: Dict[str, str] = client.get_headers()
+ cookies: Dict[str, Any] = client.get_cookies()
+
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
+ return {
+ "method": "get",
+ "url": url,
+ "headers": headers,
+ "cookies": cookies,
+ "timeout": client.get_timeout(),
+ "proxies": proxies,
+ }
+
+
+def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[Error, InstanceConfiguration]]:
+ if response.status_code == HTTPStatus.OK:
+ response_200 = InstanceConfiguration.from_dict(response.json())
+
+ return response_200
+ if response.status_code == HTTPStatus.UNAUTHORIZED:
+ response_401 = Error.from_dict(response.json())
+
+ return response_401
+ if response.status_code == HTTPStatus.FORBIDDEN:
+ response_403 = Error.from_dict(response.json())
+
+ return response_403
+ if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR:
+ response_500 = Error.from_dict(response.json())
+
+ return response_500
+ if client.raise_on_unexpected_status:
+ raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
+ else:
+ return None
+
+
+def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[Error, InstanceConfiguration]]:
+ return Response(
+ status_code=HTTPStatus(response.status_code),
+ content=response.content,
+ headers=response.headers,
+ parsed=_parse_response(client=client, response=response),
+ )
+
+
+def sync_detailed(
+ *,
+ client: Client,
+) -> Response[Union[Error, InstanceConfiguration]]:
+ """get information about the instance's configuration (requires administrative privileges)
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, InstanceConfiguration]]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ )
+
+ response = httpx.request(
+ verify=client.verify_ssl,
+ **kwargs,
+ )
+
+ return _build_response(client=client, response=response)
+
+
+def sync(
+ *,
+ client: Client,
+) -> Optional[Union[Error, InstanceConfiguration]]:
+ """get information about the instance's configuration (requires administrative privileges)
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, InstanceConfiguration]]
+ """
+
+ return sync_detailed(
+ client=client,
+ ).parsed
+
+
+async def asyncio_detailed(
+ *,
+ client: Client,
+) -> Response[Union[Error, InstanceConfiguration]]:
+ """get information about the instance's configuration (requires administrative privileges)
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, InstanceConfiguration]]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ )
+
+ async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
+ response = await _client.request(**kwargs)
+
+ return _build_response(client=client, response=response)
+
+
+async def asyncio(
+ *,
+ client: Client,
+) -> Optional[Union[Error, InstanceConfiguration]]:
+ """get information about the instance's configuration (requires administrative privileges)
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, InstanceConfiguration]]
+ """
+
+ return (
+ await asyncio_detailed(
+ client=client,
+ )
+ ).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_admin/get_settings.py b/src/tuneinsight/api/sdk/api/api_admin/get_settings.py
index 83cb3c2..4ceaee8 100644
--- a/src/tuneinsight/api/sdk/api/api_admin/get_settings.py
+++ b/src/tuneinsight/api/sdk/api/api_admin/get_settings.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py b/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py
index e1568c4..1db2948 100644
--- a/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py
+++ b/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py
@@ -22,12 +22,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "patch",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_admin/post_storage.py b/src/tuneinsight/api/sdk/api/api_admin/post_storage.py
index d085072..70958e0 100644
--- a/src/tuneinsight/api/sdk/api/api_admin/post_storage.py
+++ b/src/tuneinsight/api/sdk/api/api_admin/post_storage.py
@@ -22,12 +22,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/compute.py b/src/tuneinsight/api/sdk/api/api_computations/compute.py
index 0992888..0a31ac6 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/compute.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/compute.py
@@ -143,12 +143,24 @@ def _get_kwargs(
else:
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/delete_comp_bookmark.py b/src/tuneinsight/api/sdk/api/api_computations/delete_comp_bookmark.py
index e5340f5..8bea936 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/delete_comp_bookmark.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/delete_comp_bookmark.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/delete_computation.py b/src/tuneinsight/api/sdk/api/api_computations/delete_computation.py
index c7e3434..d136110 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/delete_computation.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/delete_computation.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/delete_computations.py b/src/tuneinsight/api/sdk/api/api_computations/delete_computations.py
index ede453d..01be3dc 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/delete_computations.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/delete_computations.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/delete_result.py b/src/tuneinsight/api/sdk/api/api_computations/delete_result.py
index 2a6afda..acc18dd 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/delete_result.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/delete_result.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/documentation.py b/src/tuneinsight/api/sdk/api/api_computations/documentation.py
index eadbddc..74740e8 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/documentation.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/documentation.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_comp_bookmark_list.py b/src/tuneinsight/api/sdk/api/api_computations/get_comp_bookmark_list.py
index 6df6c8b..a859ee3 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_comp_bookmark_list.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_comp_bookmark_list.py
@@ -38,12 +38,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_computation.py b/src/tuneinsight/api/sdk/api/api_computations/get_computation.py
index ddb0b55..7a3b213 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_computation.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_computation.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py b/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py
index 479e9de..2f0bd44 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py
@@ -56,12 +56,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_params.py b/src/tuneinsight/api/sdk/api/api_computations/get_params.py
index 6cc6bf5..4d9bfad 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_params.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_params.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_result.py b/src/tuneinsight/api/sdk/api/api_computations/get_result.py
index 8ac2338..55b9456 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_result.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_result.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py b/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py
index 3cbb6fb..cd553e9 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py
@@ -66,12 +66,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_result_tag_list.py b/src/tuneinsight/api/sdk/api/api_computations/get_result_tag_list.py
index 70922f0..ee7c9dd 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/get_result_tag_list.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/get_result_tag_list.py
@@ -27,12 +27,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/init_session.py b/src/tuneinsight/api/sdk/api/api_computations/init_session.py
index 786429d..3eec2b5 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/init_session.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/init_session.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/patch_result.py b/src/tuneinsight/api/sdk/api/api_computations/patch_result.py
index 5114a36..c141a4d 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/patch_result.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/patch_result.py
@@ -24,12 +24,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "patch",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/put_comp_bookmark.py b/src/tuneinsight/api/sdk/api/api_computations/put_comp_bookmark.py
index 4739e4e..62a8b51 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/put_comp_bookmark.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/put_comp_bookmark.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_computations/release_result.py b/src/tuneinsight/api/sdk/api/api_computations/release_result.py
index 20dca10..a63427c 100644
--- a/src/tuneinsight/api/sdk/api/api_computations/release_result.py
+++ b/src/tuneinsight/api/sdk/api/api_computations/release_result.py
@@ -24,12 +24,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datagen/post_mock_dataset.py b/src/tuneinsight/api/sdk/api/api_datagen/post_mock_dataset.py
index 43a1300..35c4090 100644
--- a/src/tuneinsight/api/sdk/api/api_datagen/post_mock_dataset.py
+++ b/src/tuneinsight/api/sdk/api/api_datagen/post_mock_dataset.py
@@ -46,12 +46,24 @@ def _get_kwargs(
json_json_body = json_body
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py b/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py
index 4c411e4..70eafd2 100644
--- a/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py
+++ b/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py
@@ -18,6 +18,7 @@ def _get_kwargs(
query: Union[Unset, None, str] = UNSET,
table_name: Union[Unset, None, str] = UNSET,
num_rows: Union[Unset, None, int] = UNSET,
+ dp_epsilon: Union[Unset, None, float] = UNSET,
) -> Dict[str, Any]:
url = "{}/synthetic/dataset".format(client.base_url)
@@ -35,14 +36,28 @@ def _get_kwargs(
params["numRows"] = num_rows
+ params["dpEpsilon"] = dp_epsilon
+
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
@@ -87,6 +102,7 @@ def sync_detailed(
query: Union[Unset, None, str] = UNSET,
table_name: Union[Unset, None, str] = UNSET,
num_rows: Union[Unset, None, int] = UNSET,
+ dp_epsilon: Union[Unset, None, float] = UNSET,
) -> Response[Union[DataSource, Error]]:
"""Request the creation of a synthetic dataset from a real dataset.
@@ -96,6 +112,7 @@ def sync_detailed(
query (Union[Unset, None, str]):
table_name (Union[Unset, None, str]):
num_rows (Union[Unset, None, int]):
+ dp_epsilon (Union[Unset, None, float]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -112,6 +129,7 @@ def sync_detailed(
query=query,
table_name=table_name,
num_rows=num_rows,
+ dp_epsilon=dp_epsilon,
)
response = httpx.request(
@@ -130,6 +148,7 @@ def sync(
query: Union[Unset, None, str] = UNSET,
table_name: Union[Unset, None, str] = UNSET,
num_rows: Union[Unset, None, int] = UNSET,
+ dp_epsilon: Union[Unset, None, float] = UNSET,
) -> Optional[Union[DataSource, Error]]:
"""Request the creation of a synthetic dataset from a real dataset.
@@ -139,6 +158,7 @@ def sync(
query (Union[Unset, None, str]):
table_name (Union[Unset, None, str]):
num_rows (Union[Unset, None, int]):
+ dp_epsilon (Union[Unset, None, float]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -155,6 +175,7 @@ def sync(
query=query,
table_name=table_name,
num_rows=num_rows,
+ dp_epsilon=dp_epsilon,
).parsed
@@ -166,6 +187,7 @@ async def asyncio_detailed(
query: Union[Unset, None, str] = UNSET,
table_name: Union[Unset, None, str] = UNSET,
num_rows: Union[Unset, None, int] = UNSET,
+ dp_epsilon: Union[Unset, None, float] = UNSET,
) -> Response[Union[DataSource, Error]]:
"""Request the creation of a synthetic dataset from a real dataset.
@@ -175,6 +197,7 @@ async def asyncio_detailed(
query (Union[Unset, None, str]):
table_name (Union[Unset, None, str]):
num_rows (Union[Unset, None, int]):
+ dp_epsilon (Union[Unset, None, float]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -191,6 +214,7 @@ async def asyncio_detailed(
query=query,
table_name=table_name,
num_rows=num_rows,
+ dp_epsilon=dp_epsilon,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
@@ -207,6 +231,7 @@ async def asyncio(
query: Union[Unset, None, str] = UNSET,
table_name: Union[Unset, None, str] = UNSET,
num_rows: Union[Unset, None, int] = UNSET,
+ dp_epsilon: Union[Unset, None, float] = UNSET,
) -> Optional[Union[DataSource, Error]]:
"""Request the creation of a synthetic dataset from a real dataset.
@@ -216,6 +241,7 @@ async def asyncio(
query (Union[Unset, None, str]):
table_name (Union[Unset, None, str]):
num_rows (Union[Unset, None, int]):
+ dp_epsilon (Union[Unset, None, float]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -233,5 +259,6 @@ async def asyncio(
query=query,
table_name=table_name,
num_rows=num_rows,
+ dp_epsilon=dp_epsilon,
)
).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_object.py b/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_object.py
index a864eb8..db30016 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_object.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_object.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_objects.py b/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_objects.py
index 3a08e39..c334ead 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_objects.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/delete_data_objects.py
@@ -31,12 +31,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object.py b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object.py
index 9e82ee6..9cd6a72 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_data.py b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_data.py
index 2fd803f..e4f1c0a 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_data.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_data.py
@@ -25,12 +25,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_list.py b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_list.py
index d3d27a8..2c57d87 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_list.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_list.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_raw_data.py b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_raw_data.py
index 1c28c5a..c7d75ea 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_raw_data.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/get_data_object_raw_data.py
@@ -26,12 +26,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/get_shared_data_object_data.py b/src/tuneinsight/api/sdk/api/api_dataobject/get_shared_data_object_data.py
index ce56d09..a3dcb38 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/get_shared_data_object_data.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/get_shared_data_object_data.py
@@ -22,12 +22,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/post_data_object.py b/src/tuneinsight/api/sdk/api/api_dataobject/post_data_object.py
index 36ee250..88be470 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/post_data_object.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/post_data_object.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_dataobject/put_data_object_data.py b/src/tuneinsight/api/sdk/api/api_dataobject/put_data_object_data.py
index 0c90cde..1aea47d 100644
--- a/src/tuneinsight/api/sdk/api/api_dataobject/put_data_object_data.py
+++ b/src/tuneinsight/api/sdk/api/api_dataobject/put_data_object_data.py
@@ -24,12 +24,24 @@ def _get_kwargs(
multipart_multipart_data = multipart_data.to_multipart()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"files": multipart_multipart_data,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/delete_data_source.py b/src/tuneinsight/api/sdk/api/api_datasource/delete_data_source.py
index 1a7b457..fc59b62 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/delete_data_source.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/delete_data_source.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/delete_data_sources.py b/src/tuneinsight/api/sdk/api/api_datasource/delete_data_sources.py
index 280dd61..7955f60 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/delete_data_sources.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/delete_data_sources.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/delete_local_data_selection.py b/src/tuneinsight/api/sdk/api/api_datasource/delete_local_data_selection.py
index b96a0cd..b6d2341 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/delete_local_data_selection.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/delete_local_data_selection.py
@@ -24,12 +24,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source.py b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source.py
index 6e44c35..d15cd92 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_list.py b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_list.py
index 4f2add5..795a7d0 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_list.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_list.py
@@ -25,12 +25,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_projects.py b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_projects.py
index fb51fb4..0040c2c 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_projects.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_projects.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_types.py b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_types.py
index a4f4895..f271d10 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_types.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_data_source_types.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection.py b/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection.py
index 082e43f..be1d247 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection.py
@@ -34,12 +34,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection_list.py b/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection_list.py
index d171cbe..872d15b 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection_list.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_local_data_selection_list.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/get_remote_local_data_selections.py b/src/tuneinsight/api/sdk/api/api_datasource/get_remote_local_data_selections.py
index e7677b7..a174f6d 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/get_remote_local_data_selections.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/get_remote_local_data_selections.py
@@ -15,6 +15,7 @@ def _get_kwargs(
remote: Union[Unset, None, bool] = UNSET,
local: Union[Unset, None, bool] = UNSET,
ignored_instance: Union[Unset, None, str] = UNSET,
+ timeout: Union[Unset, None, int] = UNSET,
) -> Dict[str, Any]:
url = "{}/datasources/selections/remote".format(client.base_url)
@@ -28,14 +29,28 @@ def _get_kwargs(
params["ignoredInstance"] = ignored_instance
+ params["timeout"] = timeout
+
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
@@ -71,6 +86,7 @@ def sync_detailed(
remote: Union[Unset, None, bool] = UNSET,
local: Union[Unset, None, bool] = UNSET,
ignored_instance: Union[Unset, None, str] = UNSET,
+ timeout: Union[Unset, None, int] = UNSET,
) -> Response[List["LocalDataSelection"]]:
"""retrieves all of the local data selections that are visible to the network
@@ -78,6 +94,7 @@ def sync_detailed(
remote (Union[Unset, None, bool]):
local (Union[Unset, None, bool]):
ignored_instance (Union[Unset, None, str]):
+ timeout (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -92,6 +109,7 @@ def sync_detailed(
remote=remote,
local=local,
ignored_instance=ignored_instance,
+ timeout=timeout,
)
response = httpx.request(
@@ -108,6 +126,7 @@ def sync(
remote: Union[Unset, None, bool] = UNSET,
local: Union[Unset, None, bool] = UNSET,
ignored_instance: Union[Unset, None, str] = UNSET,
+ timeout: Union[Unset, None, int] = UNSET,
) -> Optional[List["LocalDataSelection"]]:
"""retrieves all of the local data selections that are visible to the network
@@ -115,6 +134,7 @@ def sync(
remote (Union[Unset, None, bool]):
local (Union[Unset, None, bool]):
ignored_instance (Union[Unset, None, str]):
+ timeout (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -129,6 +149,7 @@ def sync(
remote=remote,
local=local,
ignored_instance=ignored_instance,
+ timeout=timeout,
).parsed
@@ -138,6 +159,7 @@ async def asyncio_detailed(
remote: Union[Unset, None, bool] = UNSET,
local: Union[Unset, None, bool] = UNSET,
ignored_instance: Union[Unset, None, str] = UNSET,
+ timeout: Union[Unset, None, int] = UNSET,
) -> Response[List["LocalDataSelection"]]:
"""retrieves all of the local data selections that are visible to the network
@@ -145,6 +167,7 @@ async def asyncio_detailed(
remote (Union[Unset, None, bool]):
local (Union[Unset, None, bool]):
ignored_instance (Union[Unset, None, str]):
+ timeout (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -159,6 +182,7 @@ async def asyncio_detailed(
remote=remote,
local=local,
ignored_instance=ignored_instance,
+ timeout=timeout,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
@@ -173,6 +197,7 @@ async def asyncio(
remote: Union[Unset, None, bool] = UNSET,
local: Union[Unset, None, bool] = UNSET,
ignored_instance: Union[Unset, None, str] = UNSET,
+ timeout: Union[Unset, None, int] = UNSET,
) -> Optional[List["LocalDataSelection"]]:
"""retrieves all of the local data selections that are visible to the network
@@ -180,6 +205,7 @@ async def asyncio(
remote (Union[Unset, None, bool]):
local (Union[Unset, None, bool]):
ignored_instance (Union[Unset, None, str]):
+ timeout (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
@@ -195,5 +221,6 @@ async def asyncio(
remote=remote,
local=local,
ignored_instance=ignored_instance,
+ timeout=timeout,
)
).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/patch_data_source.py b/src/tuneinsight/api/sdk/api/api_datasource/patch_data_source.py
new file mode 100644
index 0000000..56b319a
--- /dev/null
+++ b/src/tuneinsight/api/sdk/api/api_datasource/patch_data_source.py
@@ -0,0 +1,208 @@
+from http import HTTPStatus
+from typing import Any, Dict, Optional, Union
+
+import httpx
+
+from ... import errors
+from ...client import Client
+from ...models.data_source import DataSource
+from ...models.data_source_definition import DataSourceDefinition
+from ...models.error import Error
+from ...types import Response
+
+
+def _get_kwargs(
+ data_source_id: str,
+ *,
+ client: Client,
+ json_body: DataSourceDefinition,
+) -> Dict[str, Any]:
+ url = "{}/datasources/{dataSourceId}".format(client.base_url, dataSourceId=data_source_id)
+
+ headers: Dict[str, str] = client.get_headers()
+ cookies: Dict[str, Any] = client.get_cookies()
+
+ json_json_body = json_body.to_dict()
+
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
+ return {
+ "method": "patch",
+ "url": url,
+ "headers": headers,
+ "cookies": cookies,
+ "timeout": client.get_timeout(),
+ "proxies": proxies,
+ "json": json_json_body,
+ }
+
+
+def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[DataSource, Error]]:
+ if response.status_code == HTTPStatus.OK:
+ response_200 = DataSource.from_dict(response.json())
+
+ return response_200
+ if response.status_code == HTTPStatus.BAD_REQUEST:
+ response_400 = Error.from_dict(response.json())
+
+ return response_400
+ if response.status_code == HTTPStatus.FORBIDDEN:
+ response_403 = Error.from_dict(response.json())
+
+ return response_403
+ if response.status_code == HTTPStatus.CONFLICT:
+ response_409 = Error.from_dict(response.json())
+
+ return response_409
+ if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
+ response_422 = Error.from_dict(response.json())
+
+ return response_422
+ if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR:
+ response_500 = Error.from_dict(response.json())
+
+ return response_500
+ if client.raise_on_unexpected_status:
+ raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
+ else:
+ return None
+
+
+def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[DataSource, Error]]:
+ return Response(
+ status_code=HTTPStatus(response.status_code),
+ content=response.content,
+ headers=response.headers,
+ parsed=_parse_response(client=client, response=response),
+ )
+
+
+def sync_detailed(
+ data_source_id: str,
+ *,
+ client: Client,
+ json_body: DataSourceDefinition,
+) -> Response[Union[DataSource, Error]]:
+ """Edit a new datasource.
+
+ Args:
+ data_source_id (str):
+ json_body (DataSourceDefinition): parameters used to create and modify a data source
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[DataSource, Error]]
+ """
+
+ kwargs = _get_kwargs(
+ data_source_id=data_source_id,
+ client=client,
+ json_body=json_body,
+ )
+
+ response = httpx.request(
+ verify=client.verify_ssl,
+ **kwargs,
+ )
+
+ return _build_response(client=client, response=response)
+
+
+def sync(
+ data_source_id: str,
+ *,
+ client: Client,
+ json_body: DataSourceDefinition,
+) -> Optional[Union[DataSource, Error]]:
+ """Edit a new datasource.
+
+ Args:
+ data_source_id (str):
+ json_body (DataSourceDefinition): parameters used to create and modify a data source
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[DataSource, Error]]
+ """
+
+ return sync_detailed(
+ data_source_id=data_source_id,
+ client=client,
+ json_body=json_body,
+ ).parsed
+
+
+async def asyncio_detailed(
+ data_source_id: str,
+ *,
+ client: Client,
+ json_body: DataSourceDefinition,
+) -> Response[Union[DataSource, Error]]:
+ """Edit a new datasource.
+
+ Args:
+ data_source_id (str):
+ json_body (DataSourceDefinition): parameters used to create and modify a data source
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[DataSource, Error]]
+ """
+
+ kwargs = _get_kwargs(
+ data_source_id=data_source_id,
+ client=client,
+ json_body=json_body,
+ )
+
+ async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
+ response = await _client.request(**kwargs)
+
+ return _build_response(client=client, response=response)
+
+
+async def asyncio(
+ data_source_id: str,
+ *,
+ client: Client,
+ json_body: DataSourceDefinition,
+) -> Optional[Union[DataSource, Error]]:
+ """Edit a new datasource.
+
+ Args:
+ data_source_id (str):
+ json_body (DataSourceDefinition): parameters used to create and modify a data source
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[DataSource, Error]]
+ """
+
+ return (
+ await asyncio_detailed(
+ data_source_id=data_source_id,
+ client=client,
+ json_body=json_body,
+ )
+ ).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/patch_local_data_selection.py b/src/tuneinsight/api/sdk/api/api_datasource/patch_local_data_selection.py
index a71280b..90eeaeb 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/patch_local_data_selection.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/patch_local_data_selection.py
@@ -31,12 +31,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "patch",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py b/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py
index d797ad7..2c71940 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/post_local_data_selection.py b/src/tuneinsight/api/sdk/api/api_datasource/post_local_data_selection.py
index e292b06..6017f24 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/post_local_data_selection.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/post_local_data_selection.py
@@ -29,12 +29,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/put_data_source_data.py b/src/tuneinsight/api/sdk/api/api_datasource/put_data_source_data.py
index d18215e..c2974fa 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/put_data_source_data.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/put_data_source_data.py
@@ -24,12 +24,24 @@ def _get_kwargs(
multipart_multipart_data = multipart_data.to_multipart()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"files": multipart_multipart_data,
}
diff --git a/src/tuneinsight/api/sdk/api/api_datasource/refresh_local_data_selection.py b/src/tuneinsight/api/sdk/api/api_datasource/refresh_local_data_selection.py
index 027f7c3..99e7fb2 100644
--- a/src/tuneinsight/api/sdk/api/api_datasource/refresh_local_data_selection.py
+++ b/src/tuneinsight/api/sdk/api/api_datasource/refresh_local_data_selection.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_infos/get_infos.py b/src/tuneinsight/api/sdk/api/api_infos/get_infos.py
index 39d8d6d..252c035 100644
--- a/src/tuneinsight/api/sdk/api/api_infos/get_infos.py
+++ b/src/tuneinsight/api/sdk/api/api_infos/get_infos.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_log/get_log_list.py b/src/tuneinsight/api/sdk/api/api_log/get_log_list.py
index c813dea..97b9cb4 100644
--- a/src/tuneinsight/api/sdk/api/api_log/get_log_list.py
+++ b/src/tuneinsight/api/sdk/api/api_log/get_log_list.py
@@ -39,12 +39,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_ml/delete_model.py b/src/tuneinsight/api/sdk/api/api_ml/delete_model.py
index d6fb3f0..e2784e2 100644
--- a/src/tuneinsight/api/sdk/api/api_ml/delete_model.py
+++ b/src/tuneinsight/api/sdk/api/api_ml/delete_model.py
@@ -24,12 +24,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_ml/delete_models.py b/src/tuneinsight/api/sdk/api/api_ml/delete_models.py
index bc30d15..a0b6fd4 100644
--- a/src/tuneinsight/api/sdk/api/api_ml/delete_models.py
+++ b/src/tuneinsight/api/sdk/api/api_ml/delete_models.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_ml/get_model.py b/src/tuneinsight/api/sdk/api/api_ml/get_model.py
index 5a12aeb..f44bea7 100644
--- a/src/tuneinsight/api/sdk/api/api_ml/get_model.py
+++ b/src/tuneinsight/api/sdk/api/api_ml/get_model.py
@@ -28,12 +28,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_ml/get_model_list.py b/src/tuneinsight/api/sdk/api/api_ml/get_model_list.py
index f1e3fcd..d383d72 100644
--- a/src/tuneinsight/api/sdk/api/api_ml/get_model_list.py
+++ b/src/tuneinsight/api/sdk/api/api_ml/get_model_list.py
@@ -41,12 +41,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_ml/post_model.py b/src/tuneinsight/api/sdk/api/api_ml/post_model.py
index 0a274b8..473ddc8 100644
--- a/src/tuneinsight/api/sdk/api/api_ml/post_model.py
+++ b/src/tuneinsight/api/sdk/api/api_ml/post_model.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_network/get_network_metadata.py b/src/tuneinsight/api/sdk/api/api_network/get_network_metadata.py
index ee57a18..42790cc 100644
--- a/src/tuneinsight/api/sdk/api/api_network/get_network_metadata.py
+++ b/src/tuneinsight/api/sdk/api/api_network/get_network_metadata.py
@@ -25,12 +25,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_network/get_network_status.py b/src/tuneinsight/api/sdk/api/api_network/get_network_status.py
index cfe21a3..d05d71d 100644
--- a/src/tuneinsight/api/sdk/api/api_network/get_network_status.py
+++ b/src/tuneinsight/api/sdk/api/api_network/get_network_status.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_ontology_search/__init__.py b/src/tuneinsight/api/sdk/api/api_ontology_search/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/tuneinsight/api/sdk/api/api_ontology_search/sphn_ontologies_search.py b/src/tuneinsight/api/sdk/api/api_ontology_search/sphn_ontologies_search.py
new file mode 100644
index 0000000..5a4dfb2
--- /dev/null
+++ b/src/tuneinsight/api/sdk/api/api_ontology_search/sphn_ontologies_search.py
@@ -0,0 +1,220 @@
+from http import HTTPStatus
+from typing import Any, Dict, List, Optional, Union
+
+import httpx
+
+from ... import errors
+from ...client import Client
+from ...models.error import Error
+from ...models.sphn_ontologies_search_ontologies_item import SphnOntologiesSearchOntologiesItem
+from ...models.sphn_ontologies_search_response_200_item import SphnOntologiesSearchResponse200Item
+from ...types import UNSET, Response
+
+
+def _get_kwargs(
+ *,
+ client: Client,
+ query: str,
+ ontologies: List[SphnOntologiesSearchOntologiesItem],
+) -> Dict[str, Any]:
+ url = "{}/sphn-ontologies-search".format(client.base_url)
+
+ headers: Dict[str, str] = client.get_headers()
+ cookies: Dict[str, Any] = client.get_cookies()
+
+ params: Dict[str, Any] = {}
+ params["query"] = query
+
+ json_ontologies = []
+ for ontologies_item_data in ontologies:
+ ontologies_item = ontologies_item_data.value
+
+ json_ontologies.append(ontologies_item)
+
+ params["ontologies[]"] = json_ontologies
+
+ params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
+ return {
+ "method": "get",
+ "url": url,
+ "headers": headers,
+ "cookies": cookies,
+ "timeout": client.get_timeout(),
+ "proxies": proxies,
+ "params": params,
+ }
+
+
+def _parse_response(
+ *, client: Client, response: httpx.Response
+) -> Optional[Union[Error, List["SphnOntologiesSearchResponse200Item"]]]:
+ if response.status_code == HTTPStatus.OK:
+ response_200 = []
+ _response_200 = response.json()
+ for response_200_item_data in _response_200:
+ response_200_item = SphnOntologiesSearchResponse200Item.from_dict(response_200_item_data)
+
+ response_200.append(response_200_item)
+
+ return response_200
+ if response.status_code == HTTPStatus.FORBIDDEN:
+ response_403 = Error.from_dict(response.json())
+
+ return response_403
+ if response.status_code == HTTPStatus.NOT_FOUND:
+ response_404 = Error.from_dict(response.json())
+
+ return response_404
+ if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR:
+ response_500 = Error.from_dict(response.json())
+
+ return response_500
+ if client.raise_on_unexpected_status:
+ raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
+ else:
+ return None
+
+
+def _build_response(
+ *, client: Client, response: httpx.Response
+) -> Response[Union[Error, List["SphnOntologiesSearchResponse200Item"]]]:
+ return Response(
+ status_code=HTTPStatus(response.status_code),
+ content=response.content,
+ headers=response.headers,
+ parsed=_parse_response(client=client, response=response),
+ )
+
+
+def sync_detailed(
+ *,
+ client: Client,
+ query: str,
+ ontologies: List[SphnOntologiesSearchOntologiesItem],
+) -> Response[Union[Error, List["SphnOntologiesSearchResponse200Item"]]]:
+ """Search the SPHN ontologies
+
+ Args:
+ query (str):
+ ontologies (List[SphnOntologiesSearchOntologiesItem]):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, List['SphnOntologiesSearchResponse200Item']]]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ query=query,
+ ontologies=ontologies,
+ )
+
+ response = httpx.request(
+ verify=client.verify_ssl,
+ **kwargs,
+ )
+
+ return _build_response(client=client, response=response)
+
+
+def sync(
+ *,
+ client: Client,
+ query: str,
+ ontologies: List[SphnOntologiesSearchOntologiesItem],
+) -> Optional[Union[Error, List["SphnOntologiesSearchResponse200Item"]]]:
+ """Search the SPHN ontologies
+
+ Args:
+ query (str):
+ ontologies (List[SphnOntologiesSearchOntologiesItem]):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, List['SphnOntologiesSearchResponse200Item']]]
+ """
+
+ return sync_detailed(
+ client=client,
+ query=query,
+ ontologies=ontologies,
+ ).parsed
+
+
+async def asyncio_detailed(
+ *,
+ client: Client,
+ query: str,
+ ontologies: List[SphnOntologiesSearchOntologiesItem],
+) -> Response[Union[Error, List["SphnOntologiesSearchResponse200Item"]]]:
+ """Search the SPHN ontologies
+
+ Args:
+ query (str):
+ ontologies (List[SphnOntologiesSearchOntologiesItem]):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, List['SphnOntologiesSearchResponse200Item']]]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ query=query,
+ ontologies=ontologies,
+ )
+
+ async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
+ response = await _client.request(**kwargs)
+
+ return _build_response(client=client, response=response)
+
+
+async def asyncio(
+ *,
+ client: Client,
+ query: str,
+ ontologies: List[SphnOntologiesSearchOntologiesItem],
+) -> Optional[Union[Error, List["SphnOntologiesSearchResponse200Item"]]]:
+ """Search the SPHN ontologies
+
+ Args:
+ query (str):
+ ontologies (List[SphnOntologiesSearchOntologiesItem]):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, List['SphnOntologiesSearchResponse200Item']]]
+ """
+
+ return (
+ await asyncio_detailed(
+ client=client,
+ query=query,
+ ontologies=ontologies,
+ )
+ ).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_database.py b/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_database.py
index 30eb8cb..e38b38a 100644
--- a/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_database.py
+++ b/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_database.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_databases_list.py b/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_databases_list.py
index 37510cc..a25cf3f 100644
--- a/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_databases_list.py
+++ b/src/tuneinsight/api/sdk/api/api_private_search/get_private_search_databases_list.py
@@ -38,12 +38,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_private_search/post_private_search_query.py b/src/tuneinsight/api/sdk/api/api_private_search/post_private_search_query.py
index 8332020..105d317 100644
--- a/src/tuneinsight/api/sdk/api/api_private_search/post_private_search_query.py
+++ b/src/tuneinsight/api/sdk/api/api_private_search/post_private_search_query.py
@@ -22,12 +22,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/delete_project.py b/src/tuneinsight/api/sdk/api/api_project/delete_project.py
index cfbdc86..957487d 100644
--- a/src/tuneinsight/api/sdk/api/api_project/delete_project.py
+++ b/src/tuneinsight/api/sdk/api/api_project/delete_project.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/delete_projects.py b/src/tuneinsight/api/sdk/api/api_project/delete_projects.py
index 5ba11bc..cbe9ba5 100644
--- a/src/tuneinsight/api/sdk/api/api_project/delete_projects.py
+++ b/src/tuneinsight/api/sdk/api/api_project/delete_projects.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project.py b/src/tuneinsight/api/sdk/api/api_project/get_project.py
index 7c75240..124af33 100644
--- a/src/tuneinsight/api/sdk/api/api_project/get_project.py
+++ b/src/tuneinsight/api/sdk/api/api_project/get_project.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project_list.py b/src/tuneinsight/api/sdk/api/api_project/get_project_list.py
index c810a54..4fafe9f 100644
--- a/src/tuneinsight/api/sdk/api/api_project/get_project_list.py
+++ b/src/tuneinsight/api/sdk/api/api_project/get_project_list.py
@@ -50,12 +50,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project_network_status.py b/src/tuneinsight/api/sdk/api/api_project/get_project_network_status.py
index 80a9b7a..94765fd 100644
--- a/src/tuneinsight/api/sdk/api/api_project/get_project_network_status.py
+++ b/src/tuneinsight/api/sdk/api/api_project/get_project_network_status.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project_notebook.py b/src/tuneinsight/api/sdk/api/api_project/get_project_notebook.py
new file mode 100644
index 0000000..aa87c3b
--- /dev/null
+++ b/src/tuneinsight/api/sdk/api/api_project/get_project_notebook.py
@@ -0,0 +1,185 @@
+from http import HTTPStatus
+from typing import Any, Dict, Optional, Union, cast
+
+import httpx
+
+from ... import errors
+from ...client import Client
+from ...models.error import Error
+from ...types import Response
+
+
+def _get_kwargs(
+ project_id: str,
+ *,
+ client: Client,
+) -> Dict[str, Any]:
+ url = "{}/projects/{projectId}/notebook".format(client.base_url, projectId=project_id)
+
+ headers: Dict[str, str] = client.get_headers()
+ cookies: Dict[str, Any] = client.get_cookies()
+
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
+ return {
+ "method": "get",
+ "url": url,
+ "headers": headers,
+ "cookies": cookies,
+ "timeout": client.get_timeout(),
+ "proxies": proxies,
+ }
+
+
+def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[Error, str]]:
+ if response.status_code == HTTPStatus.CREATED:
+ response_201 = cast(str, response.json())
+ return response_201
+ if response.status_code == HTTPStatus.BAD_REQUEST:
+ response_400 = Error.from_dict(response.json())
+
+ return response_400
+ if response.status_code == HTTPStatus.FORBIDDEN:
+ response_403 = Error.from_dict(response.json())
+
+ return response_403
+ if response.status_code == HTTPStatus.NOT_FOUND:
+ response_404 = Error.from_dict(response.json())
+
+ return response_404
+ if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR:
+ response_500 = Error.from_dict(response.json())
+
+ return response_500
+ if client.raise_on_unexpected_status:
+ raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
+ else:
+ return None
+
+
+def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[Error, str]]:
+ return Response(
+ status_code=HTTPStatus(response.status_code),
+ content=response.content,
+ headers=response.headers,
+ parsed=_parse_response(client=client, response=response),
+ )
+
+
+def sync_detailed(
+ project_id: str,
+ *,
+ client: Client,
+) -> Response[Union[Error, str]]:
+ """Generate a notebook to connect to this project.
+
+ Args:
+ project_id (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, str]]
+ """
+
+ kwargs = _get_kwargs(
+ project_id=project_id,
+ client=client,
+ )
+
+ response = httpx.request(
+ verify=client.verify_ssl,
+ **kwargs,
+ )
+
+ return _build_response(client=client, response=response)
+
+
+def sync(
+ project_id: str,
+ *,
+ client: Client,
+) -> Optional[Union[Error, str]]:
+ """Generate a notebook to connect to this project.
+
+ Args:
+ project_id (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, str]]
+ """
+
+ return sync_detailed(
+ project_id=project_id,
+ client=client,
+ ).parsed
+
+
+async def asyncio_detailed(
+ project_id: str,
+ *,
+ client: Client,
+) -> Response[Union[Error, str]]:
+ """Generate a notebook to connect to this project.
+
+ Args:
+ project_id (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, str]]
+ """
+
+ kwargs = _get_kwargs(
+ project_id=project_id,
+ client=client,
+ )
+
+ async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
+ response = await _client.request(**kwargs)
+
+ return _build_response(client=client, response=response)
+
+
+async def asyncio(
+ project_id: str,
+ *,
+ client: Client,
+) -> Optional[Union[Error, str]]:
+ """Generate a notebook to connect to this project.
+
+ Args:
+ project_id (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Error, str]]
+ """
+
+ return (
+ await asyncio_detailed(
+ project_id=project_id,
+ client=client,
+ )
+ ).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project_participant_status.py b/src/tuneinsight/api/sdk/api/api_project/get_project_participant_status.py
index 4b4b1c3..e6049d4 100644
--- a/src/tuneinsight/api/sdk/api/api_project/get_project_participant_status.py
+++ b/src/tuneinsight/api/sdk/api/api_project/get_project_participant_status.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project_status.py b/src/tuneinsight/api/sdk/api/api_project/get_project_status.py
index 65b5ae0..0ac1804 100644
--- a/src/tuneinsight/api/sdk/api/api_project/get_project_status.py
+++ b/src/tuneinsight/api/sdk/api/api_project/get_project_status.py
@@ -26,12 +26,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/patch_project.py b/src/tuneinsight/api/sdk/api/api_project/patch_project.py
index 83775e1..d3a6e43 100644
--- a/src/tuneinsight/api/sdk/api/api_project/patch_project.py
+++ b/src/tuneinsight/api/sdk/api/api_project/patch_project.py
@@ -24,12 +24,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "patch",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/post_project.py b/src/tuneinsight/api/sdk/api/api_project/post_project.py
index 7c31b28..6ee0d67 100644
--- a/src/tuneinsight/api/sdk/api/api_project/post_project.py
+++ b/src/tuneinsight/api/sdk/api/api_project/post_project.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/post_project_computation.py b/src/tuneinsight/api/sdk/api/api_project/post_project_computation.py
index 36fea0a..837c3c6 100644
--- a/src/tuneinsight/api/sdk/api/api_project/post_project_computation.py
+++ b/src/tuneinsight/api/sdk/api/api_project/post_project_computation.py
@@ -24,12 +24,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_project/post_project_data.py b/src/tuneinsight/api/sdk/api/api_project/post_project_data.py
index c5b4ab9..6b3b4dc 100644
--- a/src/tuneinsight/api/sdk/api/api_project/post_project_data.py
+++ b/src/tuneinsight/api/sdk/api/api_project/post_project_data.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_protocols/post_protocol.py b/src/tuneinsight/api/sdk/api/api_protocols/post_protocol.py
index e26df64..8fe11e8 100644
--- a/src/tuneinsight/api/sdk/api/api_protocols/post_protocol.py
+++ b/src/tuneinsight/api/sdk/api/api_protocols/post_protocol.py
@@ -22,12 +22,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_protocols/post_protocol_message.py b/src/tuneinsight/api/sdk/api/api_protocols/post_protocol_message.py
index c3f07ad..a9e6f73 100644
--- a/src/tuneinsight/api/sdk/api/api_protocols/post_protocol_message.py
+++ b/src/tuneinsight/api/sdk/api/api_protocols/post_protocol_message.py
@@ -22,12 +22,24 @@ def _get_kwargs(
multipart_multipart_data = multipart_data.to_multipart()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"files": multipart_multipart_data,
}
diff --git a/src/tuneinsight/api/sdk/api/api_python_server/post_llm_request.py b/src/tuneinsight/api/sdk/api/api_python_server/post_llm_request.py
index c2a3008..e231630 100644
--- a/src/tuneinsight/api/sdk/api/api_python_server/post_llm_request.py
+++ b/src/tuneinsight/api/sdk/api/api_python_server/post_llm_request.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_query/get_query.py b/src/tuneinsight/api/sdk/api/api_query/get_query.py
index 0386074..0c65e5a 100644
--- a/src/tuneinsight/api/sdk/api/api_query/get_query.py
+++ b/src/tuneinsight/api/sdk/api/api_query/get_query.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_query/get_query_list.py b/src/tuneinsight/api/sdk/api/api_query/get_query_list.py
index 48b0b4b..1d67d79 100644
--- a/src/tuneinsight/api/sdk/api/api_query/get_query_list.py
+++ b/src/tuneinsight/api/sdk/api/api_query/get_query_list.py
@@ -41,12 +41,24 @@ def _get_kwargs(
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"params": params,
}
diff --git a/src/tuneinsight/api/sdk/api/api_reset/reset_all.py b/src/tuneinsight/api/sdk/api/api_reset/reset_all.py
index aaf3a70..ba39b17 100644
--- a/src/tuneinsight/api/sdk/api/api_reset/reset_all.py
+++ b/src/tuneinsight/api/sdk/api/api_reset/reset_all.py
@@ -18,12 +18,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_sessions/get_session.py b/src/tuneinsight/api/sdk/api/api_sessions/get_session.py
index 5ddceb2..89c7e7e 100644
--- a/src/tuneinsight/api/sdk/api/api_sessions/get_session.py
+++ b/src/tuneinsight/api/sdk/api/api_sessions/get_session.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_sessions/post_session.py b/src/tuneinsight/api/sdk/api/api_sessions/post_session.py
index 616e92d..98593b2 100644
--- a/src/tuneinsight/api/sdk/api/api_sessions/post_session.py
+++ b/src/tuneinsight/api/sdk/api/api_sessions/post_session.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_sse/__init__.py b/src/tuneinsight/api/sdk/api/api_sse/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/tuneinsight/api/sdk/api/api_sse/get_sse.py b/src/tuneinsight/api/sdk/api/api_sse/get_sse.py
new file mode 100644
index 0000000..7a4be68
--- /dev/null
+++ b/src/tuneinsight/api/sdk/api/api_sse/get_sse.py
@@ -0,0 +1,106 @@
+from http import HTTPStatus
+from typing import Any, Dict, Optional
+
+import httpx
+
+from ... import errors
+from ...client import Client
+from ...types import Response
+
+
+def _get_kwargs(
+ *,
+ client: Client,
+) -> Dict[str, Any]:
+ url = "{}/sse".format(client.base_url)
+
+ headers: Dict[str, str] = client.get_headers()
+ cookies: Dict[str, Any] = client.get_cookies()
+
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
+ return {
+ "method": "get",
+ "url": url,
+ "headers": headers,
+ "cookies": cookies,
+ "timeout": client.get_timeout(),
+ "proxies": proxies,
+ }
+
+
+def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Any]:
+ if response.status_code == HTTPStatus.OK:
+ return None
+ if client.raise_on_unexpected_status:
+ raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
+ else:
+ return None
+
+
+def _build_response(*, client: Client, response: httpx.Response) -> Response[Any]:
+ return Response(
+ status_code=HTTPStatus(response.status_code),
+ content=response.content,
+ headers=response.headers,
+ parsed=_parse_response(client=client, response=response),
+ )
+
+
+def sync_detailed(
+ *,
+ client: Client,
+) -> Response[Any]:
+ """Server Sent Event Subscription endpoint
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Any]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ )
+
+ response = httpx.request(
+ verify=client.verify_ssl,
+ **kwargs,
+ )
+
+ return _build_response(client=client, response=response)
+
+
+async def asyncio_detailed(
+ *,
+ client: Client,
+) -> Response[Any]:
+ """Server Sent Event Subscription endpoint
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Any]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ )
+
+ async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
+ response = await _client.request(**kwargs)
+
+ return _build_response(client=client, response=response)
diff --git a/src/tuneinsight/api/sdk/api/api_sse/post_sse_proxy_resp.py b/src/tuneinsight/api/sdk/api/api_sse/post_sse_proxy_resp.py
new file mode 100644
index 0000000..14c0a0f
--- /dev/null
+++ b/src/tuneinsight/api/sdk/api/api_sse/post_sse_proxy_resp.py
@@ -0,0 +1,183 @@
+from http import HTTPStatus
+from typing import Any, Dict, Optional, Union, cast
+
+import httpx
+
+from ... import errors
+from ...client import Client
+from ...models.error import Error
+from ...types import UNSET, Response
+
+
+def _get_kwargs(
+ *,
+ client: Client,
+ uuid: str,
+) -> Dict[str, Any]:
+ url = "{}/sse-proxy-resp".format(client.base_url)
+
+ headers: Dict[str, str] = client.get_headers()
+ cookies: Dict[str, Any] = client.get_cookies()
+
+ params: Dict[str, Any] = {}
+ params["uuid"] = uuid
+
+ params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
+
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
+ return {
+ "method": "post",
+ "url": url,
+ "headers": headers,
+ "cookies": cookies,
+ "timeout": client.get_timeout(),
+ "proxies": proxies,
+ "params": params,
+ }
+
+
+def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[Any, Error]]:
+ if response.status_code == HTTPStatus.OK:
+ response_200 = cast(Any, None)
+ return response_200
+ if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
+ response_422 = Error.from_dict(response.json())
+
+ return response_422
+ if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR:
+ response_500 = Error.from_dict(response.json())
+
+ return response_500
+ if client.raise_on_unexpected_status:
+ raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
+ else:
+ return None
+
+
+def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[Any, Error]]:
+ return Response(
+ status_code=HTTPStatus(response.status_code),
+ content=response.content,
+ headers=response.headers,
+ parsed=_parse_response(client=client, response=response),
+ )
+
+
+def sync_detailed(
+ *,
+ client: Client,
+ uuid: str,
+) -> Response[Union[Any, Error]]:
+ """Server Sent Event Message response endpoint
+
+ Args:
+ uuid (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Any, Error]]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ uuid=uuid,
+ )
+
+ response = httpx.request(
+ verify=client.verify_ssl,
+ **kwargs,
+ )
+
+ return _build_response(client=client, response=response)
+
+
+def sync(
+ *,
+ client: Client,
+ uuid: str,
+) -> Optional[Union[Any, Error]]:
+ """Server Sent Event Message response endpoint
+
+ Args:
+ uuid (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Any, Error]]
+ """
+
+ return sync_detailed(
+ client=client,
+ uuid=uuid,
+ ).parsed
+
+
+async def asyncio_detailed(
+ *,
+ client: Client,
+ uuid: str,
+) -> Response[Union[Any, Error]]:
+ """Server Sent Event Message response endpoint
+
+ Args:
+ uuid (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Any, Error]]
+ """
+
+ kwargs = _get_kwargs(
+ client=client,
+ uuid=uuid,
+ )
+
+ async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
+ response = await _client.request(**kwargs)
+
+ return _build_response(client=client, response=response)
+
+
+async def asyncio(
+ *,
+ client: Client,
+ uuid: str,
+) -> Optional[Union[Any, Error]]:
+ """Server Sent Event Message response endpoint
+
+ Args:
+ uuid (str):
+
+ Raises:
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
+
+ Returns:
+ Response[Union[Any, Error]]
+ """
+
+ return (
+ await asyncio_detailed(
+ client=client,
+ uuid=uuid,
+ )
+ ).parsed
diff --git a/src/tuneinsight/api/sdk/api/api_users/add_roles_to_user.py b/src/tuneinsight/api/sdk/api/api_users/add_roles_to_user.py
index 5d9c4ad..c8a0634 100644
--- a/src/tuneinsight/api/sdk/api/api_users/add_roles_to_user.py
+++ b/src/tuneinsight/api/sdk/api/api_users/add_roles_to_user.py
@@ -22,12 +22,24 @@ def _get_kwargs(
json_json_body = json_body
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/delete_roles_from_user.py b/src/tuneinsight/api/sdk/api/api_users/delete_roles_from_user.py
index bb1a8e5..a11693b 100644
--- a/src/tuneinsight/api/sdk/api/api_users/delete_roles_from_user.py
+++ b/src/tuneinsight/api/sdk/api/api_users/delete_roles_from_user.py
@@ -22,12 +22,24 @@ def _get_kwargs(
json_json_body = json_body
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/delete_user.py b/src/tuneinsight/api/sdk/api/api_users/delete_user.py
index 4f4fc9b..5fa87b1 100644
--- a/src/tuneinsight/api/sdk/api/api_users/delete_user.py
+++ b/src/tuneinsight/api/sdk/api/api_users/delete_user.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/get_user.py b/src/tuneinsight/api/sdk/api/api_users/get_user.py
index 37e503d..702d4ae 100644
--- a/src/tuneinsight/api/sdk/api/api_users/get_user.py
+++ b/src/tuneinsight/api/sdk/api/api_users/get_user.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/get_user_list.py b/src/tuneinsight/api/sdk/api/api_users/get_user_list.py
index a976c6a..5bc5d7a 100644
--- a/src/tuneinsight/api/sdk/api/api_users/get_user_list.py
+++ b/src/tuneinsight/api/sdk/api/api_users/get_user_list.py
@@ -23,12 +23,24 @@ def _get_kwargs(
json_json_body = json_body.to_dict()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
"json": json_json_body,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/get_user_roles.py b/src/tuneinsight/api/sdk/api/api_users/get_user_roles.py
index f86c562..335ed23 100644
--- a/src/tuneinsight/api/sdk/api/api_users/get_user_roles.py
+++ b/src/tuneinsight/api/sdk/api/api_users/get_user_roles.py
@@ -20,12 +20,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/post_user.py b/src/tuneinsight/api/sdk/api/api_users/post_user.py
index b6222be..13e824d 100644
--- a/src/tuneinsight/api/sdk/api/api_users/post_user.py
+++ b/src/tuneinsight/api/sdk/api/api_users/post_user.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/api_users/update_user.py b/src/tuneinsight/api/sdk/api/api_users/update_user.py
index 2c90401..c20a03f 100644
--- a/src/tuneinsight/api/sdk/api/api_users/update_user.py
+++ b/src/tuneinsight/api/sdk/api/api_users/update_user.py
@@ -19,12 +19,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/health/get_health.py b/src/tuneinsight/api/sdk/api/health/get_health.py
index 06a1572..c327702 100644
--- a/src/tuneinsight/api/sdk/api/health/get_health.py
+++ b/src/tuneinsight/api/sdk/api/health/get_health.py
@@ -17,12 +17,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/api/metrics/get_metrics.py b/src/tuneinsight/api/sdk/api/metrics/get_metrics.py
index e68e62c..03480c1 100644
--- a/src/tuneinsight/api/sdk/api/metrics/get_metrics.py
+++ b/src/tuneinsight/api/sdk/api/metrics/get_metrics.py
@@ -17,12 +17,24 @@ def _get_kwargs(
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
+ # Set the proxies if the client has proxies set.
+ proxies = None
+ if hasattr(client, "proxies") and client.proxies is not None:
+ https_proxy = client.proxies.get("https")
+ if https_proxy:
+ proxies = https_proxy
+ else:
+ http_proxy = client.proxies.get("http")
+ if http_proxy:
+ proxies = http_proxy
+
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
+ "proxies": proxies,
}
diff --git a/src/tuneinsight/api/sdk/models/__init__.py b/src/tuneinsight/api/sdk/models/__init__.py
index 51c2b7d..4e1f09c 100644
--- a/src/tuneinsight/api/sdk/models/__init__.py
+++ b/src/tuneinsight/api/sdk/models/__init__.py
@@ -111,6 +111,7 @@
from .get_model_list_order import GetModelListOrder
from .get_model_list_sort_by import GetModelListSortBy
from .get_network_metadata_response_200 import GetNetworkMetadataResponse200
+from .get_network_metadata_response_200_network_type import GetNetworkMetadataResponse200NetworkType
from .get_params_response_200 import GetParamsResponse200
from .get_private_search_databases_list_order import GetPrivateSearchDatabasesListOrder
from .get_private_search_databases_list_sort_by import GetPrivateSearchDatabasesListSortBy
@@ -127,6 +128,7 @@
from .gwas import GWAS
from .hybrid_fl import HybridFL
from .hybrid_fl_learning_params import HybridFLLearningParams
+from .instance_configuration import InstanceConfiguration
from .key_info import KeyInfo
from .key_switched_computation import KeySwitchedComputation
from .local_data_selection import LocalDataSelection
@@ -147,6 +149,7 @@
from .model_params import ModelParams
from .model_type import ModelType
from .network import Network
+from .network_type import NetworkType
from .network_visibility_type import NetworkVisibilityType
from .node import Node
from .node_status import NodeStatus
@@ -216,6 +219,9 @@
from .set_intersection_output_format import SetIntersectionOutputFormat
from .settings import Settings
from .setup_session import SetupSession
+from .sphn_ontologies_search_ontologies_item import SphnOntologiesSearchOntologiesItem
+from .sphn_ontologies_search_response_200_item import SphnOntologiesSearchResponse200Item
+from .sphn_ontology_search_result import SphnOntologySearchResult
from .statistic_base import StatisticBase
from .statistic_definition import StatisticDefinition
from .statistic_result import StatisticResult
@@ -359,6 +365,7 @@
"GetModelListOrder",
"GetModelListSortBy",
"GetNetworkMetadataResponse200",
+ "GetNetworkMetadataResponse200NetworkType",
"GetParamsResponse200",
"GetPrivateSearchDatabasesListOrder",
"GetPrivateSearchDatabasesListSortBy",
@@ -375,6 +382,7 @@
"GWAS",
"HybridFL",
"HybridFLLearningParams",
+ "InstanceConfiguration",
"KeyInfo",
"KeySwitchedComputation",
"LocalDataSelection",
@@ -395,6 +403,7 @@
"ModelParams",
"ModelType",
"Network",
+ "NetworkType",
"NetworkVisibilityType",
"Node",
"NodeStatus",
@@ -464,6 +473,9 @@
"SetIntersectionOutputFormat",
"Settings",
"SetupSession",
+ "SphnOntologiesSearchOntologiesItem",
+ "SphnOntologiesSearchResponse200Item",
+ "SphnOntologySearchResult",
"StatisticalAggregation",
"StatisticalQuantity",
"StatisticBase",
diff --git a/src/tuneinsight/api/sdk/models/add_columns.py b/src/tuneinsight/api/sdk/models/add_columns.py
index 331ce6a..d6c0ec4 100644
--- a/src/tuneinsight/api/sdk/models/add_columns.py
+++ b/src/tuneinsight/api/sdk/models/add_columns.py
@@ -13,29 +13,29 @@ class AddColumns:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
+ sep (Union[Unset, str]): separator when the added columns are not numerical
input_columns (Union[Unset, List[str]]): the columns to add together
numerical (Union[Unset, bool]): whether or not the output columns are numerical
output (Union[Unset, str]): column to use as output
- sep (Union[Unset, str]): separator when the added columns are not numerical
"""
type: PreprocessingOperationType
+ sep: Union[Unset, str] = UNSET
input_columns: Union[Unset, List[str]] = UNSET
numerical: Union[Unset, bool] = UNSET
output: Union[Unset, str] = UNSET
- sep: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
+ sep = self.sep
input_columns: Union[Unset, List[str]] = UNSET
if not isinstance(self.input_columns, Unset):
input_columns = self.input_columns
numerical = self.numerical
output = self.output
- sep = self.sep
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -44,14 +44,14 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
+ if sep is not UNSET:
+ field_dict["sep"] = sep
if input_columns is not UNSET:
field_dict["inputColumns"] = input_columns
if numerical is not UNSET:
field_dict["numerical"] = numerical
if output is not UNSET:
field_dict["output"] = output
- if sep is not UNSET:
- field_dict["sep"] = sep
return field_dict
@@ -60,20 +60,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
+ sep = d.pop("sep", UNSET)
+
input_columns = cast(List[str], d.pop("inputColumns", UNSET))
numerical = d.pop("numerical", UNSET)
output = d.pop("output", UNSET)
- sep = d.pop("sep", UNSET)
-
add_columns = cls(
type=type,
+ sep=sep,
input_columns=input_columns,
numerical=numerical,
output=output,
- sep=sep,
)
add_columns.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py b/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py
index e312ec9..2e9fbfa 100644
--- a/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py
+++ b/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py
@@ -22,8 +22,12 @@ class AggregatedDatasetLength:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class AggregatedDatasetLength:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class AggregatedDatasetLength:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,95 +65,82 @@ class AggregatedDatasetLength:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
features (Union[Unset, str]): Shared identifier of a data object.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
features: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
features = self.features
field_dict: Dict[str, Any] = {}
@@ -150,46 +150,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if features is not UNSET:
field_dict["features"] = features
@@ -205,14 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -221,10 +214,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -232,6 +236,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -243,19 +261,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -263,42 +268,37 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
features = d.pop("features", UNSET)
aggregated_dataset_length = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
features=features,
)
diff --git a/src/tuneinsight/api/sdk/models/apply_mapping.py b/src/tuneinsight/api/sdk/models/apply_mapping.py
index a1dad98..0e4899c 100644
--- a/src/tuneinsight/api/sdk/models/apply_mapping.py
+++ b/src/tuneinsight/api/sdk/models/apply_mapping.py
@@ -17,30 +17,29 @@ class ApplyMapping:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
+ output (Union[Unset, str]): column to use as output
default (Union[Unset, str]): default value to assign to items not specified in the dictionary
input_ (Union[Unset, str]): column to use as input
mapping (Union[Unset, StringMapping]): mapping from string -> string
- output (Union[Unset, str]): column to use as output
"""
type: PreprocessingOperationType
+ output: Union[Unset, str] = UNSET
default: Union[Unset, str] = UNSET
input_: Union[Unset, str] = UNSET
mapping: Union[Unset, "StringMapping"] = UNSET
- output: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
+ output = self.output
default = self.default
input_ = self.input_
mapping: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.mapping, Unset):
mapping = self.mapping.to_dict()
- output = self.output
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -48,14 +47,14 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
+ if output is not UNSET:
+ field_dict["output"] = output
if default is not UNSET:
field_dict["default"] = default
if input_ is not UNSET:
field_dict["input"] = input_
if mapping is not UNSET:
field_dict["mapping"] = mapping
- if output is not UNSET:
- field_dict["output"] = output
return field_dict
@@ -66,6 +65,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
+ output = d.pop("output", UNSET)
+
default = d.pop("default", UNSET)
input_ = d.pop("input", UNSET)
@@ -77,14 +78,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
mapping = StringMapping.from_dict(_mapping)
- output = d.pop("output", UNSET)
-
apply_mapping = cls(
type=type,
+ output=output,
default=default,
input_=input_,
mapping=mapping,
- output=output,
)
apply_mapping.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/approximation_params.py b/src/tuneinsight/api/sdk/models/approximation_params.py
index cba3c1f..1997bef 100644
--- a/src/tuneinsight/api/sdk/models/approximation_params.py
+++ b/src/tuneinsight/api/sdk/models/approximation_params.py
@@ -10,30 +10,30 @@ class ApproximationParams:
"""parameters for polynomial approximation
Attributes:
- approximation_degree (int): The degree for the sigmoid approximation. Default: 28.
approximation_interval_max (float): The higher bound for the approximation. The features must respect it.
Default: 8.0.
approximation_interval_min (float): The lower bound for the approximation. The features must respect it.
Default: -8.0.
+ approximation_degree (int): The degree for the sigmoid approximation. Default: 28.
"""
- approximation_degree: int = 28
approximation_interval_max: float = 8.0
approximation_interval_min: float = -8.0
+ approximation_degree: int = 28
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- approximation_degree = self.approximation_degree
approximation_interval_max = self.approximation_interval_max
approximation_interval_min = self.approximation_interval_min
+ approximation_degree = self.approximation_degree
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
- "approximationDegree": approximation_degree,
"approximationIntervalMax": approximation_interval_max,
"approximationIntervalMin": approximation_interval_min,
+ "approximationDegree": approximation_degree,
}
)
@@ -42,16 +42,16 @@ def to_dict(self) -> Dict[str, Any]:
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- approximation_degree = d.pop("approximationDegree")
-
approximation_interval_max = d.pop("approximationIntervalMax")
approximation_interval_min = d.pop("approximationIntervalMin")
+ approximation_degree = d.pop("approximationDegree")
+
approximation_params = cls(
- approximation_degree=approximation_degree,
approximation_interval_max=approximation_interval_max,
approximation_interval_min=approximation_interval_min,
+ approximation_degree=approximation_degree,
)
approximation_params.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/backup_definition.py b/src/tuneinsight/api/sdk/models/backup_definition.py
index a5b0b6f..2af8592 100644
--- a/src/tuneinsight/api/sdk/models/backup_definition.py
+++ b/src/tuneinsight/api/sdk/models/backup_definition.py
@@ -17,21 +17,22 @@ class BackupDefinition:
"""backup parameters
Attributes:
+ encrypt (Union[Unset, bool]): whether or not to encrypt the backup
encryption_key (Union[Unset, str]): b64 encoded encryption in case the backup needs to be encrypted
path (Union[Unset, str]): path to the local backup directory
s_3_parameters (Union[Unset, S3Parameters]): parameters for the remote s3-compatible storage
type (Union[Unset, BackupType]): enumeration of backup types
- encrypt (Union[Unset, bool]): whether or not to encrypt the backup
"""
+ encrypt: Union[Unset, bool] = UNSET
encryption_key: Union[Unset, str] = UNSET
path: Union[Unset, str] = UNSET
s_3_parameters: Union[Unset, "S3Parameters"] = UNSET
type: Union[Unset, BackupType] = UNSET
- encrypt: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ encrypt = self.encrypt
encryption_key = self.encryption_key
path = self.path
s_3_parameters: Union[Unset, Dict[str, Any]] = UNSET
@@ -42,11 +43,11 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.type, Unset):
type = self.type.value
- encrypt = self.encrypt
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if encrypt is not UNSET:
+ field_dict["encrypt"] = encrypt
if encryption_key is not UNSET:
field_dict["encryptionKey"] = encryption_key
if path is not UNSET:
@@ -55,8 +56,6 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["s3Parameters"] = s_3_parameters
if type is not UNSET:
field_dict["type"] = type
- if encrypt is not UNSET:
- field_dict["encrypt"] = encrypt
return field_dict
@@ -65,6 +64,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.s3_parameters import S3Parameters
d = src_dict.copy()
+ encrypt = d.pop("encrypt", UNSET)
+
encryption_key = d.pop("encryptionKey", UNSET)
path = d.pop("path", UNSET)
@@ -83,14 +84,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
type = BackupType(_type)
- encrypt = d.pop("encrypt", UNSET)
-
backup_definition = cls(
+ encrypt=encrypt,
encryption_key=encryption_key,
path=path,
s_3_parameters=s_3_parameters,
type=type,
- encrypt=encrypt,
)
backup_definition.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/binning_operation.py b/src/tuneinsight/api/sdk/models/binning_operation.py
index 06951e8..8ac0f52 100644
--- a/src/tuneinsight/api/sdk/models/binning_operation.py
+++ b/src/tuneinsight/api/sdk/models/binning_operation.py
@@ -17,6 +17,8 @@ class BinningOperation:
"""Dataset binning operation definition
Attributes:
+ aggregated_columns (Union[Unset, List[str]]): list of numerical columns to aggregate per bin when binning is
+ done, if unspecified binning only counts the number of rows
categories (Union[Unset, List[str]]): list of categories when groupByType is 'category'
count_columns (Union[Unset, List['CategoricalColumn']]): list of categorical on which to count the number of
records per bin per matching value
@@ -26,20 +28,22 @@ class BinningOperation:
range_values (Union[Unset, List[float]]): list of cuts to use when groupByType is 'range' ([x,y] => creating 3
bins [v < x, x <= v < y, y <= v])
target_column (Union[Unset, str]): column targeted by the binning operation
- aggregated_columns (Union[Unset, List[str]]): list of numerical columns to aggregate per bin when binning is
- done, if unspecified binning only counts the number of rows
"""
+ aggregated_columns: Union[Unset, List[str]] = UNSET
categories: Union[Unset, List[str]] = UNSET
count_columns: Union[Unset, List["CategoricalColumn"]] = UNSET
group_by_type: Union[Unset, GroupByType] = UNSET
keep_non_categorized_items: Union[Unset, bool] = True
range_values: Union[Unset, List[float]] = UNSET
target_column: Union[Unset, str] = UNSET
- aggregated_columns: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ aggregated_columns: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.aggregated_columns, Unset):
+ aggregated_columns = self.aggregated_columns
+
categories: Union[Unset, List[str]] = UNSET
if not isinstance(self.categories, Unset):
categories = self.categories
@@ -62,13 +66,12 @@ def to_dict(self) -> Dict[str, Any]:
range_values = self.range_values
target_column = self.target_column
- aggregated_columns: Union[Unset, List[str]] = UNSET
- if not isinstance(self.aggregated_columns, Unset):
- aggregated_columns = self.aggregated_columns
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if aggregated_columns is not UNSET:
+ field_dict["aggregatedColumns"] = aggregated_columns
if categories is not UNSET:
field_dict["categories"] = categories
if count_columns is not UNSET:
@@ -81,8 +84,6 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["rangeValues"] = range_values
if target_column is not UNSET:
field_dict["targetColumn"] = target_column
- if aggregated_columns is not UNSET:
- field_dict["aggregatedColumns"] = aggregated_columns
return field_dict
@@ -91,6 +92,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.categorical_column import CategoricalColumn
d = src_dict.copy()
+ aggregated_columns = cast(List[str], d.pop("aggregatedColumns", UNSET))
+
categories = cast(List[str], d.pop("categories", UNSET))
count_columns = []
@@ -113,16 +116,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
target_column = d.pop("targetColumn", UNSET)
- aggregated_columns = cast(List[str], d.pop("aggregatedColumns", UNSET))
-
binning_operation = cls(
+ aggregated_columns=aggregated_columns,
categories=categories,
count_columns=count_columns,
group_by_type=group_by_type,
keep_non_categorized_items=keep_non_categorized_items,
range_values=range_values,
target_column=target_column,
- aggregated_columns=aggregated_columns,
)
binning_operation.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/bootstrap.py b/src/tuneinsight/api/sdk/models/bootstrap.py
index f2f22e8..2042a68 100644
--- a/src/tuneinsight/api/sdk/models/bootstrap.py
+++ b/src/tuneinsight/api/sdk/models/bootstrap.py
@@ -23,8 +23,12 @@ class Bootstrap:
Attributes:
type (ComputationType): Type of the computation.
value (str): Unique identifier of a data object.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class Bootstrap:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class Bootstrap:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,95 +66,81 @@ class Bootstrap:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
"""
type: ComputationType
value: str
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
value = self.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -151,46 +150,46 @@ def to_dict(self) -> Dict[str, Any]:
"value": value,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
return field_dict
@@ -206,14 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
value = d.pop("value")
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -222,10 +214,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -233,6 +236,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -244,19 +261,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -264,41 +268,36 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
bootstrap = cls(
type=type,
value=value,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
)
bootstrap.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/collective_key_gen.py b/src/tuneinsight/api/sdk/models/collective_key_gen.py
index 34092e9..7b1183f 100644
--- a/src/tuneinsight/api/sdk/models/collective_key_gen.py
+++ b/src/tuneinsight/api/sdk/models/collective_key_gen.py
@@ -22,8 +22,12 @@ class CollectiveKeyGen:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class CollectiveKeyGen:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class CollectiveKeyGen:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,93 +65,79 @@ class CollectiveKeyGen:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -147,46 +146,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
return field_dict
@@ -200,14 +199,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -216,10 +208,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -227,6 +230,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -238,19 +255,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -258,40 +262,35 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
collective_key_gen = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
)
collective_key_gen.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/collective_key_switch.py b/src/tuneinsight/api/sdk/models/collective_key_switch.py
index 4bf1da7..7fdb6f4 100644
--- a/src/tuneinsight/api/sdk/models/collective_key_switch.py
+++ b/src/tuneinsight/api/sdk/models/collective_key_switch.py
@@ -23,8 +23,12 @@ class CollectiveKeySwitch:
Attributes:
type (ComputationType): Type of the computation.
cipher_vector (str): Unique identifier of a data object.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class CollectiveKeySwitch:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class CollectiveKeySwitch:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,53 +66,40 @@ class CollectiveKeySwitch:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
target_public_key (Union[Unset, str]): Unique identifier of a data object.
target_public_key_base_64 (Union[Unset, str]): the full target public key in base-64 format.
"""
type: ComputationType
cipher_vector: str
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
target_public_key: Union[Unset, str] = UNSET
target_public_key_base_64: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
@@ -108,44 +108,44 @@ def to_dict(self) -> Dict[str, Any]:
type = self.type.value
cipher_vector = self.cipher_vector
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
target_public_key = self.target_public_key
target_public_key_base_64 = self.target_public_key_base_64
@@ -157,46 +157,46 @@ def to_dict(self) -> Dict[str, Any]:
"cipherVector": cipher_vector,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if target_public_key is not UNSET:
field_dict["targetPublicKey"] = target_public_key
if target_public_key_base_64 is not UNSET:
@@ -216,14 +216,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
cipher_vector = d.pop("cipherVector")
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -232,10 +225,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -243,6 +247,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -254,19 +272,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -274,17 +279,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
target_public_key = d.pop("targetPublicKey", UNSET)
@@ -293,26 +293,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
collective_key_switch = cls(
type=type,
cipher_vector=cipher_vector,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
target_public_key=target_public_key,
target_public_key_base_64=target_public_key_base_64,
)
diff --git a/src/tuneinsight/api/sdk/models/column_schema.py b/src/tuneinsight/api/sdk/models/column_schema.py
index e87f5e3..abdfcc9 100644
--- a/src/tuneinsight/api/sdk/models/column_schema.py
+++ b/src/tuneinsight/api/sdk/models/column_schema.py
@@ -15,6 +15,9 @@
class ColumnSchema:
"""
Attributes:
+ nullable (Union[Unset, bool]): whether the column is allowed to contain null values.
+ required (Union[Unset, None, bool]): if set to false, the column will be considered as optional in the dataset.
+ title (Union[Unset, str]): name given to the column for informative purposes
checks (Union[Unset, ColumnSchemaChecks]): optional additional checks
coerce (Union[Unset, bool]): if set to true, the validation will first coerce the column into the corresponding
dtype
@@ -23,21 +26,21 @@ class ColumnSchema:
dtype (Union[Unset, str]): expected data type for the column
supported types:
https://pandera.readthedocs.io/en/stable/dtype_validation.html#supported-pandas-datatypes
- nullable (Union[Unset, bool]): whether the column is allowed to contain null values.
- required (Union[Unset, None, bool]): if set to false, the column will be considered as optional in the dataset.
- title (Union[Unset, str]): name given to the column for informative purposes
"""
+ nullable: Union[Unset, bool] = UNSET
+ required: Union[Unset, None, bool] = UNSET
+ title: Union[Unset, str] = UNSET
checks: Union[Unset, "ColumnSchemaChecks"] = UNSET
coerce: Union[Unset, bool] = UNSET
description: Union[Unset, str] = UNSET
dtype: Union[Unset, str] = UNSET
- nullable: Union[Unset, bool] = UNSET
- required: Union[Unset, None, bool] = UNSET
- title: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ nullable = self.nullable
+ required = self.required
+ title = self.title
checks: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.checks, Unset):
checks = self.checks.to_dict()
@@ -45,13 +48,16 @@ def to_dict(self) -> Dict[str, Any]:
coerce = self.coerce
description = self.description
dtype = self.dtype
- nullable = self.nullable
- required = self.required
- title = self.title
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if nullable is not UNSET:
+ field_dict["nullable"] = nullable
+ if required is not UNSET:
+ field_dict["required"] = required
+ if title is not UNSET:
+ field_dict["title"] = title
if checks is not UNSET:
field_dict["checks"] = checks
if coerce is not UNSET:
@@ -60,12 +66,6 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["description"] = description
if dtype is not UNSET:
field_dict["dtype"] = dtype
- if nullable is not UNSET:
- field_dict["nullable"] = nullable
- if required is not UNSET:
- field_dict["required"] = required
- if title is not UNSET:
- field_dict["title"] = title
return field_dict
@@ -74,6 +74,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.column_schema_checks import ColumnSchemaChecks
d = src_dict.copy()
+ nullable = d.pop("nullable", UNSET)
+
+ required = d.pop("required", UNSET)
+
+ title = d.pop("title", UNSET)
+
_checks = d.pop("checks", UNSET)
checks: Union[Unset, ColumnSchemaChecks]
if isinstance(_checks, Unset):
@@ -87,20 +93,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
dtype = d.pop("dtype", UNSET)
- nullable = d.pop("nullable", UNSET)
-
- required = d.pop("required", UNSET)
-
- title = d.pop("title", UNSET)
-
column_schema = cls(
+ nullable=nullable,
+ required=required,
+ title=title,
checks=checks,
coerce=coerce,
description=description,
dtype=dtype,
- nullable=nullable,
- required=required,
- title=title,
)
column_schema.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/column_schema_checks.py b/src/tuneinsight/api/sdk/models/column_schema_checks.py
index d93fe9b..9851b83 100644
--- a/src/tuneinsight/api/sdk/models/column_schema_checks.py
+++ b/src/tuneinsight/api/sdk/models/column_schema_checks.py
@@ -16,68 +16,69 @@ class ColumnSchemaChecks:
"""optional additional checks
Attributes:
- eq (Union[Unset, Any]): verifies that all values are equal to this value.
gt (Union[Unset, Any]): verifies that all values are greater than this value.
+ isin (Union[Unset, List[Any]]):
+ notin (Union[Unset, List[Any]]):
+ ge (Union[Unset, Any]): verifies that all values are greater than or equal to this value.
in_range (Union[Unset, ColumnSchemaChecksInRange]):
+ le (Union[Unset, Any]): verifies that all values are less than or equal to this value.
lt (Union[Unset, Any]): verifies that all values are less than this value.
str_startswith (Union[Unset, str]):
- ge (Union[Unset, Any]): verifies that all values are greater than or equal to this value.
- isin (Union[Unset, List[Any]]):
- le (Union[Unset, Any]): verifies that all values are less than or equal to this value.
- notin (Union[Unset, List[Any]]):
+ eq (Union[Unset, Any]): verifies that all values are equal to this value.
"""
- eq: Union[Unset, Any] = UNSET
gt: Union[Unset, Any] = UNSET
+ isin: Union[Unset, List[Any]] = UNSET
+ notin: Union[Unset, List[Any]] = UNSET
+ ge: Union[Unset, Any] = UNSET
in_range: Union[Unset, "ColumnSchemaChecksInRange"] = UNSET
+ le: Union[Unset, Any] = UNSET
lt: Union[Unset, Any] = UNSET
str_startswith: Union[Unset, str] = UNSET
- ge: Union[Unset, Any] = UNSET
- isin: Union[Unset, List[Any]] = UNSET
- le: Union[Unset, Any] = UNSET
- notin: Union[Unset, List[Any]] = UNSET
+ eq: Union[Unset, Any] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- eq = self.eq
gt = self.gt
- in_range: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.in_range, Unset):
- in_range = self.in_range.to_dict()
-
- lt = self.lt
- str_startswith = self.str_startswith
- ge = self.ge
isin: Union[Unset, List[Any]] = UNSET
if not isinstance(self.isin, Unset):
isin = self.isin
- le = self.le
notin: Union[Unset, List[Any]] = UNSET
if not isinstance(self.notin, Unset):
notin = self.notin
+ ge = self.ge
+ in_range: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.in_range, Unset):
+ in_range = self.in_range.to_dict()
+
+ le = self.le
+ lt = self.lt
+ str_startswith = self.str_startswith
+ eq = self.eq
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if eq is not UNSET:
- field_dict["eq"] = eq
if gt is not UNSET:
field_dict["gt"] = gt
+ if isin is not UNSET:
+ field_dict["isin"] = isin
+ if notin is not UNSET:
+ field_dict["notin"] = notin
+ if ge is not UNSET:
+ field_dict["ge"] = ge
if in_range is not UNSET:
field_dict["in_range"] = in_range
+ if le is not UNSET:
+ field_dict["le"] = le
if lt is not UNSET:
field_dict["lt"] = lt
if str_startswith is not UNSET:
field_dict["str_startswith"] = str_startswith
- if ge is not UNSET:
- field_dict["ge"] = ge
- if isin is not UNSET:
- field_dict["isin"] = isin
- if le is not UNSET:
- field_dict["le"] = le
- if notin is not UNSET:
- field_dict["notin"] = notin
+ if eq is not UNSET:
+ field_dict["eq"] = eq
return field_dict
@@ -86,10 +87,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.column_schema_checks_in_range import ColumnSchemaChecksInRange
d = src_dict.copy()
- eq = d.pop("eq", UNSET)
-
gt = d.pop("gt", UNSET)
+ isin = cast(List[Any], d.pop("isin", UNSET))
+
+ notin = cast(List[Any], d.pop("notin", UNSET))
+
+ ge = d.pop("ge", UNSET)
+
_in_range = d.pop("in_range", UNSET)
in_range: Union[Unset, ColumnSchemaChecksInRange]
if isinstance(_in_range, Unset):
@@ -97,28 +102,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
in_range = ColumnSchemaChecksInRange.from_dict(_in_range)
+ le = d.pop("le", UNSET)
+
lt = d.pop("lt", UNSET)
str_startswith = d.pop("str_startswith", UNSET)
- ge = d.pop("ge", UNSET)
-
- isin = cast(List[Any], d.pop("isin", UNSET))
-
- le = d.pop("le", UNSET)
-
- notin = cast(List[Any], d.pop("notin", UNSET))
+ eq = d.pop("eq", UNSET)
column_schema_checks = cls(
- eq=eq,
gt=gt,
+ isin=isin,
+ notin=notin,
+ ge=ge,
in_range=in_range,
+ le=le,
lt=lt,
str_startswith=str_startswith,
- ge=ge,
- isin=isin,
- le=le,
- notin=notin,
+ eq=eq,
)
column_schema_checks.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation.py b/src/tuneinsight/api/sdk/models/computation.py
index 34b8130..509905f 100644
--- a/src/tuneinsight/api/sdk/models/computation.py
+++ b/src/tuneinsight/api/sdk/models/computation.py
@@ -22,47 +22,51 @@ class Computation:
definition (ComputationDefinition): Generic computation.
id (str): Identifier of a computation, unique across all computing nodes.
status (ComputationStatus): Status of the computation.
+ errors (Union[Unset, List['ComputationError']]): list of errors that occurred during the computation
+ ingress (Union[Unset, int]): keeps track of the number of bytes received during a computation to serve as a
+ bandwidth measure
+ warnings (Union[Unset, List[str]]): list of warnings that occurred during the computation
+ description (Union[Unset, str]):
local (Union[Unset, bool]):
measurements (Union[Unset, List['Measurement']]): list of benchmarking measurements done on the computation
- owner (Union[Unset, str]): identifier of the end user that has requested the computation
- updated_at (Union[Unset, str]):
+ started_at (Union[Unset, str]):
visible (Union[Unset, bool]): False if the computation is internal and should not be displayed to the user by
default
- description (Union[Unset, str]):
+ created_at (Union[Unset, str]):
+ ended_at (Union[Unset, str]):
+ owner (Union[Unset, str]): identifier of the end user that has requested the computation
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ results (Union[Unset, List[str]]): Identifier(s) of the resulting data object(s). Available only when the status
+ is completed.
+ updated_at (Union[Unset, str]):
egress (Union[Unset, int]): keeps track of the number of bytes sent during a computation to serve as a bandwidth
measure
- ended_at (Union[Unset, str]):
- created_at (Union[Unset, str]):
- errors (Union[Unset, List['ComputationError']]): list of errors that occurred during the computation
execution_cost (Union[Unset, float]): the cost of the computation when an execution quota has been setup.
progress (Union[Unset, int]):
- started_at (Union[Unset, str]):
- warnings (Union[Unset, List[str]]): list of warnings that occurred during the computation
- ingress (Union[Unset, int]): keeps track of the number of bytes received during a computation to serve as a
- bandwidth measure
- results (Union[Unset, List[str]]): Identifier(s) of the resulting data object(s). Available only when the status
- is completed.
+ data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
"""
definition: "ComputationDefinition"
id: str
status: ComputationStatus
+ errors: Union[Unset, List["ComputationError"]] = UNSET
+ ingress: Union[Unset, int] = UNSET
+ warnings: Union[Unset, List[str]] = UNSET
+ description: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
measurements: Union[Unset, List["Measurement"]] = UNSET
+ started_at: Union[Unset, str] = UNSET
+ visible: Union[Unset, bool] = UNSET
+ created_at: Union[Unset, str] = UNSET
+ ended_at: Union[Unset, str] = UNSET
owner: Union[Unset, str] = UNSET
+ project_id: Union[Unset, str] = UNSET
+ results: Union[Unset, List[str]] = UNSET
updated_at: Union[Unset, str] = UNSET
- visible: Union[Unset, bool] = UNSET
- description: Union[Unset, str] = UNSET
egress: Union[Unset, int] = UNSET
- ended_at: Union[Unset, str] = UNSET
- created_at: Union[Unset, str] = UNSET
- errors: Union[Unset, List["ComputationError"]] = UNSET
execution_cost: Union[Unset, float] = UNSET
progress: Union[Unset, int] = UNSET
- started_at: Union[Unset, str] = UNSET
- warnings: Union[Unset, List[str]] = UNSET
- ingress: Union[Unset, int] = UNSET
- results: Union[Unset, List[str]] = UNSET
+ data_source_id: Union[Unset, None, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
@@ -71,22 +75,6 @@ def to_dict(self) -> Dict[str, Any]:
id = self.id
status = self.status.value
- local = self.local
- measurements: Union[Unset, List[Dict[str, Any]]] = UNSET
- if not isinstance(self.measurements, Unset):
- measurements = []
- for measurements_item_data in self.measurements:
- measurements_item = measurements_item_data.to_dict()
-
- measurements.append(measurements_item)
-
- owner = self.owner
- updated_at = self.updated_at
- visible = self.visible
- description = self.description
- egress = self.egress
- ended_at = self.ended_at
- created_at = self.created_at
errors: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.errors, Unset):
errors = []
@@ -95,18 +83,37 @@ def to_dict(self) -> Dict[str, Any]:
errors.append(errors_item)
- execution_cost = self.execution_cost
- progress = self.progress
- started_at = self.started_at
+ ingress = self.ingress
warnings: Union[Unset, List[str]] = UNSET
if not isinstance(self.warnings, Unset):
warnings = self.warnings
- ingress = self.ingress
+ description = self.description
+ local = self.local
+ measurements: Union[Unset, List[Dict[str, Any]]] = UNSET
+ if not isinstance(self.measurements, Unset):
+ measurements = []
+ for measurements_item_data in self.measurements:
+ measurements_item = measurements_item_data.to_dict()
+
+ measurements.append(measurements_item)
+
+ started_at = self.started_at
+ visible = self.visible
+ created_at = self.created_at
+ ended_at = self.ended_at
+ owner = self.owner
+ project_id = self.project_id
results: Union[Unset, List[str]] = UNSET
if not isinstance(self.results, Unset):
results = self.results
+ updated_at = self.updated_at
+ egress = self.egress
+ execution_cost = self.execution_cost
+ progress = self.progress
+ data_source_id = self.data_source_id
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -116,38 +123,42 @@ def to_dict(self) -> Dict[str, Any]:
"status": status,
}
)
+ if errors is not UNSET:
+ field_dict["errors"] = errors
+ if ingress is not UNSET:
+ field_dict["ingress"] = ingress
+ if warnings is not UNSET:
+ field_dict["warnings"] = warnings
+ if description is not UNSET:
+ field_dict["description"] = description
if local is not UNSET:
field_dict["local"] = local
if measurements is not UNSET:
field_dict["measurements"] = measurements
+ if started_at is not UNSET:
+ field_dict["startedAt"] = started_at
+ if visible is not UNSET:
+ field_dict["visible"] = visible
+ if created_at is not UNSET:
+ field_dict["createdAt"] = created_at
+ if ended_at is not UNSET:
+ field_dict["endedAt"] = ended_at
if owner is not UNSET:
field_dict["owner"] = owner
+ if project_id is not UNSET:
+ field_dict["projectId"] = project_id
+ if results is not UNSET:
+ field_dict["results"] = results
if updated_at is not UNSET:
field_dict["updatedAt"] = updated_at
- if visible is not UNSET:
- field_dict["visible"] = visible
- if description is not UNSET:
- field_dict["description"] = description
if egress is not UNSET:
field_dict["egress"] = egress
- if ended_at is not UNSET:
- field_dict["endedAt"] = ended_at
- if created_at is not UNSET:
- field_dict["createdAt"] = created_at
- if errors is not UNSET:
- field_dict["errors"] = errors
if execution_cost is not UNSET:
field_dict["executionCost"] = execution_cost
if progress is not UNSET:
field_dict["progress"] = progress
- if started_at is not UNSET:
- field_dict["startedAt"] = started_at
- if warnings is not UNSET:
- field_dict["warnings"] = warnings
- if ingress is not UNSET:
- field_dict["ingress"] = ingress
- if results is not UNSET:
- field_dict["results"] = results
+ if data_source_id is not UNSET:
+ field_dict["dataSourceId"] = data_source_id
return field_dict
@@ -164,6 +175,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
status = ComputationStatus(d.pop("status"))
+ errors = []
+ _errors = d.pop("errors", UNSET)
+ for errors_item_data in _errors or []:
+ errors_item = ComputationError.from_dict(errors_item_data)
+
+ errors.append(errors_item)
+
+ ingress = d.pop("ingress", UNSET)
+
+ warnings = cast(List[str], d.pop("warnings", UNSET))
+
+ description = d.pop("description", UNSET)
+
local = d.pop("local", UNSET)
measurements = []
@@ -173,59 +197,52 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
measurements.append(measurements_item)
- owner = d.pop("owner", UNSET)
-
- updated_at = d.pop("updatedAt", UNSET)
+ started_at = d.pop("startedAt", UNSET)
visible = d.pop("visible", UNSET)
- description = d.pop("description", UNSET)
-
- egress = d.pop("egress", UNSET)
+ created_at = d.pop("createdAt", UNSET)
ended_at = d.pop("endedAt", UNSET)
- created_at = d.pop("createdAt", UNSET)
-
- errors = []
- _errors = d.pop("errors", UNSET)
- for errors_item_data in _errors or []:
- errors_item = ComputationError.from_dict(errors_item_data)
+ owner = d.pop("owner", UNSET)
- errors.append(errors_item)
+ project_id = d.pop("projectId", UNSET)
- execution_cost = d.pop("executionCost", UNSET)
+ results = cast(List[str], d.pop("results", UNSET))
- progress = d.pop("progress", UNSET)
+ updated_at = d.pop("updatedAt", UNSET)
- started_at = d.pop("startedAt", UNSET)
+ egress = d.pop("egress", UNSET)
- warnings = cast(List[str], d.pop("warnings", UNSET))
+ execution_cost = d.pop("executionCost", UNSET)
- ingress = d.pop("ingress", UNSET)
+ progress = d.pop("progress", UNSET)
- results = cast(List[str], d.pop("results", UNSET))
+ data_source_id = d.pop("dataSourceId", UNSET)
computation = cls(
definition=definition,
id=id,
status=status,
+ errors=errors,
+ ingress=ingress,
+ warnings=warnings,
+ description=description,
local=local,
measurements=measurements,
+ started_at=started_at,
+ visible=visible,
+ created_at=created_at,
+ ended_at=ended_at,
owner=owner,
+ project_id=project_id,
+ results=results,
updated_at=updated_at,
- visible=visible,
- description=description,
egress=egress,
- ended_at=ended_at,
- created_at=created_at,
- errors=errors,
execution_cost=execution_cost,
progress=progress,
- started_at=started_at,
- warnings=warnings,
- ingress=ingress,
- results=results,
+ data_source_id=data_source_id,
)
computation.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py b/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py
index 54bc6b5..0264dad 100644
--- a/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py
+++ b/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py
@@ -17,25 +17,24 @@ class ComputationDataSourceParameters:
"""Parameters used to query the datasource from each node before the computation
Attributes:
- only_root_query (Union[Unset, bool]): Whether or not the query should only be executed at the root node of the
- computation
compound_disabled (Union[Unset, bool]): when true, then even if the compound query is specified, it is not taken
into account (enables keeping previously defined queries)
compound_query (Union[Unset, DataSourceCompoundQuery]): definition of datasource queries for each node in the
computation
data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
data_source_query (Union[Unset, DataSourceQuery]): schema used for the query
+ only_root_query (Union[Unset, bool]): Whether or not the query should only be executed at the root node of the
+ computation
"""
- only_root_query: Union[Unset, bool] = UNSET
compound_disabled: Union[Unset, bool] = UNSET
compound_query: Union[Unset, "DataSourceCompoundQuery"] = UNSET
data_source_id: Union[Unset, None, str] = UNSET
data_source_query: Union[Unset, "DataSourceQuery"] = UNSET
+ only_root_query: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- only_root_query = self.only_root_query
compound_disabled = self.compound_disabled
compound_query: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.compound_query, Unset):
@@ -46,11 +45,11 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.data_source_query, Unset):
data_source_query = self.data_source_query.to_dict()
+ only_root_query = self.only_root_query
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if only_root_query is not UNSET:
- field_dict["onlyRootQuery"] = only_root_query
if compound_disabled is not UNSET:
field_dict["compoundDisabled"] = compound_disabled
if compound_query is not UNSET:
@@ -59,6 +58,8 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["dataSourceId"] = data_source_id
if data_source_query is not UNSET:
field_dict["dataSourceQuery"] = data_source_query
+ if only_root_query is not UNSET:
+ field_dict["onlyRootQuery"] = only_root_query
return field_dict
@@ -68,8 +69,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.data_source_query import DataSourceQuery
d = src_dict.copy()
- only_root_query = d.pop("onlyRootQuery", UNSET)
-
compound_disabled = d.pop("compoundDisabled", UNSET)
_compound_query = d.pop("compoundQuery", UNSET)
@@ -88,12 +87,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
data_source_query = DataSourceQuery.from_dict(_data_source_query)
+ only_root_query = d.pop("onlyRootQuery", UNSET)
+
computation_data_source_parameters = cls(
- only_root_query=only_root_query,
compound_disabled=compound_disabled,
compound_query=compound_query,
data_source_id=data_source_id,
data_source_query=data_source_query,
+ only_root_query=only_root_query,
)
computation_data_source_parameters.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation_definition.py b/src/tuneinsight/api/sdk/models/computation_definition.py
index 2e94a19..2b3075b 100644
--- a/src/tuneinsight/api/sdk/models/computation_definition.py
+++ b/src/tuneinsight/api/sdk/models/computation_definition.py
@@ -23,8 +23,12 @@ class ComputationDefinition:
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class ComputationDefinition:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class ComputationDefinition:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,93 +66,79 @@ class ComputationDefinition:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -148,46 +147,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
return field_dict
@@ -201,14 +200,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -217,10 +209,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -228,6 +231,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -239,19 +256,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -259,40 +263,35 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
computation_definition = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
)
computation_definition.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation_error.py b/src/tuneinsight/api/sdk/models/computation_error.py
index d4c41ba..f469d12 100644
--- a/src/tuneinsight/api/sdk/models/computation_error.py
+++ b/src/tuneinsight/api/sdk/models/computation_error.py
@@ -13,44 +13,48 @@ class ComputationError:
"""error that occurred when running a computation
Attributes:
- type (Union[Unset, ComputationErrorType]): error type identifier
- message (Union[Unset, str]): the error message
origin (Union[Unset, str]): node instance id that caused the error
timestamp (Union[Unset, str]): time at which the error ocurred
+ type (Union[Unset, ComputationErrorType]): error type identifier
+ message (Union[Unset, str]): the error message
"""
- type: Union[Unset, ComputationErrorType] = UNSET
- message: Union[Unset, str] = UNSET
origin: Union[Unset, str] = UNSET
timestamp: Union[Unset, str] = UNSET
+ type: Union[Unset, ComputationErrorType] = UNSET
+ message: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ origin = self.origin
+ timestamp = self.timestamp
type: Union[Unset, str] = UNSET
if not isinstance(self.type, Unset):
type = self.type.value
message = self.message
- origin = self.origin
- timestamp = self.timestamp
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if type is not UNSET:
- field_dict["type"] = type
- if message is not UNSET:
- field_dict["message"] = message
if origin is not UNSET:
field_dict["origin"] = origin
if timestamp is not UNSET:
field_dict["timestamp"] = timestamp
+ if type is not UNSET:
+ field_dict["type"] = type
+ if message is not UNSET:
+ field_dict["message"] = message
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
+ origin = d.pop("origin", UNSET)
+
+ timestamp = d.pop("timestamp", UNSET)
+
_type = d.pop("type", UNSET)
type: Union[Unset, ComputationErrorType]
if isinstance(_type, Unset):
@@ -60,15 +64,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
message = d.pop("message", UNSET)
- origin = d.pop("origin", UNSET)
-
- timestamp = d.pop("timestamp", UNSET)
-
computation_error = cls(
- type=type,
- message=message,
origin=origin,
timestamp=timestamp,
+ type=type,
+ message=message,
)
computation_error.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation_list_response.py b/src/tuneinsight/api/sdk/models/computation_list_response.py
index c2cb577..ee51665 100644
--- a/src/tuneinsight/api/sdk/models/computation_list_response.py
+++ b/src/tuneinsight/api/sdk/models/computation_list_response.py
@@ -16,15 +16,16 @@ class ComputationListResponse:
"""List of available computations.
Attributes:
- items (Union[Unset, List['Computation']]):
total (Union[Unset, int]):
+ items (Union[Unset, List['Computation']]):
"""
- items: Union[Unset, List["Computation"]] = UNSET
total: Union[Unset, int] = UNSET
+ items: Union[Unset, List["Computation"]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ total = self.total
items: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.items, Unset):
items = []
@@ -33,15 +34,13 @@ def to_dict(self) -> Dict[str, Any]:
items.append(items_item)
- total = self.total
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if items is not UNSET:
- field_dict["items"] = items
if total is not UNSET:
field_dict["total"] = total
+ if items is not UNSET:
+ field_dict["items"] = items
return field_dict
@@ -50,6 +49,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.computation import Computation
d = src_dict.copy()
+ total = d.pop("total", UNSET)
+
items = []
_items = d.pop("items", UNSET)
for items_item_data in _items or []:
@@ -57,11 +58,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
items.append(items_item)
- total = d.pop("total", UNSET)
-
computation_list_response = cls(
- items=items,
total=total,
+ items=items,
)
computation_list_response.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation_policy.py b/src/tuneinsight/api/sdk/models/computation_policy.py
index 57d4386..81046d8 100644
--- a/src/tuneinsight/api/sdk/models/computation_policy.py
+++ b/src/tuneinsight/api/sdk/models/computation_policy.py
@@ -19,51 +19,67 @@ class ComputationPolicy:
"""policy to validate a specific computation
Attributes:
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
authorized_data_source_queries (Union[Unset, List[str]]): list of authorized datasource queries when
restrictDataSourceQueries is set to true
+ authorized_preprocessing_operations (Union[Unset, List[PreprocessingOperationType]]): list of authorized
+ preprocessing operations types when restrictPreprocessingOperations is set to true
flexible_parameters (Union[Unset, List[str]]): when validateParameters is enabled, specifies the set of
parameters for which to ignore validation
restrict_data_source_queries (Union[Unset, bool]): whether or not datasource queries should be restricted
restrict_preprocessing_operations (Union[Unset, bool]): whether or not datasource queries should be restricted
+ template (Union[Unset, ComputationDefinition]): Generic computation.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
+ fixed_parameters (Union[Unset, List[str]]): when validateParameters is enabled, specifies the set of parameters
+ that cannot be changed if empty, then all parameters are validated
validate_parameters (Union[Unset, bool]): whether or not to validate the parameters with the ones from the
template
authorized_computation_types (Union[Unset, List[ComputationType]]): list of authorized computation types
- authorized_preprocessing_operations (Union[Unset, List[PreprocessingOperationType]]): list of authorized
- preprocessing operations types when restrictPreprocessingOperations is set to true
- fixed_parameters (Union[Unset, List[str]]): when validateParameters is enabled, specifies the set of parameters
- that cannot be changed if empty, then all parameters are validated
- template (Union[Unset, ComputationDefinition]): Generic computation.
"""
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
authorized_data_source_queries: Union[Unset, List[str]] = UNSET
+ authorized_preprocessing_operations: Union[Unset, List[PreprocessingOperationType]] = UNSET
flexible_parameters: Union[Unset, List[str]] = UNSET
restrict_data_source_queries: Union[Unset, bool] = UNSET
restrict_preprocessing_operations: Union[Unset, bool] = UNSET
+ template: Union[Unset, "ComputationDefinition"] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ fixed_parameters: Union[Unset, List[str]] = UNSET
validate_parameters: Union[Unset, bool] = UNSET
authorized_computation_types: Union[Unset, List[ComputationType]] = UNSET
- authorized_preprocessing_operations: Union[Unset, List[PreprocessingOperationType]] = UNSET
- fixed_parameters: Union[Unset, List[str]] = UNSET
- template: Union[Unset, "ComputationDefinition"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- dp_policy: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.dp_policy, Unset):
- dp_policy = self.dp_policy.to_dict()
-
authorized_data_source_queries: Union[Unset, List[str]] = UNSET
if not isinstance(self.authorized_data_source_queries, Unset):
authorized_data_source_queries = self.authorized_data_source_queries
+ authorized_preprocessing_operations: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.authorized_preprocessing_operations, Unset):
+ authorized_preprocessing_operations = []
+ for authorized_preprocessing_operations_item_data in self.authorized_preprocessing_operations:
+ authorized_preprocessing_operations_item = authorized_preprocessing_operations_item_data.value
+
+ authorized_preprocessing_operations.append(authorized_preprocessing_operations_item)
+
flexible_parameters: Union[Unset, List[str]] = UNSET
if not isinstance(self.flexible_parameters, Unset):
flexible_parameters = self.flexible_parameters
restrict_data_source_queries = self.restrict_data_source_queries
restrict_preprocessing_operations = self.restrict_preprocessing_operations
+ template: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.template, Unset):
+ template = self.template.to_dict()
+
+ dp_policy: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.dp_policy, Unset):
+ dp_policy = self.dp_policy.to_dict()
+
+ fixed_parameters: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.fixed_parameters, Unset):
+ fixed_parameters = self.fixed_parameters
+
validate_parameters = self.validate_parameters
authorized_computation_types: Union[Unset, List[str]] = UNSET
if not isinstance(self.authorized_computation_types, Unset):
@@ -73,45 +89,29 @@ def to_dict(self) -> Dict[str, Any]:
authorized_computation_types.append(authorized_computation_types_item)
- authorized_preprocessing_operations: Union[Unset, List[str]] = UNSET
- if not isinstance(self.authorized_preprocessing_operations, Unset):
- authorized_preprocessing_operations = []
- for authorized_preprocessing_operations_item_data in self.authorized_preprocessing_operations:
- authorized_preprocessing_operations_item = authorized_preprocessing_operations_item_data.value
-
- authorized_preprocessing_operations.append(authorized_preprocessing_operations_item)
-
- fixed_parameters: Union[Unset, List[str]] = UNSET
- if not isinstance(self.fixed_parameters, Unset):
- fixed_parameters = self.fixed_parameters
-
- template: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.template, Unset):
- template = self.template.to_dict()
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
if authorized_data_source_queries is not UNSET:
field_dict["authorizedDataSourceQueries"] = authorized_data_source_queries
+ if authorized_preprocessing_operations is not UNSET:
+ field_dict["authorizedPreprocessingOperations"] = authorized_preprocessing_operations
if flexible_parameters is not UNSET:
field_dict["flexibleParameters"] = flexible_parameters
if restrict_data_source_queries is not UNSET:
field_dict["restrictDataSourceQueries"] = restrict_data_source_queries
if restrict_preprocessing_operations is not UNSET:
field_dict["restrictPreprocessingOperations"] = restrict_preprocessing_operations
+ if template is not UNSET:
+ field_dict["template"] = template
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if fixed_parameters is not UNSET:
+ field_dict["fixedParameters"] = fixed_parameters
if validate_parameters is not UNSET:
field_dict["validateParameters"] = validate_parameters
if authorized_computation_types is not UNSET:
field_dict["authorizedComputationTypes"] = authorized_computation_types
- if authorized_preprocessing_operations is not UNSET:
- field_dict["authorizedPreprocessingOperations"] = authorized_preprocessing_operations
- if fixed_parameters is not UNSET:
- field_dict["fixedParameters"] = fixed_parameters
- if template is not UNSET:
- field_dict["template"] = template
return field_dict
@@ -121,30 +121,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.dp_policy import DPPolicy
d = src_dict.copy()
- _dp_policy = d.pop("DPPolicy", UNSET)
- dp_policy: Union[Unset, DPPolicy]
- if isinstance(_dp_policy, Unset):
- dp_policy = UNSET
- else:
- dp_policy = DPPolicy.from_dict(_dp_policy)
-
authorized_data_source_queries = cast(List[str], d.pop("authorizedDataSourceQueries", UNSET))
- flexible_parameters = cast(List[str], d.pop("flexibleParameters", UNSET))
-
- restrict_data_source_queries = d.pop("restrictDataSourceQueries", UNSET)
-
- restrict_preprocessing_operations = d.pop("restrictPreprocessingOperations", UNSET)
-
- validate_parameters = d.pop("validateParameters", UNSET)
-
- authorized_computation_types = []
- _authorized_computation_types = d.pop("authorizedComputationTypes", UNSET)
- for authorized_computation_types_item_data in _authorized_computation_types or []:
- authorized_computation_types_item = ComputationType(authorized_computation_types_item_data)
-
- authorized_computation_types.append(authorized_computation_types_item)
-
authorized_preprocessing_operations = []
_authorized_preprocessing_operations = d.pop("authorizedPreprocessingOperations", UNSET)
for authorized_preprocessing_operations_item_data in _authorized_preprocessing_operations or []:
@@ -154,7 +132,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
authorized_preprocessing_operations.append(authorized_preprocessing_operations_item)
- fixed_parameters = cast(List[str], d.pop("fixedParameters", UNSET))
+ flexible_parameters = cast(List[str], d.pop("flexibleParameters", UNSET))
+
+ restrict_data_source_queries = d.pop("restrictDataSourceQueries", UNSET)
+
+ restrict_preprocessing_operations = d.pop("restrictPreprocessingOperations", UNSET)
_template = d.pop("template", UNSET)
template: Union[Unset, ComputationDefinition]
@@ -163,17 +145,35 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
template = ComputationDefinition.from_dict(_template)
+ _dp_policy = d.pop("DPPolicy", UNSET)
+ dp_policy: Union[Unset, DPPolicy]
+ if isinstance(_dp_policy, Unset):
+ dp_policy = UNSET
+ else:
+ dp_policy = DPPolicy.from_dict(_dp_policy)
+
+ fixed_parameters = cast(List[str], d.pop("fixedParameters", UNSET))
+
+ validate_parameters = d.pop("validateParameters", UNSET)
+
+ authorized_computation_types = []
+ _authorized_computation_types = d.pop("authorizedComputationTypes", UNSET)
+ for authorized_computation_types_item_data in _authorized_computation_types or []:
+ authorized_computation_types_item = ComputationType(authorized_computation_types_item_data)
+
+ authorized_computation_types.append(authorized_computation_types_item)
+
computation_policy = cls(
- dp_policy=dp_policy,
authorized_data_source_queries=authorized_data_source_queries,
+ authorized_preprocessing_operations=authorized_preprocessing_operations,
flexible_parameters=flexible_parameters,
restrict_data_source_queries=restrict_data_source_queries,
restrict_preprocessing_operations=restrict_preprocessing_operations,
+ template=template,
+ dp_policy=dp_policy,
+ fixed_parameters=fixed_parameters,
validate_parameters=validate_parameters,
authorized_computation_types=authorized_computation_types,
- authorized_preprocessing_operations=authorized_preprocessing_operations,
- fixed_parameters=fixed_parameters,
- template=template,
)
computation_policy.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py b/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py
index 16ad382..3edc7c6 100644
--- a/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py
+++ b/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py
@@ -22,32 +22,24 @@ class ComputationPreprocessingParameters:
"""dataframe pre-processing parameters applied to the input retrieved from the datasource, if applicable
Attributes:
- compound_preprocessing (Union[Unset, ComputationPreprocessingParametersCompoundPreprocessing]): preprocessing to
- be applied for each node
- dataset_schema (Union[Unset, DatasetSchema]): dataset schema definition used to validate input datasets.
filters (Union[Unset, List['LogicalFormula']]): list of filters to apply to the input dataframe (applied after
the preprocessing is run)
global_preprocessing (Union[Unset, PreprocessingChain]): Chain of preprocessing operations applied to the input
dataframe
select (Union[Unset, Select]):
+ compound_preprocessing (Union[Unset, ComputationPreprocessingParametersCompoundPreprocessing]): preprocessing to
+ be applied for each node
+ dataset_schema (Union[Unset, DatasetSchema]): dataset schema definition used to validate input datasets.
"""
- compound_preprocessing: Union[Unset, "ComputationPreprocessingParametersCompoundPreprocessing"] = UNSET
- dataset_schema: Union[Unset, "DatasetSchema"] = UNSET
filters: Union[Unset, List["LogicalFormula"]] = UNSET
global_preprocessing: Union[Unset, "PreprocessingChain"] = UNSET
select: Union[Unset, "Select"] = UNSET
+ compound_preprocessing: Union[Unset, "ComputationPreprocessingParametersCompoundPreprocessing"] = UNSET
+ dataset_schema: Union[Unset, "DatasetSchema"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- compound_preprocessing: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.compound_preprocessing, Unset):
- compound_preprocessing = self.compound_preprocessing.to_dict()
-
- dataset_schema: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.dataset_schema, Unset):
- dataset_schema = self.dataset_schema.to_dict()
-
filters: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.filters, Unset):
filters = []
@@ -64,19 +56,27 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.select, Unset):
select = self.select.to_dict()
+ compound_preprocessing: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.compound_preprocessing, Unset):
+ compound_preprocessing = self.compound_preprocessing.to_dict()
+
+ dataset_schema: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.dataset_schema, Unset):
+ dataset_schema = self.dataset_schema.to_dict()
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if compound_preprocessing is not UNSET:
- field_dict["compoundPreprocessing"] = compound_preprocessing
- if dataset_schema is not UNSET:
- field_dict["datasetSchema"] = dataset_schema
if filters is not UNSET:
field_dict["filters"] = filters
if global_preprocessing is not UNSET:
field_dict["globalPreprocessing"] = global_preprocessing
if select is not UNSET:
field_dict["select"] = select
+ if compound_preprocessing is not UNSET:
+ field_dict["compoundPreprocessing"] = compound_preprocessing
+ if dataset_schema is not UNSET:
+ field_dict["datasetSchema"] = dataset_schema
return field_dict
@@ -91,22 +91,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.select import Select
d = src_dict.copy()
- _compound_preprocessing = d.pop("compoundPreprocessing", UNSET)
- compound_preprocessing: Union[Unset, ComputationPreprocessingParametersCompoundPreprocessing]
- if isinstance(_compound_preprocessing, Unset):
- compound_preprocessing = UNSET
- else:
- compound_preprocessing = ComputationPreprocessingParametersCompoundPreprocessing.from_dict(
- _compound_preprocessing
- )
-
- _dataset_schema = d.pop("datasetSchema", UNSET)
- dataset_schema: Union[Unset, DatasetSchema]
- if isinstance(_dataset_schema, Unset):
- dataset_schema = UNSET
- else:
- dataset_schema = DatasetSchema.from_dict(_dataset_schema)
-
filters = []
_filters = d.pop("filters", UNSET)
for filters_item_data in _filters or []:
@@ -128,12 +112,28 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
select = Select.from_dict(_select)
+ _compound_preprocessing = d.pop("compoundPreprocessing", UNSET)
+ compound_preprocessing: Union[Unset, ComputationPreprocessingParametersCompoundPreprocessing]
+ if isinstance(_compound_preprocessing, Unset):
+ compound_preprocessing = UNSET
+ else:
+ compound_preprocessing = ComputationPreprocessingParametersCompoundPreprocessing.from_dict(
+ _compound_preprocessing
+ )
+
+ _dataset_schema = d.pop("datasetSchema", UNSET)
+ dataset_schema: Union[Unset, DatasetSchema]
+ if isinstance(_dataset_schema, Unset):
+ dataset_schema = UNSET
+ else:
+ dataset_schema = DatasetSchema.from_dict(_dataset_schema)
+
computation_preprocessing_parameters = cls(
- compound_preprocessing=compound_preprocessing,
- dataset_schema=dataset_schema,
filters=filters,
global_preprocessing=global_preprocessing,
select=select,
+ compound_preprocessing=compound_preprocessing,
+ dataset_schema=dataset_schema,
)
computation_preprocessing_parameters.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/credentials.py b/src/tuneinsight/api/sdk/models/credentials.py
index 8b2b71e..64116b4 100644
--- a/src/tuneinsight/api/sdk/models/credentials.py
+++ b/src/tuneinsight/api/sdk/models/credentials.py
@@ -13,24 +13,23 @@ class Credentials:
"""The credentials needed to access the data source.
Attributes:
- api_token (Union[Unset, str]): Token to connect to the API
connection_string (Union[Unset, str]): connection string for a database
credentials_id (Union[Unset, str]): the id of the credentials stored in the key vault
password (Union[Unset, str]): generic password field.
type (Union[Unset, CredentialsType]):
username (Union[Unset, str]): generic username field.
+ api_token (Union[Unset, str]): Token to connect to the API
"""
- api_token: Union[Unset, str] = UNSET
connection_string: Union[Unset, str] = UNSET
credentials_id: Union[Unset, str] = UNSET
password: Union[Unset, str] = UNSET
type: Union[Unset, CredentialsType] = UNSET
username: Union[Unset, str] = UNSET
+ api_token: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- api_token = self.api_token
connection_string = self.connection_string
credentials_id = self.credentials_id
password = self.password
@@ -39,12 +38,11 @@ def to_dict(self) -> Dict[str, Any]:
type = self.type.value
username = self.username
+ api_token = self.api_token
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if api_token is not UNSET:
- field_dict["api-token"] = api_token
if connection_string is not UNSET:
field_dict["connectionString"] = connection_string
if credentials_id is not UNSET:
@@ -55,14 +53,14 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["type"] = type
if username is not UNSET:
field_dict["username"] = username
+ if api_token is not UNSET:
+ field_dict["api-token"] = api_token
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- api_token = d.pop("api-token", UNSET)
-
connection_string = d.pop("connectionString", UNSET)
credentials_id = d.pop("credentialsId", UNSET)
@@ -78,13 +76,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
username = d.pop("username", UNSET)
+ api_token = d.pop("api-token", UNSET)
+
credentials = cls(
- api_token=api_token,
connection_string=connection_string,
credentials_id=credentials_id,
password=password,
type=type,
username=username,
+ api_token=api_token,
)
credentials.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/custom.py b/src/tuneinsight/api/sdk/models/custom.py
index 35f8af8..92758d4 100644
--- a/src/tuneinsight/api/sdk/models/custom.py
+++ b/src/tuneinsight/api/sdk/models/custom.py
@@ -13,27 +13,27 @@ class Custom:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
- name (Union[Unset, str]): name given to the operation. The name has no impact on the operation
- and the name given to the function
description (Union[Unset, str]): description given to the operation, for documentation purposes.
function (Union[Unset, str]): function definition which must respect the following format:
`def (df: pd.DataFrame) -> pd.DataFrame
return df`
+ name (Union[Unset, str]): name given to the operation. The name has no impact on the operation
+ and the name given to the function
"""
type: PreprocessingOperationType
- name: Union[Unset, str] = UNSET
description: Union[Unset, str] = UNSET
function: Union[Unset, str] = UNSET
+ name: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- name = self.name
description = self.description
function = self.function
+ name = self.name
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -42,12 +42,12 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if name is not UNSET:
- field_dict["name"] = name
if description is not UNSET:
field_dict["description"] = description
if function is not UNSET:
field_dict["function"] = function
+ if name is not UNSET:
+ field_dict["name"] = name
return field_dict
@@ -56,17 +56,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
- name = d.pop("name", UNSET)
-
description = d.pop("description", UNSET)
function = d.pop("function", UNSET)
+ name = d.pop("name", UNSET)
+
custom = cls(
type=type,
- name=name,
description=description,
function=function,
+ name=name,
)
custom.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/cut.py b/src/tuneinsight/api/sdk/models/cut.py
index 347e8e7..13c44cb 100644
--- a/src/tuneinsight/api/sdk/models/cut.py
+++ b/src/tuneinsight/api/sdk/models/cut.py
@@ -13,31 +13,32 @@ class Cut:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
+ cuts (Union[Unset, List[float]]): cuts to use
input_ (Union[Unset, str]): column to use as input
labels (Union[Unset, List[str]]): labels to use for the cuts
output (Union[Unset, str]): column to use as output
- cuts (Union[Unset, List[float]]): cuts to use
"""
type: PreprocessingOperationType
+ cuts: Union[Unset, List[float]] = UNSET
input_: Union[Unset, str] = UNSET
labels: Union[Unset, List[str]] = UNSET
output: Union[Unset, str] = UNSET
- cuts: Union[Unset, List[float]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
+ cuts: Union[Unset, List[float]] = UNSET
+ if not isinstance(self.cuts, Unset):
+ cuts = self.cuts
+
input_ = self.input_
labels: Union[Unset, List[str]] = UNSET
if not isinstance(self.labels, Unset):
labels = self.labels
output = self.output
- cuts: Union[Unset, List[float]] = UNSET
- if not isinstance(self.cuts, Unset):
- cuts = self.cuts
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -46,14 +47,14 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
+ if cuts is not UNSET:
+ field_dict["cuts"] = cuts
if input_ is not UNSET:
field_dict["input"] = input_
if labels is not UNSET:
field_dict["labels"] = labels
if output is not UNSET:
field_dict["output"] = output
- if cuts is not UNSET:
- field_dict["cuts"] = cuts
return field_dict
@@ -62,20 +63,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
+ cuts = cast(List[float], d.pop("cuts", UNSET))
+
input_ = d.pop("input", UNSET)
labels = cast(List[str], d.pop("labels", UNSET))
output = d.pop("output", UNSET)
- cuts = cast(List[float], d.pop("cuts", UNSET))
-
cut = cls(
type=type,
+ cuts=cuts,
input_=input_,
labels=labels,
output=output,
- cuts=cuts,
)
cut.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/data_object.py b/src/tuneinsight/api/sdk/models/data_object.py
index 401d1bb..04eda87 100644
--- a/src/tuneinsight/api/sdk/models/data_object.py
+++ b/src/tuneinsight/api/sdk/models/data_object.py
@@ -14,25 +14,28 @@ class DataObject:
"""A data object definition.
Attributes:
+ has_data (Union[Unset, bool]): whether the dataobject's data has been set
+ session_id (Union[Unset, str]): Unique identifier of a session
+ shared_id (Union[Unset, str]): Shared identifier of a data object.
type (Union[Unset, DataObjectType]): type of the dataobject
unique_id (Union[Unset, str]): Unique identifier of a data object.
visibility_status (Union[Unset, DataObjectVisibilityStatus]): type of visibility set to the dataobject
encrypted (Union[Unset, bool]):
- has_data (Union[Unset, bool]): whether the dataobject's data has been set
- session_id (Union[Unset, str]): Unique identifier of a session
- shared_id (Union[Unset, str]): Shared identifier of a data object.
"""
+ has_data: Union[Unset, bool] = UNSET
+ session_id: Union[Unset, str] = UNSET
+ shared_id: Union[Unset, str] = UNSET
type: Union[Unset, DataObjectType] = UNSET
unique_id: Union[Unset, str] = UNSET
visibility_status: Union[Unset, DataObjectVisibilityStatus] = UNSET
encrypted: Union[Unset, bool] = UNSET
- has_data: Union[Unset, bool] = UNSET
- session_id: Union[Unset, str] = UNSET
- shared_id: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ has_data = self.has_data
+ session_id = self.session_id
+ shared_id = self.shared_id
type: Union[Unset, str] = UNSET
if not isinstance(self.type, Unset):
type = self.type.value
@@ -43,13 +46,16 @@ def to_dict(self) -> Dict[str, Any]:
visibility_status = self.visibility_status.value
encrypted = self.encrypted
- has_data = self.has_data
- session_id = self.session_id
- shared_id = self.shared_id
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if has_data is not UNSET:
+ field_dict["hasData"] = has_data
+ if session_id is not UNSET:
+ field_dict["sessionId"] = session_id
+ if shared_id is not UNSET:
+ field_dict["sharedId"] = shared_id
if type is not UNSET:
field_dict["type"] = type
if unique_id is not UNSET:
@@ -58,18 +64,18 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["visibilityStatus"] = visibility_status
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if has_data is not UNSET:
- field_dict["hasData"] = has_data
- if session_id is not UNSET:
- field_dict["sessionId"] = session_id
- if shared_id is not UNSET:
- field_dict["sharedId"] = shared_id
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
+ has_data = d.pop("hasData", UNSET)
+
+ session_id = d.pop("sessionId", UNSET)
+
+ shared_id = d.pop("sharedId", UNSET)
+
_type = d.pop("type", UNSET)
type: Union[Unset, DataObjectType]
if isinstance(_type, Unset):
@@ -88,20 +94,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
encrypted = d.pop("encrypted", UNSET)
- has_data = d.pop("hasData", UNSET)
-
- session_id = d.pop("sessionId", UNSET)
-
- shared_id = d.pop("sharedId", UNSET)
-
data_object = cls(
+ has_data=has_data,
+ session_id=session_id,
+ shared_id=shared_id,
type=type,
unique_id=unique_id,
visibility_status=visibility_status,
encrypted=encrypted,
- has_data=has_data,
- session_id=session_id,
- shared_id=shared_id,
)
data_object.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/data_source.py b/src/tuneinsight/api/sdk/models/data_source.py
index 0c0d530..116e948 100644
--- a/src/tuneinsight/api/sdk/models/data_source.py
+++ b/src/tuneinsight/api/sdk/models/data_source.py
@@ -12,7 +12,6 @@
from ..models.data_source_config import DataSourceConfig
from ..models.data_source_metadata import DataSourceMetadata
from ..models.local_data_selection import LocalDataSelection
- from ..models.project import Project
T = TypeVar("T", bound="DataSource")
@@ -22,74 +21,82 @@
class DataSource:
"""
Attributes:
+ consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source.
+ id (Union[Unset, None, str]): Unique identifier of a data source.
name (Union[Unset, str]):
- type (Union[Unset, DataSourceType]):
- attributes (Union[Unset, List[str]]): optional list of attributes.
authorized_users (Union[Unset, List[str]]):
- credentials (Union[Unset, Credentials]): The credentials needed to access the data source.
- id (Union[Unset, None, str]): Unique identifier of a data source.
- access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource
clear_if_exists (Union[Unset, bool]): If true and a data source with the same name already exists, delete it.
configuration (Union[Unset, DataSourceConfig]): data source configuration
- consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source.
+ credentials (Union[Unset, Credentials]): The credentials needed to access the data source.
+ is_mock (Union[Unset, bool]): Whether this datasource contains mock/synthetic data and should not be used in
+ production.
+ structure_template_json (Union[Unset, str]): data source's structure template (used to determine the query
+ builder structure, if provided)
+ type (Union[Unset, DataSourceType]):
+ access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource
+ attributes (Union[Unset, List[str]]): optional list of attributes.
created_at (Union[Unset, str]):
metadata (Union[Unset, DataSourceMetadata]): metadata about a datasource
owner (Union[Unset, str]):
- projects (Union[Unset, List['Project']]): response for successfully retrieved projects
+ projects (Union[Unset, List[str]]): ids of connected projects
selections (Union[Unset, List['LocalDataSelection']]): list of local data selections associated with the data
source
updated_at (Union[Unset, str]):
"""
+ consent_type: Union[Unset, DataSourceConsentType] = UNSET
+ id: Union[Unset, None, str] = UNSET
name: Union[Unset, str] = UNSET
- type: Union[Unset, DataSourceType] = UNSET
- attributes: Union[Unset, List[str]] = UNSET
authorized_users: Union[Unset, List[str]] = UNSET
- credentials: Union[Unset, "Credentials"] = UNSET
- id: Union[Unset, None, str] = UNSET
- access_scope: Union[Unset, AccessScope] = UNSET
clear_if_exists: Union[Unset, bool] = False
configuration: Union[Unset, "DataSourceConfig"] = UNSET
- consent_type: Union[Unset, DataSourceConsentType] = UNSET
+ credentials: Union[Unset, "Credentials"] = UNSET
+ is_mock: Union[Unset, bool] = UNSET
+ structure_template_json: Union[Unset, str] = UNSET
+ type: Union[Unset, DataSourceType] = UNSET
+ access_scope: Union[Unset, AccessScope] = UNSET
+ attributes: Union[Unset, List[str]] = UNSET
created_at: Union[Unset, str] = UNSET
metadata: Union[Unset, "DataSourceMetadata"] = UNSET
owner: Union[Unset, str] = UNSET
- projects: Union[Unset, List["Project"]] = UNSET
+ projects: Union[Unset, List[str]] = UNSET
selections: Union[Unset, List["LocalDataSelection"]] = UNSET
updated_at: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- name = self.name
- type: Union[Unset, str] = UNSET
- if not isinstance(self.type, Unset):
- type = self.type.value
-
- attributes: Union[Unset, List[str]] = UNSET
- if not isinstance(self.attributes, Unset):
- attributes = self.attributes
+ consent_type: Union[Unset, str] = UNSET
+ if not isinstance(self.consent_type, Unset):
+ consent_type = self.consent_type.value
+ id = self.id
+ name = self.name
authorized_users: Union[Unset, List[str]] = UNSET
if not isinstance(self.authorized_users, Unset):
authorized_users = self.authorized_users
+ clear_if_exists = self.clear_if_exists
+ configuration: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.configuration, Unset):
+ configuration = self.configuration.to_dict()
+
credentials: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.credentials, Unset):
credentials = self.credentials.to_dict()
- id = self.id
+ is_mock = self.is_mock
+ structure_template_json = self.structure_template_json
+ type: Union[Unset, str] = UNSET
+ if not isinstance(self.type, Unset):
+ type = self.type.value
+
access_scope: Union[Unset, str] = UNSET
if not isinstance(self.access_scope, Unset):
access_scope = self.access_scope.value
- clear_if_exists = self.clear_if_exists
- configuration: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.configuration, Unset):
- configuration = self.configuration.to_dict()
-
- consent_type: Union[Unset, str] = UNSET
- if not isinstance(self.consent_type, Unset):
- consent_type = self.consent_type.value
+ attributes: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.attributes, Unset):
+ attributes = self.attributes
created_at = self.created_at
metadata: Union[Unset, Dict[str, Any]] = UNSET
@@ -97,13 +104,9 @@ def to_dict(self) -> Dict[str, Any]:
metadata = self.metadata.to_dict()
owner = self.owner
- projects: Union[Unset, List[Dict[str, Any]]] = UNSET
+ projects: Union[Unset, List[str]] = UNSET
if not isinstance(self.projects, Unset):
- projects = []
- for projects_item_data in self.projects:
- projects_item = projects_item_data.to_dict()
-
- projects.append(projects_item)
+ projects = self.projects
selections: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.selections, Unset):
@@ -118,26 +121,30 @@ def to_dict(self) -> Dict[str, Any]:
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if consent_type is not UNSET:
+ field_dict["consentType"] = consent_type
+ if id is not UNSET:
+ field_dict["id"] = id
if name is not UNSET:
field_dict["name"] = name
- if type is not UNSET:
- field_dict["type"] = type
- if attributes is not UNSET:
- field_dict["attributes"] = attributes
if authorized_users is not UNSET:
field_dict["authorizedUsers"] = authorized_users
- if credentials is not UNSET:
- field_dict["credentials"] = credentials
- if id is not UNSET:
- field_dict["id"] = id
- if access_scope is not UNSET:
- field_dict["accessScope"] = access_scope
if clear_if_exists is not UNSET:
field_dict["clearIfExists"] = clear_if_exists
if configuration is not UNSET:
field_dict["configuration"] = configuration
- if consent_type is not UNSET:
- field_dict["consentType"] = consent_type
+ if credentials is not UNSET:
+ field_dict["credentials"] = credentials
+ if is_mock is not UNSET:
+ field_dict["isMock"] = is_mock
+ if structure_template_json is not UNSET:
+ field_dict["structureTemplateJSON"] = structure_template_json
+ if type is not UNSET:
+ field_dict["type"] = type
+ if access_scope is not UNSET:
+ field_dict["accessScope"] = access_scope
+ if attributes is not UNSET:
+ field_dict["attributes"] = attributes
if created_at is not UNSET:
field_dict["createdAt"] = created_at
if metadata is not UNSET:
@@ -159,22 +166,30 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.data_source_config import DataSourceConfig
from ..models.data_source_metadata import DataSourceMetadata
from ..models.local_data_selection import LocalDataSelection
- from ..models.project import Project
d = src_dict.copy()
- name = d.pop("name", UNSET)
-
- _type = d.pop("type", UNSET)
- type: Union[Unset, DataSourceType]
- if isinstance(_type, Unset):
- type = UNSET
+ _consent_type = d.pop("consentType", UNSET)
+ consent_type: Union[Unset, DataSourceConsentType]
+ if isinstance(_consent_type, Unset):
+ consent_type = UNSET
else:
- type = DataSourceType(_type)
+ consent_type = DataSourceConsentType(_consent_type)
- attributes = cast(List[str], d.pop("attributes", UNSET))
+ id = d.pop("id", UNSET)
+
+ name = d.pop("name", UNSET)
authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
+ clear_if_exists = d.pop("clearIfExists", UNSET)
+
+ _configuration = d.pop("configuration", UNSET)
+ configuration: Union[Unset, DataSourceConfig]
+ if isinstance(_configuration, Unset):
+ configuration = UNSET
+ else:
+ configuration = DataSourceConfig.from_dict(_configuration)
+
_credentials = d.pop("credentials", UNSET)
credentials: Union[Unset, Credentials]
if isinstance(_credentials, Unset):
@@ -182,7 +197,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
credentials = Credentials.from_dict(_credentials)
- id = d.pop("id", UNSET)
+ is_mock = d.pop("isMock", UNSET)
+
+ structure_template_json = d.pop("structureTemplateJSON", UNSET)
+
+ _type = d.pop("type", UNSET)
+ type: Union[Unset, DataSourceType]
+ if isinstance(_type, Unset):
+ type = UNSET
+ else:
+ type = DataSourceType(_type)
_access_scope = d.pop("accessScope", UNSET)
access_scope: Union[Unset, AccessScope]
@@ -191,21 +215,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
access_scope = AccessScope(_access_scope)
- clear_if_exists = d.pop("clearIfExists", UNSET)
-
- _configuration = d.pop("configuration", UNSET)
- configuration: Union[Unset, DataSourceConfig]
- if isinstance(_configuration, Unset):
- configuration = UNSET
- else:
- configuration = DataSourceConfig.from_dict(_configuration)
-
- _consent_type = d.pop("consentType", UNSET)
- consent_type: Union[Unset, DataSourceConsentType]
- if isinstance(_consent_type, Unset):
- consent_type = UNSET
- else:
- consent_type = DataSourceConsentType(_consent_type)
+ attributes = cast(List[str], d.pop("attributes", UNSET))
created_at = d.pop("createdAt", UNSET)
@@ -218,12 +228,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
owner = d.pop("owner", UNSET)
- projects = []
- _projects = d.pop("projects", UNSET)
- for projects_item_data in _projects or []:
- projects_item = Project.from_dict(projects_item_data)
-
- projects.append(projects_item)
+ projects = cast(List[str], d.pop("projects", UNSET))
selections = []
_selections = d.pop("selections", UNSET)
@@ -235,16 +240,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
updated_at = d.pop("updatedAt", UNSET)
data_source = cls(
+ consent_type=consent_type,
+ id=id,
name=name,
- type=type,
- attributes=attributes,
authorized_users=authorized_users,
- credentials=credentials,
- id=id,
- access_scope=access_scope,
clear_if_exists=clear_if_exists,
configuration=configuration,
- consent_type=consent_type,
+ credentials=credentials,
+ is_mock=is_mock,
+ structure_template_json=structure_template_json,
+ type=type,
+ access_scope=access_scope,
+ attributes=attributes,
created_at=created_at,
metadata=metadata,
owner=owner,
diff --git a/src/tuneinsight/api/sdk/models/data_source_config.py b/src/tuneinsight/api/sdk/models/data_source_config.py
index 487e8ba..dd910eb 100644
--- a/src/tuneinsight/api/sdk/models/data_source_config.py
+++ b/src/tuneinsight/api/sdk/models/data_source_config.py
@@ -15,109 +15,99 @@ class DataSourceConfig:
"""data source configuration
Attributes:
- csv_path (Union[Unset, str]): the path to the CSV file.
+ api_url (Union[Unset, str]): URL of the API
cert (Union[Unset, str]): If applicable, name of the certificate to access the datasource. Certificate should be
in '/usr/local/share/datasource-certificates/.{crt/key}'
- host (Union[Unset, str]): Hostname of the database
+ insecure_skip_verify_tls (Union[Unset, bool]): This flag enables skipping TLS verification when connecting to
+ the remote API data source. WARNING: this should not be used in production
suricata_path (Union[Unset, str]): the path to the suricata JSON file.
- local_type (Union[Unset, LocalDataSourceType]):
- port (Union[Unset, str]): Port number of the database
with_auth (Union[Unset, bool]): Whether the API requires authentication
api_type (Union[Unset, APIType]):
- api_url (Union[Unset, str]): URL of the API
+ csv_path (Union[Unset, str]): the path to the CSV file.
+ host (Union[Unset, str]): Hostname of the database
+ local_type (Union[Unset, LocalDataSourceType]):
+ port (Union[Unset, str]): Port number of the database
database (Union[Unset, str]): Name of the database
database_type (Union[Unset, DatabaseType]): Type of the database
- insecure_skip_verify_tls (Union[Unset, bool]): This flag enables skipping TLS verification when connecting to
- the remote API data source. WARNING: this should not be used in production
"""
- csv_path: Union[Unset, str] = UNSET
+ api_url: Union[Unset, str] = UNSET
cert: Union[Unset, str] = UNSET
- host: Union[Unset, str] = UNSET
+ insecure_skip_verify_tls: Union[Unset, bool] = UNSET
suricata_path: Union[Unset, str] = UNSET
- local_type: Union[Unset, LocalDataSourceType] = UNSET
- port: Union[Unset, str] = UNSET
with_auth: Union[Unset, bool] = UNSET
api_type: Union[Unset, APIType] = UNSET
- api_url: Union[Unset, str] = UNSET
+ csv_path: Union[Unset, str] = UNSET
+ host: Union[Unset, str] = UNSET
+ local_type: Union[Unset, LocalDataSourceType] = UNSET
+ port: Union[Unset, str] = UNSET
database: Union[Unset, str] = UNSET
database_type: Union[Unset, DatabaseType] = UNSET
- insecure_skip_verify_tls: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- csv_path = self.csv_path
+ api_url = self.api_url
cert = self.cert
- host = self.host
+ insecure_skip_verify_tls = self.insecure_skip_verify_tls
suricata_path = self.suricata_path
- local_type: Union[Unset, str] = UNSET
- if not isinstance(self.local_type, Unset):
- local_type = self.local_type.value
-
- port = self.port
with_auth = self.with_auth
api_type: Union[Unset, str] = UNSET
if not isinstance(self.api_type, Unset):
api_type = self.api_type.value
- api_url = self.api_url
+ csv_path = self.csv_path
+ host = self.host
+ local_type: Union[Unset, str] = UNSET
+ if not isinstance(self.local_type, Unset):
+ local_type = self.local_type.value
+
+ port = self.port
database = self.database
database_type: Union[Unset, str] = UNSET
if not isinstance(self.database_type, Unset):
database_type = self.database_type.value
- insecure_skip_verify_tls = self.insecure_skip_verify_tls
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if csv_path is not UNSET:
- field_dict["CSVPath"] = csv_path
+ if api_url is not UNSET:
+ field_dict["api-url"] = api_url
if cert is not UNSET:
field_dict["cert"] = cert
- if host is not UNSET:
- field_dict["host"] = host
+ if insecure_skip_verify_tls is not UNSET:
+ field_dict["insecureSkipVerifyTLS"] = insecure_skip_verify_tls
if suricata_path is not UNSET:
field_dict["suricataPath"] = suricata_path
- if local_type is not UNSET:
- field_dict["localType"] = local_type
- if port is not UNSET:
- field_dict["port"] = port
if with_auth is not UNSET:
field_dict["withAuth"] = with_auth
if api_type is not UNSET:
field_dict["APIType"] = api_type
- if api_url is not UNSET:
- field_dict["api-url"] = api_url
+ if csv_path is not UNSET:
+ field_dict["CSVPath"] = csv_path
+ if host is not UNSET:
+ field_dict["host"] = host
+ if local_type is not UNSET:
+ field_dict["localType"] = local_type
+ if port is not UNSET:
+ field_dict["port"] = port
if database is not UNSET:
field_dict["database"] = database
if database_type is not UNSET:
field_dict["databaseType"] = database_type
- if insecure_skip_verify_tls is not UNSET:
- field_dict["insecureSkipVerifyTLS"] = insecure_skip_verify_tls
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- csv_path = d.pop("CSVPath", UNSET)
+ api_url = d.pop("api-url", UNSET)
cert = d.pop("cert", UNSET)
- host = d.pop("host", UNSET)
+ insecure_skip_verify_tls = d.pop("insecureSkipVerifyTLS", UNSET)
suricata_path = d.pop("suricataPath", UNSET)
- _local_type = d.pop("localType", UNSET)
- local_type: Union[Unset, LocalDataSourceType]
- if isinstance(_local_type, Unset):
- local_type = UNSET
- else:
- local_type = LocalDataSourceType(_local_type)
-
- port = d.pop("port", UNSET)
-
with_auth = d.pop("withAuth", UNSET)
_api_type = d.pop("APIType", UNSET)
@@ -127,7 +117,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
api_type = APIType(_api_type)
- api_url = d.pop("api-url", UNSET)
+ csv_path = d.pop("CSVPath", UNSET)
+
+ host = d.pop("host", UNSET)
+
+ _local_type = d.pop("localType", UNSET)
+ local_type: Union[Unset, LocalDataSourceType]
+ if isinstance(_local_type, Unset):
+ local_type = UNSET
+ else:
+ local_type = LocalDataSourceType(_local_type)
+
+ port = d.pop("port", UNSET)
database = d.pop("database", UNSET)
@@ -138,21 +139,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
database_type = DatabaseType(_database_type)
- insecure_skip_verify_tls = d.pop("insecureSkipVerifyTLS", UNSET)
-
data_source_config = cls(
- csv_path=csv_path,
+ api_url=api_url,
cert=cert,
- host=host,
+ insecure_skip_verify_tls=insecure_skip_verify_tls,
suricata_path=suricata_path,
- local_type=local_type,
- port=port,
with_auth=with_auth,
api_type=api_type,
- api_url=api_url,
+ csv_path=csv_path,
+ host=host,
+ local_type=local_type,
+ port=port,
database=database,
database_type=database_type,
- insecure_skip_verify_tls=insecure_skip_verify_tls,
)
data_source_config.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/data_source_definition.py b/src/tuneinsight/api/sdk/models/data_source_definition.py
index 5c9331d..325bd92 100644
--- a/src/tuneinsight/api/sdk/models/data_source_definition.py
+++ b/src/tuneinsight/api/sdk/models/data_source_definition.py
@@ -20,85 +20,97 @@ class DataSourceDefinition:
"""parameters used to create and modify a data source
Attributes:
+ consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source.
+ id (Union[Unset, None, str]): Unique identifier of a data source.
name (Union[Unset, str]):
- type (Union[Unset, DataSourceType]):
- attributes (Union[Unset, List[str]]): optional list of attributes.
authorized_users (Union[Unset, List[str]]):
- credentials (Union[Unset, Credentials]): The credentials needed to access the data source.
- id (Union[Unset, None, str]): Unique identifier of a data source.
- access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource
clear_if_exists (Union[Unset, bool]): If true and a data source with the same name already exists, delete it.
configuration (Union[Unset, DataSourceConfig]): data source configuration
- consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source.
+ credentials (Union[Unset, Credentials]): The credentials needed to access the data source.
+ is_mock (Union[Unset, bool]): Whether this datasource contains mock/synthetic data and should not be used in
+ production.
+ structure_template_json (Union[Unset, str]): data source's structure template (used to determine the query
+ builder structure, if provided)
+ type (Union[Unset, DataSourceType]):
+ access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource
+ attributes (Union[Unset, List[str]]): optional list of attributes.
"""
+ consent_type: Union[Unset, DataSourceConsentType] = UNSET
+ id: Union[Unset, None, str] = UNSET
name: Union[Unset, str] = UNSET
- type: Union[Unset, DataSourceType] = UNSET
- attributes: Union[Unset, List[str]] = UNSET
authorized_users: Union[Unset, List[str]] = UNSET
- credentials: Union[Unset, "Credentials"] = UNSET
- id: Union[Unset, None, str] = UNSET
- access_scope: Union[Unset, AccessScope] = UNSET
clear_if_exists: Union[Unset, bool] = False
configuration: Union[Unset, "DataSourceConfig"] = UNSET
- consent_type: Union[Unset, DataSourceConsentType] = UNSET
+ credentials: Union[Unset, "Credentials"] = UNSET
+ is_mock: Union[Unset, bool] = UNSET
+ structure_template_json: Union[Unset, str] = UNSET
+ type: Union[Unset, DataSourceType] = UNSET
+ access_scope: Union[Unset, AccessScope] = UNSET
+ attributes: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- name = self.name
- type: Union[Unset, str] = UNSET
- if not isinstance(self.type, Unset):
- type = self.type.value
-
- attributes: Union[Unset, List[str]] = UNSET
- if not isinstance(self.attributes, Unset):
- attributes = self.attributes
+ consent_type: Union[Unset, str] = UNSET
+ if not isinstance(self.consent_type, Unset):
+ consent_type = self.consent_type.value
+ id = self.id
+ name = self.name
authorized_users: Union[Unset, List[str]] = UNSET
if not isinstance(self.authorized_users, Unset):
authorized_users = self.authorized_users
+ clear_if_exists = self.clear_if_exists
+ configuration: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.configuration, Unset):
+ configuration = self.configuration.to_dict()
+
credentials: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.credentials, Unset):
credentials = self.credentials.to_dict()
- id = self.id
+ is_mock = self.is_mock
+ structure_template_json = self.structure_template_json
+ type: Union[Unset, str] = UNSET
+ if not isinstance(self.type, Unset):
+ type = self.type.value
+
access_scope: Union[Unset, str] = UNSET
if not isinstance(self.access_scope, Unset):
access_scope = self.access_scope.value
- clear_if_exists = self.clear_if_exists
- configuration: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.configuration, Unset):
- configuration = self.configuration.to_dict()
-
- consent_type: Union[Unset, str] = UNSET
- if not isinstance(self.consent_type, Unset):
- consent_type = self.consent_type.value
+ attributes: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.attributes, Unset):
+ attributes = self.attributes
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if consent_type is not UNSET:
+ field_dict["consentType"] = consent_type
+ if id is not UNSET:
+ field_dict["id"] = id
if name is not UNSET:
field_dict["name"] = name
- if type is not UNSET:
- field_dict["type"] = type
- if attributes is not UNSET:
- field_dict["attributes"] = attributes
if authorized_users is not UNSET:
field_dict["authorizedUsers"] = authorized_users
- if credentials is not UNSET:
- field_dict["credentials"] = credentials
- if id is not UNSET:
- field_dict["id"] = id
- if access_scope is not UNSET:
- field_dict["accessScope"] = access_scope
if clear_if_exists is not UNSET:
field_dict["clearIfExists"] = clear_if_exists
if configuration is not UNSET:
field_dict["configuration"] = configuration
- if consent_type is not UNSET:
- field_dict["consentType"] = consent_type
+ if credentials is not UNSET:
+ field_dict["credentials"] = credentials
+ if is_mock is not UNSET:
+ field_dict["isMock"] = is_mock
+ if structure_template_json is not UNSET:
+ field_dict["structureTemplateJSON"] = structure_template_json
+ if type is not UNSET:
+ field_dict["type"] = type
+ if access_scope is not UNSET:
+ field_dict["accessScope"] = access_scope
+ if attributes is not UNSET:
+ field_dict["attributes"] = attributes
return field_dict
@@ -108,19 +120,28 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.data_source_config import DataSourceConfig
d = src_dict.copy()
- name = d.pop("name", UNSET)
-
- _type = d.pop("type", UNSET)
- type: Union[Unset, DataSourceType]
- if isinstance(_type, Unset):
- type = UNSET
+ _consent_type = d.pop("consentType", UNSET)
+ consent_type: Union[Unset, DataSourceConsentType]
+ if isinstance(_consent_type, Unset):
+ consent_type = UNSET
else:
- type = DataSourceType(_type)
+ consent_type = DataSourceConsentType(_consent_type)
- attributes = cast(List[str], d.pop("attributes", UNSET))
+ id = d.pop("id", UNSET)
+
+ name = d.pop("name", UNSET)
authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
+ clear_if_exists = d.pop("clearIfExists", UNSET)
+
+ _configuration = d.pop("configuration", UNSET)
+ configuration: Union[Unset, DataSourceConfig]
+ if isinstance(_configuration, Unset):
+ configuration = UNSET
+ else:
+ configuration = DataSourceConfig.from_dict(_configuration)
+
_credentials = d.pop("credentials", UNSET)
credentials: Union[Unset, Credentials]
if isinstance(_credentials, Unset):
@@ -128,7 +149,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
credentials = Credentials.from_dict(_credentials)
- id = d.pop("id", UNSET)
+ is_mock = d.pop("isMock", UNSET)
+
+ structure_template_json = d.pop("structureTemplateJSON", UNSET)
+
+ _type = d.pop("type", UNSET)
+ type: Union[Unset, DataSourceType]
+ if isinstance(_type, Unset):
+ type = UNSET
+ else:
+ type = DataSourceType(_type)
_access_scope = d.pop("accessScope", UNSET)
access_scope: Union[Unset, AccessScope]
@@ -137,33 +167,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
access_scope = AccessScope(_access_scope)
- clear_if_exists = d.pop("clearIfExists", UNSET)
-
- _configuration = d.pop("configuration", UNSET)
- configuration: Union[Unset, DataSourceConfig]
- if isinstance(_configuration, Unset):
- configuration = UNSET
- else:
- configuration = DataSourceConfig.from_dict(_configuration)
-
- _consent_type = d.pop("consentType", UNSET)
- consent_type: Union[Unset, DataSourceConsentType]
- if isinstance(_consent_type, Unset):
- consent_type = UNSET
- else:
- consent_type = DataSourceConsentType(_consent_type)
+ attributes = cast(List[str], d.pop("attributes", UNSET))
data_source_definition = cls(
+ consent_type=consent_type,
+ id=id,
name=name,
- type=type,
- attributes=attributes,
authorized_users=authorized_users,
- credentials=credentials,
- id=id,
- access_scope=access_scope,
clear_if_exists=clear_if_exists,
configuration=configuration,
- consent_type=consent_type,
+ credentials=credentials,
+ is_mock=is_mock,
+ structure_template_json=structure_template_json,
+ type=type,
+ access_scope=access_scope,
+ attributes=attributes,
)
data_source_definition.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/data_source_metadata.py b/src/tuneinsight/api/sdk/models/data_source_metadata.py
index a3a65aa..b820d01 100644
--- a/src/tuneinsight/api/sdk/models/data_source_metadata.py
+++ b/src/tuneinsight/api/sdk/models/data_source_metadata.py
@@ -16,17 +16,18 @@ class DataSourceMetadata:
"""metadata about a datasource
Attributes:
+ metadata_available (Union[Unset, bool]): whether or not the datasource supports returning metadata
stores_templates (Union[Unset, bool]): whether the data source stores template tables.
tables (Union[Unset, List['DataSourceTable']]):
- metadata_available (Union[Unset, bool]): whether or not the datasource supports returning metadata
"""
+ metadata_available: Union[Unset, bool] = UNSET
stores_templates: Union[Unset, bool] = UNSET
tables: Union[Unset, List["DataSourceTable"]] = UNSET
- metadata_available: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ metadata_available = self.metadata_available
stores_templates = self.stores_templates
tables: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.tables, Unset):
@@ -36,17 +37,15 @@ def to_dict(self) -> Dict[str, Any]:
tables.append(tables_item)
- metadata_available = self.metadata_available
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if metadata_available is not UNSET:
+ field_dict["metadataAvailable"] = metadata_available
if stores_templates is not UNSET:
field_dict["storesTemplates"] = stores_templates
if tables is not UNSET:
field_dict["tables"] = tables
- if metadata_available is not UNSET:
- field_dict["metadataAvailable"] = metadata_available
return field_dict
@@ -55,6 +54,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.data_source_table import DataSourceTable
d = src_dict.copy()
+ metadata_available = d.pop("metadataAvailable", UNSET)
+
stores_templates = d.pop("storesTemplates", UNSET)
tables = []
@@ -64,12 +65,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
tables.append(tables_item)
- metadata_available = d.pop("metadataAvailable", UNSET)
-
data_source_metadata = cls(
+ metadata_available=metadata_available,
stores_templates=stores_templates,
tables=tables,
- metadata_available=metadata_available,
)
data_source_metadata.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/data_source_query_preview.py b/src/tuneinsight/api/sdk/models/data_source_query_preview.py
index 3e893d9..641e8f0 100644
--- a/src/tuneinsight/api/sdk/models/data_source_query_preview.py
+++ b/src/tuneinsight/api/sdk/models/data_source_query_preview.py
@@ -16,17 +16,21 @@ class DataSourceQueryPreview:
"""preview of a datasource query
Attributes:
+ table_metadata (Union[Unset, DataSourceTable]): schema information for a table from a datasource
columns (Union[Unset, List[str]]): columns of the queried table
rows (Union[Unset, List[List[str]]]): previewed records
- table_metadata (Union[Unset, DataSourceTable]): schema information for a table from a datasource
"""
+ table_metadata: Union[Unset, "DataSourceTable"] = UNSET
columns: Union[Unset, List[str]] = UNSET
rows: Union[Unset, List[List[str]]] = UNSET
- table_metadata: Union[Unset, "DataSourceTable"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ table_metadata: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.table_metadata, Unset):
+ table_metadata = self.table_metadata.to_dict()
+
columns: Union[Unset, List[str]] = UNSET
if not isinstance(self.columns, Unset):
columns = self.columns
@@ -39,19 +43,15 @@ def to_dict(self) -> Dict[str, Any]:
rows.append(rows_item)
- table_metadata: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.table_metadata, Unset):
- table_metadata = self.table_metadata.to_dict()
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if table_metadata is not UNSET:
+ field_dict["tableMetadata"] = table_metadata
if columns is not UNSET:
field_dict["columns"] = columns
if rows is not UNSET:
field_dict["rows"] = rows
- if table_metadata is not UNSET:
- field_dict["tableMetadata"] = table_metadata
return field_dict
@@ -60,6 +60,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.data_source_table import DataSourceTable
d = src_dict.copy()
+ _table_metadata = d.pop("tableMetadata", UNSET)
+ table_metadata: Union[Unset, DataSourceTable]
+ if isinstance(_table_metadata, Unset):
+ table_metadata = UNSET
+ else:
+ table_metadata = DataSourceTable.from_dict(_table_metadata)
+
columns = cast(List[str], d.pop("columns", UNSET))
rows = []
@@ -69,17 +76,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
rows.append(rows_item)
- _table_metadata = d.pop("tableMetadata", UNSET)
- table_metadata: Union[Unset, DataSourceTable]
- if isinstance(_table_metadata, Unset):
- table_metadata = UNSET
- else:
- table_metadata = DataSourceTable.from_dict(_table_metadata)
-
data_source_query_preview = cls(
+ table_metadata=table_metadata,
columns=columns,
rows=rows,
- table_metadata=table_metadata,
)
data_source_query_preview.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/dataset_statistics.py b/src/tuneinsight/api/sdk/models/dataset_statistics.py
index e71c4dd..2bbdd34 100644
--- a/src/tuneinsight/api/sdk/models/dataset_statistics.py
+++ b/src/tuneinsight/api/sdk/models/dataset_statistics.py
@@ -23,8 +23,12 @@ class DatasetStatistics:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class DatasetStatistics:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class DatasetStatistics:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,95 +66,82 @@ class DatasetStatistics:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
statistics (Union[Unset, List['StatisticDefinition']]): list of statistics to be extracted from the dataset
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
statistics: Union[Unset, List["StatisticDefinition"]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
statistics: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.statistics, Unset):
statistics = []
@@ -157,46 +157,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if statistics is not UNSET:
field_dict["statistics"] = statistics
@@ -213,14 +213,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -229,10 +222,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -240,6 +244,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -251,19 +269,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -271,17 +276,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
statistics = []
_statistics = d.pop("statistics", UNSET)
@@ -292,26 +292,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
dataset_statistics = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
statistics=statistics,
)
diff --git a/src/tuneinsight/api/sdk/models/deviation_squares.py b/src/tuneinsight/api/sdk/models/deviation_squares.py
index eff6fea..f9676f7 100644
--- a/src/tuneinsight/api/sdk/models/deviation_squares.py
+++ b/src/tuneinsight/api/sdk/models/deviation_squares.py
@@ -13,27 +13,27 @@ class DeviationSquares:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
- count (Union[Unset, float]): dataset count used for computing the variance, if < 2 then the sum of squares will
- be divided by 1
input_ (Union[Unset, str]): column to use as input
mean (Union[Unset, float]): mean to compute the deviation from
output (Union[Unset, str]): column to use as output
+ count (Union[Unset, float]): dataset count used for computing the variance, if < 2 then the sum of squares will
+ be divided by 1
"""
type: PreprocessingOperationType
- count: Union[Unset, float] = UNSET
input_: Union[Unset, str] = UNSET
mean: Union[Unset, float] = UNSET
output: Union[Unset, str] = UNSET
+ count: Union[Unset, float] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- count = self.count
input_ = self.input_
mean = self.mean
output = self.output
+ count = self.count
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -42,14 +42,14 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if count is not UNSET:
- field_dict["count"] = count
if input_ is not UNSET:
field_dict["input"] = input_
if mean is not UNSET:
field_dict["mean"] = mean
if output is not UNSET:
field_dict["output"] = output
+ if count is not UNSET:
+ field_dict["count"] = count
return field_dict
@@ -58,20 +58,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
- count = d.pop("count", UNSET)
-
input_ = d.pop("input", UNSET)
mean = d.pop("mean", UNSET)
output = d.pop("output", UNSET)
+ count = d.pop("count", UNSET)
+
deviation_squares = cls(
type=type,
- count=count,
input_=input_,
mean=mean,
output=output,
+ count=count,
)
deviation_squares.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/distributed_join.py b/src/tuneinsight/api/sdk/models/distributed_join.py
index 89a59a9..6e89556 100644
--- a/src/tuneinsight/api/sdk/models/distributed_join.py
+++ b/src/tuneinsight/api/sdk/models/distributed_join.py
@@ -22,8 +22,12 @@ class DistributedJoin:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class DistributedJoin:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class DistributedJoin:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,53 +65,40 @@ class DistributedJoin:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
join_columns (Union[Unset, List[str]]):
missing_patterns (Union[Unset, List[str]]):
target_columns (Union[Unset, List[str]]):
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
join_columns: Union[Unset, List[str]] = UNSET
missing_patterns: Union[Unset, List[str]] = UNSET
target_columns: Union[Unset, List[str]] = UNSET
@@ -107,44 +107,44 @@ class DistributedJoin:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
join_columns: Union[Unset, List[str]] = UNSET
if not isinstance(self.join_columns, Unset):
join_columns = self.join_columns
@@ -164,46 +164,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if join_columns is not UNSET:
field_dict["joinColumns"] = join_columns
if missing_patterns is not UNSET:
@@ -223,14 +223,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -239,10 +232,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -250,6 +254,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -261,19 +279,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -281,17 +286,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
join_columns = cast(List[str], d.pop("joinColumns", UNSET))
@@ -301,26 +301,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
distributed_join = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
join_columns=join_columns,
missing_patterns=missing_patterns,
target_columns=target_columns,
diff --git a/src/tuneinsight/api/sdk/models/dp_policy.py b/src/tuneinsight/api/sdk/models/dp_policy.py
index 58c315d..4b597c4 100644
--- a/src/tuneinsight/api/sdk/models/dp_policy.py
+++ b/src/tuneinsight/api/sdk/models/dp_policy.py
@@ -17,11 +17,20 @@ class DPPolicy:
"""represents the disclosure prevention policy that enables toggling various disclosure prevention mechanisms
Attributes:
- max_column_count (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset
- size
- max_factors (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset size
noisy_global_size (Union[Unset, bool]): when computing the global size, whether noise is used or not. If so,
each node adds discrete noise to its input to the encrypted aggregation
+ authorized_variables (Union[Unset, List[str]]): constraint on the set of variables that can be used as input, in
+ order to prevent misuse of variables that are out of context of the project.
+ if > 0 variables are defined here, then the dataset will automatically drop any variables that do not belong to
+ this set.
+ Warning: this mechanism is only effective when the data selection parameters (data source queries) are fixed,
+ and therefore
+ returned variables cannot be aliased (for example using aliases in SQL SELECT statements) to evade this trap.
+ max_factors (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset size
+ min_global_dataset_size (Union[Unset, int]): minimum size of the global / collective dataset. It is collectively
+ computed using the encrypted aggregation
+ min_frequencies (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset
+ size
use_differential_privacy (Union[Unset, bool]): whether to use Differential Privacy to protect the privacy of the
results.
execution_quota_parameters (Union[Unset, ExecutionQuotaParameters]): Execution quota settings.
@@ -30,78 +39,70 @@ class DPPolicy:
If the computation is a private set intersection, each query consumes budget equal to the size of the querying
set.
Otherwise, a unit represents one computation.
+ max_column_count (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset
+ size
min_dataset_size (Union[Unset, int]): minimum size of the dataset used as local input (checked both before and
after the preprocessing operations are run)
- min_frequencies (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset
- size
- min_global_dataset_size (Union[Unset, int]): minimum size of the global / collective dataset. It is collectively
- computed using the encrypted aggregation
- authorized_variables (Union[Unset, List[str]]): constraint on the set of variables that can be used as input, in
- order to prevent misuse of variables that are out of context of the project.
- if > 0 variables are defined here, then the dataset will automatically drop any variables that do not belong to
- this set.
- Warning: this mechanism is only effective when the data selection parameters (data source queries) are fixed,
- and therefore
- returned variables cannot be aliased (for example using aliases in SQL SELECT statements) to evade this trap.
"""
- max_column_count: Union[Unset, "Threshold"] = UNSET
- max_factors: Union[Unset, "Threshold"] = UNSET
noisy_global_size: Union[Unset, bool] = UNSET
+ authorized_variables: Union[Unset, List[str]] = UNSET
+ max_factors: Union[Unset, "Threshold"] = UNSET
+ min_global_dataset_size: Union[Unset, int] = UNSET
+ min_frequencies: Union[Unset, "Threshold"] = UNSET
use_differential_privacy: Union[Unset, bool] = False
execution_quota_parameters: Union[Unset, "ExecutionQuotaParameters"] = UNSET
+ max_column_count: Union[Unset, "Threshold"] = UNSET
min_dataset_size: Union[Unset, int] = UNSET
- min_frequencies: Union[Unset, "Threshold"] = UNSET
- min_global_dataset_size: Union[Unset, int] = UNSET
- authorized_variables: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- max_column_count: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.max_column_count, Unset):
- max_column_count = self.max_column_count.to_dict()
+ noisy_global_size = self.noisy_global_size
+ authorized_variables: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.authorized_variables, Unset):
+ authorized_variables = self.authorized_variables
max_factors: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.max_factors, Unset):
max_factors = self.max_factors.to_dict()
- noisy_global_size = self.noisy_global_size
+ min_global_dataset_size = self.min_global_dataset_size
+ min_frequencies: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.min_frequencies, Unset):
+ min_frequencies = self.min_frequencies.to_dict()
+
use_differential_privacy = self.use_differential_privacy
execution_quota_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.execution_quota_parameters, Unset):
execution_quota_parameters = self.execution_quota_parameters.to_dict()
- min_dataset_size = self.min_dataset_size
- min_frequencies: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.min_frequencies, Unset):
- min_frequencies = self.min_frequencies.to_dict()
+ max_column_count: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.max_column_count, Unset):
+ max_column_count = self.max_column_count.to_dict()
- min_global_dataset_size = self.min_global_dataset_size
- authorized_variables: Union[Unset, List[str]] = UNSET
- if not isinstance(self.authorized_variables, Unset):
- authorized_variables = self.authorized_variables
+ min_dataset_size = self.min_dataset_size
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if max_column_count is not UNSET:
- field_dict["maxColumnCount"] = max_column_count
- if max_factors is not UNSET:
- field_dict["maxFactors"] = max_factors
if noisy_global_size is not UNSET:
field_dict["noisyGlobalSize"] = noisy_global_size
+ if authorized_variables is not UNSET:
+ field_dict["authorizedVariables"] = authorized_variables
+ if max_factors is not UNSET:
+ field_dict["maxFactors"] = max_factors
+ if min_global_dataset_size is not UNSET:
+ field_dict["minGlobalDatasetSize"] = min_global_dataset_size
+ if min_frequencies is not UNSET:
+ field_dict["minFrequencies"] = min_frequencies
if use_differential_privacy is not UNSET:
field_dict["useDifferentialPrivacy"] = use_differential_privacy
if execution_quota_parameters is not UNSET:
field_dict["executionQuotaParameters"] = execution_quota_parameters
+ if max_column_count is not UNSET:
+ field_dict["maxColumnCount"] = max_column_count
if min_dataset_size is not UNSET:
field_dict["minDatasetSize"] = min_dataset_size
- if min_frequencies is not UNSET:
- field_dict["minFrequencies"] = min_frequencies
- if min_global_dataset_size is not UNSET:
- field_dict["minGlobalDatasetSize"] = min_global_dataset_size
- if authorized_variables is not UNSET:
- field_dict["authorizedVariables"] = authorized_variables
return field_dict
@@ -111,12 +112,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.threshold import Threshold
d = src_dict.copy()
- _max_column_count = d.pop("maxColumnCount", UNSET)
- max_column_count: Union[Unset, Threshold]
- if isinstance(_max_column_count, Unset):
- max_column_count = UNSET
- else:
- max_column_count = Threshold.from_dict(_max_column_count)
+ noisy_global_size = d.pop("noisyGlobalSize", UNSET)
+
+ authorized_variables = cast(List[str], d.pop("authorizedVariables", UNSET))
_max_factors = d.pop("maxFactors", UNSET)
max_factors: Union[Unset, Threshold]
@@ -125,7 +123,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
max_factors = Threshold.from_dict(_max_factors)
- noisy_global_size = d.pop("noisyGlobalSize", UNSET)
+ min_global_dataset_size = d.pop("minGlobalDatasetSize", UNSET)
+
+ _min_frequencies = d.pop("minFrequencies", UNSET)
+ min_frequencies: Union[Unset, Threshold]
+ if isinstance(_min_frequencies, Unset):
+ min_frequencies = UNSET
+ else:
+ min_frequencies = Threshold.from_dict(_min_frequencies)
use_differential_privacy = d.pop("useDifferentialPrivacy", UNSET)
@@ -136,29 +141,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
execution_quota_parameters = ExecutionQuotaParameters.from_dict(_execution_quota_parameters)
- min_dataset_size = d.pop("minDatasetSize", UNSET)
-
- _min_frequencies = d.pop("minFrequencies", UNSET)
- min_frequencies: Union[Unset, Threshold]
- if isinstance(_min_frequencies, Unset):
- min_frequencies = UNSET
+ _max_column_count = d.pop("maxColumnCount", UNSET)
+ max_column_count: Union[Unset, Threshold]
+ if isinstance(_max_column_count, Unset):
+ max_column_count = UNSET
else:
- min_frequencies = Threshold.from_dict(_min_frequencies)
-
- min_global_dataset_size = d.pop("minGlobalDatasetSize", UNSET)
+ max_column_count = Threshold.from_dict(_max_column_count)
- authorized_variables = cast(List[str], d.pop("authorizedVariables", UNSET))
+ min_dataset_size = d.pop("minDatasetSize", UNSET)
dp_policy = cls(
- max_column_count=max_column_count,
- max_factors=max_factors,
noisy_global_size=noisy_global_size,
+ authorized_variables=authorized_variables,
+ max_factors=max_factors,
+ min_global_dataset_size=min_global_dataset_size,
+ min_frequencies=min_frequencies,
use_differential_privacy=use_differential_privacy,
execution_quota_parameters=execution_quota_parameters,
+ max_column_count=max_column_count,
min_dataset_size=min_dataset_size,
- min_frequencies=min_frequencies,
- min_global_dataset_size=min_global_dataset_size,
- authorized_variables=authorized_variables,
)
dp_policy.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/dummy.py b/src/tuneinsight/api/sdk/models/dummy.py
index 7e23cef..ba8ba11 100644
--- a/src/tuneinsight/api/sdk/models/dummy.py
+++ b/src/tuneinsight/api/sdk/models/dummy.py
@@ -22,8 +22,12 @@ class Dummy:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class Dummy:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class Dummy:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,105 +65,92 @@ class Dummy:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
- error_in_constructor (Union[Unset, bool]):
error_in_start (Union[Unset, bool]):
panic_in_constructor (Union[Unset, bool]):
panic_in_start (Union[Unset, bool]):
+ error_in_constructor (Union[Unset, bool]):
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
- error_in_constructor: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
error_in_start: Union[Unset, bool] = UNSET
panic_in_constructor: Union[Unset, bool] = UNSET
panic_in_start: Union[Unset, bool] = UNSET
+ error_in_constructor: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
- error_in_constructor = self.error_in_constructor
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
error_in_start = self.error_in_start
panic_in_constructor = self.panic_in_constructor
panic_in_start = self.panic_in_start
+ error_in_constructor = self.error_in_constructor
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -159,54 +159,54 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
- if error_in_constructor is not UNSET:
- field_dict["errorInConstructor"] = error_in_constructor
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if error_in_start is not UNSET:
field_dict["errorInStart"] = error_in_start
if panic_in_constructor is not UNSET:
field_dict["panicInConstructor"] = panic_in_constructor
if panic_in_start is not UNSET:
field_dict["panicInStart"] = panic_in_start
+ if error_in_constructor is not UNSET:
+ field_dict["errorInConstructor"] = error_in_constructor
return field_dict
@@ -220,14 +220,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -236,10 +229,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -247,6 +251,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -258,19 +276,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -278,19 +283,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
-
- error_in_constructor = d.pop("errorInConstructor", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
error_in_start = d.pop("errorInStart", UNSET)
@@ -298,32 +296,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
panic_in_start = d.pop("panicInStart", UNSET)
+ error_in_constructor = d.pop("errorInConstructor", UNSET)
+
dummy = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
- error_in_constructor=error_in_constructor,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
error_in_start=error_in_start,
panic_in_constructor=panic_in_constructor,
panic_in_start=panic_in_start,
+ error_in_constructor=error_in_constructor,
)
dummy.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/enc_vector.py b/src/tuneinsight/api/sdk/models/enc_vector.py
index a80faf0..40f922c 100644
--- a/src/tuneinsight/api/sdk/models/enc_vector.py
+++ b/src/tuneinsight/api/sdk/models/enc_vector.py
@@ -17,23 +17,23 @@ class EncVector:
"""Vector of encrypted numerical values.
Attributes:
- encryption (Encryption):
type (EncVectorType):
+ encryption (Encryption):
expanded (Union[Unset, List[str]]):
packed (Union[Unset, str]):
"""
- encryption: "Encryption"
type: EncVectorType
+ encryption: "Encryption"
expanded: Union[Unset, List[str]] = UNSET
packed: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- encryption = self.encryption.to_dict()
-
type = self.type.value
+ encryption = self.encryption.to_dict()
+
expanded: Union[Unset, List[str]] = UNSET
if not isinstance(self.expanded, Unset):
expanded = self.expanded
@@ -44,8 +44,8 @@ def to_dict(self) -> Dict[str, Any]:
field_dict.update(self.additional_properties)
field_dict.update(
{
- "encryption": encryption,
"type": type,
+ "encryption": encryption,
}
)
if expanded is not UNSET:
@@ -60,17 +60,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.encryption import Encryption
d = src_dict.copy()
- encryption = Encryption.from_dict(d.pop("encryption"))
-
type = EncVectorType(d.pop("type"))
+ encryption = Encryption.from_dict(d.pop("encryption"))
+
expanded = cast(List[str], d.pop("expanded", UNSET))
packed = d.pop("packed", UNSET)
enc_vector = cls(
- encryption=encryption,
type=type,
+ encryption=encryption,
expanded=expanded,
packed=packed,
)
diff --git a/src/tuneinsight/api/sdk/models/encrypted_aggregation.py b/src/tuneinsight/api/sdk/models/encrypted_aggregation.py
index 6698430..e5f5456 100644
--- a/src/tuneinsight/api/sdk/models/encrypted_aggregation.py
+++ b/src/tuneinsight/api/sdk/models/encrypted_aggregation.py
@@ -22,8 +22,12 @@ class EncryptedAggregation:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class EncryptedAggregation:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class EncryptedAggregation:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,25 +65,13 @@ class EncryptedAggregation:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ features (Union[Unset, str]): Shared identifier of a data object.
lower_bounds (Union[Unset, List[float]]): Lower bounds on the values in each column of the aggregation. Used in
DP mode for clipping and sensitivity.
nb_features (Union[Unset, int]): Number of columns of the dataset
@@ -80,82 +81,82 @@ class EncryptedAggregation:
aggregate_features (Union[Unset, bool]): If true, sum the columns together into one number
count_column (Union[Unset, str]): The column, if any, that is a count column (of 1s). Used in DP mode to improve
accuracy.
- features (Union[Unset, str]): Shared identifier of a data object.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
+ features: Union[Unset, str] = UNSET
lower_bounds: Union[Unset, List[float]] = UNSET
nb_features: Union[Unset, int] = UNSET
upper_bounds: Union[Unset, List[float]] = UNSET
aggregate_columns: Union[Unset, List[str]] = UNSET
aggregate_features: Union[Unset, bool] = UNSET
count_column: Union[Unset, str] = UNSET
- features: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
+ features = self.features
lower_bounds: Union[Unset, List[float]] = UNSET
if not isinstance(self.lower_bounds, Unset):
lower_bounds = self.lower_bounds
@@ -171,7 +172,6 @@ def to_dict(self) -> Dict[str, Any]:
aggregate_features = self.aggregate_features
count_column = self.count_column
- features = self.features
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -180,46 +180,48 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
+ if features is not UNSET:
+ field_dict["features"] = features
if lower_bounds is not UNSET:
field_dict["lowerBounds"] = lower_bounds
if nb_features is not UNSET:
@@ -232,8 +234,6 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["aggregateFeatures"] = aggregate_features
if count_column is not UNSET:
field_dict["countColumn"] = count_column
- if features is not UNSET:
- field_dict["features"] = features
return field_dict
@@ -247,14 +247,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -263,10 +256,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -274,6 +278,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -285,19 +303,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -305,17 +310,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
- wait = d.pop("wait", UNSET)
+ features = d.pop("features", UNSET)
lower_bounds = cast(List[float], d.pop("lowerBounds", UNSET))
@@ -329,37 +331,35 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
count_column = d.pop("countColumn", UNSET)
- features = d.pop("features", UNSET)
-
encrypted_aggregation = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
+ features=features,
lower_bounds=lower_bounds,
nb_features=nb_features,
upper_bounds=upper_bounds,
aggregate_columns=aggregate_columns,
aggregate_features=aggregate_features,
count_column=count_column,
- features=features,
)
encrypted_aggregation.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/encrypted_mean.py b/src/tuneinsight/api/sdk/models/encrypted_mean.py
index e49cbc1..21bfb1a 100644
--- a/src/tuneinsight/api/sdk/models/encrypted_mean.py
+++ b/src/tuneinsight/api/sdk/models/encrypted_mean.py
@@ -22,8 +22,12 @@ class EncryptedMean:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class EncryptedMean:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class EncryptedMean:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,25 +65,12 @@ class EncryptedMean:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
grouping_keys (Union[Unset, List[str]]): This parameter is used to specify the composite keys for grouping the
aggregated values.
For example, when the groupingKeys are set to [id, name], the aggregation will be performed separately
@@ -87,28 +87,28 @@ class EncryptedMean:
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
grouping_keys: Union[Unset, List[str]] = UNSET
min_participants: Union[Unset, int] = UNSET
outlier_threshold: Union[Unset, float] = UNSET
@@ -119,44 +119,44 @@ class EncryptedMean:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
grouping_keys: Union[Unset, List[str]] = UNSET
if not isinstance(self.grouping_keys, Unset):
grouping_keys = self.grouping_keys
@@ -175,46 +175,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if grouping_keys is not UNSET:
field_dict["groupingKeys"] = grouping_keys
if min_participants is not UNSET:
@@ -238,14 +238,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -254,10 +247,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -265,6 +269,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -276,19 +294,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -296,17 +301,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
grouping_keys = cast(List[str], d.pop("groupingKeys", UNSET))
@@ -320,26 +320,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
encrypted_mean = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
grouping_keys=grouping_keys,
min_participants=min_participants,
outlier_threshold=outlier_threshold,
diff --git a/src/tuneinsight/api/sdk/models/encrypted_prediction.py b/src/tuneinsight/api/sdk/models/encrypted_prediction.py
index bf0a9ab..2bd1011 100644
--- a/src/tuneinsight/api/sdk/models/encrypted_prediction.py
+++ b/src/tuneinsight/api/sdk/models/encrypted_prediction.py
@@ -22,8 +22,12 @@ class EncryptedPrediction:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class EncryptedPrediction:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class EncryptedPrediction:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,25 +65,12 @@ class EncryptedPrediction:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
label_columns (Union[Unset, List[str]]): specified label columns of the original dataset if the computation
specifies to return the ground truth labels alongside
model (Union[Unset, str]): Unique identifier of a data object.
@@ -84,28 +84,28 @@ class EncryptedPrediction:
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
label_columns: Union[Unset, List[str]] = UNSET
model: Union[Unset, str] = UNSET
only_root_prediction: Union[Unset, bool] = UNSET
@@ -117,44 +117,44 @@ class EncryptedPrediction:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
label_columns: Union[Unset, List[str]] = UNSET
if not isinstance(self.label_columns, Unset):
label_columns = self.label_columns
@@ -175,46 +175,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if label_columns is not UNSET:
field_dict["labelColumns"] = label_columns
if model is not UNSET:
@@ -240,14 +240,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -256,10 +249,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -267,6 +271,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -278,19 +296,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -298,17 +303,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
label_columns = cast(List[str], d.pop("labelColumns", UNSET))
@@ -324,26 +324,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
encrypted_prediction = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
label_columns=label_columns,
model=model,
only_root_prediction=only_root_prediction,
diff --git a/src/tuneinsight/api/sdk/models/encrypted_regression.py b/src/tuneinsight/api/sdk/models/encrypted_regression.py
index 63ecef9..81a8644 100644
--- a/src/tuneinsight/api/sdk/models/encrypted_regression.py
+++ b/src/tuneinsight/api/sdk/models/encrypted_regression.py
@@ -23,8 +23,12 @@ class EncryptedRegression:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class EncryptedRegression:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class EncryptedRegression:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,25 +66,12 @@ class EncryptedRegression:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
feature_columns (Union[Unset, List[str]]): specified columns from the input dataset corresponding to the
features
label_columns (Union[Unset, List[str]]): specified columns from the input dataset corresponding to the labels
@@ -80,28 +80,28 @@ class EncryptedRegression:
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
feature_columns: Union[Unset, List[str]] = UNSET
label_columns: Union[Unset, List[str]] = UNSET
params: Union[Unset, "EncryptedRegressionParams"] = UNSET
@@ -111,44 +111,44 @@ class EncryptedRegression:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
feature_columns: Union[Unset, List[str]] = UNSET
if not isinstance(self.feature_columns, Unset):
feature_columns = self.feature_columns
@@ -170,46 +170,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if feature_columns is not UNSET:
field_dict["featureColumns"] = feature_columns
if label_columns is not UNSET:
@@ -232,14 +232,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -248,10 +241,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -259,6 +263,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -270,19 +288,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -290,17 +295,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
feature_columns = cast(List[str], d.pop("featureColumns", UNSET))
@@ -317,26 +317,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
encrypted_regression = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
feature_columns=feature_columns,
label_columns=label_columns,
params=params,
diff --git a/src/tuneinsight/api/sdk/models/encrypted_regression_params.py b/src/tuneinsight/api/sdk/models/encrypted_regression_params.py
index 1929248..3197bfc 100644
--- a/src/tuneinsight/api/sdk/models/encrypted_regression_params.py
+++ b/src/tuneinsight/api/sdk/models/encrypted_regression_params.py
@@ -18,74 +18,73 @@ class EncryptedRegressionParams:
"""Parameters for the encrypted regression.
Attributes:
- elastic_rate (Union[Unset, float]): The elastic rate of the regression. Default: 0.85.
- local_batch_size (Union[Unset, int]): The batch size in each local iteration. Default: 64.
seed (Union[Unset, float]): The seed to sample the initial weights.
+ momentum (Union[Unset, float]): The momentum rate of the regression. Default: 0.92.
+ network_iteration_count (Union[Unset, int]): The global maximum number of iteration. Default: 1.
approximation_params (Union[Unset, ApproximationParams]): parameters for polynomial approximation
+ elastic_rate (Union[Unset, float]): The elastic rate of the regression. Default: 0.85.
+ learning_rate (Union[Unset, float]): The learning rate of the regression. Default: 0.02.
linear (Union[Unset, EncryptedRegressionParamsLinear]): Parameters specific for the linear regression.
+ local_batch_size (Union[Unset, int]): The batch size in each local iteration. Default: 64.
local_iteration_count (Union[Unset, int]): The maximum number of local iterations. Default: 1.
- momentum (Union[Unset, float]): The momentum rate of the regression. Default: 0.92.
- network_iteration_count (Union[Unset, int]): The global maximum number of iteration. Default: 1.
type (Union[Unset, RegressionType]): type of the regression
- learning_rate (Union[Unset, float]): The learning rate of the regression. Default: 0.02.
"""
- elastic_rate: Union[Unset, float] = 0.85
- local_batch_size: Union[Unset, int] = 64
seed: Union[Unset, float] = 0.0
+ momentum: Union[Unset, float] = 0.92
+ network_iteration_count: Union[Unset, int] = 1
approximation_params: Union[Unset, "ApproximationParams"] = UNSET
+ elastic_rate: Union[Unset, float] = 0.85
+ learning_rate: Union[Unset, float] = 0.02
linear: Union[Unset, "EncryptedRegressionParamsLinear"] = UNSET
+ local_batch_size: Union[Unset, int] = 64
local_iteration_count: Union[Unset, int] = 1
- momentum: Union[Unset, float] = 0.92
- network_iteration_count: Union[Unset, int] = 1
type: Union[Unset, RegressionType] = UNSET
- learning_rate: Union[Unset, float] = 0.02
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- elastic_rate = self.elastic_rate
- local_batch_size = self.local_batch_size
seed = self.seed
+ momentum = self.momentum
+ network_iteration_count = self.network_iteration_count
approximation_params: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.approximation_params, Unset):
approximation_params = self.approximation_params.to_dict()
+ elastic_rate = self.elastic_rate
+ learning_rate = self.learning_rate
linear: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.linear, Unset):
linear = self.linear.to_dict()
+ local_batch_size = self.local_batch_size
local_iteration_count = self.local_iteration_count
- momentum = self.momentum
- network_iteration_count = self.network_iteration_count
type: Union[Unset, str] = UNSET
if not isinstance(self.type, Unset):
type = self.type.value
- learning_rate = self.learning_rate
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if elastic_rate is not UNSET:
- field_dict["elasticRate"] = elastic_rate
- if local_batch_size is not UNSET:
- field_dict["localBatchSize"] = local_batch_size
if seed is not UNSET:
field_dict["seed"] = seed
+ if momentum is not UNSET:
+ field_dict["momentum"] = momentum
+ if network_iteration_count is not UNSET:
+ field_dict["networkIterationCount"] = network_iteration_count
if approximation_params is not UNSET:
field_dict["approximationParams"] = approximation_params
+ if elastic_rate is not UNSET:
+ field_dict["elasticRate"] = elastic_rate
+ if learning_rate is not UNSET:
+ field_dict["learningRate"] = learning_rate
if linear is not UNSET:
field_dict["linear"] = linear
+ if local_batch_size is not UNSET:
+ field_dict["localBatchSize"] = local_batch_size
if local_iteration_count is not UNSET:
field_dict["localIterationCount"] = local_iteration_count
- if momentum is not UNSET:
- field_dict["momentum"] = momentum
- if network_iteration_count is not UNSET:
- field_dict["networkIterationCount"] = network_iteration_count
if type is not UNSET:
field_dict["type"] = type
- if learning_rate is not UNSET:
- field_dict["learningRate"] = learning_rate
return field_dict
@@ -95,11 +94,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.encrypted_regression_params_linear import EncryptedRegressionParamsLinear
d = src_dict.copy()
- elastic_rate = d.pop("elasticRate", UNSET)
+ seed = d.pop("seed", UNSET)
- local_batch_size = d.pop("localBatchSize", UNSET)
+ momentum = d.pop("momentum", UNSET)
- seed = d.pop("seed", UNSET)
+ network_iteration_count = d.pop("networkIterationCount", UNSET)
_approximation_params = d.pop("approximationParams", UNSET)
approximation_params: Union[Unset, ApproximationParams]
@@ -108,6 +107,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
approximation_params = ApproximationParams.from_dict(_approximation_params)
+ elastic_rate = d.pop("elasticRate", UNSET)
+
+ learning_rate = d.pop("learningRate", UNSET)
+
_linear = d.pop("linear", UNSET)
linear: Union[Unset, EncryptedRegressionParamsLinear]
if isinstance(_linear, Unset):
@@ -115,11 +118,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
linear = EncryptedRegressionParamsLinear.from_dict(_linear)
- local_iteration_count = d.pop("localIterationCount", UNSET)
-
- momentum = d.pop("momentum", UNSET)
+ local_batch_size = d.pop("localBatchSize", UNSET)
- network_iteration_count = d.pop("networkIterationCount", UNSET)
+ local_iteration_count = d.pop("localIterationCount", UNSET)
_type = d.pop("type", UNSET)
type: Union[Unset, RegressionType]
@@ -128,19 +129,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
type = RegressionType(_type)
- learning_rate = d.pop("learningRate", UNSET)
-
encrypted_regression_params = cls(
- elastic_rate=elastic_rate,
- local_batch_size=local_batch_size,
seed=seed,
+ momentum=momentum,
+ network_iteration_count=network_iteration_count,
approximation_params=approximation_params,
+ elastic_rate=elastic_rate,
+ learning_rate=learning_rate,
linear=linear,
+ local_batch_size=local_batch_size,
local_iteration_count=local_iteration_count,
- momentum=momentum,
- network_iteration_count=network_iteration_count,
type=type,
- learning_rate=learning_rate,
)
encrypted_regression_params.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/execution_quota_parameters.py b/src/tuneinsight/api/sdk/models/execution_quota_parameters.py
index a65f865..9e3e306 100644
--- a/src/tuneinsight/api/sdk/models/execution_quota_parameters.py
+++ b/src/tuneinsight/api/sdk/models/execution_quota_parameters.py
@@ -23,31 +23,25 @@ class ExecutionQuotaParameters:
Otherwise, a unit represents one computation.
Attributes:
- allocation (Union[Unset, float]): quota allocated initially.
- allocation_interval (Union[Unset, Duration]): definition of a date-independent time interval
- increment (Union[Unset, float]): value incremented after each allocation interval
local_computations_use_budget (Union[Unset, bool]): whether local computations consume the execution quota
max_allocation (Union[Unset, float]): maximum value that can be taken by the execution quota
scope (Union[Unset, ExecutionQuotaParametersScope]): scope of the quota
start (Union[Unset, datetime.datetime]): date time at which the quota is effective
+ allocation (Union[Unset, float]): quota allocated initially.
+ allocation_interval (Union[Unset, Duration]): definition of a date-independent time interval
+ increment (Union[Unset, float]): value incremented after each allocation interval
"""
- allocation: Union[Unset, float] = UNSET
- allocation_interval: Union[Unset, "Duration"] = UNSET
- increment: Union[Unset, float] = UNSET
local_computations_use_budget: Union[Unset, bool] = False
max_allocation: Union[Unset, float] = UNSET
scope: Union[Unset, ExecutionQuotaParametersScope] = UNSET
start: Union[Unset, datetime.datetime] = UNSET
+ allocation: Union[Unset, float] = UNSET
+ allocation_interval: Union[Unset, "Duration"] = UNSET
+ increment: Union[Unset, float] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- allocation = self.allocation
- allocation_interval: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.allocation_interval, Unset):
- allocation_interval = self.allocation_interval.to_dict()
-
- increment = self.increment
local_computations_use_budget = self.local_computations_use_budget
max_allocation = self.max_allocation
scope: Union[Unset, str] = UNSET
@@ -58,15 +52,16 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.start, Unset):
start = self.start.isoformat()
+ allocation = self.allocation
+ allocation_interval: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.allocation_interval, Unset):
+ allocation_interval = self.allocation_interval.to_dict()
+
+ increment = self.increment
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if allocation is not UNSET:
- field_dict["allocation"] = allocation
- if allocation_interval is not UNSET:
- field_dict["allocationInterval"] = allocation_interval
- if increment is not UNSET:
- field_dict["increment"] = increment
if local_computations_use_budget is not UNSET:
field_dict["localComputationsUseBudget"] = local_computations_use_budget
if max_allocation is not UNSET:
@@ -75,6 +70,12 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["scope"] = scope
if start is not UNSET:
field_dict["start"] = start
+ if allocation is not UNSET:
+ field_dict["allocation"] = allocation
+ if allocation_interval is not UNSET:
+ field_dict["allocationInterval"] = allocation_interval
+ if increment is not UNSET:
+ field_dict["increment"] = increment
return field_dict
@@ -83,17 +84,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.duration import Duration
d = src_dict.copy()
- allocation = d.pop("allocation", UNSET)
-
- _allocation_interval = d.pop("allocationInterval", UNSET)
- allocation_interval: Union[Unset, Duration]
- if isinstance(_allocation_interval, Unset):
- allocation_interval = UNSET
- else:
- allocation_interval = Duration.from_dict(_allocation_interval)
-
- increment = d.pop("increment", UNSET)
-
local_computations_use_budget = d.pop("localComputationsUseBudget", UNSET)
max_allocation = d.pop("maxAllocation", UNSET)
@@ -112,14 +102,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
start = isoparse(_start)
+ allocation = d.pop("allocation", UNSET)
+
+ _allocation_interval = d.pop("allocationInterval", UNSET)
+ allocation_interval: Union[Unset, Duration]
+ if isinstance(_allocation_interval, Unset):
+ allocation_interval = UNSET
+ else:
+ allocation_interval = Duration.from_dict(_allocation_interval)
+
+ increment = d.pop("increment", UNSET)
+
execution_quota_parameters = cls(
- allocation=allocation,
- allocation_interval=allocation_interval,
- increment=increment,
local_computations_use_budget=local_computations_use_budget,
max_allocation=max_allocation,
scope=scope,
start=start,
+ allocation=allocation,
+ allocation_interval=allocation_interval,
+ increment=increment,
)
execution_quota_parameters.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/filter_.py b/src/tuneinsight/api/sdk/models/filter_.py
index e15c4d3..86e0dd7 100644
--- a/src/tuneinsight/api/sdk/models/filter_.py
+++ b/src/tuneinsight/api/sdk/models/filter_.py
@@ -14,26 +14,27 @@ class Filter:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
- col_name (str): name of column to filter on
value (str): value with which to compare
+ col_name (str): name of column to filter on
+ numerical (Union[Unset, bool]): indicate whether the comparison is on numerical values
values (Union[Unset, List[str]]): list of values to pass in when comparison type is 'isin'.
comparator (Union[Unset, ComparisonType]): type of comparison
- numerical (Union[Unset, bool]): indicate whether the comparison is on numerical values
"""
type: PreprocessingOperationType
- col_name: str
value: str
+ col_name: str
+ numerical: Union[Unset, bool] = UNSET
values: Union[Unset, List[str]] = UNSET
comparator: Union[Unset, ComparisonType] = UNSET
- numerical: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- col_name = self.col_name
value = self.value
+ col_name = self.col_name
+ numerical = self.numerical
values: Union[Unset, List[str]] = UNSET
if not isinstance(self.values, Unset):
values = self.values
@@ -42,23 +43,21 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.comparator, Unset):
comparator = self.comparator.value
- numerical = self.numerical
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"type": type,
- "colName": col_name,
"value": value,
+ "colName": col_name,
}
)
+ if numerical is not UNSET:
+ field_dict["numerical"] = numerical
if values is not UNSET:
field_dict["values"] = values
if comparator is not UNSET:
field_dict["comparator"] = comparator
- if numerical is not UNSET:
- field_dict["numerical"] = numerical
return field_dict
@@ -67,9 +66,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
+ value = d.pop("value")
+
col_name = d.pop("colName")
- value = d.pop("value")
+ numerical = d.pop("numerical", UNSET)
values = cast(List[str], d.pop("values", UNSET))
@@ -80,15 +81,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
comparator = ComparisonType(_comparator)
- numerical = d.pop("numerical", UNSET)
-
filter_ = cls(
type=type,
- col_name=col_name,
value=value,
+ col_name=col_name,
+ numerical=numerical,
values=values,
comparator=comparator,
- numerical=numerical,
)
filter_.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/float_matrix.py b/src/tuneinsight/api/sdk/models/float_matrix.py
index 4bc40e1..deb17ad 100644
--- a/src/tuneinsight/api/sdk/models/float_matrix.py
+++ b/src/tuneinsight/api/sdk/models/float_matrix.py
@@ -20,16 +20,16 @@ class FloatMatrix:
columns (List[str]): Name of the columns of the matrix
data (List[List[float]]): 2d array of float values
contextual_info (Union[Unset, ResultContextualInfo]): contextual information about the content retrieved
- row_count (Union[Unset, int]):
column_count (Union[Unset, int]):
+ row_count (Union[Unset, int]):
"""
type: ContentType
columns: List[str]
data: List[List[float]]
contextual_info: Union[Unset, "ResultContextualInfo"] = UNSET
- row_count: Union[Unset, int] = UNSET
column_count: Union[Unset, int] = UNSET
+ row_count: Union[Unset, int] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
@@ -47,8 +47,8 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.contextual_info, Unset):
contextual_info = self.contextual_info.to_dict()
- row_count = self.row_count
column_count = self.column_count
+ row_count = self.row_count
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -61,10 +61,10 @@ def to_dict(self) -> Dict[str, Any]:
)
if contextual_info is not UNSET:
field_dict["contextualInfo"] = contextual_info
- if row_count is not UNSET:
- field_dict["rowCount"] = row_count
if column_count is not UNSET:
field_dict["columnCount"] = column_count
+ if row_count is not UNSET:
+ field_dict["rowCount"] = row_count
return field_dict
@@ -91,17 +91,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
contextual_info = ResultContextualInfo.from_dict(_contextual_info)
- row_count = d.pop("rowCount", UNSET)
-
column_count = d.pop("columnCount", UNSET)
+ row_count = d.pop("rowCount", UNSET)
+
float_matrix = cls(
type=type,
columns=columns,
data=data,
contextual_info=contextual_info,
- row_count=row_count,
column_count=column_count,
+ row_count=row_count,
)
float_matrix.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/get_infos_response_200.py b/src/tuneinsight/api/sdk/models/get_infos_response_200.py
index a234047..59a5f66 100644
--- a/src/tuneinsight/api/sdk/models/get_infos_response_200.py
+++ b/src/tuneinsight/api/sdk/models/get_infos_response_200.py
@@ -11,54 +11,54 @@
class GetInfosResponse200:
"""
Attributes:
- api_checksum (Union[Unset, str]): Checksum of the current version of the API.
- auth_status (Union[Unset, str]): Authentication provider connectivity status
portal_status (Union[Unset, str]): Portal connectivity status
version (Union[Unset, str]): Tune Insight instance version
+ api_checksum (Union[Unset, str]): Checksum of the current version of the API.
+ auth_status (Union[Unset, str]): Authentication provider connectivity status
"""
- api_checksum: Union[Unset, str] = UNSET
- auth_status: Union[Unset, str] = UNSET
portal_status: Union[Unset, str] = UNSET
version: Union[Unset, str] = UNSET
+ api_checksum: Union[Unset, str] = UNSET
+ auth_status: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- api_checksum = self.api_checksum
- auth_status = self.auth_status
portal_status = self.portal_status
version = self.version
+ api_checksum = self.api_checksum
+ auth_status = self.auth_status
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if api_checksum is not UNSET:
- field_dict["APIChecksum"] = api_checksum
- if auth_status is not UNSET:
- field_dict["authStatus"] = auth_status
if portal_status is not UNSET:
field_dict["portalStatus"] = portal_status
if version is not UNSET:
field_dict["version"] = version
+ if api_checksum is not UNSET:
+ field_dict["APIChecksum"] = api_checksum
+ if auth_status is not UNSET:
+ field_dict["authStatus"] = auth_status
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- api_checksum = d.pop("APIChecksum", UNSET)
-
- auth_status = d.pop("authStatus", UNSET)
-
portal_status = d.pop("portalStatus", UNSET)
version = d.pop("version", UNSET)
+ api_checksum = d.pop("APIChecksum", UNSET)
+
+ auth_status = d.pop("authStatus", UNSET)
+
get_infos_response_200 = cls(
- api_checksum=api_checksum,
- auth_status=auth_status,
portal_status=portal_status,
version=version,
+ api_checksum=api_checksum,
+ auth_status=auth_status,
)
get_infos_response_200.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py b/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py
index 6820d98..a84adde 100644
--- a/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py
+++ b/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py
@@ -2,6 +2,7 @@
import attr
+from ..models.get_network_metadata_response_200_network_type import GetNetworkMetadataResponse200NetworkType
from ..types import UNSET, Unset
if TYPE_CHECKING:
@@ -16,25 +17,35 @@
class GetNetworkMetadataResponse200:
"""
Attributes:
- networks (Union[Unset, List['Network']]):
- nodes (Union[Unset, List['Node']]):
- warnings (Union[Unset, List[str]]):
compound_queries_enabled (Union[Unset, bool]): Indicates if compound queries are enabled. If true, the data
source queries can be composed of multiple queries.
default_topology (Union[Unset, str]): Indicates the default topology of the network used when creating a
project. Values can be "star" or "tree".
dpo_authorization_enabled (Union[Unset, bool]): Indicates if collective projects require authorization.
+ network_type (Union[Unset, GetNetworkMetadataResponse200NetworkType]): Indicates the type of network. Values can
+ be "default" or "sse".
+ networks (Union[Unset, List['Network']]):
+ nodes (Union[Unset, List['Node']]):
+ warnings (Union[Unset, List[str]]):
"""
- networks: Union[Unset, List["Network"]] = UNSET
- nodes: Union[Unset, List["Node"]] = UNSET
- warnings: Union[Unset, List[str]] = UNSET
compound_queries_enabled: Union[Unset, bool] = UNSET
default_topology: Union[Unset, str] = UNSET
dpo_authorization_enabled: Union[Unset, bool] = UNSET
+ network_type: Union[Unset, GetNetworkMetadataResponse200NetworkType] = UNSET
+ networks: Union[Unset, List["Network"]] = UNSET
+ nodes: Union[Unset, List["Node"]] = UNSET
+ warnings: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ compound_queries_enabled = self.compound_queries_enabled
+ default_topology = self.default_topology
+ dpo_authorization_enabled = self.dpo_authorization_enabled
+ network_type: Union[Unset, str] = UNSET
+ if not isinstance(self.network_type, Unset):
+ network_type = self.network_type.value
+
networks: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.networks, Unset):
networks = []
@@ -55,25 +66,23 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.warnings, Unset):
warnings = self.warnings
- compound_queries_enabled = self.compound_queries_enabled
- default_topology = self.default_topology
- dpo_authorization_enabled = self.dpo_authorization_enabled
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if compound_queries_enabled is not UNSET:
+ field_dict["compoundQueriesEnabled"] = compound_queries_enabled
+ if default_topology is not UNSET:
+ field_dict["defaultTopology"] = default_topology
+ if dpo_authorization_enabled is not UNSET:
+ field_dict["dpoAuthorizationEnabled"] = dpo_authorization_enabled
+ if network_type is not UNSET:
+ field_dict["networkType"] = network_type
if networks is not UNSET:
field_dict["networks"] = networks
if nodes is not UNSET:
field_dict["nodes"] = nodes
if warnings is not UNSET:
field_dict["warnings"] = warnings
- if compound_queries_enabled is not UNSET:
- field_dict["compoundQueriesEnabled"] = compound_queries_enabled
- if default_topology is not UNSET:
- field_dict["default-topology"] = default_topology
- if dpo_authorization_enabled is not UNSET:
- field_dict["dpoAuthorizationEnabled"] = dpo_authorization_enabled
return field_dict
@@ -83,6 +92,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.node import Node
d = src_dict.copy()
+ compound_queries_enabled = d.pop("compoundQueriesEnabled", UNSET)
+
+ default_topology = d.pop("defaultTopology", UNSET)
+
+ dpo_authorization_enabled = d.pop("dpoAuthorizationEnabled", UNSET)
+
+ _network_type = d.pop("networkType", UNSET)
+ network_type: Union[Unset, GetNetworkMetadataResponse200NetworkType]
+ if isinstance(_network_type, Unset):
+ network_type = UNSET
+ else:
+ network_type = GetNetworkMetadataResponse200NetworkType(_network_type)
+
networks = []
_networks = d.pop("networks", UNSET)
for networks_item_data in _networks or []:
@@ -99,19 +121,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
warnings = cast(List[str], d.pop("warnings", UNSET))
- compound_queries_enabled = d.pop("compoundQueriesEnabled", UNSET)
-
- default_topology = d.pop("default-topology", UNSET)
-
- dpo_authorization_enabled = d.pop("dpoAuthorizationEnabled", UNSET)
-
get_network_metadata_response_200 = cls(
- networks=networks,
- nodes=nodes,
- warnings=warnings,
compound_queries_enabled=compound_queries_enabled,
default_topology=default_topology,
dpo_authorization_enabled=dpo_authorization_enabled,
+ network_type=network_type,
+ networks=networks,
+ nodes=nodes,
+ warnings=warnings,
)
get_network_metadata_response_200.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/get_network_metadata_response_200_network_type.py b/src/tuneinsight/api/sdk/models/get_network_metadata_response_200_network_type.py
new file mode 100644
index 0000000..877a721
--- /dev/null
+++ b/src/tuneinsight/api/sdk/models/get_network_metadata_response_200_network_type.py
@@ -0,0 +1,9 @@
+from enum import Enum
+
+
+class GetNetworkMetadataResponse200NetworkType(str, Enum):
+ DEFAULT = "default"
+ SSE = "sse"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/src/tuneinsight/api/sdk/models/gwas.py b/src/tuneinsight/api/sdk/models/gwas.py
index 03a0fec..3e81053 100644
--- a/src/tuneinsight/api/sdk/models/gwas.py
+++ b/src/tuneinsight/api/sdk/models/gwas.py
@@ -24,8 +24,12 @@ class GWAS:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -36,8 +40,7 @@ class GWAS:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -45,8 +48,18 @@ class GWAS:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -54,109 +67,90 @@ class GWAS:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ covariates (Union[Unset, List[str]]): list of columns holding the covariate values
+ locus_range (Union[Unset, LocusRange]): range specification for locus genomic positions
matching_params (Union[Unset, MatchingParams]): parameters relevant for matching
target_label (Union[Unset, str]): target to use from the clinical datasets
variants_organization (Union[Unset, str]): organization holding the variants
- covariates (Union[Unset, List[str]]): list of columns holding the covariate values
- locus_range (Union[Unset, LocusRange]): range specification for locus genomic positions
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
+ covariates: Union[Unset, List[str]] = UNSET
+ locus_range: Union[Unset, "LocusRange"] = UNSET
matching_params: Union[Unset, "MatchingParams"] = UNSET
target_label: Union[Unset, str] = UNSET
variants_organization: Union[Unset, str] = UNSET
- covariates: Union[Unset, List[str]] = UNSET
- locus_range: Union[Unset, "LocusRange"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
- matching_params: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.matching_params, Unset):
- matching_params = self.matching_params.to_dict()
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
- target_label = self.target_label
- variants_organization = self.variants_organization
covariates: Union[Unset, List[str]] = UNSET
if not isinstance(self.covariates, Unset):
covariates = self.covariates
@@ -165,6 +159,13 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.locus_range, Unset):
locus_range = self.locus_range.to_dict()
+ matching_params: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.matching_params, Unset):
+ matching_params = self.matching_params.to_dict()
+
+ target_label = self.target_label
+ variants_organization = self.variants_organization
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -172,56 +173,56 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
+ if covariates is not UNSET:
+ field_dict["covariates"] = covariates
+ if locus_range is not UNSET:
+ field_dict["locusRange"] = locus_range
if matching_params is not UNSET:
field_dict["matchingParams"] = matching_params
if target_label is not UNSET:
field_dict["targetLabel"] = target_label
if variants_organization is not UNSET:
field_dict["variantsOrganization"] = variants_organization
- if covariates is not UNSET:
- field_dict["covariates"] = covariates
- if locus_range is not UNSET:
- field_dict["locusRange"] = locus_range
return field_dict
@@ -237,14 +238,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -253,10 +247,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -264,6 +269,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -275,7 +294,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
+ _dp_policy = d.pop("DPPolicy", UNSET)
+ dp_policy: Union[Unset, DPPolicy]
+ if isinstance(_dp_policy, Unset):
+ dp_policy = UNSET
+ else:
+ dp_policy = DPPolicy.from_dict(_dp_policy)
_local_input = d.pop("localInput", UNSET)
local_input: Union[Unset, LocalInput]
@@ -284,28 +308,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
local_input = LocalInput.from_dict(_local_input)
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
+ covariates = cast(List[str], d.pop("covariates", UNSET))
- _dp_policy = d.pop("DPPolicy", UNSET)
- dp_policy: Union[Unset, DPPolicy]
- if isinstance(_dp_policy, Unset):
- dp_policy = UNSET
+ _locus_range = d.pop("locusRange", UNSET)
+ locus_range: Union[Unset, LocusRange]
+ if isinstance(_locus_range, Unset):
+ locus_range = UNSET
else:
- dp_policy = DPPolicy.from_dict(_dp_policy)
-
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ locus_range = LocusRange.from_dict(_locus_range)
_matching_params = d.pop("matchingParams", UNSET)
matching_params: Union[Unset, MatchingParams]
@@ -318,42 +328,33 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
variants_organization = d.pop("variantsOrganization", UNSET)
- covariates = cast(List[str], d.pop("covariates", UNSET))
-
- _locus_range = d.pop("locusRange", UNSET)
- locus_range: Union[Unset, LocusRange]
- if isinstance(_locus_range, Unset):
- locus_range = UNSET
- else:
- locus_range = LocusRange.from_dict(_locus_range)
-
gwas = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
+ covariates=covariates,
+ locus_range=locus_range,
matching_params=matching_params,
target_label=target_label,
variants_organization=variants_organization,
- covariates=covariates,
- locus_range=locus_range,
)
gwas.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/hybrid_fl.py b/src/tuneinsight/api/sdk/models/hybrid_fl.py
index 97e759e..9cf615f 100644
--- a/src/tuneinsight/api/sdk/models/hybrid_fl.py
+++ b/src/tuneinsight/api/sdk/models/hybrid_fl.py
@@ -23,8 +23,12 @@ class HybridFL:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class HybridFL:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class HybridFL:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,105 +66,92 @@ class HybridFL:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
- task_id (Union[Unset, str]):
learning_params (Union[Unset, HybridFLLearningParams]): Hyperparameters for the Hybrid Federated Learning
task_def (Union[Unset, str]):
+ task_id (Union[Unset, str]):
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
- task_id: Union[Unset, str] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
learning_params: Union[Unset, "HybridFLLearningParams"] = UNSET
task_def: Union[Unset, str] = UNSET
+ task_id: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
- task_id = self.task_id
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
learning_params: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.learning_params, Unset):
learning_params = self.learning_params.to_dict()
task_def = self.task_def
+ task_id = self.task_id
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -160,52 +160,52 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
- if task_id is not UNSET:
- field_dict["taskId"] = task_id
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if learning_params is not UNSET:
field_dict["learningParams"] = learning_params
if task_def is not UNSET:
field_dict["taskDef"] = task_def
+ if task_id is not UNSET:
+ field_dict["taskId"] = task_id
return field_dict
@@ -220,14 +220,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -236,10 +229,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -247,6 +251,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -258,19 +276,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -278,19 +283,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
-
- task_id = d.pop("taskId", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
_learning_params = d.pop("learningParams", UNSET)
learning_params: Union[Unset, HybridFLLearningParams]
@@ -301,31 +299,33 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
task_def = d.pop("taskDef", UNSET)
+ task_id = d.pop("taskId", UNSET)
+
hybrid_fl = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
- task_id=task_id,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
learning_params=learning_params,
task_def=task_def,
+ task_id=task_id,
)
hybrid_fl.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py b/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py
index ee87653..4d3427b 100644
--- a/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py
+++ b/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py
@@ -13,137 +13,136 @@ class HybridFLLearningParams:
"""Hyperparameters for the Hybrid Federated Learning
Attributes:
- num_workers (Union[Unset, int]): Number of workers loading the data for training in the python-server
- strategy (Union[Unset, AggregationStrategy]): weighting aggregation strategy Default:
- AggregationStrategy.CONSTANT.
+ add_noise (Union[Unset, bool]): Whether to add differential privacy or not to the HybridFL Default: True.
+ delta (Union[Unset, float]): Delta parameter of the differential privacy in HybridFL
+ fl_rounds (Union[Unset, int]): Number of federated rounds of the Hybrid FL
+ gradient_clipping (Union[Unset, float]): Gradient clipping to apply for the training and the noise computation
+ local_epochs (Union[Unset, int]): Number of local epochs of the Hybrid FL between aggregations
+ momentum (Union[Unset, float]): Momentum of the optimizer in the python-server
use_clipping_factor (Union[Unset, bool]): If set to true, gradient clipping is adjusted specifically at each
layer Default: True.
batch_size (Union[Unset, int]): Batch size for the training in the python-server
- delta (Union[Unset, float]): Delta parameter of the differential privacy in HybridFL
encrypt_aggregation (Union[Unset, bool]): Whether to to the aggregation encrypted or not in HybridFL Default:
True.
- momentum (Union[Unset, float]): Momentum of the optimizer in the python-server
- learning_rate (Union[Unset, float]): Learning rate of the optimizer in the python-server
- local_epochs (Union[Unset, int]): Number of local epochs of the Hybrid FL between aggregations
- add_noise (Union[Unset, bool]): Whether to add differential privacy or not to the HybridFL Default: True.
epsilon (Union[Unset, float]): Epsilon parameter of the differential privacy in HybridFL
- fl_rounds (Union[Unset, int]): Number of federated rounds of the Hybrid FL
- gradient_clipping (Union[Unset, float]): Gradient clipping to apply for the training and the noise computation
+ learning_rate (Union[Unset, float]): Learning rate of the optimizer in the python-server
+ num_workers (Union[Unset, int]): Number of workers loading the data for training in the python-server
+ strategy (Union[Unset, AggregationStrategy]): weighting aggregation strategy Default:
+ AggregationStrategy.CONSTANT.
"""
- num_workers: Union[Unset, int] = UNSET
- strategy: Union[Unset, AggregationStrategy] = AggregationStrategy.CONSTANT
+ add_noise: Union[Unset, bool] = True
+ delta: Union[Unset, float] = UNSET
+ fl_rounds: Union[Unset, int] = UNSET
+ gradient_clipping: Union[Unset, float] = UNSET
+ local_epochs: Union[Unset, int] = UNSET
+ momentum: Union[Unset, float] = UNSET
use_clipping_factor: Union[Unset, bool] = True
batch_size: Union[Unset, int] = UNSET
- delta: Union[Unset, float] = UNSET
encrypt_aggregation: Union[Unset, bool] = True
- momentum: Union[Unset, float] = UNSET
- learning_rate: Union[Unset, float] = UNSET
- local_epochs: Union[Unset, int] = UNSET
- add_noise: Union[Unset, bool] = True
epsilon: Union[Unset, float] = UNSET
- fl_rounds: Union[Unset, int] = UNSET
- gradient_clipping: Union[Unset, float] = UNSET
+ learning_rate: Union[Unset, float] = UNSET
+ num_workers: Union[Unset, int] = UNSET
+ strategy: Union[Unset, AggregationStrategy] = AggregationStrategy.CONSTANT
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- num_workers = self.num_workers
- strategy: Union[Unset, str] = UNSET
- if not isinstance(self.strategy, Unset):
- strategy = self.strategy.value
-
+ add_noise = self.add_noise
+ delta = self.delta
+ fl_rounds = self.fl_rounds
+ gradient_clipping = self.gradient_clipping
+ local_epochs = self.local_epochs
+ momentum = self.momentum
use_clipping_factor = self.use_clipping_factor
batch_size = self.batch_size
- delta = self.delta
encrypt_aggregation = self.encrypt_aggregation
- momentum = self.momentum
- learning_rate = self.learning_rate
- local_epochs = self.local_epochs
- add_noise = self.add_noise
epsilon = self.epsilon
- fl_rounds = self.fl_rounds
- gradient_clipping = self.gradient_clipping
+ learning_rate = self.learning_rate
+ num_workers = self.num_workers
+ strategy: Union[Unset, str] = UNSET
+ if not isinstance(self.strategy, Unset):
+ strategy = self.strategy.value
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if num_workers is not UNSET:
- field_dict["numWorkers"] = num_workers
- if strategy is not UNSET:
- field_dict["strategy"] = strategy
+ if add_noise is not UNSET:
+ field_dict["addNoise"] = add_noise
+ if delta is not UNSET:
+ field_dict["delta"] = delta
+ if fl_rounds is not UNSET:
+ field_dict["flRounds"] = fl_rounds
+ if gradient_clipping is not UNSET:
+ field_dict["gradientClipping"] = gradient_clipping
+ if local_epochs is not UNSET:
+ field_dict["localEpochs"] = local_epochs
+ if momentum is not UNSET:
+ field_dict["momentum"] = momentum
if use_clipping_factor is not UNSET:
field_dict["useClippingFactor"] = use_clipping_factor
if batch_size is not UNSET:
field_dict["batchSize"] = batch_size
- if delta is not UNSET:
- field_dict["delta"] = delta
if encrypt_aggregation is not UNSET:
field_dict["encryptAggregation"] = encrypt_aggregation
- if momentum is not UNSET:
- field_dict["momentum"] = momentum
- if learning_rate is not UNSET:
- field_dict["learningRate"] = learning_rate
- if local_epochs is not UNSET:
- field_dict["localEpochs"] = local_epochs
- if add_noise is not UNSET:
- field_dict["addNoise"] = add_noise
if epsilon is not UNSET:
field_dict["epsilon"] = epsilon
- if fl_rounds is not UNSET:
- field_dict["flRounds"] = fl_rounds
- if gradient_clipping is not UNSET:
- field_dict["gradientClipping"] = gradient_clipping
+ if learning_rate is not UNSET:
+ field_dict["learningRate"] = learning_rate
+ if num_workers is not UNSET:
+ field_dict["numWorkers"] = num_workers
+ if strategy is not UNSET:
+ field_dict["strategy"] = strategy
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- num_workers = d.pop("numWorkers", UNSET)
-
- _strategy = d.pop("strategy", UNSET)
- strategy: Union[Unset, AggregationStrategy]
- if isinstance(_strategy, Unset):
- strategy = UNSET
- else:
- strategy = AggregationStrategy(_strategy)
+ add_noise = d.pop("addNoise", UNSET)
- use_clipping_factor = d.pop("useClippingFactor", UNSET)
+ delta = d.pop("delta", UNSET)
- batch_size = d.pop("batchSize", UNSET)
+ fl_rounds = d.pop("flRounds", UNSET)
- delta = d.pop("delta", UNSET)
+ gradient_clipping = d.pop("gradientClipping", UNSET)
- encrypt_aggregation = d.pop("encryptAggregation", UNSET)
+ local_epochs = d.pop("localEpochs", UNSET)
momentum = d.pop("momentum", UNSET)
- learning_rate = d.pop("learningRate", UNSET)
+ use_clipping_factor = d.pop("useClippingFactor", UNSET)
- local_epochs = d.pop("localEpochs", UNSET)
+ batch_size = d.pop("batchSize", UNSET)
- add_noise = d.pop("addNoise", UNSET)
+ encrypt_aggregation = d.pop("encryptAggregation", UNSET)
epsilon = d.pop("epsilon", UNSET)
- fl_rounds = d.pop("flRounds", UNSET)
+ learning_rate = d.pop("learningRate", UNSET)
- gradient_clipping = d.pop("gradientClipping", UNSET)
+ num_workers = d.pop("numWorkers", UNSET)
+
+ _strategy = d.pop("strategy", UNSET)
+ strategy: Union[Unset, AggregationStrategy]
+ if isinstance(_strategy, Unset):
+ strategy = UNSET
+ else:
+ strategy = AggregationStrategy(_strategy)
hybrid_fl_learning_params = cls(
- num_workers=num_workers,
- strategy=strategy,
+ add_noise=add_noise,
+ delta=delta,
+ fl_rounds=fl_rounds,
+ gradient_clipping=gradient_clipping,
+ local_epochs=local_epochs,
+ momentum=momentum,
use_clipping_factor=use_clipping_factor,
batch_size=batch_size,
- delta=delta,
encrypt_aggregation=encrypt_aggregation,
- momentum=momentum,
- learning_rate=learning_rate,
- local_epochs=local_epochs,
- add_noise=add_noise,
epsilon=epsilon,
- fl_rounds=fl_rounds,
- gradient_clipping=gradient_clipping,
+ learning_rate=learning_rate,
+ num_workers=num_workers,
+ strategy=strategy,
)
hybrid_fl_learning_params.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/instance_configuration.py b/src/tuneinsight/api/sdk/models/instance_configuration.py
new file mode 100644
index 0000000..43982dd
--- /dev/null
+++ b/src/tuneinsight/api/sdk/models/instance_configuration.py
@@ -0,0 +1,66 @@
+from typing import Any, Dict, List, Type, TypeVar, Union
+
+import attr
+
+from ..types import UNSET, Unset
+
+T = TypeVar("T", bound="InstanceConfiguration")
+
+
+@attr.s(auto_attribs=True)
+class InstanceConfiguration:
+ """contains information about the instance's current configuration
+
+ Attributes:
+ config_yaml (Union[Unset, str]): YAML-serialized configuration string.
+ instance_name (Union[Unset, str]): name or alias of the instance
+ """
+
+ config_yaml: Union[Unset, str] = UNSET
+ instance_name: Union[Unset, str] = UNSET
+ additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
+
+ def to_dict(self) -> Dict[str, Any]:
+ config_yaml = self.config_yaml
+ instance_name = self.instance_name
+
+ field_dict: Dict[str, Any] = {}
+ field_dict.update(self.additional_properties)
+ field_dict.update({})
+ if config_yaml is not UNSET:
+ field_dict["configYAML"] = config_yaml
+ if instance_name is not UNSET:
+ field_dict["instanceName"] = instance_name
+
+ return field_dict
+
+ @classmethod
+ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
+ d = src_dict.copy()
+ config_yaml = d.pop("configYAML", UNSET)
+
+ instance_name = d.pop("instanceName", UNSET)
+
+ instance_configuration = cls(
+ config_yaml=config_yaml,
+ instance_name=instance_name,
+ )
+
+ instance_configuration.additional_properties = d
+ return instance_configuration
+
+ @property
+ def additional_keys(self) -> List[str]:
+ return list(self.additional_properties.keys())
+
+ def __getitem__(self, key: str) -> Any:
+ return self.additional_properties[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self.additional_properties[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self.additional_properties[key]
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.additional_properties
diff --git a/src/tuneinsight/api/sdk/models/key_switched_computation.py b/src/tuneinsight/api/sdk/models/key_switched_computation.py
index 7011eae..fecc4a0 100644
--- a/src/tuneinsight/api/sdk/models/key_switched_computation.py
+++ b/src/tuneinsight/api/sdk/models/key_switched_computation.py
@@ -23,8 +23,12 @@ class KeySwitchedComputation:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class KeySwitchedComputation:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class KeySwitchedComputation:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,109 +66,96 @@ class KeySwitchedComputation:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
- target_public_key (Union[Unset, str]): Unique identifier of a data object.
computation (Union[Unset, ComputationDefinition]): Generic computation.
decrypt_results (Union[Unset, bool]): if true, the key-switched results are decrypted using either the specified
secret key or the secret key from the session
secret_key (Union[Unset, str]): Unique identifier of a data object.
+ target_public_key (Union[Unset, str]): Unique identifier of a data object.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
- target_public_key: Union[Unset, str] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
computation: Union[Unset, "ComputationDefinition"] = UNSET
decrypt_results: Union[Unset, bool] = UNSET
secret_key: Union[Unset, str] = UNSET
+ target_public_key: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
- target_public_key = self.target_public_key
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
computation: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.computation, Unset):
computation = self.computation.to_dict()
decrypt_results = self.decrypt_results
secret_key = self.secret_key
+ target_public_key = self.target_public_key
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -164,54 +164,54 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
- if target_public_key is not UNSET:
- field_dict["targetPublicKey"] = target_public_key
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if computation is not UNSET:
field_dict["computation"] = computation
if decrypt_results is not UNSET:
field_dict["decryptResults"] = decrypt_results
if secret_key is not UNSET:
field_dict["secretKey"] = secret_key
+ if target_public_key is not UNSET:
+ field_dict["targetPublicKey"] = target_public_key
return field_dict
@@ -226,14 +226,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -242,10 +235,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -253,6 +257,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -264,19 +282,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -284,19 +289,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
-
- target_public_key = d.pop("targetPublicKey", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
_computation = d.pop("computation", UNSET)
computation: Union[Unset, ComputationDefinition]
@@ -309,32 +307,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
secret_key = d.pop("secretKey", UNSET)
+ target_public_key = d.pop("targetPublicKey", UNSET)
+
key_switched_computation = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
- target_public_key=target_public_key,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
computation=computation,
decrypt_results=decrypt_results,
secret_key=secret_key,
+ target_public_key=target_public_key,
)
key_switched_computation.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/local_data_selection.py b/src/tuneinsight/api/sdk/models/local_data_selection.py
index 08785c5..4a1dcea 100644
--- a/src/tuneinsight/api/sdk/models/local_data_selection.py
+++ b/src/tuneinsight/api/sdk/models/local_data_selection.py
@@ -21,6 +21,8 @@ class LocalDataSelection:
"""selection to retrieve data from the datasource and preprocess it
Attributes:
+ visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other
+ instances in the network.
data_selection (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource from
each node before the computation
description (Union[Unset, str]): optional description for the selection
@@ -30,21 +32,20 @@ class LocalDataSelection:
preview_content_disabled (Union[Unset, None, bool]): whether to disable previewing the content (metadata only)
store_in_database (Union[Unset, None, bool]): whether to store the selection in the database
type (Union[Unset, DataSelectionType]):
- visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other
- instances in the network.
- num_local_records (Union[Unset, int]): holds the total number of local records from the selection (only
- displayed when the selection is saved to the database)
- query (Union[Unset, Query]): Data source query
- remote_instance_id (Union[Unset, str]): the name of the remote instance id this selection was retrieved from.
created_at (Union[Unset, str]):
- data_source (Union[Unset, DataSource]):
preview (Union[Unset, DataSourceQueryPreview]): preview of a datasource query
- remote (Union[Unset, bool]): whether the selection was fetched remotely
+ query (Union[Unset, Query]): Data source query
updated_at (Union[Unset, str]):
created_by_user (Union[Unset, str]): creator of the selection
+ data_source (Union[Unset, DataSource]):
id (Union[Unset, str]): id of the selection
+ num_local_records (Union[Unset, int]): holds the total number of local records from the selection (only
+ displayed when the selection is saved to the database)
+ remote (Union[Unset, bool]): whether the selection was fetched remotely
+ remote_instance_id (Union[Unset, str]): the name of the remote instance id this selection was retrieved from.
"""
+ visible_to_network: Union[Unset, None, bool] = UNSET
data_selection: Union[Unset, "ComputationDataSourceParameters"] = UNSET
description: Union[Unset, str] = UNSET
name: Union[Unset, str] = UNSET
@@ -52,20 +53,20 @@ class LocalDataSelection:
preview_content_disabled: Union[Unset, None, bool] = UNSET
store_in_database: Union[Unset, None, bool] = UNSET
type: Union[Unset, DataSelectionType] = UNSET
- visible_to_network: Union[Unset, None, bool] = UNSET
- num_local_records: Union[Unset, int] = UNSET
- query: Union[Unset, "Query"] = UNSET
- remote_instance_id: Union[Unset, str] = UNSET
created_at: Union[Unset, str] = UNSET
- data_source: Union[Unset, "DataSource"] = UNSET
preview: Union[Unset, "DataSourceQueryPreview"] = UNSET
- remote: Union[Unset, bool] = UNSET
+ query: Union[Unset, "Query"] = UNSET
updated_at: Union[Unset, str] = UNSET
created_by_user: Union[Unset, str] = UNSET
+ data_source: Union[Unset, "DataSource"] = UNSET
id: Union[Unset, str] = UNSET
+ num_local_records: Union[Unset, int] = UNSET
+ remote: Union[Unset, bool] = UNSET
+ remote_instance_id: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ visible_to_network = self.visible_to_network
data_selection: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_selection, Unset):
data_selection = self.data_selection.to_dict()
@@ -82,30 +83,31 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.type, Unset):
type = self.type.value
- visible_to_network = self.visible_to_network
- num_local_records = self.num_local_records
+ created_at = self.created_at
+ preview: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.preview, Unset):
+ preview = self.preview.to_dict()
+
query: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.query, Unset):
query = self.query.to_dict()
- remote_instance_id = self.remote_instance_id
- created_at = self.created_at
+ updated_at = self.updated_at
+ created_by_user = self.created_by_user
data_source: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source, Unset):
data_source = self.data_source.to_dict()
- preview: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.preview, Unset):
- preview = self.preview.to_dict()
-
- remote = self.remote
- updated_at = self.updated_at
- created_by_user = self.created_by_user
id = self.id
+ num_local_records = self.num_local_records
+ remote = self.remote
+ remote_instance_id = self.remote_instance_id
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if visible_to_network is not UNSET:
+ field_dict["visibleToNetwork"] = visible_to_network
if data_selection is not UNSET:
field_dict["dataSelection"] = data_selection
if description is not UNSET:
@@ -120,28 +122,26 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["storeInDatabase"] = store_in_database
if type is not UNSET:
field_dict["type"] = type
- if visible_to_network is not UNSET:
- field_dict["visibleToNetwork"] = visible_to_network
- if num_local_records is not UNSET:
- field_dict["numLocalRecords"] = num_local_records
- if query is not UNSET:
- field_dict["query"] = query
- if remote_instance_id is not UNSET:
- field_dict["remoteInstanceId"] = remote_instance_id
if created_at is not UNSET:
field_dict["createdAt"] = created_at
- if data_source is not UNSET:
- field_dict["dataSource"] = data_source
if preview is not UNSET:
field_dict["preview"] = preview
- if remote is not UNSET:
- field_dict["remote"] = remote
+ if query is not UNSET:
+ field_dict["query"] = query
if updated_at is not UNSET:
field_dict["updatedAt"] = updated_at
if created_by_user is not UNSET:
field_dict["createdByUser"] = created_by_user
+ if data_source is not UNSET:
+ field_dict["dataSource"] = data_source
if id is not UNSET:
field_dict["id"] = id
+ if num_local_records is not UNSET:
+ field_dict["numLocalRecords"] = num_local_records
+ if remote is not UNSET:
+ field_dict["remote"] = remote
+ if remote_instance_id is not UNSET:
+ field_dict["remoteInstanceId"] = remote_instance_id
return field_dict
@@ -154,6 +154,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.query import Query
d = src_dict.copy()
+ visible_to_network = d.pop("visibleToNetwork", UNSET)
+
_data_selection = d.pop("dataSelection", UNSET)
data_selection: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_selection, Unset):
@@ -183,9 +185,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
type = DataSelectionType(_type)
- visible_to_network = d.pop("visibleToNetwork", UNSET)
+ created_at = d.pop("createdAt", UNSET)
- num_local_records = d.pop("numLocalRecords", UNSET)
+ _preview = d.pop("preview", UNSET)
+ preview: Union[Unset, DataSourceQueryPreview]
+ if isinstance(_preview, Unset):
+ preview = UNSET
+ else:
+ preview = DataSourceQueryPreview.from_dict(_preview)
_query = d.pop("query", UNSET)
query: Union[Unset, Query]
@@ -194,9 +201,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
query = Query.from_dict(_query)
- remote_instance_id = d.pop("remoteInstanceId", UNSET)
+ updated_at = d.pop("updatedAt", UNSET)
- created_at = d.pop("createdAt", UNSET)
+ created_by_user = d.pop("createdByUser", UNSET)
_data_source = d.pop("dataSource", UNSET)
data_source: Union[Unset, DataSource]
@@ -205,22 +212,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
data_source = DataSource.from_dict(_data_source)
- _preview = d.pop("preview", UNSET)
- preview: Union[Unset, DataSourceQueryPreview]
- if isinstance(_preview, Unset):
- preview = UNSET
- else:
- preview = DataSourceQueryPreview.from_dict(_preview)
-
- remote = d.pop("remote", UNSET)
+ id = d.pop("id", UNSET)
- updated_at = d.pop("updatedAt", UNSET)
+ num_local_records = d.pop("numLocalRecords", UNSET)
- created_by_user = d.pop("createdByUser", UNSET)
+ remote = d.pop("remote", UNSET)
- id = d.pop("id", UNSET)
+ remote_instance_id = d.pop("remoteInstanceId", UNSET)
local_data_selection = cls(
+ visible_to_network=visible_to_network,
data_selection=data_selection,
description=description,
name=name,
@@ -228,17 +229,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
preview_content_disabled=preview_content_disabled,
store_in_database=store_in_database,
type=type,
- visible_to_network=visible_to_network,
- num_local_records=num_local_records,
- query=query,
- remote_instance_id=remote_instance_id,
created_at=created_at,
- data_source=data_source,
preview=preview,
- remote=remote,
+ query=query,
updated_at=updated_at,
created_by_user=created_by_user,
+ data_source=data_source,
id=id,
+ num_local_records=num_local_records,
+ remote=remote,
+ remote_instance_id=remote_instance_id,
)
local_data_selection.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/local_data_selection_definition.py b/src/tuneinsight/api/sdk/models/local_data_selection_definition.py
index 6a8a401..a1ff4b4 100644
--- a/src/tuneinsight/api/sdk/models/local_data_selection_definition.py
+++ b/src/tuneinsight/api/sdk/models/local_data_selection_definition.py
@@ -18,6 +18,8 @@ class LocalDataSelectionDefinition:
"""datasource selection definition. A selection is a "query" or data selection definition to run on the datasource
Attributes:
+ visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other
+ instances in the network.
data_selection (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource from
each node before the computation
description (Union[Unset, str]): optional description for the selection
@@ -27,10 +29,9 @@ class LocalDataSelectionDefinition:
preview_content_disabled (Union[Unset, None, bool]): whether to disable previewing the content (metadata only)
store_in_database (Union[Unset, None, bool]): whether to store the selection in the database
type (Union[Unset, DataSelectionType]):
- visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other
- instances in the network.
"""
+ visible_to_network: Union[Unset, None, bool] = UNSET
data_selection: Union[Unset, "ComputationDataSourceParameters"] = UNSET
description: Union[Unset, str] = UNSET
name: Union[Unset, str] = UNSET
@@ -38,10 +39,10 @@ class LocalDataSelectionDefinition:
preview_content_disabled: Union[Unset, None, bool] = UNSET
store_in_database: Union[Unset, None, bool] = UNSET
type: Union[Unset, DataSelectionType] = UNSET
- visible_to_network: Union[Unset, None, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ visible_to_network = self.visible_to_network
data_selection: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_selection, Unset):
data_selection = self.data_selection.to_dict()
@@ -58,11 +59,11 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.type, Unset):
type = self.type.value
- visible_to_network = self.visible_to_network
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if visible_to_network is not UNSET:
+ field_dict["visibleToNetwork"] = visible_to_network
if data_selection is not UNSET:
field_dict["dataSelection"] = data_selection
if description is not UNSET:
@@ -77,8 +78,6 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["storeInDatabase"] = store_in_database
if type is not UNSET:
field_dict["type"] = type
- if visible_to_network is not UNSET:
- field_dict["visibleToNetwork"] = visible_to_network
return field_dict
@@ -88,6 +87,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.computation_preprocessing_parameters import ComputationPreprocessingParameters
d = src_dict.copy()
+ visible_to_network = d.pop("visibleToNetwork", UNSET)
+
_data_selection = d.pop("dataSelection", UNSET)
data_selection: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_selection, Unset):
@@ -117,9 +118,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
type = DataSelectionType(_type)
- visible_to_network = d.pop("visibleToNetwork", UNSET)
-
local_data_selection_definition = cls(
+ visible_to_network=visible_to_network,
data_selection=data_selection,
description=description,
name=name,
@@ -127,7 +127,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
preview_content_disabled=preview_content_disabled,
store_in_database=store_in_database,
type=type,
- visible_to_network=visible_to_network,
)
local_data_selection_definition.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/logical_formula.py b/src/tuneinsight/api/sdk/models/logical_formula.py
index 3a8d23f..1188f60 100644
--- a/src/tuneinsight/api/sdk/models/logical_formula.py
+++ b/src/tuneinsight/api/sdk/models/logical_formula.py
@@ -17,19 +17,23 @@ class LogicalFormula:
"""logical formula composing filters
Attributes:
+ single_filter (Union[Unset, Filter]):
left_formula (Union[Unset, LogicalFormula]): logical formula composing filters
operator (Union[Unset, LogicalFormulaOperator]):
right_formula (Union[Unset, LogicalFormula]): logical formula composing filters
- single_filter (Union[Unset, Filter]):
"""
+ single_filter: Union[Unset, "Filter"] = UNSET
left_formula: Union[Unset, "LogicalFormula"] = UNSET
operator: Union[Unset, LogicalFormulaOperator] = UNSET
right_formula: Union[Unset, "LogicalFormula"] = UNSET
- single_filter: Union[Unset, "Filter"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ single_filter: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.single_filter, Unset):
+ single_filter = self.single_filter.to_dict()
+
left_formula: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.left_formula, Unset):
left_formula = self.left_formula.to_dict()
@@ -42,21 +46,17 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.right_formula, Unset):
right_formula = self.right_formula.to_dict()
- single_filter: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.single_filter, Unset):
- single_filter = self.single_filter.to_dict()
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if single_filter is not UNSET:
+ field_dict["singleFilter"] = single_filter
if left_formula is not UNSET:
field_dict["leftFormula"] = left_formula
if operator is not UNSET:
field_dict["operator"] = operator
if right_formula is not UNSET:
field_dict["rightFormula"] = right_formula
- if single_filter is not UNSET:
- field_dict["singleFilter"] = single_filter
return field_dict
@@ -65,6 +65,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.filter_ import Filter
d = src_dict.copy()
+ _single_filter = d.pop("singleFilter", UNSET)
+ single_filter: Union[Unset, Filter]
+ if isinstance(_single_filter, Unset):
+ single_filter = UNSET
+ else:
+ single_filter = Filter.from_dict(_single_filter)
+
_left_formula = d.pop("leftFormula", UNSET)
left_formula: Union[Unset, LogicalFormula]
if isinstance(_left_formula, Unset):
@@ -86,18 +93,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
right_formula = LogicalFormula.from_dict(_right_formula)
- _single_filter = d.pop("singleFilter", UNSET)
- single_filter: Union[Unset, Filter]
- if isinstance(_single_filter, Unset):
- single_filter = UNSET
- else:
- single_filter = Filter.from_dict(_single_filter)
-
logical_formula = cls(
+ single_filter=single_filter,
left_formula=left_formula,
operator=operator,
right_formula=right_formula,
- single_filter=single_filter,
)
logical_formula.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/measurement.py b/src/tuneinsight/api/sdk/models/measurement.py
index bc67e78..edf7676 100644
--- a/src/tuneinsight/api/sdk/models/measurement.py
+++ b/src/tuneinsight/api/sdk/models/measurement.py
@@ -1,4 +1,4 @@
-from typing import Any, Dict, List, Type, TypeVar, Union
+from typing import Any, Dict, List, Type, TypeVar, Union, cast
import attr
@@ -12,62 +12,97 @@ class Measurement:
"""measurement done during a specific part of a computation
Attributes:
- start (Union[Unset, str]): start time of the measurement. (RFC 3339 Nano format)
allocated (Union[Unset, int]): total number of bytes allocated during this part.
- description (Union[Unset, str]): description of the computation part.
end (Union[Unset, str]): end time of the measurement. (RFC 3339 Nano format)
+ ingress (Union[Unset, None, int]): number of incoming bytes from the network
+ periodic_allocations (Union[Unset, List[int]]): periodic measures of bytes allocated during this part.
+ start (Union[Unset, str]): start time of the measurement. (RFC 3339 Nano format)
+ description (Union[Unset, str]): description of the computation part.
+ egress (Union[Unset, None, int]): number of outgoing bytes to the network
name (Union[Unset, str]): name of the computation part.
+ time (Union[Unset, None, int]): total time of the measurement in milliseconds
"""
- start: Union[Unset, str] = UNSET
allocated: Union[Unset, int] = UNSET
- description: Union[Unset, str] = UNSET
end: Union[Unset, str] = UNSET
+ ingress: Union[Unset, None, int] = UNSET
+ periodic_allocations: Union[Unset, List[int]] = UNSET
+ start: Union[Unset, str] = UNSET
+ description: Union[Unset, str] = UNSET
+ egress: Union[Unset, None, int] = UNSET
name: Union[Unset, str] = UNSET
+ time: Union[Unset, None, int] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- start = self.start
allocated = self.allocated
- description = self.description
end = self.end
+ ingress = self.ingress
+ periodic_allocations: Union[Unset, List[int]] = UNSET
+ if not isinstance(self.periodic_allocations, Unset):
+ periodic_allocations = self.periodic_allocations
+
+ start = self.start
+ description = self.description
+ egress = self.egress
name = self.name
+ time = self.time
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if start is not UNSET:
- field_dict["start"] = start
if allocated is not UNSET:
field_dict["allocated"] = allocated
- if description is not UNSET:
- field_dict["description"] = description
if end is not UNSET:
field_dict["end"] = end
+ if ingress is not UNSET:
+ field_dict["ingress"] = ingress
+ if periodic_allocations is not UNSET:
+ field_dict["periodicAllocations"] = periodic_allocations
+ if start is not UNSET:
+ field_dict["start"] = start
+ if description is not UNSET:
+ field_dict["description"] = description
+ if egress is not UNSET:
+ field_dict["egress"] = egress
if name is not UNSET:
field_dict["name"] = name
+ if time is not UNSET:
+ field_dict["time"] = time
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- start = d.pop("start", UNSET)
-
allocated = d.pop("allocated", UNSET)
+ end = d.pop("end", UNSET)
+
+ ingress = d.pop("ingress", UNSET)
+
+ periodic_allocations = cast(List[int], d.pop("periodicAllocations", UNSET))
+
+ start = d.pop("start", UNSET)
+
description = d.pop("description", UNSET)
- end = d.pop("end", UNSET)
+ egress = d.pop("egress", UNSET)
name = d.pop("name", UNSET)
+ time = d.pop("time", UNSET)
+
measurement = cls(
- start=start,
allocated=allocated,
- description=description,
end=end,
+ ingress=ingress,
+ periodic_allocations=periodic_allocations,
+ start=start,
+ description=description,
+ egress=egress,
name=name,
+ time=time,
)
measurement.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/model.py b/src/tuneinsight/api/sdk/models/model.py
index b08a022..8b2ee4e 100644
--- a/src/tuneinsight/api/sdk/models/model.py
+++ b/src/tuneinsight/api/sdk/models/model.py
@@ -20,41 +20,32 @@ class Model:
"""Machine learning model metadata definition
Attributes:
- type (Union[Unset, ModelType]): whether the model is local (plaintext) or collective (ciphertext)
- computation_id (Union[Unset, str]): Computation that created this model if collective model
- data_object (Union[Unset, DataObject]): A data object definition.
metadata (Union[Unset, ModelMetadata]): public metadata about the model
model_id (Union[Unset, str]): Unique identifier of a model.
model_params (Union[Unset, ModelParams]): detailed parameters about the model, only returned when getting
specific model
name (Union[Unset, str]): common name for the model
- training_algorithm (Union[Unset, TrainingAlgorithm]): the algorithm used to train the model
- created_at (Union[Unset, str]):
updated_at (Union[Unset, str]):
+ computation_id (Union[Unset, str]): Computation that created this model if collective model
+ created_at (Union[Unset, str]):
+ data_object (Union[Unset, DataObject]): A data object definition.
+ training_algorithm (Union[Unset, TrainingAlgorithm]): the algorithm used to train the model
+ type (Union[Unset, ModelType]): whether the model is local (plaintext) or collective (ciphertext)
"""
- type: Union[Unset, ModelType] = UNSET
- computation_id: Union[Unset, str] = UNSET
- data_object: Union[Unset, "DataObject"] = UNSET
metadata: Union[Unset, "ModelMetadata"] = UNSET
model_id: Union[Unset, str] = UNSET
model_params: Union[Unset, "ModelParams"] = UNSET
name: Union[Unset, str] = UNSET
- training_algorithm: Union[Unset, TrainingAlgorithm] = UNSET
- created_at: Union[Unset, str] = UNSET
updated_at: Union[Unset, str] = UNSET
+ computation_id: Union[Unset, str] = UNSET
+ created_at: Union[Unset, str] = UNSET
+ data_object: Union[Unset, "DataObject"] = UNSET
+ training_algorithm: Union[Unset, TrainingAlgorithm] = UNSET
+ type: Union[Unset, ModelType] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- type: Union[Unset, str] = UNSET
- if not isinstance(self.type, Unset):
- type = self.type.value
-
- computation_id = self.computation_id
- data_object: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.data_object, Unset):
- data_object = self.data_object.to_dict()
-
metadata: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.metadata, Unset):
metadata = self.metadata.to_dict()
@@ -65,22 +56,24 @@ def to_dict(self) -> Dict[str, Any]:
model_params = self.model_params.to_dict()
name = self.name
+ updated_at = self.updated_at
+ computation_id = self.computation_id
+ created_at = self.created_at
+ data_object: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.data_object, Unset):
+ data_object = self.data_object.to_dict()
+
training_algorithm: Union[Unset, str] = UNSET
if not isinstance(self.training_algorithm, Unset):
training_algorithm = self.training_algorithm.value
- created_at = self.created_at
- updated_at = self.updated_at
+ type: Union[Unset, str] = UNSET
+ if not isinstance(self.type, Unset):
+ type = self.type.value
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if type is not UNSET:
- field_dict["type"] = type
- if computation_id is not UNSET:
- field_dict["computationId"] = computation_id
- if data_object is not UNSET:
- field_dict["dataObject"] = data_object
if metadata is not UNSET:
field_dict["metadata"] = metadata
if model_id is not UNSET:
@@ -89,12 +82,18 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["modelParams"] = model_params
if name is not UNSET:
field_dict["name"] = name
- if training_algorithm is not UNSET:
- field_dict["trainingAlgorithm"] = training_algorithm
- if created_at is not UNSET:
- field_dict["createdAt"] = created_at
if updated_at is not UNSET:
field_dict["updatedAt"] = updated_at
+ if computation_id is not UNSET:
+ field_dict["computationId"] = computation_id
+ if created_at is not UNSET:
+ field_dict["createdAt"] = created_at
+ if data_object is not UNSET:
+ field_dict["dataObject"] = data_object
+ if training_algorithm is not UNSET:
+ field_dict["trainingAlgorithm"] = training_algorithm
+ if type is not UNSET:
+ field_dict["type"] = type
return field_dict
@@ -105,22 +104,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.model_params import ModelParams
d = src_dict.copy()
- _type = d.pop("type", UNSET)
- type: Union[Unset, ModelType]
- if isinstance(_type, Unset):
- type = UNSET
- else:
- type = ModelType(_type)
-
- computation_id = d.pop("computationId", UNSET)
-
- _data_object = d.pop("dataObject", UNSET)
- data_object: Union[Unset, DataObject]
- if isinstance(_data_object, Unset):
- data_object = UNSET
- else:
- data_object = DataObject.from_dict(_data_object)
-
_metadata = d.pop("metadata", UNSET)
metadata: Union[Unset, ModelMetadata]
if isinstance(_metadata, Unset):
@@ -139,6 +122,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
name = d.pop("name", UNSET)
+ updated_at = d.pop("updatedAt", UNSET)
+
+ computation_id = d.pop("computationId", UNSET)
+
+ created_at = d.pop("createdAt", UNSET)
+
+ _data_object = d.pop("dataObject", UNSET)
+ data_object: Union[Unset, DataObject]
+ if isinstance(_data_object, Unset):
+ data_object = UNSET
+ else:
+ data_object = DataObject.from_dict(_data_object)
+
_training_algorithm = d.pop("trainingAlgorithm", UNSET)
training_algorithm: Union[Unset, TrainingAlgorithm]
if isinstance(_training_algorithm, Unset):
@@ -146,21 +142,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
training_algorithm = TrainingAlgorithm(_training_algorithm)
- created_at = d.pop("createdAt", UNSET)
-
- updated_at = d.pop("updatedAt", UNSET)
+ _type = d.pop("type", UNSET)
+ type: Union[Unset, ModelType]
+ if isinstance(_type, Unset):
+ type = UNSET
+ else:
+ type = ModelType(_type)
model = cls(
- type=type,
- computation_id=computation_id,
- data_object=data_object,
metadata=metadata,
model_id=model_id,
model_params=model_params,
name=name,
- training_algorithm=training_algorithm,
- created_at=created_at,
updated_at=updated_at,
+ computation_id=computation_id,
+ created_at=created_at,
+ data_object=data_object,
+ training_algorithm=training_algorithm,
+ type=type,
)
model.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/model_metadata.py b/src/tuneinsight/api/sdk/models/model_metadata.py
index 048eb87..ce54394 100644
--- a/src/tuneinsight/api/sdk/models/model_metadata.py
+++ b/src/tuneinsight/api/sdk/models/model_metadata.py
@@ -12,68 +12,68 @@ class ModelMetadata:
"""public metadata about the model
Attributes:
- classes (Union[Unset, List[str]]): optional labels for classes
- description (Union[Unset, str]): optional description for the model
features (Union[Unset, List[str]]): optional labels for features
num_classes (Union[Unset, int]): number classes
num_features (Union[Unset, int]): number of features
+ classes (Union[Unset, List[str]]): optional labels for classes
+ description (Union[Unset, str]): optional description for the model
"""
- classes: Union[Unset, List[str]] = UNSET
- description: Union[Unset, str] = UNSET
features: Union[Unset, List[str]] = UNSET
num_classes: Union[Unset, int] = UNSET
num_features: Union[Unset, int] = UNSET
+ classes: Union[Unset, List[str]] = UNSET
+ description: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- classes: Union[Unset, List[str]] = UNSET
- if not isinstance(self.classes, Unset):
- classes = self.classes
-
- description = self.description
features: Union[Unset, List[str]] = UNSET
if not isinstance(self.features, Unset):
features = self.features
num_classes = self.num_classes
num_features = self.num_features
+ classes: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.classes, Unset):
+ classes = self.classes
+
+ description = self.description
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if classes is not UNSET:
- field_dict["classes"] = classes
- if description is not UNSET:
- field_dict["description"] = description
if features is not UNSET:
field_dict["features"] = features
if num_classes is not UNSET:
field_dict["numClasses"] = num_classes
if num_features is not UNSET:
field_dict["numFeatures"] = num_features
+ if classes is not UNSET:
+ field_dict["classes"] = classes
+ if description is not UNSET:
+ field_dict["description"] = description
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- classes = cast(List[str], d.pop("classes", UNSET))
-
- description = d.pop("description", UNSET)
-
features = cast(List[str], d.pop("features", UNSET))
num_classes = d.pop("numClasses", UNSET)
num_features = d.pop("numFeatures", UNSET)
+ classes = cast(List[str], d.pop("classes", UNSET))
+
+ description = d.pop("description", UNSET)
+
model_metadata = cls(
- classes=classes,
- description=description,
features=features,
num_classes=num_classes,
num_features=num_features,
+ classes=classes,
+ description=description,
)
model_metadata.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/network.py b/src/tuneinsight/api/sdk/models/network.py
index 999cc73..a21f544 100644
--- a/src/tuneinsight/api/sdk/models/network.py
+++ b/src/tuneinsight/api/sdk/models/network.py
@@ -1,7 +1,8 @@
-from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union
+from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast
import attr
+from ..models.network_type import NetworkType
from ..models.network_visibility_type import NetworkVisibilityType
from ..models.topology import Topology
from ..types import UNSET, Unset
@@ -18,21 +19,34 @@ class Network:
"""Network that represents a set of nodes
Attributes:
+ name (Union[Unset, str]):
+ network_type (Union[Unset, NetworkType]): Network Type. 'default' or 'sse'. In a NAT network, leaf node use SSE
+ to connect to the root.
nodes (Union[Unset, List['Node']]):
+ restricted (Union[Unset, None, bool]): if set, then the network can only be viewed by the users in the network.
+ (does not apply to projects)
topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
connected to a central node. In tree topology all nodes are connected and aware of each other.
+ users (Union[Unset, List[str]]): list of users in the network
visibility_type (Union[Unset, NetworkVisibilityType]): represents the type of visibility leaf nodes have in a
network
- name (Union[Unset, str]):
"""
+ name: Union[Unset, str] = UNSET
+ network_type: Union[Unset, NetworkType] = UNSET
nodes: Union[Unset, List["Node"]] = UNSET
+ restricted: Union[Unset, None, bool] = UNSET
topology: Union[Unset, Topology] = UNSET
+ users: Union[Unset, List[str]] = UNSET
visibility_type: Union[Unset, NetworkVisibilityType] = UNSET
- name: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ name = self.name
+ network_type: Union[Unset, str] = UNSET
+ if not isinstance(self.network_type, Unset):
+ network_type = self.network_type.value
+
nodes: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.nodes, Unset):
nodes = []
@@ -41,27 +55,36 @@ def to_dict(self) -> Dict[str, Any]:
nodes.append(nodes_item)
+ restricted = self.restricted
topology: Union[Unset, str] = UNSET
if not isinstance(self.topology, Unset):
topology = self.topology.value
+ users: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.users, Unset):
+ users = self.users
+
visibility_type: Union[Unset, str] = UNSET
if not isinstance(self.visibility_type, Unset):
visibility_type = self.visibility_type.value
- name = self.name
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if name is not UNSET:
+ field_dict["name"] = name
+ if network_type is not UNSET:
+ field_dict["networkType"] = network_type
if nodes is not UNSET:
field_dict["nodes"] = nodes
+ if restricted is not UNSET:
+ field_dict["restricted"] = restricted
if topology is not UNSET:
field_dict["topology"] = topology
+ if users is not UNSET:
+ field_dict["users"] = users
if visibility_type is not UNSET:
field_dict["visibilityType"] = visibility_type
- if name is not UNSET:
- field_dict["name"] = name
return field_dict
@@ -70,6 +93,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.node import Node
d = src_dict.copy()
+ name = d.pop("name", UNSET)
+
+ _network_type = d.pop("networkType", UNSET)
+ network_type: Union[Unset, NetworkType]
+ if isinstance(_network_type, Unset):
+ network_type = UNSET
+ else:
+ network_type = NetworkType(_network_type)
+
nodes = []
_nodes = d.pop("nodes", UNSET)
for nodes_item_data in _nodes or []:
@@ -77,6 +109,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
nodes.append(nodes_item)
+ restricted = d.pop("restricted", UNSET)
+
_topology = d.pop("topology", UNSET)
topology: Union[Unset, Topology]
if isinstance(_topology, Unset):
@@ -84,6 +118,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
topology = Topology(_topology)
+ users = cast(List[str], d.pop("users", UNSET))
+
_visibility_type = d.pop("visibilityType", UNSET)
visibility_type: Union[Unset, NetworkVisibilityType]
if isinstance(_visibility_type, Unset):
@@ -91,13 +127,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
visibility_type = NetworkVisibilityType(_visibility_type)
- name = d.pop("name", UNSET)
-
network = cls(
+ name=name,
+ network_type=network_type,
nodes=nodes,
+ restricted=restricted,
topology=topology,
+ users=users,
visibility_type=visibility_type,
- name=name,
)
network.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/network_type.py b/src/tuneinsight/api/sdk/models/network_type.py
new file mode 100644
index 0000000..d94ce2b
--- /dev/null
+++ b/src/tuneinsight/api/sdk/models/network_type.py
@@ -0,0 +1,9 @@
+from enum import Enum
+
+
+class NetworkType(str, Enum):
+ DEFAULT = "default"
+ SSE = "sse"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/src/tuneinsight/api/sdk/models/node.py b/src/tuneinsight/api/sdk/models/node.py
index 50568f0..1fe0333 100644
--- a/src/tuneinsight/api/sdk/models/node.py
+++ b/src/tuneinsight/api/sdk/models/node.py
@@ -16,69 +16,80 @@ class Node:
"""Node or agent of the network
Attributes:
- is_in_network (Union[Unset, bool]): True if the node can be found in the instance's network. If False, then the
- node information cannot be completed.
- name (Union[Unset, str]):
- api_path (Union[Unset, str]):
- current (Union[Unset, bool]): True if this node is the current one (root node).
is_contributor (Union[Unset, bool]): Indicates if this instance does contribute data.
+ name (Union[Unset, str]):
organization (Union[Unset, Organization]): Organization taking part in a project
- url (Union[Unset, str]):
+ client_username (Union[Unset, str]): client username is the node client (OIDC client_id)'s service account
+ username
certificate (Union[Unset, str]): Certificate of the node, in base64-encoded DER format.
+ current (Union[Unset, bool]): True if this node is the current one (root node).
has_user_management (Union[Unset, bool]): True if the node has the user management APIs enabled.
+ is_in_network (Union[Unset, bool]): True if the node can be found in the instance's network. If False, then the
+ node information cannot be completed.
is_root (Union[Unset, bool]): True if the node is the root node in a tree topology network.
+ is_sse (Union[Unset, None, bool]): True if the node configured to use server-sent events.
+ url (Union[Unset, str]):
+ api_path (Union[Unset, str]):
"""
- is_in_network: Union[Unset, bool] = UNSET
- name: Union[Unset, str] = UNSET
- api_path: Union[Unset, str] = UNSET
- current: Union[Unset, bool] = UNSET
is_contributor: Union[Unset, bool] = UNSET
+ name: Union[Unset, str] = UNSET
organization: Union[Unset, "Organization"] = UNSET
- url: Union[Unset, str] = UNSET
+ client_username: Union[Unset, str] = UNSET
certificate: Union[Unset, str] = UNSET
+ current: Union[Unset, bool] = UNSET
has_user_management: Union[Unset, bool] = UNSET
+ is_in_network: Union[Unset, bool] = UNSET
is_root: Union[Unset, bool] = UNSET
+ is_sse: Union[Unset, None, bool] = UNSET
+ url: Union[Unset, str] = UNSET
+ api_path: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- is_in_network = self.is_in_network
- name = self.name
- api_path = self.api_path
- current = self.current
is_contributor = self.is_contributor
+ name = self.name
organization: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.organization, Unset):
organization = self.organization.to_dict()
- url = self.url
+ client_username = self.client_username
certificate = self.certificate
+ current = self.current
has_user_management = self.has_user_management
+ is_in_network = self.is_in_network
is_root = self.is_root
+ is_sse = self.is_sse
+ url = self.url
+ api_path = self.api_path
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if is_in_network is not UNSET:
- field_dict["isInNetwork"] = is_in_network
- if name is not UNSET:
- field_dict["name"] = name
- if api_path is not UNSET:
- field_dict["apiPath"] = api_path
- if current is not UNSET:
- field_dict["current"] = current
if is_contributor is not UNSET:
field_dict["isContributor"] = is_contributor
+ if name is not UNSET:
+ field_dict["name"] = name
if organization is not UNSET:
field_dict["organization"] = organization
- if url is not UNSET:
- field_dict["url"] = url
+ if client_username is not UNSET:
+ field_dict["clientUsername"] = client_username
if certificate is not UNSET:
field_dict["certificate"] = certificate
+ if current is not UNSET:
+ field_dict["current"] = current
if has_user_management is not UNSET:
field_dict["hasUserManagement"] = has_user_management
+ if is_in_network is not UNSET:
+ field_dict["isInNetwork"] = is_in_network
if is_root is not UNSET:
field_dict["isRoot"] = is_root
+ if is_sse is not UNSET:
+ field_dict["isSSE"] = is_sse
+ if url is not UNSET:
+ field_dict["url"] = url
+ if api_path is not UNSET:
+ field_dict["apiPath"] = api_path
return field_dict
@@ -87,16 +98,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.organization import Organization
d = src_dict.copy()
- is_in_network = d.pop("isInNetwork", UNSET)
+ is_contributor = d.pop("isContributor", UNSET)
name = d.pop("name", UNSET)
- api_path = d.pop("apiPath", UNSET)
-
- current = d.pop("current", UNSET)
-
- is_contributor = d.pop("isContributor", UNSET)
-
_organization = d.pop("organization", UNSET)
organization: Union[Unset, Organization]
if isinstance(_organization, Unset):
@@ -104,25 +109,37 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
organization = Organization.from_dict(_organization)
- url = d.pop("url", UNSET)
+ client_username = d.pop("clientUsername", UNSET)
certificate = d.pop("certificate", UNSET)
+ current = d.pop("current", UNSET)
+
has_user_management = d.pop("hasUserManagement", UNSET)
+ is_in_network = d.pop("isInNetwork", UNSET)
+
is_root = d.pop("isRoot", UNSET)
+ is_sse = d.pop("isSSE", UNSET)
+
+ url = d.pop("url", UNSET)
+
+ api_path = d.pop("apiPath", UNSET)
+
node = cls(
- is_in_network=is_in_network,
- name=name,
- api_path=api_path,
- current=current,
is_contributor=is_contributor,
+ name=name,
organization=organization,
- url=url,
+ client_username=client_username,
certificate=certificate,
+ current=current,
has_user_management=has_user_management,
+ is_in_network=is_in_network,
is_root=is_root,
+ is_sse=is_sse,
+ url=url,
+ api_path=api_path,
)
node.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/node_status.py b/src/tuneinsight/api/sdk/models/node_status.py
index 101856d..5bc7c70 100644
--- a/src/tuneinsight/api/sdk/models/node_status.py
+++ b/src/tuneinsight/api/sdk/models/node_status.py
@@ -12,46 +12,54 @@ class NodeStatus:
"""Network Status of a node
Attributes:
- node (Union[Unset, str]): URL of the node
+ rtt (Union[Unset, int]): Round-trip time to this node in milliseconds
status (Union[Unset, str]): Status (ok/nok)
version (Union[Unset, str]): Version of the node
+ node (Union[Unset, str]): URL of the node
"""
- node: Union[Unset, str] = UNSET
+ rtt: Union[Unset, int] = UNSET
status: Union[Unset, str] = UNSET
version: Union[Unset, str] = UNSET
+ node: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- node = self.node
+ rtt = self.rtt
status = self.status
version = self.version
+ node = self.node
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if node is not UNSET:
- field_dict["node"] = node
+ if rtt is not UNSET:
+ field_dict["rtt"] = rtt
if status is not UNSET:
field_dict["status"] = status
if version is not UNSET:
field_dict["version"] = version
+ if node is not UNSET:
+ field_dict["node"] = node
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- node = d.pop("node", UNSET)
+ rtt = d.pop("rtt", UNSET)
status = d.pop("status", UNSET)
version = d.pop("version", UNSET)
+ node = d.pop("node", UNSET)
+
node_status = cls(
- node=node,
+ rtt=rtt,
status=status,
version=version,
+ node=node,
)
node_status.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/organization.py b/src/tuneinsight/api/sdk/models/organization.py
index 91c6eed..9a00ad6 100644
--- a/src/tuneinsight/api/sdk/models/organization.py
+++ b/src/tuneinsight/api/sdk/models/organization.py
@@ -17,23 +17,27 @@ class Organization:
"""Organization taking part in a project
Attributes:
- authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
- coordinates (Union[Unset, OrganizationCoordinates]): Coordinates of the organization. (Decimal degrees, WGS84)
country (Union[Unset, str]): Country code of the organization. (Lower case two-letter ISO 3166-1 alpha-2)
data_officer (Union[Unset, str]): Name of the data officer in charge in the organization
group (Union[Unset, str]): Name of the corresponding keycloak group
name (Union[Unset, str]): Name of the organization
+ authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ coordinates (Union[Unset, OrganizationCoordinates]): Coordinates of the organization. (Decimal degrees, WGS84)
"""
- authorization_status: Union[Unset, AuthorizationStatus] = UNSET
- coordinates: Union[Unset, "OrganizationCoordinates"] = UNSET
country: Union[Unset, str] = UNSET
data_officer: Union[Unset, str] = UNSET
group: Union[Unset, str] = UNSET
name: Union[Unset, str] = UNSET
+ authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ coordinates: Union[Unset, "OrganizationCoordinates"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ country = self.country
+ data_officer = self.data_officer
+ group = self.group
+ name = self.name
authorization_status: Union[Unset, str] = UNSET
if not isinstance(self.authorization_status, Unset):
authorization_status = self.authorization_status.value
@@ -42,18 +46,9 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.coordinates, Unset):
coordinates = self.coordinates.to_dict()
- country = self.country
- data_officer = self.data_officer
- group = self.group
- name = self.name
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if authorization_status is not UNSET:
- field_dict["authorizationStatus"] = authorization_status
- if coordinates is not UNSET:
- field_dict["coordinates"] = coordinates
if country is not UNSET:
field_dict["country"] = country
if data_officer is not UNSET:
@@ -62,6 +57,10 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["group"] = group
if name is not UNSET:
field_dict["name"] = name
+ if authorization_status is not UNSET:
+ field_dict["authorizationStatus"] = authorization_status
+ if coordinates is not UNSET:
+ field_dict["coordinates"] = coordinates
return field_dict
@@ -70,6 +69,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.organization_coordinates import OrganizationCoordinates
d = src_dict.copy()
+ country = d.pop("country", UNSET)
+
+ data_officer = d.pop("dataOfficer", UNSET)
+
+ group = d.pop("group", UNSET)
+
+ name = d.pop("name", UNSET)
+
_authorization_status = d.pop("authorizationStatus", UNSET)
authorization_status: Union[Unset, AuthorizationStatus]
if isinstance(_authorization_status, Unset):
@@ -84,21 +91,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
coordinates = OrganizationCoordinates.from_dict(_coordinates)
- country = d.pop("country", UNSET)
-
- data_officer = d.pop("dataOfficer", UNSET)
-
- group = d.pop("group", UNSET)
-
- name = d.pop("name", UNSET)
-
organization = cls(
- authorization_status=authorization_status,
- coordinates=coordinates,
country=country,
data_officer=data_officer,
group=group,
name=name,
+ authorization_status=authorization_status,
+ coordinates=coordinates,
)
organization.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/participant.py b/src/tuneinsight/api/sdk/models/participant.py
index 6e4c2a7..a2f004b 100644
--- a/src/tuneinsight/api/sdk/models/participant.py
+++ b/src/tuneinsight/api/sdk/models/participant.py
@@ -20,28 +20,23 @@ class Participant:
"""Node participating in a project
Attributes:
- input_metadata (Union[Unset, DataSourceMetadata]): metadata about a datasource
- is_contributor (Union[Unset, None, bool]):
node (Union[Unset, Node]): Node or agent of the network
participation_status (Union[Unset, ParticipationStatus]): participation state of a project's participant
status (Union[Unset, ProjectStatus]): Stages of a project workflow
authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ input_metadata (Union[Unset, DataSourceMetadata]): metadata about a datasource
+ is_contributor (Union[Unset, None, bool]):
"""
- input_metadata: Union[Unset, "DataSourceMetadata"] = UNSET
- is_contributor: Union[Unset, None, bool] = UNSET
node: Union[Unset, "Node"] = UNSET
participation_status: Union[Unset, ParticipationStatus] = UNSET
status: Union[Unset, ProjectStatus] = UNSET
authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ input_metadata: Union[Unset, "DataSourceMetadata"] = UNSET
+ is_contributor: Union[Unset, None, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- input_metadata: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.input_metadata, Unset):
- input_metadata = self.input_metadata.to_dict()
-
- is_contributor = self.is_contributor
node: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.node, Unset):
node = self.node.to_dict()
@@ -58,13 +53,15 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.authorization_status, Unset):
authorization_status = self.authorization_status.value
+ input_metadata: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.input_metadata, Unset):
+ input_metadata = self.input_metadata.to_dict()
+
+ is_contributor = self.is_contributor
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if input_metadata is not UNSET:
- field_dict["inputMetadata"] = input_metadata
- if is_contributor is not UNSET:
- field_dict["isContributor"] = is_contributor
if node is not UNSET:
field_dict["node"] = node
if participation_status is not UNSET:
@@ -73,6 +70,10 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["status"] = status
if authorization_status is not UNSET:
field_dict["authorizationStatus"] = authorization_status
+ if input_metadata is not UNSET:
+ field_dict["inputMetadata"] = input_metadata
+ if is_contributor is not UNSET:
+ field_dict["isContributor"] = is_contributor
return field_dict
@@ -82,15 +83,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.node import Node
d = src_dict.copy()
- _input_metadata = d.pop("inputMetadata", UNSET)
- input_metadata: Union[Unset, DataSourceMetadata]
- if isinstance(_input_metadata, Unset):
- input_metadata = UNSET
- else:
- input_metadata = DataSourceMetadata.from_dict(_input_metadata)
-
- is_contributor = d.pop("isContributor", UNSET)
-
_node = d.pop("node", UNSET)
node: Union[Unset, Node]
if isinstance(_node, Unset):
@@ -119,13 +111,22 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
authorization_status = AuthorizationStatus(_authorization_status)
+ _input_metadata = d.pop("inputMetadata", UNSET)
+ input_metadata: Union[Unset, DataSourceMetadata]
+ if isinstance(_input_metadata, Unset):
+ input_metadata = UNSET
+ else:
+ input_metadata = DataSourceMetadata.from_dict(_input_metadata)
+
+ is_contributor = d.pop("isContributor", UNSET)
+
participant = cls(
- input_metadata=input_metadata,
- is_contributor=is_contributor,
node=node,
participation_status=participation_status,
status=status,
authorization_status=authorization_status,
+ input_metadata=input_metadata,
+ is_contributor=is_contributor,
)
participant.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/post_data_object_json_body.py b/src/tuneinsight/api/sdk/models/post_data_object_json_body.py
index 68a92b6..359d113 100644
--- a/src/tuneinsight/api/sdk/models/post_data_object_json_body.py
+++ b/src/tuneinsight/api/sdk/models/post_data_object_json_body.py
@@ -18,66 +18,57 @@
class PostDataObjectJsonBody:
"""
Attributes:
+ session_id (Union[Unset, str]): Unique identifier of a session
+ data_object_id (Union[Unset, str]): Unique identifier of a data object.
data_object_shared_id (Union[Unset, str]): Shared identifier of a data object.
- data_source_id (Union[Unset, str]): Data source adapting into data object
+ query (Union[Unset, str]):
+ encrypted (Union[Unset, bool]): indicator whether or not the uploaded dataobject is encrypted
private_key (Union[Unset, str]): Unique identifier of a data object.
project_id (Union[Unset, str]): Unique identifier of a project.
shared (Union[Unset, bool]): whether the dataobject is meant to be used as a collective input
- visibility_status (Union[Unset, DataObjectVisibilityStatus]): type of visibility set to the dataobject
- columns (Union[Unset, List[str]]):
- data_object_id (Union[Unset, str]): Unique identifier of a data object.
- query (Union[Unset, str]):
type (Union[Unset, DataObjectType]): type of the dataobject
- session_id (Union[Unset, str]): Unique identifier of a session
- encrypted (Union[Unset, bool]): indicator whether or not the uploaded dataobject is encrypted
- json_path (Union[Unset, str]): JsonPath expression to retrieve data from within JSON-structured data.
+ data_source_id (Union[Unset, str]): Data source adapting into data object
key_info (Union[Unset, KeyInfo]): information about keys
method (Union[Unset, DataObjectCreationMethod]): Method of creation: from a data source or by
encrypting/decrypting a data object, or simply create a new one
public_key (Union[Unset, str]): Unique identifier of a data object.
+ visibility_status (Union[Unset, DataObjectVisibilityStatus]): type of visibility set to the dataobject
+ columns (Union[Unset, List[str]]):
+ json_path (Union[Unset, str]): JsonPath expression to retrieve data from within JSON-structured data.
"""
+ session_id: Union[Unset, str] = UNSET
+ data_object_id: Union[Unset, str] = UNSET
data_object_shared_id: Union[Unset, str] = UNSET
- data_source_id: Union[Unset, str] = UNSET
+ query: Union[Unset, str] = UNSET
+ encrypted: Union[Unset, bool] = UNSET
private_key: Union[Unset, str] = UNSET
project_id: Union[Unset, str] = UNSET
shared: Union[Unset, bool] = UNSET
- visibility_status: Union[Unset, DataObjectVisibilityStatus] = UNSET
- columns: Union[Unset, List[str]] = UNSET
- data_object_id: Union[Unset, str] = UNSET
- query: Union[Unset, str] = UNSET
type: Union[Unset, DataObjectType] = UNSET
- session_id: Union[Unset, str] = UNSET
- encrypted: Union[Unset, bool] = UNSET
- json_path: Union[Unset, str] = UNSET
+ data_source_id: Union[Unset, str] = UNSET
key_info: Union[Unset, "KeyInfo"] = UNSET
method: Union[Unset, DataObjectCreationMethod] = UNSET
public_key: Union[Unset, str] = UNSET
+ visibility_status: Union[Unset, DataObjectVisibilityStatus] = UNSET
+ columns: Union[Unset, List[str]] = UNSET
+ json_path: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ session_id = self.session_id
+ data_object_id = self.data_object_id
data_object_shared_id = self.data_object_shared_id
- data_source_id = self.data_source_id
+ query = self.query
+ encrypted = self.encrypted
private_key = self.private_key
project_id = self.project_id
shared = self.shared
- visibility_status: Union[Unset, str] = UNSET
- if not isinstance(self.visibility_status, Unset):
- visibility_status = self.visibility_status.value
-
- columns: Union[Unset, List[str]] = UNSET
- if not isinstance(self.columns, Unset):
- columns = self.columns
-
- data_object_id = self.data_object_id
- query = self.query
type: Union[Unset, str] = UNSET
if not isinstance(self.type, Unset):
type = self.type.value
- session_id = self.session_id
- encrypted = self.encrypted
- json_path = self.json_path
+ data_source_id = self.data_source_id
key_info: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.key_info, Unset):
key_info = self.key_info.to_dict()
@@ -87,42 +78,51 @@ def to_dict(self) -> Dict[str, Any]:
method = self.method.value
public_key = self.public_key
+ visibility_status: Union[Unset, str] = UNSET
+ if not isinstance(self.visibility_status, Unset):
+ visibility_status = self.visibility_status.value
+
+ columns: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.columns, Unset):
+ columns = self.columns
+
+ json_path = self.json_path
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if session_id is not UNSET:
+ field_dict["sessionId"] = session_id
+ if data_object_id is not UNSET:
+ field_dict["dataObjectId"] = data_object_id
if data_object_shared_id is not UNSET:
field_dict["dataObjectSharedId"] = data_object_shared_id
- if data_source_id is not UNSET:
- field_dict["dataSourceId"] = data_source_id
+ if query is not UNSET:
+ field_dict["query"] = query
+ if encrypted is not UNSET:
+ field_dict["encrypted"] = encrypted
if private_key is not UNSET:
field_dict["privateKey"] = private_key
if project_id is not UNSET:
field_dict["projectId"] = project_id
if shared is not UNSET:
field_dict["shared"] = shared
- if visibility_status is not UNSET:
- field_dict["visibilityStatus"] = visibility_status
- if columns is not UNSET:
- field_dict["columns"] = columns
- if data_object_id is not UNSET:
- field_dict["dataObjectId"] = data_object_id
- if query is not UNSET:
- field_dict["query"] = query
if type is not UNSET:
field_dict["type"] = type
- if session_id is not UNSET:
- field_dict["sessionId"] = session_id
- if encrypted is not UNSET:
- field_dict["encrypted"] = encrypted
- if json_path is not UNSET:
- field_dict["jsonPath"] = json_path
+ if data_source_id is not UNSET:
+ field_dict["dataSourceId"] = data_source_id
if key_info is not UNSET:
field_dict["keyInfo"] = key_info
if method is not UNSET:
field_dict["method"] = method
if public_key is not UNSET:
field_dict["publicKey"] = public_key
+ if visibility_status is not UNSET:
+ field_dict["visibilityStatus"] = visibility_status
+ if columns is not UNSET:
+ field_dict["columns"] = columns
+ if json_path is not UNSET:
+ field_dict["jsonPath"] = json_path
return field_dict
@@ -131,9 +131,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.key_info import KeyInfo
d = src_dict.copy()
+ session_id = d.pop("sessionId", UNSET)
+
+ data_object_id = d.pop("dataObjectId", UNSET)
+
data_object_shared_id = d.pop("dataObjectSharedId", UNSET)
- data_source_id = d.pop("dataSourceId", UNSET)
+ query = d.pop("query", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
private_key = d.pop("privateKey", UNSET)
@@ -141,19 +147,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
shared = d.pop("shared", UNSET)
- _visibility_status = d.pop("visibilityStatus", UNSET)
- visibility_status: Union[Unset, DataObjectVisibilityStatus]
- if isinstance(_visibility_status, Unset):
- visibility_status = UNSET
- else:
- visibility_status = DataObjectVisibilityStatus(_visibility_status)
-
- columns = cast(List[str], d.pop("columns", UNSET))
-
- data_object_id = d.pop("dataObjectId", UNSET)
-
- query = d.pop("query", UNSET)
-
_type = d.pop("type", UNSET)
type: Union[Unset, DataObjectType]
if isinstance(_type, Unset):
@@ -161,11 +154,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
type = DataObjectType(_type)
- session_id = d.pop("sessionId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- json_path = d.pop("jsonPath", UNSET)
+ data_source_id = d.pop("dataSourceId", UNSET)
_key_info = d.pop("keyInfo", UNSET)
key_info: Union[Unset, KeyInfo]
@@ -183,23 +172,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
public_key = d.pop("publicKey", UNSET)
+ _visibility_status = d.pop("visibilityStatus", UNSET)
+ visibility_status: Union[Unset, DataObjectVisibilityStatus]
+ if isinstance(_visibility_status, Unset):
+ visibility_status = UNSET
+ else:
+ visibility_status = DataObjectVisibilityStatus(_visibility_status)
+
+ columns = cast(List[str], d.pop("columns", UNSET))
+
+ json_path = d.pop("jsonPath", UNSET)
+
post_data_object_json_body = cls(
+ session_id=session_id,
+ data_object_id=data_object_id,
data_object_shared_id=data_object_shared_id,
- data_source_id=data_source_id,
+ query=query,
+ encrypted=encrypted,
private_key=private_key,
project_id=project_id,
shared=shared,
- visibility_status=visibility_status,
- columns=columns,
- data_object_id=data_object_id,
- query=query,
type=type,
- session_id=session_id,
- encrypted=encrypted,
- json_path=json_path,
+ data_source_id=data_source_id,
key_info=key_info,
method=method,
public_key=public_key,
+ visibility_status=visibility_status,
+ columns=columns,
+ json_path=json_path,
)
post_data_object_json_body.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py b/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py
index 72c7896..5b4d3d1 100644
--- a/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py
+++ b/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py
@@ -12,38 +12,36 @@ class PostLlmRequestJsonBodyPromptArgs:
"""Parameters of the prompt as a dict
Attributes:
- computation_labels (Union[Unset, str]): (AI-Explainer) Labels of the computation to explain
computation_results (Union[Unset, str]): (AI-Explainer) Results of the computation to explain
computation_type (Union[Unset, str]): (AI-Explainer) Type of the computation to explain
query (Union[Unset, str]): User's additional query
rdf_filter (Union[Unset, str]): (SPARQL) Filter for SPARQL relations and subclasses to include
rdf_schema (Union[Unset, str]): (SPARQL) SPARQL schema
sql_schema (Union[Unset, str]): (SQL) SQL schema
+ computation_labels (Union[Unset, str]): (AI-Explainer) Labels of the computation to explain
"""
- computation_labels: Union[Unset, str] = UNSET
computation_results: Union[Unset, str] = UNSET
computation_type: Union[Unset, str] = UNSET
query: Union[Unset, str] = UNSET
rdf_filter: Union[Unset, str] = UNSET
rdf_schema: Union[Unset, str] = UNSET
sql_schema: Union[Unset, str] = UNSET
+ computation_labels: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- computation_labels = self.computation_labels
computation_results = self.computation_results
computation_type = self.computation_type
query = self.query
rdf_filter = self.rdf_filter
rdf_schema = self.rdf_schema
sql_schema = self.sql_schema
+ computation_labels = self.computation_labels
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if computation_labels is not UNSET:
- field_dict["computation_labels"] = computation_labels
if computation_results is not UNSET:
field_dict["computation_results"] = computation_results
if computation_type is not UNSET:
@@ -56,14 +54,14 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["rdf_schema"] = rdf_schema
if sql_schema is not UNSET:
field_dict["sql_schema"] = sql_schema
+ if computation_labels is not UNSET:
+ field_dict["computation_labels"] = computation_labels
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- computation_labels = d.pop("computation_labels", UNSET)
-
computation_results = d.pop("computation_results", UNSET)
computation_type = d.pop("computation_type", UNSET)
@@ -76,14 +74,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
sql_schema = d.pop("sql_schema", UNSET)
+ computation_labels = d.pop("computation_labels", UNSET)
+
post_llm_request_json_body_prompt_args = cls(
- computation_labels=computation_labels,
computation_results=computation_results,
computation_type=computation_type,
query=query,
rdf_filter=rdf_filter,
rdf_schema=rdf_schema,
sql_schema=sql_schema,
+ computation_labels=computation_labels,
)
post_llm_request_json_body_prompt_args.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/post_user_response_201.py b/src/tuneinsight/api/sdk/models/post_user_response_201.py
index a06c861..11bf7b8 100644
--- a/src/tuneinsight/api/sdk/models/post_user_response_201.py
+++ b/src/tuneinsight/api/sdk/models/post_user_response_201.py
@@ -11,38 +11,38 @@
class PostUserResponse201:
"""
Attributes:
- id (Union[Unset, str]): User id
email (Union[Unset, str]): User email
+ id (Union[Unset, str]): User id
"""
- id: Union[Unset, str] = UNSET
email: Union[Unset, str] = UNSET
+ id: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- id = self.id
email = self.email
+ id = self.id
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if id is not UNSET:
- field_dict["id"] = id
if email is not UNSET:
field_dict["email"] = email
+ if id is not UNSET:
+ field_dict["id"] = id
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- id = d.pop("id", UNSET)
-
email = d.pop("email", UNSET)
+ id = d.pop("id", UNSET)
+
post_user_response_201 = cls(
- id=id,
email=email,
+ id=id,
)
post_user_response_201.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/prediction_params.py b/src/tuneinsight/api/sdk/models/prediction_params.py
index 2c42515..d14a3fe 100644
--- a/src/tuneinsight/api/sdk/models/prediction_params.py
+++ b/src/tuneinsight/api/sdk/models/prediction_params.py
@@ -17,30 +17,30 @@ class PredictionParams:
"""subset of parameters required for only the prediction
Attributes:
- regression_type (Union[Unset, RegressionType]): type of the regression
approximation_params (Union[Unset, ApproximationParams]): parameters for polynomial approximation
+ regression_type (Union[Unset, RegressionType]): type of the regression
"""
- regression_type: Union[Unset, RegressionType] = UNSET
approximation_params: Union[Unset, "ApproximationParams"] = UNSET
+ regression_type: Union[Unset, RegressionType] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- regression_type: Union[Unset, str] = UNSET
- if not isinstance(self.regression_type, Unset):
- regression_type = self.regression_type.value
-
approximation_params: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.approximation_params, Unset):
approximation_params = self.approximation_params.to_dict()
+ regression_type: Union[Unset, str] = UNSET
+ if not isinstance(self.regression_type, Unset):
+ regression_type = self.regression_type.value
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if regression_type is not UNSET:
- field_dict["regressionType"] = regression_type
if approximation_params is not UNSET:
field_dict["approximationParams"] = approximation_params
+ if regression_type is not UNSET:
+ field_dict["regressionType"] = regression_type
return field_dict
@@ -49,13 +49,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.approximation_params import ApproximationParams
d = src_dict.copy()
- _regression_type = d.pop("regressionType", UNSET)
- regression_type: Union[Unset, RegressionType]
- if isinstance(_regression_type, Unset):
- regression_type = UNSET
- else:
- regression_type = RegressionType(_regression_type)
-
_approximation_params = d.pop("approximationParams", UNSET)
approximation_params: Union[Unset, ApproximationParams]
if isinstance(_approximation_params, Unset):
@@ -63,9 +56,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
approximation_params = ApproximationParams.from_dict(_approximation_params)
+ _regression_type = d.pop("regressionType", UNSET)
+ regression_type: Union[Unset, RegressionType]
+ if isinstance(_regression_type, Unset):
+ regression_type = UNSET
+ else:
+ regression_type = RegressionType(_regression_type)
+
prediction_params = cls(
- regression_type=regression_type,
approximation_params=approximation_params,
+ regression_type=regression_type,
)
prediction_params.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/privacy_summary.py b/src/tuneinsight/api/sdk/models/privacy_summary.py
index afcea80..b14a724 100644
--- a/src/tuneinsight/api/sdk/models/privacy_summary.py
+++ b/src/tuneinsight/api/sdk/models/privacy_summary.py
@@ -19,20 +19,24 @@ class PrivacySummary:
"""Privacy summary for a project
Attributes:
+ authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
computation (Union[Unset, PrivacySummaryComputation]): Description of the computation that will be run for the
project
data_source (Union[Unset, DataSource]):
execution_quota (Union[Unset, ExecutionQuota]): stores information about the status of the execution quota
- authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
"""
+ authorization_status: Union[Unset, AuthorizationStatus] = UNSET
computation: Union[Unset, "PrivacySummaryComputation"] = UNSET
data_source: Union[Unset, "DataSource"] = UNSET
execution_quota: Union[Unset, "ExecutionQuota"] = UNSET
- authorization_status: Union[Unset, AuthorizationStatus] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ authorization_status: Union[Unset, str] = UNSET
+ if not isinstance(self.authorization_status, Unset):
+ authorization_status = self.authorization_status.value
+
computation: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.computation, Unset):
computation = self.computation.to_dict()
@@ -45,21 +49,17 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.execution_quota, Unset):
execution_quota = self.execution_quota.to_dict()
- authorization_status: Union[Unset, str] = UNSET
- if not isinstance(self.authorization_status, Unset):
- authorization_status = self.authorization_status.value
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if authorization_status is not UNSET:
+ field_dict["authorizationStatus"] = authorization_status
if computation is not UNSET:
field_dict["computation"] = computation
if data_source is not UNSET:
field_dict["dataSource"] = data_source
if execution_quota is not UNSET:
field_dict["executionQuota"] = execution_quota
- if authorization_status is not UNSET:
- field_dict["authorizationStatus"] = authorization_status
return field_dict
@@ -70,6 +70,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.privacy_summary_computation import PrivacySummaryComputation
d = src_dict.copy()
+ _authorization_status = d.pop("authorizationStatus", UNSET)
+ authorization_status: Union[Unset, AuthorizationStatus]
+ if isinstance(_authorization_status, Unset):
+ authorization_status = UNSET
+ else:
+ authorization_status = AuthorizationStatus(_authorization_status)
+
_computation = d.pop("computation", UNSET)
computation: Union[Unset, PrivacySummaryComputation]
if isinstance(_computation, Unset):
@@ -91,18 +98,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
execution_quota = ExecutionQuota.from_dict(_execution_quota)
- _authorization_status = d.pop("authorizationStatus", UNSET)
- authorization_status: Union[Unset, AuthorizationStatus]
- if isinstance(_authorization_status, Unset):
- authorization_status = UNSET
- else:
- authorization_status = AuthorizationStatus(_authorization_status)
-
privacy_summary = cls(
+ authorization_status=authorization_status,
computation=computation,
data_source=data_source,
execution_quota=execution_quota,
- authorization_status=authorization_status,
)
privacy_summary.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/private_search.py b/src/tuneinsight/api/sdk/models/private_search.py
index af412b8..1a9e9a9 100644
--- a/src/tuneinsight/api/sdk/models/private_search.py
+++ b/src/tuneinsight/api/sdk/models/private_search.py
@@ -22,8 +22,12 @@ class PrivateSearch:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class PrivateSearch:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class PrivateSearch:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,52 +65,39 @@ class PrivateSearch:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
pir_dataset_object_id (Union[Unset, str]): Unique identifier of a data object.
pir_search_object_id (Union[Unset, str]): Unique identifier of a data object.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
pir_dataset_object_id: Union[Unset, str] = UNSET
pir_search_object_id: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
@@ -105,44 +105,44 @@ class PrivateSearch:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
pir_dataset_object_id = self.pir_dataset_object_id
pir_search_object_id = self.pir_search_object_id
@@ -153,46 +153,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if pir_dataset_object_id is not UNSET:
field_dict["pirDatasetObjectId"] = pir_dataset_object_id
if pir_search_object_id is not UNSET:
@@ -210,14 +210,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -226,10 +219,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -237,6 +241,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -248,19 +266,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -268,17 +273,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
pir_dataset_object_id = d.pop("pirDatasetObjectId", UNSET)
@@ -286,26 +286,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
private_search = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
pir_dataset_object_id=pir_dataset_object_id,
pir_search_object_id=pir_search_object_id,
)
diff --git a/src/tuneinsight/api/sdk/models/private_search_database.py b/src/tuneinsight/api/sdk/models/private_search_database.py
index 038a7bf..1f0ec70 100644
--- a/src/tuneinsight/api/sdk/models/private_search_database.py
+++ b/src/tuneinsight/api/sdk/models/private_search_database.py
@@ -12,55 +12,55 @@ class PrivateSearchDatabase:
"""Database used by private search
Attributes:
+ cryptosystem_params (Union[Unset, str]): cryptosystem parameters (b64-encoded)
database_id (Union[Unset, str]): Unique identifier of a private search database.
database_index (Union[Unset, str]): private search database hash index (b64-encoded)
database_params (Union[Unset, str]): private search database parameters (b64-encoded), returned on GET /private-
search-databases/
- cryptosystem_params (Union[Unset, str]): cryptosystem parameters (b64-encoded)
"""
+ cryptosystem_params: Union[Unset, str] = UNSET
database_id: Union[Unset, str] = UNSET
database_index: Union[Unset, str] = UNSET
database_params: Union[Unset, str] = UNSET
- cryptosystem_params: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ cryptosystem_params = self.cryptosystem_params
database_id = self.database_id
database_index = self.database_index
database_params = self.database_params
- cryptosystem_params = self.cryptosystem_params
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if cryptosystem_params is not UNSET:
+ field_dict["cryptosystemParams"] = cryptosystem_params
if database_id is not UNSET:
field_dict["databaseID"] = database_id
if database_index is not UNSET:
field_dict["databaseIndex"] = database_index
if database_params is not UNSET:
field_dict["databaseParams"] = database_params
- if cryptosystem_params is not UNSET:
- field_dict["cryptosystemParams"] = cryptosystem_params
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
+ cryptosystem_params = d.pop("cryptosystemParams", UNSET)
+
database_id = d.pop("databaseID", UNSET)
database_index = d.pop("databaseIndex", UNSET)
database_params = d.pop("databaseParams", UNSET)
- cryptosystem_params = d.pop("cryptosystemParams", UNSET)
-
private_search_database = cls(
+ cryptosystem_params=cryptosystem_params,
database_id=database_id,
database_index=database_index,
database_params=database_params,
- cryptosystem_params=cryptosystem_params,
)
private_search_database.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/private_search_setup.py b/src/tuneinsight/api/sdk/models/private_search_setup.py
index e70d873..7c47f36 100644
--- a/src/tuneinsight/api/sdk/models/private_search_setup.py
+++ b/src/tuneinsight/api/sdk/models/private_search_setup.py
@@ -23,8 +23,12 @@ class PrivateSearchSetup:
Attributes:
type (ComputationType): Type of the computation.
keys (str): (required) name of the column from the dataset which stores the keys of the database
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class PrivateSearchSetup:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class PrivateSearchSetup:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,25 +66,12 @@ class PrivateSearchSetup:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
values (Union[Unset, List[str]]): name of the columns from the dataset which stores the values of the database.
If empty, the computation will set this parameter to the column names of the dataset after dropping the keys
column.
@@ -79,28 +79,28 @@ class PrivateSearchSetup:
type: ComputationType
keys: str
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
values: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
@@ -108,44 +108,44 @@ def to_dict(self) -> Dict[str, Any]:
type = self.type.value
keys = self.keys
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
values: Union[Unset, List[str]] = UNSET
if not isinstance(self.values, Unset):
values = self.values
@@ -158,46 +158,46 @@ def to_dict(self) -> Dict[str, Any]:
"keys": keys,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if values is not UNSET:
field_dict["values"] = values
@@ -215,14 +215,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
keys = d.pop("keys")
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -231,10 +224,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -242,6 +246,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -253,19 +271,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -273,43 +278,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
values = cast(List[str], d.pop("values", UNSET))
private_search_setup = cls(
type=type,
keys=keys,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
values=values,
)
diff --git a/src/tuneinsight/api/sdk/models/project.py b/src/tuneinsight/api/sdk/models/project.py
index 9c3740b..beaec0f 100644
--- a/src/tuneinsight/api/sdk/models/project.py
+++ b/src/tuneinsight/api/sdk/models/project.py
@@ -15,6 +15,7 @@
from ..models.computation_policy import ComputationPolicy
from ..models.data_source_query import DataSourceQuery
from ..models.local_data_selection_definition import LocalDataSelectionDefinition
+ from ..models.network import Network
from ..models.participant import Participant
from ..models.privacy_summary import PrivacySummary
@@ -27,162 +28,168 @@ class Project:
"""Project entity definition.
Attributes:
- authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ created_by_node (Union[Unset, str]): ID of node where the project was first created
created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API
- description (Union[Unset, None, str]):
+ dpia (Union[Unset, str]):
+ local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not
+ configured the network)
+ policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation
allow_shared_edit (Union[Unset, bool]): True if this project can be modified after being shared. Modifications
of a shared project will be broadcasted to the network
- created_by_node (Union[Unset, str]): ID of node where the project was first created
- locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared)
- unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are
- authorized to access the project (view / edit depends on the roles)
- workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend
+ authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project
computation_definition (Union[Unset, ComputationDefinition]): Generic computation.
- shared (Union[Unset, bool]): True if the project has once been shared across the participants
- policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation
- created_by_user (Union[Unset, str]): ID of user who created the project
- name (Union[Unset, str]):
- network_id (Union[Unset, str]): id to uniquely identify the network
- query (Union[Unset, DataSourceQuery]): schema used for the query
- topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
- connected to a central node. In tree topology all nodes are connected and aware of each other.
- allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for
- a client to query the data source all participants of the project and return the clear text result
- local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not
- configured the network)
run_async (Union[Unset, bool]): flag indicating if computation should be run asynchronously
- unique_id (Union[Unset, str]): Unique identifier of a project.
+ workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend
+ non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the
+ distributed computations but does not have any input data.
+ By default this field is set according to the instance's configuration.
data_source_auto_match (Union[Unset, bool]): whether or not to automatically assign the first matching
datasource when the project is shared with other nodes
end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted
on the client side
+ locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared)
+ query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30.
+ data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
+ name (Union[Unset, str]):
+ network_name (Union[Unset, str]): name of the network from the list of existing networks to link the project to.
+ created_by_user (Union[Unset, str]): ID of user who created the project
+ workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend
+ allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for
+ a client to query the data source all participants of the project and return the clear text result
+ authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ description (Union[Unset, None, str]):
+ topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
+ connected to a central node. In tree topology all nodes are connected and aware of each other.
hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf
participants when the project is in a star topology.
local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A
selection is a "query" or data selection definition to run on the datasource
+ query (Union[Unset, DataSourceQuery]): schema used for the query
+ shared (Union[Unset, bool]): True if the project has once been shared across the participants
+ unique_id (Union[Unset, str]): Unique identifier of a project.
min_contributors (Union[Unset, None, int]): minimum number of participants that contribute with their data
required to run computations within this project
- query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30.
- workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend
- authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project
- dpia (Union[Unset, str]):
- non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the
- distributed computations but does not have any input data.
- By default this field is set according to the instance's configuration.
- data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
+ network_id (Union[Unset, str]): id to uniquely identify the network
+ unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are
+ authorized to access the project (view / edit depends on the roles)
+ workflow_description (Union[Unset, str]): dynamically generated markdown description of the distributed workflow
+ that is currently configured with the project.
+ Not to be confused with the project description which is set by the user that has created the project for
+ informative purposes.
computations (Union[Unset, List['Computation']]): List of computations of the project
created_at (Union[Unset, str]):
error (Union[Unset, str]): Description of a potential error that happened during the project lifespan
+ status (Union[Unset, ProjectStatus]): Stages of a project workflow
+ network (Union[Unset, Network]): Network that represents a set of nodes
participants (Union[Unset, List['Participant']]): List of participants in the project
privacy_summary (Union[Unset, PrivacySummary]): Privacy summary for a project
- status (Union[Unset, ProjectStatus]): Stages of a project workflow
updated_at (Union[Unset, str]):
- workflow_description (Union[Unset, str]): dynamically generated markdown description of the distributed workflow
- that is currently configured with the project.
- Not to be confused with the project description which is set by the user that has created the project for
- informative purposes.
"""
- authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ created_by_node: Union[Unset, str] = UNSET
created_with_client: Union[Unset, Client] = UNSET
- description: Union[Unset, None, str] = UNSET
+ dpia: Union[Unset, str] = UNSET
+ local: Union[Unset, None, bool] = UNSET
+ policy: Union[Unset, "ComputationPolicy"] = UNSET
allow_shared_edit: Union[Unset, bool] = UNSET
- created_by_node: Union[Unset, str] = UNSET
- locked: Union[Unset, None, bool] = UNSET
- unrestricted_access: Union[Unset, None, bool] = UNSET
- workflow_type: Union[Unset, WorkflowType] = UNSET
+ authorized_users: Union[Unset, List[str]] = UNSET
computation_definition: Union[Unset, "ComputationDefinition"] = UNSET
- shared: Union[Unset, bool] = UNSET
- policy: Union[Unset, "ComputationPolicy"] = UNSET
- created_by_user: Union[Unset, str] = UNSET
- name: Union[Unset, str] = UNSET
- network_id: Union[Unset, str] = UNSET
- query: Union[Unset, "DataSourceQuery"] = UNSET
- topology: Union[Unset, Topology] = UNSET
- allow_clear_query: Union[Unset, bool] = UNSET
- local: Union[Unset, None, bool] = UNSET
run_async: Union[Unset, bool] = UNSET
- unique_id: Union[Unset, str] = UNSET
+ workflow_type: Union[Unset, WorkflowType] = UNSET
+ non_contributor: Union[Unset, None, bool] = UNSET
data_source_auto_match: Union[Unset, bool] = UNSET
end_to_end_encrypted: Union[Unset, None, bool] = UNSET
+ locked: Union[Unset, None, bool] = UNSET
+ query_timeout: Union[Unset, int] = 30
+ data_source_id: Union[Unset, None, str] = UNSET
+ name: Union[Unset, str] = UNSET
+ network_name: Union[Unset, str] = UNSET
+ created_by_user: Union[Unset, str] = UNSET
+ workflow_json: Union[Unset, str] = UNSET
+ allow_clear_query: Union[Unset, bool] = UNSET
+ authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ description: Union[Unset, None, str] = UNSET
+ topology: Union[Unset, Topology] = UNSET
hide_leaf_participants: Union[Unset, None, bool] = UNSET
local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET
+ query: Union[Unset, "DataSourceQuery"] = UNSET
+ shared: Union[Unset, bool] = UNSET
+ unique_id: Union[Unset, str] = UNSET
min_contributors: Union[Unset, None, int] = UNSET
- query_timeout: Union[Unset, int] = 30
- workflow_json: Union[Unset, str] = UNSET
- authorized_users: Union[Unset, List[str]] = UNSET
- dpia: Union[Unset, str] = UNSET
- non_contributor: Union[Unset, None, bool] = UNSET
- data_source_id: Union[Unset, None, str] = UNSET
+ network_id: Union[Unset, str] = UNSET
+ unrestricted_access: Union[Unset, None, bool] = UNSET
+ workflow_description: Union[Unset, str] = UNSET
computations: Union[Unset, List["Computation"]] = UNSET
created_at: Union[Unset, str] = UNSET
error: Union[Unset, str] = UNSET
+ status: Union[Unset, ProjectStatus] = UNSET
+ network: Union[Unset, "Network"] = UNSET
participants: Union[Unset, List["Participant"]] = UNSET
privacy_summary: Union[Unset, "PrivacySummary"] = UNSET
- status: Union[Unset, ProjectStatus] = UNSET
updated_at: Union[Unset, str] = UNSET
- workflow_description: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- authorization_status: Union[Unset, str] = UNSET
- if not isinstance(self.authorization_status, Unset):
- authorization_status = self.authorization_status.value
-
+ created_by_node = self.created_by_node
created_with_client: Union[Unset, str] = UNSET
if not isinstance(self.created_with_client, Unset):
created_with_client = self.created_with_client.value
- description = self.description
+ dpia = self.dpia
+ local = self.local
+ policy: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.policy, Unset):
+ policy = self.policy.to_dict()
+
allow_shared_edit = self.allow_shared_edit
- created_by_node = self.created_by_node
- locked = self.locked
- unrestricted_access = self.unrestricted_access
- workflow_type: Union[Unset, str] = UNSET
- if not isinstance(self.workflow_type, Unset):
- workflow_type = self.workflow_type.value
+ authorized_users: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.authorized_users, Unset):
+ authorized_users = self.authorized_users
computation_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.computation_definition, Unset):
computation_definition = self.computation_definition.to_dict()
- shared = self.shared
- policy: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.policy, Unset):
- policy = self.policy.to_dict()
+ run_async = self.run_async
+ workflow_type: Union[Unset, str] = UNSET
+ if not isinstance(self.workflow_type, Unset):
+ workflow_type = self.workflow_type.value
- created_by_user = self.created_by_user
+ non_contributor = self.non_contributor
+ data_source_auto_match = self.data_source_auto_match
+ end_to_end_encrypted = self.end_to_end_encrypted
+ locked = self.locked
+ query_timeout = self.query_timeout
+ data_source_id = self.data_source_id
name = self.name
- network_id = self.network_id
- query: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.query, Unset):
- query = self.query.to_dict()
+ network_name = self.network_name
+ created_by_user = self.created_by_user
+ workflow_json = self.workflow_json
+ allow_clear_query = self.allow_clear_query
+ authorization_status: Union[Unset, str] = UNSET
+ if not isinstance(self.authorization_status, Unset):
+ authorization_status = self.authorization_status.value
+ description = self.description
topology: Union[Unset, str] = UNSET
if not isinstance(self.topology, Unset):
topology = self.topology.value
- allow_clear_query = self.allow_clear_query
- local = self.local
- run_async = self.run_async
- unique_id = self.unique_id
- data_source_auto_match = self.data_source_auto_match
- end_to_end_encrypted = self.end_to_end_encrypted
hide_leaf_participants = self.hide_leaf_participants
local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.local_data_selection_definition, Unset):
local_data_selection_definition = self.local_data_selection_definition.to_dict()
- min_contributors = self.min_contributors
- query_timeout = self.query_timeout
- workflow_json = self.workflow_json
- authorized_users: Union[Unset, List[str]] = UNSET
- if not isinstance(self.authorized_users, Unset):
- authorized_users = self.authorized_users
+ query: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.query, Unset):
+ query = self.query.to_dict()
- dpia = self.dpia
- non_contributor = self.non_contributor
- data_source_id = self.data_source_id
+ shared = self.shared
+ unique_id = self.unique_id
+ min_contributors = self.min_contributors
+ network_id = self.network_id
+ unrestricted_access = self.unrestricted_access
+ workflow_description = self.workflow_description
computations: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.computations, Unset):
computations = []
@@ -193,6 +200,14 @@ def to_dict(self) -> Dict[str, Any]:
created_at = self.created_at
error = self.error
+ status: Union[Unset, str] = UNSET
+ if not isinstance(self.status, Unset):
+ status = self.status.value
+
+ network: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.network, Unset):
+ network = self.network.to_dict()
+
participants: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.participants, Unset):
participants = []
@@ -205,94 +220,93 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.privacy_summary, Unset):
privacy_summary = self.privacy_summary.to_dict()
- status: Union[Unset, str] = UNSET
- if not isinstance(self.status, Unset):
- status = self.status.value
-
updated_at = self.updated_at
- workflow_description = self.workflow_description
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if authorization_status is not UNSET:
- field_dict["authorizationStatus"] = authorization_status
+ if created_by_node is not UNSET:
+ field_dict["createdByNode"] = created_by_node
if created_with_client is not UNSET:
field_dict["createdWithClient"] = created_with_client
- if description is not UNSET:
- field_dict["description"] = description
+ if dpia is not UNSET:
+ field_dict["dpia"] = dpia
+ if local is not UNSET:
+ field_dict["local"] = local
+ if policy is not UNSET:
+ field_dict["policy"] = policy
if allow_shared_edit is not UNSET:
field_dict["allowSharedEdit"] = allow_shared_edit
- if created_by_node is not UNSET:
- field_dict["createdByNode"] = created_by_node
- if locked is not UNSET:
- field_dict["locked"] = locked
- if unrestricted_access is not UNSET:
- field_dict["unrestrictedAccess"] = unrestricted_access
- if workflow_type is not UNSET:
- field_dict["workflowType"] = workflow_type
+ if authorized_users is not UNSET:
+ field_dict["authorizedUsers"] = authorized_users
if computation_definition is not UNSET:
field_dict["computationDefinition"] = computation_definition
- if shared is not UNSET:
- field_dict["shared"] = shared
- if policy is not UNSET:
- field_dict["policy"] = policy
- if created_by_user is not UNSET:
- field_dict["createdByUser"] = created_by_user
- if name is not UNSET:
- field_dict["name"] = name
- if network_id is not UNSET:
- field_dict["networkId"] = network_id
- if query is not UNSET:
- field_dict["query"] = query
- if topology is not UNSET:
- field_dict["topology"] = topology
- if allow_clear_query is not UNSET:
- field_dict["allowClearQuery"] = allow_clear_query
- if local is not UNSET:
- field_dict["local"] = local
if run_async is not UNSET:
field_dict["runAsync"] = run_async
- if unique_id is not UNSET:
- field_dict["uniqueId"] = unique_id
+ if workflow_type is not UNSET:
+ field_dict["workflowType"] = workflow_type
+ if non_contributor is not UNSET:
+ field_dict["nonContributor"] = non_contributor
if data_source_auto_match is not UNSET:
field_dict["dataSourceAutoMatch"] = data_source_auto_match
if end_to_end_encrypted is not UNSET:
field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if locked is not UNSET:
+ field_dict["locked"] = locked
+ if query_timeout is not UNSET:
+ field_dict["queryTimeout"] = query_timeout
+ if data_source_id is not UNSET:
+ field_dict["dataSourceId"] = data_source_id
+ if name is not UNSET:
+ field_dict["name"] = name
+ if network_name is not UNSET:
+ field_dict["networkName"] = network_name
+ if created_by_user is not UNSET:
+ field_dict["createdByUser"] = created_by_user
+ if workflow_json is not UNSET:
+ field_dict["workflowJSON"] = workflow_json
+ if allow_clear_query is not UNSET:
+ field_dict["allowClearQuery"] = allow_clear_query
+ if authorization_status is not UNSET:
+ field_dict["authorizationStatus"] = authorization_status
+ if description is not UNSET:
+ field_dict["description"] = description
+ if topology is not UNSET:
+ field_dict["topology"] = topology
if hide_leaf_participants is not UNSET:
field_dict["hideLeafParticipants"] = hide_leaf_participants
if local_data_selection_definition is not UNSET:
field_dict["localDataSelectionDefinition"] = local_data_selection_definition
+ if query is not UNSET:
+ field_dict["query"] = query
+ if shared is not UNSET:
+ field_dict["shared"] = shared
+ if unique_id is not UNSET:
+ field_dict["uniqueId"] = unique_id
if min_contributors is not UNSET:
field_dict["minContributors"] = min_contributors
- if query_timeout is not UNSET:
- field_dict["queryTimeout"] = query_timeout
- if workflow_json is not UNSET:
- field_dict["workflowJSON"] = workflow_json
- if authorized_users is not UNSET:
- field_dict["authorizedUsers"] = authorized_users
- if dpia is not UNSET:
- field_dict["dpia"] = dpia
- if non_contributor is not UNSET:
- field_dict["nonContributor"] = non_contributor
- if data_source_id is not UNSET:
- field_dict["dataSourceId"] = data_source_id
+ if network_id is not UNSET:
+ field_dict["networkId"] = network_id
+ if unrestricted_access is not UNSET:
+ field_dict["unrestrictedAccess"] = unrestricted_access
+ if workflow_description is not UNSET:
+ field_dict["workflowDescription"] = workflow_description
if computations is not UNSET:
field_dict["computations"] = computations
if created_at is not UNSET:
field_dict["createdAt"] = created_at
if error is not UNSET:
field_dict["error"] = error
+ if status is not UNSET:
+ field_dict["status"] = status
+ if network is not UNSET:
+ field_dict["network"] = network
if participants is not UNSET:
field_dict["participants"] = participants
if privacy_summary is not UNSET:
field_dict["privacySummary"] = privacy_summary
- if status is not UNSET:
- field_dict["status"] = status
if updated_at is not UNSET:
field_dict["updatedAt"] = updated_at
- if workflow_description is not UNSET:
- field_dict["workflowDescription"] = workflow_description
return field_dict
@@ -303,16 +317,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.computation_policy import ComputationPolicy
from ..models.data_source_query import DataSourceQuery
from ..models.local_data_selection_definition import LocalDataSelectionDefinition
+ from ..models.network import Network
from ..models.participant import Participant
from ..models.privacy_summary import PrivacySummary
d = src_dict.copy()
- _authorization_status = d.pop("authorizationStatus", UNSET)
- authorization_status: Union[Unset, AuthorizationStatus]
- if isinstance(_authorization_status, Unset):
- authorization_status = UNSET
- else:
- authorization_status = AuthorizationStatus(_authorization_status)
+ created_by_node = d.pop("createdByNode", UNSET)
_created_with_client = d.pop("createdWithClient", UNSET)
created_with_client: Union[Unset, Client]
@@ -321,15 +331,29 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
created_with_client = Client(_created_with_client)
- description = d.pop("description", UNSET)
+ dpia = d.pop("dpia", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ _policy = d.pop("policy", UNSET)
+ policy: Union[Unset, ComputationPolicy]
+ if isinstance(_policy, Unset):
+ policy = UNSET
+ else:
+ policy = ComputationPolicy.from_dict(_policy)
allow_shared_edit = d.pop("allowSharedEdit", UNSET)
- created_by_node = d.pop("createdByNode", UNSET)
+ authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
- locked = d.pop("locked", UNSET)
+ _computation_definition = d.pop("computationDefinition", UNSET)
+ computation_definition: Union[Unset, ComputationDefinition]
+ if isinstance(_computation_definition, Unset):
+ computation_definition = UNSET
+ else:
+ computation_definition = ComputationDefinition.from_dict(_computation_definition)
- unrestricted_access = d.pop("unrestrictedAccess", UNSET)
+ run_async = d.pop("runAsync", UNSET)
_workflow_type = d.pop("workflowType", UNSET)
workflow_type: Union[Unset, WorkflowType]
@@ -338,34 +362,36 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
workflow_type = WorkflowType(_workflow_type)
- _computation_definition = d.pop("computationDefinition", UNSET)
- computation_definition: Union[Unset, ComputationDefinition]
- if isinstance(_computation_definition, Unset):
- computation_definition = UNSET
- else:
- computation_definition = ComputationDefinition.from_dict(_computation_definition)
+ non_contributor = d.pop("nonContributor", UNSET)
- shared = d.pop("shared", UNSET)
+ data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET)
- _policy = d.pop("policy", UNSET)
- policy: Union[Unset, ComputationPolicy]
- if isinstance(_policy, Unset):
- policy = UNSET
- else:
- policy = ComputationPolicy.from_dict(_policy)
+ end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
- created_by_user = d.pop("createdByUser", UNSET)
+ locked = d.pop("locked", UNSET)
+
+ query_timeout = d.pop("queryTimeout", UNSET)
+
+ data_source_id = d.pop("dataSourceId", UNSET)
name = d.pop("name", UNSET)
- network_id = d.pop("networkId", UNSET)
+ network_name = d.pop("networkName", UNSET)
- _query = d.pop("query", UNSET)
- query: Union[Unset, DataSourceQuery]
- if isinstance(_query, Unset):
- query = UNSET
+ created_by_user = d.pop("createdByUser", UNSET)
+
+ workflow_json = d.pop("workflowJSON", UNSET)
+
+ allow_clear_query = d.pop("allowClearQuery", UNSET)
+
+ _authorization_status = d.pop("authorizationStatus", UNSET)
+ authorization_status: Union[Unset, AuthorizationStatus]
+ if isinstance(_authorization_status, Unset):
+ authorization_status = UNSET
else:
- query = DataSourceQuery.from_dict(_query)
+ authorization_status = AuthorizationStatus(_authorization_status)
+
+ description = d.pop("description", UNSET)
_topology = d.pop("topology", UNSET)
topology: Union[Unset, Topology]
@@ -374,18 +400,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
topology = Topology(_topology)
- allow_clear_query = d.pop("allowClearQuery", UNSET)
-
- local = d.pop("local", UNSET)
-
- run_async = d.pop("runAsync", UNSET)
-
- unique_id = d.pop("uniqueId", UNSET)
-
- data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET)
-
- end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
-
hide_leaf_participants = d.pop("hideLeafParticipants", UNSET)
_local_data_selection_definition = d.pop("localDataSelectionDefinition", UNSET)
@@ -395,19 +409,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
local_data_selection_definition = LocalDataSelectionDefinition.from_dict(_local_data_selection_definition)
- min_contributors = d.pop("minContributors", UNSET)
+ _query = d.pop("query", UNSET)
+ query: Union[Unset, DataSourceQuery]
+ if isinstance(_query, Unset):
+ query = UNSET
+ else:
+ query = DataSourceQuery.from_dict(_query)
- query_timeout = d.pop("queryTimeout", UNSET)
+ shared = d.pop("shared", UNSET)
- workflow_json = d.pop("workflowJSON", UNSET)
+ unique_id = d.pop("uniqueId", UNSET)
- authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
+ min_contributors = d.pop("minContributors", UNSET)
- dpia = d.pop("dpia", UNSET)
+ network_id = d.pop("networkId", UNSET)
- non_contributor = d.pop("nonContributor", UNSET)
+ unrestricted_access = d.pop("unrestrictedAccess", UNSET)
- data_source_id = d.pop("dataSourceId", UNSET)
+ workflow_description = d.pop("workflowDescription", UNSET)
computations = []
_computations = d.pop("computations", UNSET)
@@ -420,6 +439,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
error = d.pop("error", UNSET)
+ _status = d.pop("status", UNSET)
+ status: Union[Unset, ProjectStatus]
+ if isinstance(_status, Unset):
+ status = UNSET
+ else:
+ status = ProjectStatus(_status)
+
+ _network = d.pop("network", UNSET)
+ network: Union[Unset, Network]
+ if isinstance(_network, Unset):
+ network = UNSET
+ else:
+ network = Network.from_dict(_network)
+
participants = []
_participants = d.pop("participants", UNSET)
for participants_item_data in _participants or []:
@@ -434,57 +467,50 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
privacy_summary = PrivacySummary.from_dict(_privacy_summary)
- _status = d.pop("status", UNSET)
- status: Union[Unset, ProjectStatus]
- if isinstance(_status, Unset):
- status = UNSET
- else:
- status = ProjectStatus(_status)
-
updated_at = d.pop("updatedAt", UNSET)
- workflow_description = d.pop("workflowDescription", UNSET)
-
project = cls(
- authorization_status=authorization_status,
+ created_by_node=created_by_node,
created_with_client=created_with_client,
- description=description,
+ dpia=dpia,
+ local=local,
+ policy=policy,
allow_shared_edit=allow_shared_edit,
- created_by_node=created_by_node,
- locked=locked,
- unrestricted_access=unrestricted_access,
- workflow_type=workflow_type,
+ authorized_users=authorized_users,
computation_definition=computation_definition,
- shared=shared,
- policy=policy,
- created_by_user=created_by_user,
- name=name,
- network_id=network_id,
- query=query,
- topology=topology,
- allow_clear_query=allow_clear_query,
- local=local,
run_async=run_async,
- unique_id=unique_id,
+ workflow_type=workflow_type,
+ non_contributor=non_contributor,
data_source_auto_match=data_source_auto_match,
end_to_end_encrypted=end_to_end_encrypted,
+ locked=locked,
+ query_timeout=query_timeout,
+ data_source_id=data_source_id,
+ name=name,
+ network_name=network_name,
+ created_by_user=created_by_user,
+ workflow_json=workflow_json,
+ allow_clear_query=allow_clear_query,
+ authorization_status=authorization_status,
+ description=description,
+ topology=topology,
hide_leaf_participants=hide_leaf_participants,
local_data_selection_definition=local_data_selection_definition,
+ query=query,
+ shared=shared,
+ unique_id=unique_id,
min_contributors=min_contributors,
- query_timeout=query_timeout,
- workflow_json=workflow_json,
- authorized_users=authorized_users,
- dpia=dpia,
- non_contributor=non_contributor,
- data_source_id=data_source_id,
+ network_id=network_id,
+ unrestricted_access=unrestricted_access,
+ workflow_description=workflow_description,
computations=computations,
created_at=created_at,
error=error,
+ status=status,
+ network=network,
participants=participants,
privacy_summary=privacy_summary,
- status=status,
updated_at=updated_at,
- workflow_description=workflow_description,
)
project.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/project_base.py b/src/tuneinsight/api/sdk/models/project_base.py
index 99f10ab..ebd65c6 100644
--- a/src/tuneinsight/api/sdk/models/project_base.py
+++ b/src/tuneinsight/api/sdk/models/project_base.py
@@ -23,209 +23,214 @@ class ProjectBase:
"""Common fields of a project (for get, patch and post)
Attributes:
- authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ created_by_node (Union[Unset, str]): ID of node where the project was first created
created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API
- description (Union[Unset, None, str]):
+ dpia (Union[Unset, str]):
+ local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not
+ configured the network)
+ policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation
allow_shared_edit (Union[Unset, bool]): True if this project can be modified after being shared. Modifications
of a shared project will be broadcasted to the network
- created_by_node (Union[Unset, str]): ID of node where the project was first created
- locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared)
- unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are
- authorized to access the project (view / edit depends on the roles)
- workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend
+ authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project
computation_definition (Union[Unset, ComputationDefinition]): Generic computation.
- shared (Union[Unset, bool]): True if the project has once been shared across the participants
- policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation
- created_by_user (Union[Unset, str]): ID of user who created the project
- name (Union[Unset, str]):
- network_id (Union[Unset, str]): id to uniquely identify the network
- query (Union[Unset, DataSourceQuery]): schema used for the query
- topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
- connected to a central node. In tree topology all nodes are connected and aware of each other.
- allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for
- a client to query the data source all participants of the project and return the clear text result
- local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not
- configured the network)
run_async (Union[Unset, bool]): flag indicating if computation should be run asynchronously
- unique_id (Union[Unset, str]): Unique identifier of a project.
+ workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend
+ non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the
+ distributed computations but does not have any input data.
+ By default this field is set according to the instance's configuration.
data_source_auto_match (Union[Unset, bool]): whether or not to automatically assign the first matching
datasource when the project is shared with other nodes
end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted
on the client side
+ locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared)
+ query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30.
+ data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
+ name (Union[Unset, str]):
+ network_name (Union[Unset, str]): name of the network from the list of existing networks to link the project to.
+ created_by_user (Union[Unset, str]): ID of user who created the project
+ workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend
+ allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for
+ a client to query the data source all participants of the project and return the clear text result
+ authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ description (Union[Unset, None, str]):
+ topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
+ connected to a central node. In tree topology all nodes are connected and aware of each other.
hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf
participants when the project is in a star topology.
local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A
selection is a "query" or data selection definition to run on the datasource
+ query (Union[Unset, DataSourceQuery]): schema used for the query
+ shared (Union[Unset, bool]): True if the project has once been shared across the participants
+ unique_id (Union[Unset, str]): Unique identifier of a project.
min_contributors (Union[Unset, None, int]): minimum number of participants that contribute with their data
required to run computations within this project
- query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30.
- workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend
- authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project
- dpia (Union[Unset, str]):
- non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the
- distributed computations but does not have any input data.
- By default this field is set according to the instance's configuration.
- data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
+ network_id (Union[Unset, str]): id to uniquely identify the network
+ unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are
+ authorized to access the project (view / edit depends on the roles)
"""
- authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ created_by_node: Union[Unset, str] = UNSET
created_with_client: Union[Unset, Client] = UNSET
- description: Union[Unset, None, str] = UNSET
+ dpia: Union[Unset, str] = UNSET
+ local: Union[Unset, None, bool] = UNSET
+ policy: Union[Unset, "ComputationPolicy"] = UNSET
allow_shared_edit: Union[Unset, bool] = UNSET
- created_by_node: Union[Unset, str] = UNSET
- locked: Union[Unset, None, bool] = UNSET
- unrestricted_access: Union[Unset, None, bool] = UNSET
- workflow_type: Union[Unset, WorkflowType] = UNSET
+ authorized_users: Union[Unset, List[str]] = UNSET
computation_definition: Union[Unset, "ComputationDefinition"] = UNSET
- shared: Union[Unset, bool] = UNSET
- policy: Union[Unset, "ComputationPolicy"] = UNSET
- created_by_user: Union[Unset, str] = UNSET
- name: Union[Unset, str] = UNSET
- network_id: Union[Unset, str] = UNSET
- query: Union[Unset, "DataSourceQuery"] = UNSET
- topology: Union[Unset, Topology] = UNSET
- allow_clear_query: Union[Unset, bool] = UNSET
- local: Union[Unset, None, bool] = UNSET
run_async: Union[Unset, bool] = UNSET
- unique_id: Union[Unset, str] = UNSET
+ workflow_type: Union[Unset, WorkflowType] = UNSET
+ non_contributor: Union[Unset, None, bool] = UNSET
data_source_auto_match: Union[Unset, bool] = UNSET
end_to_end_encrypted: Union[Unset, None, bool] = UNSET
+ locked: Union[Unset, None, bool] = UNSET
+ query_timeout: Union[Unset, int] = 30
+ data_source_id: Union[Unset, None, str] = UNSET
+ name: Union[Unset, str] = UNSET
+ network_name: Union[Unset, str] = UNSET
+ created_by_user: Union[Unset, str] = UNSET
+ workflow_json: Union[Unset, str] = UNSET
+ allow_clear_query: Union[Unset, bool] = UNSET
+ authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ description: Union[Unset, None, str] = UNSET
+ topology: Union[Unset, Topology] = UNSET
hide_leaf_participants: Union[Unset, None, bool] = UNSET
local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET
+ query: Union[Unset, "DataSourceQuery"] = UNSET
+ shared: Union[Unset, bool] = UNSET
+ unique_id: Union[Unset, str] = UNSET
min_contributors: Union[Unset, None, int] = UNSET
- query_timeout: Union[Unset, int] = 30
- workflow_json: Union[Unset, str] = UNSET
- authorized_users: Union[Unset, List[str]] = UNSET
- dpia: Union[Unset, str] = UNSET
- non_contributor: Union[Unset, None, bool] = UNSET
- data_source_id: Union[Unset, None, str] = UNSET
+ network_id: Union[Unset, str] = UNSET
+ unrestricted_access: Union[Unset, None, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- authorization_status: Union[Unset, str] = UNSET
- if not isinstance(self.authorization_status, Unset):
- authorization_status = self.authorization_status.value
-
+ created_by_node = self.created_by_node
created_with_client: Union[Unset, str] = UNSET
if not isinstance(self.created_with_client, Unset):
created_with_client = self.created_with_client.value
- description = self.description
+ dpia = self.dpia
+ local = self.local
+ policy: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.policy, Unset):
+ policy = self.policy.to_dict()
+
allow_shared_edit = self.allow_shared_edit
- created_by_node = self.created_by_node
- locked = self.locked
- unrestricted_access = self.unrestricted_access
- workflow_type: Union[Unset, str] = UNSET
- if not isinstance(self.workflow_type, Unset):
- workflow_type = self.workflow_type.value
+ authorized_users: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.authorized_users, Unset):
+ authorized_users = self.authorized_users
computation_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.computation_definition, Unset):
computation_definition = self.computation_definition.to_dict()
- shared = self.shared
- policy: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.policy, Unset):
- policy = self.policy.to_dict()
+ run_async = self.run_async
+ workflow_type: Union[Unset, str] = UNSET
+ if not isinstance(self.workflow_type, Unset):
+ workflow_type = self.workflow_type.value
- created_by_user = self.created_by_user
+ non_contributor = self.non_contributor
+ data_source_auto_match = self.data_source_auto_match
+ end_to_end_encrypted = self.end_to_end_encrypted
+ locked = self.locked
+ query_timeout = self.query_timeout
+ data_source_id = self.data_source_id
name = self.name
- network_id = self.network_id
- query: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.query, Unset):
- query = self.query.to_dict()
+ network_name = self.network_name
+ created_by_user = self.created_by_user
+ workflow_json = self.workflow_json
+ allow_clear_query = self.allow_clear_query
+ authorization_status: Union[Unset, str] = UNSET
+ if not isinstance(self.authorization_status, Unset):
+ authorization_status = self.authorization_status.value
+ description = self.description
topology: Union[Unset, str] = UNSET
if not isinstance(self.topology, Unset):
topology = self.topology.value
- allow_clear_query = self.allow_clear_query
- local = self.local
- run_async = self.run_async
- unique_id = self.unique_id
- data_source_auto_match = self.data_source_auto_match
- end_to_end_encrypted = self.end_to_end_encrypted
hide_leaf_participants = self.hide_leaf_participants
local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.local_data_selection_definition, Unset):
local_data_selection_definition = self.local_data_selection_definition.to_dict()
- min_contributors = self.min_contributors
- query_timeout = self.query_timeout
- workflow_json = self.workflow_json
- authorized_users: Union[Unset, List[str]] = UNSET
- if not isinstance(self.authorized_users, Unset):
- authorized_users = self.authorized_users
+ query: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.query, Unset):
+ query = self.query.to_dict()
- dpia = self.dpia
- non_contributor = self.non_contributor
- data_source_id = self.data_source_id
+ shared = self.shared
+ unique_id = self.unique_id
+ min_contributors = self.min_contributors
+ network_id = self.network_id
+ unrestricted_access = self.unrestricted_access
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if authorization_status is not UNSET:
- field_dict["authorizationStatus"] = authorization_status
+ if created_by_node is not UNSET:
+ field_dict["createdByNode"] = created_by_node
if created_with_client is not UNSET:
field_dict["createdWithClient"] = created_with_client
- if description is not UNSET:
- field_dict["description"] = description
+ if dpia is not UNSET:
+ field_dict["dpia"] = dpia
+ if local is not UNSET:
+ field_dict["local"] = local
+ if policy is not UNSET:
+ field_dict["policy"] = policy
if allow_shared_edit is not UNSET:
field_dict["allowSharedEdit"] = allow_shared_edit
- if created_by_node is not UNSET:
- field_dict["createdByNode"] = created_by_node
- if locked is not UNSET:
- field_dict["locked"] = locked
- if unrestricted_access is not UNSET:
- field_dict["unrestrictedAccess"] = unrestricted_access
- if workflow_type is not UNSET:
- field_dict["workflowType"] = workflow_type
+ if authorized_users is not UNSET:
+ field_dict["authorizedUsers"] = authorized_users
if computation_definition is not UNSET:
field_dict["computationDefinition"] = computation_definition
- if shared is not UNSET:
- field_dict["shared"] = shared
- if policy is not UNSET:
- field_dict["policy"] = policy
- if created_by_user is not UNSET:
- field_dict["createdByUser"] = created_by_user
- if name is not UNSET:
- field_dict["name"] = name
- if network_id is not UNSET:
- field_dict["networkId"] = network_id
- if query is not UNSET:
- field_dict["query"] = query
- if topology is not UNSET:
- field_dict["topology"] = topology
- if allow_clear_query is not UNSET:
- field_dict["allowClearQuery"] = allow_clear_query
- if local is not UNSET:
- field_dict["local"] = local
if run_async is not UNSET:
field_dict["runAsync"] = run_async
- if unique_id is not UNSET:
- field_dict["uniqueId"] = unique_id
+ if workflow_type is not UNSET:
+ field_dict["workflowType"] = workflow_type
+ if non_contributor is not UNSET:
+ field_dict["nonContributor"] = non_contributor
if data_source_auto_match is not UNSET:
field_dict["dataSourceAutoMatch"] = data_source_auto_match
if end_to_end_encrypted is not UNSET:
field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if locked is not UNSET:
+ field_dict["locked"] = locked
+ if query_timeout is not UNSET:
+ field_dict["queryTimeout"] = query_timeout
+ if data_source_id is not UNSET:
+ field_dict["dataSourceId"] = data_source_id
+ if name is not UNSET:
+ field_dict["name"] = name
+ if network_name is not UNSET:
+ field_dict["networkName"] = network_name
+ if created_by_user is not UNSET:
+ field_dict["createdByUser"] = created_by_user
+ if workflow_json is not UNSET:
+ field_dict["workflowJSON"] = workflow_json
+ if allow_clear_query is not UNSET:
+ field_dict["allowClearQuery"] = allow_clear_query
+ if authorization_status is not UNSET:
+ field_dict["authorizationStatus"] = authorization_status
+ if description is not UNSET:
+ field_dict["description"] = description
+ if topology is not UNSET:
+ field_dict["topology"] = topology
if hide_leaf_participants is not UNSET:
field_dict["hideLeafParticipants"] = hide_leaf_participants
if local_data_selection_definition is not UNSET:
field_dict["localDataSelectionDefinition"] = local_data_selection_definition
+ if query is not UNSET:
+ field_dict["query"] = query
+ if shared is not UNSET:
+ field_dict["shared"] = shared
+ if unique_id is not UNSET:
+ field_dict["uniqueId"] = unique_id
if min_contributors is not UNSET:
field_dict["minContributors"] = min_contributors
- if query_timeout is not UNSET:
- field_dict["queryTimeout"] = query_timeout
- if workflow_json is not UNSET:
- field_dict["workflowJSON"] = workflow_json
- if authorized_users is not UNSET:
- field_dict["authorizedUsers"] = authorized_users
- if dpia is not UNSET:
- field_dict["dpia"] = dpia
- if non_contributor is not UNSET:
- field_dict["nonContributor"] = non_contributor
- if data_source_id is not UNSET:
- field_dict["dataSourceId"] = data_source_id
+ if network_id is not UNSET:
+ field_dict["networkId"] = network_id
+ if unrestricted_access is not UNSET:
+ field_dict["unrestrictedAccess"] = unrestricted_access
return field_dict
@@ -237,12 +242,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.local_data_selection_definition import LocalDataSelectionDefinition
d = src_dict.copy()
- _authorization_status = d.pop("authorizationStatus", UNSET)
- authorization_status: Union[Unset, AuthorizationStatus]
- if isinstance(_authorization_status, Unset):
- authorization_status = UNSET
- else:
- authorization_status = AuthorizationStatus(_authorization_status)
+ created_by_node = d.pop("createdByNode", UNSET)
_created_with_client = d.pop("createdWithClient", UNSET)
created_with_client: Union[Unset, Client]
@@ -251,15 +251,29 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
created_with_client = Client(_created_with_client)
- description = d.pop("description", UNSET)
+ dpia = d.pop("dpia", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ _policy = d.pop("policy", UNSET)
+ policy: Union[Unset, ComputationPolicy]
+ if isinstance(_policy, Unset):
+ policy = UNSET
+ else:
+ policy = ComputationPolicy.from_dict(_policy)
allow_shared_edit = d.pop("allowSharedEdit", UNSET)
- created_by_node = d.pop("createdByNode", UNSET)
+ authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
- locked = d.pop("locked", UNSET)
+ _computation_definition = d.pop("computationDefinition", UNSET)
+ computation_definition: Union[Unset, ComputationDefinition]
+ if isinstance(_computation_definition, Unset):
+ computation_definition = UNSET
+ else:
+ computation_definition = ComputationDefinition.from_dict(_computation_definition)
- unrestricted_access = d.pop("unrestrictedAccess", UNSET)
+ run_async = d.pop("runAsync", UNSET)
_workflow_type = d.pop("workflowType", UNSET)
workflow_type: Union[Unset, WorkflowType]
@@ -268,34 +282,36 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
workflow_type = WorkflowType(_workflow_type)
- _computation_definition = d.pop("computationDefinition", UNSET)
- computation_definition: Union[Unset, ComputationDefinition]
- if isinstance(_computation_definition, Unset):
- computation_definition = UNSET
- else:
- computation_definition = ComputationDefinition.from_dict(_computation_definition)
+ non_contributor = d.pop("nonContributor", UNSET)
- shared = d.pop("shared", UNSET)
+ data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET)
- _policy = d.pop("policy", UNSET)
- policy: Union[Unset, ComputationPolicy]
- if isinstance(_policy, Unset):
- policy = UNSET
- else:
- policy = ComputationPolicy.from_dict(_policy)
+ end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
- created_by_user = d.pop("createdByUser", UNSET)
+ locked = d.pop("locked", UNSET)
+
+ query_timeout = d.pop("queryTimeout", UNSET)
+
+ data_source_id = d.pop("dataSourceId", UNSET)
name = d.pop("name", UNSET)
- network_id = d.pop("networkId", UNSET)
+ network_name = d.pop("networkName", UNSET)
- _query = d.pop("query", UNSET)
- query: Union[Unset, DataSourceQuery]
- if isinstance(_query, Unset):
- query = UNSET
+ created_by_user = d.pop("createdByUser", UNSET)
+
+ workflow_json = d.pop("workflowJSON", UNSET)
+
+ allow_clear_query = d.pop("allowClearQuery", UNSET)
+
+ _authorization_status = d.pop("authorizationStatus", UNSET)
+ authorization_status: Union[Unset, AuthorizationStatus]
+ if isinstance(_authorization_status, Unset):
+ authorization_status = UNSET
else:
- query = DataSourceQuery.from_dict(_query)
+ authorization_status = AuthorizationStatus(_authorization_status)
+
+ description = d.pop("description", UNSET)
_topology = d.pop("topology", UNSET)
topology: Union[Unset, Topology]
@@ -304,18 +320,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
topology = Topology(_topology)
- allow_clear_query = d.pop("allowClearQuery", UNSET)
-
- local = d.pop("local", UNSET)
-
- run_async = d.pop("runAsync", UNSET)
-
- unique_id = d.pop("uniqueId", UNSET)
-
- data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET)
-
- end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
-
hide_leaf_participants = d.pop("hideLeafParticipants", UNSET)
_local_data_selection_definition = d.pop("localDataSelectionDefinition", UNSET)
@@ -325,52 +329,56 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
local_data_selection_definition = LocalDataSelectionDefinition.from_dict(_local_data_selection_definition)
- min_contributors = d.pop("minContributors", UNSET)
-
- query_timeout = d.pop("queryTimeout", UNSET)
+ _query = d.pop("query", UNSET)
+ query: Union[Unset, DataSourceQuery]
+ if isinstance(_query, Unset):
+ query = UNSET
+ else:
+ query = DataSourceQuery.from_dict(_query)
- workflow_json = d.pop("workflowJSON", UNSET)
+ shared = d.pop("shared", UNSET)
- authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
+ unique_id = d.pop("uniqueId", UNSET)
- dpia = d.pop("dpia", UNSET)
+ min_contributors = d.pop("minContributors", UNSET)
- non_contributor = d.pop("nonContributor", UNSET)
+ network_id = d.pop("networkId", UNSET)
- data_source_id = d.pop("dataSourceId", UNSET)
+ unrestricted_access = d.pop("unrestrictedAccess", UNSET)
project_base = cls(
- authorization_status=authorization_status,
+ created_by_node=created_by_node,
created_with_client=created_with_client,
- description=description,
+ dpia=dpia,
+ local=local,
+ policy=policy,
allow_shared_edit=allow_shared_edit,
- created_by_node=created_by_node,
- locked=locked,
- unrestricted_access=unrestricted_access,
- workflow_type=workflow_type,
+ authorized_users=authorized_users,
computation_definition=computation_definition,
- shared=shared,
- policy=policy,
- created_by_user=created_by_user,
- name=name,
- network_id=network_id,
- query=query,
- topology=topology,
- allow_clear_query=allow_clear_query,
- local=local,
run_async=run_async,
- unique_id=unique_id,
+ workflow_type=workflow_type,
+ non_contributor=non_contributor,
data_source_auto_match=data_source_auto_match,
end_to_end_encrypted=end_to_end_encrypted,
+ locked=locked,
+ query_timeout=query_timeout,
+ data_source_id=data_source_id,
+ name=name,
+ network_name=network_name,
+ created_by_user=created_by_user,
+ workflow_json=workflow_json,
+ allow_clear_query=allow_clear_query,
+ authorization_status=authorization_status,
+ description=description,
+ topology=topology,
hide_leaf_participants=hide_leaf_participants,
local_data_selection_definition=local_data_selection_definition,
+ query=query,
+ shared=shared,
+ unique_id=unique_id,
min_contributors=min_contributors,
- query_timeout=query_timeout,
- workflow_json=workflow_json,
- authorized_users=authorized_users,
- dpia=dpia,
- non_contributor=non_contributor,
- data_source_id=data_source_id,
+ network_id=network_id,
+ unrestricted_access=unrestricted_access,
)
project_base.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/project_definition.py b/src/tuneinsight/api/sdk/models/project_definition.py
index 61d29f3..ae7491b 100644
--- a/src/tuneinsight/api/sdk/models/project_definition.py
+++ b/src/tuneinsight/api/sdk/models/project_definition.py
@@ -23,149 +23,152 @@
class ProjectDefinition:
"""
Attributes:
- authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ created_by_node (Union[Unset, str]): ID of node where the project was first created
created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API
- description (Union[Unset, None, str]):
+ dpia (Union[Unset, str]):
+ local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not
+ configured the network)
+ policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation
allow_shared_edit (Union[Unset, bool]): True if this project can be modified after being shared. Modifications
of a shared project will be broadcasted to the network
- created_by_node (Union[Unset, str]): ID of node where the project was first created
- locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared)
- unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are
- authorized to access the project (view / edit depends on the roles)
- workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend
+ authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project
computation_definition (Union[Unset, ComputationDefinition]): Generic computation.
- shared (Union[Unset, bool]): True if the project has once been shared across the participants
- policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation
- created_by_user (Union[Unset, str]): ID of user who created the project
- name (Union[Unset, str]):
- network_id (Union[Unset, str]): id to uniquely identify the network
- query (Union[Unset, DataSourceQuery]): schema used for the query
- topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
- connected to a central node. In tree topology all nodes are connected and aware of each other.
- allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for
- a client to query the data source all participants of the project and return the clear text result
- local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not
- configured the network)
run_async (Union[Unset, bool]): flag indicating if computation should be run asynchronously
- unique_id (Union[Unset, str]): Unique identifier of a project.
+ workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend
+ non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the
+ distributed computations but does not have any input data.
+ By default this field is set according to the instance's configuration.
data_source_auto_match (Union[Unset, bool]): whether or not to automatically assign the first matching
datasource when the project is shared with other nodes
end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted
on the client side
+ locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared)
+ query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30.
+ data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
+ name (Union[Unset, str]):
+ network_name (Union[Unset, str]): name of the network from the list of existing networks to link the project to.
+ created_by_user (Union[Unset, str]): ID of user who created the project
+ workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend
+ allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for
+ a client to query the data source all participants of the project and return the clear text result
+ authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project
+ description (Union[Unset, None, str]):
+ topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are
+ connected to a central node. In tree topology all nodes are connected and aware of each other.
hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf
participants when the project is in a star topology.
local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A
selection is a "query" or data selection definition to run on the datasource
+ query (Union[Unset, DataSourceQuery]): schema used for the query
+ shared (Union[Unset, bool]): True if the project has once been shared across the participants
+ unique_id (Union[Unset, str]): Unique identifier of a project.
min_contributors (Union[Unset, None, int]): minimum number of participants that contribute with their data
required to run computations within this project
- query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30.
- workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend
- authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project
- dpia (Union[Unset, str]):
- non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the
- distributed computations but does not have any input data.
- By default this field is set according to the instance's configuration.
- data_source_id (Union[Unset, None, str]): Unique identifier of a data source.
+ network_id (Union[Unset, str]): id to uniquely identify the network
+ unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are
+ authorized to access the project (view / edit depends on the roles)
broadcast (Union[Unset, bool]): Temporary field. Always set to false. Only used for server-server communication
data_source_type (Union[Unset, DataSourceType]):
participants (Union[Unset, None, List[str]]): List of nodes involved in the project's collaboration
"""
- authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ created_by_node: Union[Unset, str] = UNSET
created_with_client: Union[Unset, Client] = UNSET
- description: Union[Unset, None, str] = UNSET
+ dpia: Union[Unset, str] = UNSET
+ local: Union[Unset, None, bool] = UNSET
+ policy: Union[Unset, "ComputationPolicy"] = UNSET
allow_shared_edit: Union[Unset, bool] = UNSET
- created_by_node: Union[Unset, str] = UNSET
- locked: Union[Unset, None, bool] = UNSET
- unrestricted_access: Union[Unset, None, bool] = UNSET
- workflow_type: Union[Unset, WorkflowType] = UNSET
+ authorized_users: Union[Unset, List[str]] = UNSET
computation_definition: Union[Unset, "ComputationDefinition"] = UNSET
- shared: Union[Unset, bool] = UNSET
- policy: Union[Unset, "ComputationPolicy"] = UNSET
- created_by_user: Union[Unset, str] = UNSET
- name: Union[Unset, str] = UNSET
- network_id: Union[Unset, str] = UNSET
- query: Union[Unset, "DataSourceQuery"] = UNSET
- topology: Union[Unset, Topology] = UNSET
- allow_clear_query: Union[Unset, bool] = UNSET
- local: Union[Unset, None, bool] = UNSET
run_async: Union[Unset, bool] = UNSET
- unique_id: Union[Unset, str] = UNSET
+ workflow_type: Union[Unset, WorkflowType] = UNSET
+ non_contributor: Union[Unset, None, bool] = UNSET
data_source_auto_match: Union[Unset, bool] = UNSET
end_to_end_encrypted: Union[Unset, None, bool] = UNSET
+ locked: Union[Unset, None, bool] = UNSET
+ query_timeout: Union[Unset, int] = 30
+ data_source_id: Union[Unset, None, str] = UNSET
+ name: Union[Unset, str] = UNSET
+ network_name: Union[Unset, str] = UNSET
+ created_by_user: Union[Unset, str] = UNSET
+ workflow_json: Union[Unset, str] = UNSET
+ allow_clear_query: Union[Unset, bool] = UNSET
+ authorization_status: Union[Unset, AuthorizationStatus] = UNSET
+ description: Union[Unset, None, str] = UNSET
+ topology: Union[Unset, Topology] = UNSET
hide_leaf_participants: Union[Unset, None, bool] = UNSET
local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET
+ query: Union[Unset, "DataSourceQuery"] = UNSET
+ shared: Union[Unset, bool] = UNSET
+ unique_id: Union[Unset, str] = UNSET
min_contributors: Union[Unset, None, int] = UNSET
- query_timeout: Union[Unset, int] = 30
- workflow_json: Union[Unset, str] = UNSET
- authorized_users: Union[Unset, List[str]] = UNSET
- dpia: Union[Unset, str] = UNSET
- non_contributor: Union[Unset, None, bool] = UNSET
- data_source_id: Union[Unset, None, str] = UNSET
+ network_id: Union[Unset, str] = UNSET
+ unrestricted_access: Union[Unset, None, bool] = UNSET
broadcast: Union[Unset, bool] = UNSET
data_source_type: Union[Unset, DataSourceType] = UNSET
participants: Union[Unset, None, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- authorization_status: Union[Unset, str] = UNSET
- if not isinstance(self.authorization_status, Unset):
- authorization_status = self.authorization_status.value
-
+ created_by_node = self.created_by_node
created_with_client: Union[Unset, str] = UNSET
if not isinstance(self.created_with_client, Unset):
created_with_client = self.created_with_client.value
- description = self.description
+ dpia = self.dpia
+ local = self.local
+ policy: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.policy, Unset):
+ policy = self.policy.to_dict()
+
allow_shared_edit = self.allow_shared_edit
- created_by_node = self.created_by_node
- locked = self.locked
- unrestricted_access = self.unrestricted_access
- workflow_type: Union[Unset, str] = UNSET
- if not isinstance(self.workflow_type, Unset):
- workflow_type = self.workflow_type.value
+ authorized_users: Union[Unset, List[str]] = UNSET
+ if not isinstance(self.authorized_users, Unset):
+ authorized_users = self.authorized_users
computation_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.computation_definition, Unset):
computation_definition = self.computation_definition.to_dict()
- shared = self.shared
- policy: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.policy, Unset):
- policy = self.policy.to_dict()
+ run_async = self.run_async
+ workflow_type: Union[Unset, str] = UNSET
+ if not isinstance(self.workflow_type, Unset):
+ workflow_type = self.workflow_type.value
- created_by_user = self.created_by_user
+ non_contributor = self.non_contributor
+ data_source_auto_match = self.data_source_auto_match
+ end_to_end_encrypted = self.end_to_end_encrypted
+ locked = self.locked
+ query_timeout = self.query_timeout
+ data_source_id = self.data_source_id
name = self.name
- network_id = self.network_id
- query: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.query, Unset):
- query = self.query.to_dict()
+ network_name = self.network_name
+ created_by_user = self.created_by_user
+ workflow_json = self.workflow_json
+ allow_clear_query = self.allow_clear_query
+ authorization_status: Union[Unset, str] = UNSET
+ if not isinstance(self.authorization_status, Unset):
+ authorization_status = self.authorization_status.value
+ description = self.description
topology: Union[Unset, str] = UNSET
if not isinstance(self.topology, Unset):
topology = self.topology.value
- allow_clear_query = self.allow_clear_query
- local = self.local
- run_async = self.run_async
- unique_id = self.unique_id
- data_source_auto_match = self.data_source_auto_match
- end_to_end_encrypted = self.end_to_end_encrypted
hide_leaf_participants = self.hide_leaf_participants
local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.local_data_selection_definition, Unset):
local_data_selection_definition = self.local_data_selection_definition.to_dict()
- min_contributors = self.min_contributors
- query_timeout = self.query_timeout
- workflow_json = self.workflow_json
- authorized_users: Union[Unset, List[str]] = UNSET
- if not isinstance(self.authorized_users, Unset):
- authorized_users = self.authorized_users
+ query: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.query, Unset):
+ query = self.query.to_dict()
- dpia = self.dpia
- non_contributor = self.non_contributor
- data_source_id = self.data_source_id
+ shared = self.shared
+ unique_id = self.unique_id
+ min_contributors = self.min_contributors
+ network_id = self.network_id
+ unrestricted_access = self.unrestricted_access
broadcast = self.broadcast
data_source_type: Union[Unset, str] = UNSET
if not isinstance(self.data_source_type, Unset):
@@ -181,68 +184,70 @@ def to_dict(self) -> Dict[str, Any]:
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if authorization_status is not UNSET:
- field_dict["authorizationStatus"] = authorization_status
+ if created_by_node is not UNSET:
+ field_dict["createdByNode"] = created_by_node
if created_with_client is not UNSET:
field_dict["createdWithClient"] = created_with_client
- if description is not UNSET:
- field_dict["description"] = description
+ if dpia is not UNSET:
+ field_dict["dpia"] = dpia
+ if local is not UNSET:
+ field_dict["local"] = local
+ if policy is not UNSET:
+ field_dict["policy"] = policy
if allow_shared_edit is not UNSET:
field_dict["allowSharedEdit"] = allow_shared_edit
- if created_by_node is not UNSET:
- field_dict["createdByNode"] = created_by_node
- if locked is not UNSET:
- field_dict["locked"] = locked
- if unrestricted_access is not UNSET:
- field_dict["unrestrictedAccess"] = unrestricted_access
- if workflow_type is not UNSET:
- field_dict["workflowType"] = workflow_type
+ if authorized_users is not UNSET:
+ field_dict["authorizedUsers"] = authorized_users
if computation_definition is not UNSET:
field_dict["computationDefinition"] = computation_definition
- if shared is not UNSET:
- field_dict["shared"] = shared
- if policy is not UNSET:
- field_dict["policy"] = policy
- if created_by_user is not UNSET:
- field_dict["createdByUser"] = created_by_user
- if name is not UNSET:
- field_dict["name"] = name
- if network_id is not UNSET:
- field_dict["networkId"] = network_id
- if query is not UNSET:
- field_dict["query"] = query
- if topology is not UNSET:
- field_dict["topology"] = topology
- if allow_clear_query is not UNSET:
- field_dict["allowClearQuery"] = allow_clear_query
- if local is not UNSET:
- field_dict["local"] = local
if run_async is not UNSET:
field_dict["runAsync"] = run_async
- if unique_id is not UNSET:
- field_dict["uniqueId"] = unique_id
+ if workflow_type is not UNSET:
+ field_dict["workflowType"] = workflow_type
+ if non_contributor is not UNSET:
+ field_dict["nonContributor"] = non_contributor
if data_source_auto_match is not UNSET:
field_dict["dataSourceAutoMatch"] = data_source_auto_match
if end_to_end_encrypted is not UNSET:
field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if locked is not UNSET:
+ field_dict["locked"] = locked
+ if query_timeout is not UNSET:
+ field_dict["queryTimeout"] = query_timeout
+ if data_source_id is not UNSET:
+ field_dict["dataSourceId"] = data_source_id
+ if name is not UNSET:
+ field_dict["name"] = name
+ if network_name is not UNSET:
+ field_dict["networkName"] = network_name
+ if created_by_user is not UNSET:
+ field_dict["createdByUser"] = created_by_user
+ if workflow_json is not UNSET:
+ field_dict["workflowJSON"] = workflow_json
+ if allow_clear_query is not UNSET:
+ field_dict["allowClearQuery"] = allow_clear_query
+ if authorization_status is not UNSET:
+ field_dict["authorizationStatus"] = authorization_status
+ if description is not UNSET:
+ field_dict["description"] = description
+ if topology is not UNSET:
+ field_dict["topology"] = topology
if hide_leaf_participants is not UNSET:
field_dict["hideLeafParticipants"] = hide_leaf_participants
if local_data_selection_definition is not UNSET:
field_dict["localDataSelectionDefinition"] = local_data_selection_definition
+ if query is not UNSET:
+ field_dict["query"] = query
+ if shared is not UNSET:
+ field_dict["shared"] = shared
+ if unique_id is not UNSET:
+ field_dict["uniqueId"] = unique_id
if min_contributors is not UNSET:
field_dict["minContributors"] = min_contributors
- if query_timeout is not UNSET:
- field_dict["queryTimeout"] = query_timeout
- if workflow_json is not UNSET:
- field_dict["workflowJSON"] = workflow_json
- if authorized_users is not UNSET:
- field_dict["authorizedUsers"] = authorized_users
- if dpia is not UNSET:
- field_dict["dpia"] = dpia
- if non_contributor is not UNSET:
- field_dict["nonContributor"] = non_contributor
- if data_source_id is not UNSET:
- field_dict["dataSourceId"] = data_source_id
+ if network_id is not UNSET:
+ field_dict["networkId"] = network_id
+ if unrestricted_access is not UNSET:
+ field_dict["unrestrictedAccess"] = unrestricted_access
if broadcast is not UNSET:
field_dict["broadcast"] = broadcast
if data_source_type is not UNSET:
@@ -260,12 +265,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.local_data_selection_definition import LocalDataSelectionDefinition
d = src_dict.copy()
- _authorization_status = d.pop("authorizationStatus", UNSET)
- authorization_status: Union[Unset, AuthorizationStatus]
- if isinstance(_authorization_status, Unset):
- authorization_status = UNSET
- else:
- authorization_status = AuthorizationStatus(_authorization_status)
+ created_by_node = d.pop("createdByNode", UNSET)
_created_with_client = d.pop("createdWithClient", UNSET)
created_with_client: Union[Unset, Client]
@@ -274,15 +274,29 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
created_with_client = Client(_created_with_client)
- description = d.pop("description", UNSET)
+ dpia = d.pop("dpia", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ _policy = d.pop("policy", UNSET)
+ policy: Union[Unset, ComputationPolicy]
+ if isinstance(_policy, Unset):
+ policy = UNSET
+ else:
+ policy = ComputationPolicy.from_dict(_policy)
allow_shared_edit = d.pop("allowSharedEdit", UNSET)
- created_by_node = d.pop("createdByNode", UNSET)
+ authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
- locked = d.pop("locked", UNSET)
+ _computation_definition = d.pop("computationDefinition", UNSET)
+ computation_definition: Union[Unset, ComputationDefinition]
+ if isinstance(_computation_definition, Unset):
+ computation_definition = UNSET
+ else:
+ computation_definition = ComputationDefinition.from_dict(_computation_definition)
- unrestricted_access = d.pop("unrestrictedAccess", UNSET)
+ run_async = d.pop("runAsync", UNSET)
_workflow_type = d.pop("workflowType", UNSET)
workflow_type: Union[Unset, WorkflowType]
@@ -291,34 +305,36 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
workflow_type = WorkflowType(_workflow_type)
- _computation_definition = d.pop("computationDefinition", UNSET)
- computation_definition: Union[Unset, ComputationDefinition]
- if isinstance(_computation_definition, Unset):
- computation_definition = UNSET
- else:
- computation_definition = ComputationDefinition.from_dict(_computation_definition)
+ non_contributor = d.pop("nonContributor", UNSET)
- shared = d.pop("shared", UNSET)
+ data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET)
- _policy = d.pop("policy", UNSET)
- policy: Union[Unset, ComputationPolicy]
- if isinstance(_policy, Unset):
- policy = UNSET
- else:
- policy = ComputationPolicy.from_dict(_policy)
+ end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
- created_by_user = d.pop("createdByUser", UNSET)
+ locked = d.pop("locked", UNSET)
+
+ query_timeout = d.pop("queryTimeout", UNSET)
+
+ data_source_id = d.pop("dataSourceId", UNSET)
name = d.pop("name", UNSET)
- network_id = d.pop("networkId", UNSET)
+ network_name = d.pop("networkName", UNSET)
- _query = d.pop("query", UNSET)
- query: Union[Unset, DataSourceQuery]
- if isinstance(_query, Unset):
- query = UNSET
+ created_by_user = d.pop("createdByUser", UNSET)
+
+ workflow_json = d.pop("workflowJSON", UNSET)
+
+ allow_clear_query = d.pop("allowClearQuery", UNSET)
+
+ _authorization_status = d.pop("authorizationStatus", UNSET)
+ authorization_status: Union[Unset, AuthorizationStatus]
+ if isinstance(_authorization_status, Unset):
+ authorization_status = UNSET
else:
- query = DataSourceQuery.from_dict(_query)
+ authorization_status = AuthorizationStatus(_authorization_status)
+
+ description = d.pop("description", UNSET)
_topology = d.pop("topology", UNSET)
topology: Union[Unset, Topology]
@@ -327,18 +343,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
topology = Topology(_topology)
- allow_clear_query = d.pop("allowClearQuery", UNSET)
-
- local = d.pop("local", UNSET)
-
- run_async = d.pop("runAsync", UNSET)
-
- unique_id = d.pop("uniqueId", UNSET)
-
- data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET)
-
- end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
-
hide_leaf_participants = d.pop("hideLeafParticipants", UNSET)
_local_data_selection_definition = d.pop("localDataSelectionDefinition", UNSET)
@@ -348,19 +352,22 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
local_data_selection_definition = LocalDataSelectionDefinition.from_dict(_local_data_selection_definition)
- min_contributors = d.pop("minContributors", UNSET)
-
- query_timeout = d.pop("queryTimeout", UNSET)
+ _query = d.pop("query", UNSET)
+ query: Union[Unset, DataSourceQuery]
+ if isinstance(_query, Unset):
+ query = UNSET
+ else:
+ query = DataSourceQuery.from_dict(_query)
- workflow_json = d.pop("workflowJSON", UNSET)
+ shared = d.pop("shared", UNSET)
- authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET))
+ unique_id = d.pop("uniqueId", UNSET)
- dpia = d.pop("dpia", UNSET)
+ min_contributors = d.pop("minContributors", UNSET)
- non_contributor = d.pop("nonContributor", UNSET)
+ network_id = d.pop("networkId", UNSET)
- data_source_id = d.pop("dataSourceId", UNSET)
+ unrestricted_access = d.pop("unrestrictedAccess", UNSET)
broadcast = d.pop("broadcast", UNSET)
@@ -374,37 +381,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
participants = cast(List[str], d.pop("participants", UNSET))
project_definition = cls(
- authorization_status=authorization_status,
+ created_by_node=created_by_node,
created_with_client=created_with_client,
- description=description,
+ dpia=dpia,
+ local=local,
+ policy=policy,
allow_shared_edit=allow_shared_edit,
- created_by_node=created_by_node,
- locked=locked,
- unrestricted_access=unrestricted_access,
- workflow_type=workflow_type,
+ authorized_users=authorized_users,
computation_definition=computation_definition,
- shared=shared,
- policy=policy,
- created_by_user=created_by_user,
- name=name,
- network_id=network_id,
- query=query,
- topology=topology,
- allow_clear_query=allow_clear_query,
- local=local,
run_async=run_async,
- unique_id=unique_id,
+ workflow_type=workflow_type,
+ non_contributor=non_contributor,
data_source_auto_match=data_source_auto_match,
end_to_end_encrypted=end_to_end_encrypted,
+ locked=locked,
+ query_timeout=query_timeout,
+ data_source_id=data_source_id,
+ name=name,
+ network_name=network_name,
+ created_by_user=created_by_user,
+ workflow_json=workflow_json,
+ allow_clear_query=allow_clear_query,
+ authorization_status=authorization_status,
+ description=description,
+ topology=topology,
hide_leaf_participants=hide_leaf_participants,
local_data_selection_definition=local_data_selection_definition,
+ query=query,
+ shared=shared,
+ unique_id=unique_id,
min_contributors=min_contributors,
- query_timeout=query_timeout,
- workflow_json=workflow_json,
- authorized_users=authorized_users,
- dpia=dpia,
- non_contributor=non_contributor,
- data_source_id=data_source_id,
+ network_id=network_id,
+ unrestricted_access=unrestricted_access,
broadcast=broadcast,
data_source_type=data_source_type,
participants=participants,
diff --git a/src/tuneinsight/api/sdk/models/query.py b/src/tuneinsight/api/sdk/models/query.py
index 2293b07..08786cc 100644
--- a/src/tuneinsight/api/sdk/models/query.py
+++ b/src/tuneinsight/api/sdk/models/query.py
@@ -17,66 +17,66 @@ class Query:
"""Data source query
Attributes:
+ created_at (Union[Unset, str]):
query_string (Union[Unset, str]): String of the query e.g. SQL or JSON
results (Union[Unset, QueryResults]): result dataobject IDs
- error (Union[Unset, str]): Error message, in case status of the query is error.
+ status (Union[Unset, QueryStatus]):
+ updated_at (Union[Unset, str]):
created_by_user (Union[Unset, str]): ID of user who created the project
+ error (Union[Unset, str]): Error message, in case status of the query is error.
id (Union[Unset, str]):
project_id (Union[Unset, str]): Unique identifier of a project.
- status (Union[Unset, QueryStatus]):
- updated_at (Union[Unset, str]):
- created_at (Union[Unset, str]):
"""
+ created_at: Union[Unset, str] = UNSET
query_string: Union[Unset, str] = UNSET
results: Union[Unset, "QueryResults"] = UNSET
- error: Union[Unset, str] = UNSET
+ status: Union[Unset, QueryStatus] = UNSET
+ updated_at: Union[Unset, str] = UNSET
created_by_user: Union[Unset, str] = UNSET
+ error: Union[Unset, str] = UNSET
id: Union[Unset, str] = UNSET
project_id: Union[Unset, str] = UNSET
- status: Union[Unset, QueryStatus] = UNSET
- updated_at: Union[Unset, str] = UNSET
- created_at: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ created_at = self.created_at
query_string = self.query_string
results: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.results, Unset):
results = self.results.to_dict()
- error = self.error
- created_by_user = self.created_by_user
- id = self.id
- project_id = self.project_id
status: Union[Unset, str] = UNSET
if not isinstance(self.status, Unset):
status = self.status.value
updated_at = self.updated_at
- created_at = self.created_at
+ created_by_user = self.created_by_user
+ error = self.error
+ id = self.id
+ project_id = self.project_id
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if created_at is not UNSET:
+ field_dict["createdAt"] = created_at
if query_string is not UNSET:
field_dict["queryString"] = query_string
if results is not UNSET:
field_dict["results"] = results
- if error is not UNSET:
- field_dict["error"] = error
+ if status is not UNSET:
+ field_dict["status"] = status
+ if updated_at is not UNSET:
+ field_dict["updatedAt"] = updated_at
if created_by_user is not UNSET:
field_dict["createdByUser"] = created_by_user
+ if error is not UNSET:
+ field_dict["error"] = error
if id is not UNSET:
field_dict["id"] = id
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if status is not UNSET:
- field_dict["status"] = status
- if updated_at is not UNSET:
- field_dict["updatedAt"] = updated_at
- if created_at is not UNSET:
- field_dict["createdAt"] = created_at
return field_dict
@@ -85,6 +85,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.query_results import QueryResults
d = src_dict.copy()
+ created_at = d.pop("createdAt", UNSET)
+
query_string = d.pop("queryString", UNSET)
_results = d.pop("results", UNSET)
@@ -94,14 +96,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
results = QueryResults.from_dict(_results)
- error = d.pop("error", UNSET)
-
- created_by_user = d.pop("createdByUser", UNSET)
-
- id = d.pop("id", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_status = d.pop("status", UNSET)
status: Union[Unset, QueryStatus]
if isinstance(_status, Unset):
@@ -111,18 +105,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
updated_at = d.pop("updatedAt", UNSET)
- created_at = d.pop("createdAt", UNSET)
+ created_by_user = d.pop("createdByUser", UNSET)
+
+ error = d.pop("error", UNSET)
+
+ id = d.pop("id", UNSET)
+
+ project_id = d.pop("projectId", UNSET)
query = cls(
+ created_at=created_at,
query_string=query_string,
results=results,
- error=error,
+ status=status,
+ updated_at=updated_at,
created_by_user=created_by_user,
+ error=error,
id=id,
project_id=project_id,
- status=status,
- updated_at=updated_at,
- created_at=created_at,
)
query.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/realm_role.py b/src/tuneinsight/api/sdk/models/realm_role.py
index 6952149..eab8cd5 100644
--- a/src/tuneinsight/api/sdk/models/realm_role.py
+++ b/src/tuneinsight/api/sdk/models/realm_role.py
@@ -11,37 +11,33 @@
class RealmRole:
"""
Attributes:
- client_role (Union[Unset, bool]):
- composite (Union[Unset, bool]):
container_id (Union[Unset, str]):
description (Union[Unset, str]):
id (Union[Unset, str]):
name (Union[Unset, str]):
+ client_role (Union[Unset, bool]):
+ composite (Union[Unset, bool]):
"""
- client_role: Union[Unset, bool] = UNSET
- composite: Union[Unset, bool] = UNSET
container_id: Union[Unset, str] = UNSET
description: Union[Unset, str] = UNSET
id: Union[Unset, str] = UNSET
name: Union[Unset, str] = UNSET
+ client_role: Union[Unset, bool] = UNSET
+ composite: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- client_role = self.client_role
- composite = self.composite
container_id = self.container_id
description = self.description
id = self.id
name = self.name
+ client_role = self.client_role
+ composite = self.composite
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if client_role is not UNSET:
- field_dict["clientRole"] = client_role
- if composite is not UNSET:
- field_dict["composite"] = composite
if container_id is not UNSET:
field_dict["containerId"] = container_id
if description is not UNSET:
@@ -50,16 +46,16 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["id"] = id
if name is not UNSET:
field_dict["name"] = name
+ if client_role is not UNSET:
+ field_dict["clientRole"] = client_role
+ if composite is not UNSET:
+ field_dict["composite"] = composite
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- client_role = d.pop("clientRole", UNSET)
-
- composite = d.pop("composite", UNSET)
-
container_id = d.pop("containerId", UNSET)
description = d.pop("description", UNSET)
@@ -68,13 +64,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
name = d.pop("name", UNSET)
+ client_role = d.pop("clientRole", UNSET)
+
+ composite = d.pop("composite", UNSET)
+
realm_role = cls(
- client_role=client_role,
- composite=composite,
container_id=container_id,
description=description,
id=id,
name=name,
+ client_role=client_role,
+ composite=composite,
)
realm_role.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/relin_key_gen.py b/src/tuneinsight/api/sdk/models/relin_key_gen.py
index ce055f4..4700312 100644
--- a/src/tuneinsight/api/sdk/models/relin_key_gen.py
+++ b/src/tuneinsight/api/sdk/models/relin_key_gen.py
@@ -22,8 +22,12 @@ class RelinKeyGen:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class RelinKeyGen:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class RelinKeyGen:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,93 +65,79 @@ class RelinKeyGen:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -147,46 +146,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
return field_dict
@@ -200,14 +199,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -216,10 +208,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -227,6 +230,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -238,19 +255,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -258,40 +262,35 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
relin_key_gen = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
)
relin_key_gen.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/reset_index.py b/src/tuneinsight/api/sdk/models/reset_index.py
index 7bb8d4f..ac493fd 100644
--- a/src/tuneinsight/api/sdk/models/reset_index.py
+++ b/src/tuneinsight/api/sdk/models/reset_index.py
@@ -13,23 +13,24 @@ class ResetIndex:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
- drop (Union[Unset, bool]): whether to drop the index as a column
level (Union[Unset, List[str]]): which levels to remove from the index (all by default)
+ drop (Union[Unset, bool]): whether to drop the index as a column
"""
type: PreprocessingOperationType
- drop: Union[Unset, bool] = UNSET
level: Union[Unset, List[str]] = UNSET
+ drop: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- drop = self.drop
level: Union[Unset, List[str]] = UNSET
if not isinstance(self.level, Unset):
level = self.level
+ drop = self.drop
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -37,10 +38,10 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if drop is not UNSET:
- field_dict["drop"] = drop
if level is not UNSET:
field_dict["level"] = level
+ if drop is not UNSET:
+ field_dict["drop"] = drop
return field_dict
@@ -49,14 +50,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
- drop = d.pop("drop", UNSET)
-
level = cast(List[str], d.pop("level", UNSET))
+ drop = d.pop("drop", UNSET)
+
reset_index = cls(
type=type,
- drop=drop,
level=level,
+ drop=drop,
)
reset_index.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/result.py b/src/tuneinsight/api/sdk/models/result.py
index 0eff13f..ee13fbd 100644
--- a/src/tuneinsight/api/sdk/models/result.py
+++ b/src/tuneinsight/api/sdk/models/result.py
@@ -20,39 +20,39 @@ class Result:
shared (Union[Unset, None, bool]): if set to true, the result is shared with users from the same project
tags (Union[Unset, List[str]]):
title (Union[Unset, str]): title given to the result
- updated_at (Union[Unset, str]):
- collective_encrypted (Union[Unset, None, bool]):
+ created_at (Union[Unset, str]):
+ data_object_id (Union[Unset, str]): Unique identifier of a data object.
end_to_end_encrypted (Union[Unset, bool]):
metadata (Union[Unset, ResultMetadata]): various metadata field along with the result to provide additional
context
- switching_key_id (Union[Unset, str]): Unique identifier of a data object.
- switching_params (Union[Unset, str]):
original_ciphertext_id (Union[Unset, str]): Unique identifier of a data object.
owner (Union[Unset, str]):
+ updated_at (Union[Unset, str]):
+ collective_encrypted (Union[Unset, None, bool]):
computation_id (Union[Unset, str]): Identifier of a computation, unique across all computing nodes.
computation_type (Union[Unset, ComputationType]): Type of the computation.
- created_at (Union[Unset, str]):
- data_object_id (Union[Unset, str]): Unique identifier of a data object.
id (Union[Unset, str]): Unique identifier of a result.
+ switching_key_id (Union[Unset, str]): Unique identifier of a data object.
+ switching_params (Union[Unset, str]):
"""
is_large: Union[Unset, None, bool] = UNSET
shared: Union[Unset, None, bool] = UNSET
tags: Union[Unset, List[str]] = UNSET
title: Union[Unset, str] = UNSET
- updated_at: Union[Unset, str] = UNSET
- collective_encrypted: Union[Unset, None, bool] = UNSET
+ created_at: Union[Unset, str] = UNSET
+ data_object_id: Union[Unset, str] = UNSET
end_to_end_encrypted: Union[Unset, bool] = UNSET
metadata: Union[Unset, "ResultMetadata"] = UNSET
- switching_key_id: Union[Unset, str] = UNSET
- switching_params: Union[Unset, str] = UNSET
original_ciphertext_id: Union[Unset, str] = UNSET
owner: Union[Unset, str] = UNSET
+ updated_at: Union[Unset, str] = UNSET
+ collective_encrypted: Union[Unset, None, bool] = UNSET
computation_id: Union[Unset, str] = UNSET
computation_type: Union[Unset, ComputationType] = UNSET
- created_at: Union[Unset, str] = UNSET
- data_object_id: Union[Unset, str] = UNSET
id: Union[Unset, str] = UNSET
+ switching_key_id: Union[Unset, str] = UNSET
+ switching_params: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
@@ -63,25 +63,25 @@ def to_dict(self) -> Dict[str, Any]:
tags = self.tags
title = self.title
- updated_at = self.updated_at
- collective_encrypted = self.collective_encrypted
+ created_at = self.created_at
+ data_object_id = self.data_object_id
end_to_end_encrypted = self.end_to_end_encrypted
metadata: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.metadata, Unset):
metadata = self.metadata.to_dict()
- switching_key_id = self.switching_key_id
- switching_params = self.switching_params
original_ciphertext_id = self.original_ciphertext_id
owner = self.owner
+ updated_at = self.updated_at
+ collective_encrypted = self.collective_encrypted
computation_id = self.computation_id
computation_type: Union[Unset, str] = UNSET
if not isinstance(self.computation_type, Unset):
computation_type = self.computation_type.value
- created_at = self.created_at
- data_object_id = self.data_object_id
id = self.id
+ switching_key_id = self.switching_key_id
+ switching_params = self.switching_params
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -94,32 +94,32 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["tags"] = tags
if title is not UNSET:
field_dict["title"] = title
- if updated_at is not UNSET:
- field_dict["updatedAt"] = updated_at
- if collective_encrypted is not UNSET:
- field_dict["collectiveEncrypted"] = collective_encrypted
+ if created_at is not UNSET:
+ field_dict["createdAt"] = created_at
+ if data_object_id is not UNSET:
+ field_dict["dataObjectId"] = data_object_id
if end_to_end_encrypted is not UNSET:
field_dict["endToEndEncrypted"] = end_to_end_encrypted
if metadata is not UNSET:
field_dict["metadata"] = metadata
- if switching_key_id is not UNSET:
- field_dict["switchingKeyId"] = switching_key_id
- if switching_params is not UNSET:
- field_dict["switchingParams"] = switching_params
if original_ciphertext_id is not UNSET:
field_dict["originalCiphertextID"] = original_ciphertext_id
if owner is not UNSET:
field_dict["owner"] = owner
+ if updated_at is not UNSET:
+ field_dict["updatedAt"] = updated_at
+ if collective_encrypted is not UNSET:
+ field_dict["collectiveEncrypted"] = collective_encrypted
if computation_id is not UNSET:
field_dict["computationId"] = computation_id
if computation_type is not UNSET:
field_dict["computationType"] = computation_type
- if created_at is not UNSET:
- field_dict["createdAt"] = created_at
- if data_object_id is not UNSET:
- field_dict["dataObjectId"] = data_object_id
if id is not UNSET:
field_dict["id"] = id
+ if switching_key_id is not UNSET:
+ field_dict["switchingKeyId"] = switching_key_id
+ if switching_params is not UNSET:
+ field_dict["switchingParams"] = switching_params
return field_dict
@@ -136,9 +136,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
title = d.pop("title", UNSET)
- updated_at = d.pop("updatedAt", UNSET)
+ created_at = d.pop("createdAt", UNSET)
- collective_encrypted = d.pop("collectiveEncrypted", UNSET)
+ data_object_id = d.pop("dataObjectId", UNSET)
end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET)
@@ -149,14 +149,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
metadata = ResultMetadata.from_dict(_metadata)
- switching_key_id = d.pop("switchingKeyId", UNSET)
-
- switching_params = d.pop("switchingParams", UNSET)
-
original_ciphertext_id = d.pop("originalCiphertextID", UNSET)
owner = d.pop("owner", UNSET)
+ updated_at = d.pop("updatedAt", UNSET)
+
+ collective_encrypted = d.pop("collectiveEncrypted", UNSET)
+
computation_id = d.pop("computationId", UNSET)
_computation_type = d.pop("computationType", UNSET)
@@ -166,30 +166,30 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
computation_type = ComputationType(_computation_type)
- created_at = d.pop("createdAt", UNSET)
+ id = d.pop("id", UNSET)
- data_object_id = d.pop("dataObjectId", UNSET)
+ switching_key_id = d.pop("switchingKeyId", UNSET)
- id = d.pop("id", UNSET)
+ switching_params = d.pop("switchingParams", UNSET)
result = cls(
is_large=is_large,
shared=shared,
tags=tags,
title=title,
- updated_at=updated_at,
- collective_encrypted=collective_encrypted,
+ created_at=created_at,
+ data_object_id=data_object_id,
end_to_end_encrypted=end_to_end_encrypted,
metadata=metadata,
- switching_key_id=switching_key_id,
- switching_params=switching_params,
original_ciphertext_id=original_ciphertext_id,
owner=owner,
+ updated_at=updated_at,
+ collective_encrypted=collective_encrypted,
computation_id=computation_id,
computation_type=computation_type,
- created_at=created_at,
- data_object_id=data_object_id,
id=id,
+ switching_key_id=switching_key_id,
+ switching_params=switching_params,
)
result.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/rot_key_gen.py b/src/tuneinsight/api/sdk/models/rot_key_gen.py
index 59cb243..7819144 100644
--- a/src/tuneinsight/api/sdk/models/rot_key_gen.py
+++ b/src/tuneinsight/api/sdk/models/rot_key_gen.py
@@ -23,8 +23,12 @@ class RotKeyGen:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class RotKeyGen:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class RotKeyGen:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,95 +66,82 @@ class RotKeyGen:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
rotations (Union[Unset, List['RotKeyGenRotationsItem']]):
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
rotations: Union[Unset, List["RotKeyGenRotationsItem"]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
rotations: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.rotations, Unset):
rotations = []
@@ -157,46 +157,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if rotations is not UNSET:
field_dict["rotations"] = rotations
@@ -213,14 +213,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -229,10 +222,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -240,6 +244,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -251,19 +269,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -271,17 +276,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
rotations = []
_rotations = d.pop("rotations", UNSET)
@@ -292,26 +292,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
rot_key_gen = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
rotations=rotations,
)
diff --git a/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py b/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py
index d031113..89ca32a 100644
--- a/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py
+++ b/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py
@@ -11,38 +11,38 @@
class RotKeyGenRotationsItem:
"""
Attributes:
- value (Union[Unset, int]):
side (Union[Unset, bool]):
+ value (Union[Unset, int]):
"""
- value: Union[Unset, int] = UNSET
side: Union[Unset, bool] = UNSET
+ value: Union[Unset, int] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- value = self.value
side = self.side
+ value = self.value
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if value is not UNSET:
- field_dict["value"] = value
if side is not UNSET:
field_dict["side"] = side
+ if value is not UNSET:
+ field_dict["value"] = value
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- value = d.pop("value", UNSET)
-
side = d.pop("side", UNSET)
+ value = d.pop("value", UNSET)
+
rot_key_gen_rotations_item = cls(
- value=value,
side=side,
+ value=value,
)
rot_key_gen_rotations_item.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/run_project_parameters.py b/src/tuneinsight/api/sdk/models/run_project_parameters.py
index 41c52be..7eef4ff 100644
--- a/src/tuneinsight/api/sdk/models/run_project_parameters.py
+++ b/src/tuneinsight/api/sdk/models/run_project_parameters.py
@@ -17,18 +17,17 @@ class RunProjectParameters:
"""parameters used to launch the project with.
Attributes:
- wait (Union[Unset, None, bool]): whether to run the computation synchronously
computation_definition (Union[Unset, ComputationDefinition]): Generic computation.
run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ wait (Union[Unset, None, bool]): whether to run the computation synchronously
"""
- wait: Union[Unset, None, bool] = UNSET
computation_definition: Union[Unset, "ComputationDefinition"] = UNSET
run_mode: Union[Unset, RunMode] = UNSET
+ wait: Union[Unset, None, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- wait = self.wait
computation_definition: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.computation_definition, Unset):
computation_definition = self.computation_definition.to_dict()
@@ -37,15 +36,17 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
+ wait = self.wait
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if wait is not UNSET:
- field_dict["wait"] = wait
if computation_definition is not UNSET:
field_dict["computationDefinition"] = computation_definition
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
+ if wait is not UNSET:
+ field_dict["wait"] = wait
return field_dict
@@ -54,8 +55,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.computation_definition import ComputationDefinition
d = src_dict.copy()
- wait = d.pop("wait", UNSET)
-
_computation_definition = d.pop("computationDefinition", UNSET)
computation_definition: Union[Unset, ComputationDefinition]
if isinstance(_computation_definition, Unset):
@@ -70,10 +69,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
+ wait = d.pop("wait", UNSET)
+
run_project_parameters = cls(
- wait=wait,
computation_definition=computation_definition,
run_mode=run_mode,
+ wait=wait,
)
run_project_parameters.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/sample_extraction.py b/src/tuneinsight/api/sdk/models/sample_extraction.py
index 137e7b8..0713d4c 100644
--- a/src/tuneinsight/api/sdk/models/sample_extraction.py
+++ b/src/tuneinsight/api/sdk/models/sample_extraction.py
@@ -22,8 +22,12 @@ class SampleExtraction:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -34,8 +38,7 @@ class SampleExtraction:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -43,8 +46,18 @@ class SampleExtraction:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -52,99 +65,86 @@ class SampleExtraction:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
- sample_size (Union[Unset, int]): size of the sample as number of rows
seed (Union[Unset, str]): seed to use for the sampling
+ sample_size (Union[Unset, int]): size of the sample as number of rows
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
- sample_size: Union[Unset, int] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
seed: Union[Unset, str] = UNSET
+ sample_size: Union[Unset, int] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
- sample_size = self.sample_size
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
seed = self.seed
+ sample_size = self.sample_size
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -153,50 +153,50 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
- if sample_size is not UNSET:
- field_dict["sampleSize"] = sample_size
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if seed is not UNSET:
field_dict["seed"] = seed
+ if sample_size is not UNSET:
+ field_dict["sampleSize"] = sample_size
return field_dict
@@ -210,14 +210,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -226,10 +219,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -237,6 +241,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -248,19 +266,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -268,46 +273,41 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
- wait = d.pop("wait", UNSET)
+ seed = d.pop("seed", UNSET)
sample_size = d.pop("sampleSize", UNSET)
- seed = d.pop("seed", UNSET)
-
sample_extraction = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
- sample_size=sample_size,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
seed=seed,
+ sample_size=sample_size,
)
sample_extraction.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/session.py b/src/tuneinsight/api/sdk/models/session.py
index 238bb9e..7469c43 100644
--- a/src/tuneinsight/api/sdk/models/session.py
+++ b/src/tuneinsight/api/sdk/models/session.py
@@ -12,32 +12,30 @@ class Session:
"""basic information about a session returned from POST/GET
Attributes:
- network_id (Union[Unset, str]): network of the session
params (Union[Unset, str]): b64 encoded marshaled parameters
scheme (Union[Unset, str]): cryptographic scheme used, comes from the cryptolib
collective_key (Union[Unset, str]): Unique identifier of a data object.
id (Union[Unset, str]): Unique identifier of a session
+ network_id (Union[Unset, str]): network of the session
"""
- network_id: Union[Unset, str] = UNSET
params: Union[Unset, str] = UNSET
scheme: Union[Unset, str] = UNSET
collective_key: Union[Unset, str] = UNSET
id: Union[Unset, str] = UNSET
+ network_id: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- network_id = self.network_id
params = self.params
scheme = self.scheme
collective_key = self.collective_key
id = self.id
+ network_id = self.network_id
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if network_id is not UNSET:
- field_dict["networkId"] = network_id
if params is not UNSET:
field_dict["params"] = params
if scheme is not UNSET:
@@ -46,14 +44,14 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["collectiveKey"] = collective_key
if id is not UNSET:
field_dict["id"] = id
+ if network_id is not UNSET:
+ field_dict["networkId"] = network_id
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- network_id = d.pop("networkId", UNSET)
-
params = d.pop("params", UNSET)
scheme = d.pop("scheme", UNSET)
@@ -62,12 +60,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
id = d.pop("id", UNSET)
+ network_id = d.pop("networkId", UNSET)
+
session = cls(
- network_id=network_id,
params=params,
scheme=scheme,
collective_key=collective_key,
id=id,
+ network_id=network_id,
)
session.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/set_index.py b/src/tuneinsight/api/sdk/models/set_index.py
index d967ee2..f7214eb 100644
--- a/src/tuneinsight/api/sdk/models/set_index.py
+++ b/src/tuneinsight/api/sdk/models/set_index.py
@@ -13,27 +13,26 @@ class SetIndex:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
+ drop (Union[Unset, bool]): Delete columns to be used as the new index
append (Union[Unset, bool]): Whether to append columns to existing index
cols (Union[Unset, List[str]]): column(s) to use as index
- drop (Union[Unset, bool]): Delete columns to be used as the new index
"""
type: PreprocessingOperationType
+ drop: Union[Unset, bool] = UNSET
append: Union[Unset, bool] = UNSET
cols: Union[Unset, List[str]] = UNSET
- drop: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
+ drop = self.drop
append = self.append
cols: Union[Unset, List[str]] = UNSET
if not isinstance(self.cols, Unset):
cols = self.cols
- drop = self.drop
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -41,12 +40,12 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
+ if drop is not UNSET:
+ field_dict["drop"] = drop
if append is not UNSET:
field_dict["append"] = append
if cols is not UNSET:
field_dict["cols"] = cols
- if drop is not UNSET:
- field_dict["drop"] = drop
return field_dict
@@ -55,17 +54,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
+ drop = d.pop("drop", UNSET)
+
append = d.pop("append", UNSET)
cols = cast(List[str], d.pop("cols", UNSET))
- drop = d.pop("drop", UNSET)
-
set_index = cls(
type=type,
+ drop=drop,
append=append,
cols=cols,
- drop=drop,
)
set_index.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/set_intersection.py b/src/tuneinsight/api/sdk/models/set_intersection.py
index a9001dd..d0b86fe 100644
--- a/src/tuneinsight/api/sdk/models/set_intersection.py
+++ b/src/tuneinsight/api/sdk/models/set_intersection.py
@@ -24,8 +24,12 @@ class SetIntersection:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -36,8 +40,7 @@ class SetIntersection:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -45,8 +48,18 @@ class SetIntersection:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -54,25 +67,12 @@ class SetIntersection:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
encrypted_results (Union[Unset, bool]): if true, then the resulting matches are kept encrypted
fuzzy_params (Union[Unset, FuzzyMatchingParameters]):
hide_matching_origin (Union[Unset, bool]): if true, then the matches are aggregated before being decrypted,
@@ -82,28 +82,28 @@ class SetIntersection:
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
encrypted_results: Union[Unset, bool] = UNSET
fuzzy_params: Union[Unset, "FuzzyMatchingParameters"] = UNSET
hide_matching_origin: Union[Unset, bool] = UNSET
@@ -114,44 +114,44 @@ class SetIntersection:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
encrypted_results = self.encrypted_results
fuzzy_params: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.fuzzy_params, Unset):
@@ -173,46 +173,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if encrypted_results is not UNSET:
field_dict["encryptedResults"] = encrypted_results
if fuzzy_params is not UNSET:
@@ -237,14 +237,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -253,10 +246,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -264,6 +268,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -275,19 +293,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -295,17 +300,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
encrypted_results = d.pop("encryptedResults", UNSET)
@@ -329,26 +329,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
set_intersection = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
encrypted_results=encrypted_results,
fuzzy_params=fuzzy_params,
hide_matching_origin=hide_matching_origin,
diff --git a/src/tuneinsight/api/sdk/models/settings.py b/src/tuneinsight/api/sdk/models/settings.py
index c2734c9..fced7fc 100644
--- a/src/tuneinsight/api/sdk/models/settings.py
+++ b/src/tuneinsight/api/sdk/models/settings.py
@@ -13,20 +13,26 @@ class Settings:
"""instance settings that is configurable by the administrator.
Attributes:
+ access_with_python (Union[Unset, None, bool]): whether or not to enable the access with Python in Project
+ Workflows.
authorized_project_types (Union[Unset, List[WorkflowType]]): array of project types that are available for
selection when creating a new project.
default_data_source (Union[Unset, None, str]): Unique identifier of a data source.
selectable_data_source (Union[Unset, None, bool]): whether or not the datasource of the project can be modified.
set_project_policies (Union[Unset, None, bool]): whether policies can be set for projects.
+ sparql_query_builder (Union[Unset, None, bool]): whether or not to enable the SparQL Query Builder.
"""
+ access_with_python: Union[Unset, None, bool] = UNSET
authorized_project_types: Union[Unset, List[WorkflowType]] = UNSET
default_data_source: Union[Unset, None, str] = UNSET
selectable_data_source: Union[Unset, None, bool] = UNSET
set_project_policies: Union[Unset, None, bool] = UNSET
+ sparql_query_builder: Union[Unset, None, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ access_with_python = self.access_with_python
authorized_project_types: Union[Unset, List[str]] = UNSET
if not isinstance(self.authorized_project_types, Unset):
authorized_project_types = []
@@ -38,10 +44,13 @@ def to_dict(self) -> Dict[str, Any]:
default_data_source = self.default_data_source
selectable_data_source = self.selectable_data_source
set_project_policies = self.set_project_policies
+ sparql_query_builder = self.sparql_query_builder
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if access_with_python is not UNSET:
+ field_dict["accessWithPython"] = access_with_python
if authorized_project_types is not UNSET:
field_dict["authorizedProjectTypes"] = authorized_project_types
if default_data_source is not UNSET:
@@ -50,12 +59,16 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["selectableDataSource"] = selectable_data_source
if set_project_policies is not UNSET:
field_dict["setProjectPolicies"] = set_project_policies
+ if sparql_query_builder is not UNSET:
+ field_dict["sparqlQueryBuilder"] = sparql_query_builder
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
+ access_with_python = d.pop("accessWithPython", UNSET)
+
authorized_project_types = []
_authorized_project_types = d.pop("authorizedProjectTypes", UNSET)
for authorized_project_types_item_data in _authorized_project_types or []:
@@ -69,11 +82,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
set_project_policies = d.pop("setProjectPolicies", UNSET)
+ sparql_query_builder = d.pop("sparqlQueryBuilder", UNSET)
+
settings = cls(
+ access_with_python=access_with_python,
authorized_project_types=authorized_project_types,
default_data_source=default_data_source,
selectable_data_source=selectable_data_source,
set_project_policies=set_project_policies,
+ sparql_query_builder=sparql_query_builder,
)
settings.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/setup_session.py b/src/tuneinsight/api/sdk/models/setup_session.py
index 2adc055..099cb04 100644
--- a/src/tuneinsight/api/sdk/models/setup_session.py
+++ b/src/tuneinsight/api/sdk/models/setup_session.py
@@ -23,8 +23,12 @@ class SetupSession:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class SetupSession:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class SetupSession:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,106 +66,92 @@ class SetupSession:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ target_scheme_context (Union[Unset, str]): base64 encoded scheme context
relinearization_key (Union[Unset, bool]): whether or not to generate the relinearization key
target_computation (Union[Unset, ComputationDefinition]): Generic computation.
- target_scheme_context (Union[Unset, str]): base64 encoded scheme context
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
+ target_scheme_context: Union[Unset, str] = UNSET
relinearization_key: Union[Unset, bool] = UNSET
target_computation: Union[Unset, "ComputationDefinition"] = UNSET
- target_scheme_context: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
+ target_scheme_context = self.target_scheme_context
relinearization_key = self.relinearization_key
target_computation: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.target_computation, Unset):
target_computation = self.target_computation.to_dict()
- target_scheme_context = self.target_scheme_context
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -160,52 +159,52 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
+ if target_scheme_context is not UNSET:
+ field_dict["targetSchemeContext"] = target_scheme_context
if relinearization_key is not UNSET:
field_dict["relinearizationKey"] = relinearization_key
if target_computation is not UNSET:
field_dict["targetComputation"] = target_computation
- if target_scheme_context is not UNSET:
- field_dict["targetSchemeContext"] = target_scheme_context
return field_dict
@@ -220,14 +219,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -236,10 +228,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -247,6 +250,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -258,19 +275,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -278,17 +282,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
- wait = d.pop("wait", UNSET)
+ target_scheme_context = d.pop("targetSchemeContext", UNSET)
relinearization_key = d.pop("relinearizationKey", UNSET)
@@ -299,33 +300,31 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
target_computation = ComputationDefinition.from_dict(_target_computation)
- target_scheme_context = d.pop("targetSchemeContext", UNSET)
-
setup_session = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
+ target_scheme_context=target_scheme_context,
relinearization_key=relinearization_key,
target_computation=target_computation,
- target_scheme_context=target_scheme_context,
)
setup_session.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/sphn_ontologies_search_ontologies_item.py b/src/tuneinsight/api/sdk/models/sphn_ontologies_search_ontologies_item.py
new file mode 100644
index 0000000..6fd5406
--- /dev/null
+++ b/src/tuneinsight/api/sdk/models/sphn_ontologies_search_ontologies_item.py
@@ -0,0 +1,10 @@
+from enum import Enum
+
+
+class SphnOntologiesSearchOntologiesItem(str, Enum):
+ ICD10 = "ICD10"
+ ATC = "ATC"
+ LOINC = "LOINC"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/src/tuneinsight/api/sdk/models/sphn_ontologies_search_response_200_item.py b/src/tuneinsight/api/sdk/models/sphn_ontologies_search_response_200_item.py
new file mode 100644
index 0000000..d0292fb
--- /dev/null
+++ b/src/tuneinsight/api/sdk/models/sphn_ontologies_search_response_200_item.py
@@ -0,0 +1,82 @@
+from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union
+
+import attr
+
+from ..types import UNSET, Unset
+
+if TYPE_CHECKING:
+ from ..models.sphn_ontology_search_result import SphnOntologySearchResult
+
+
+T = TypeVar("T", bound="SphnOntologiesSearchResponse200Item")
+
+
+@attr.s(auto_attribs=True)
+class SphnOntologiesSearchResponse200Item:
+ """
+ Attributes:
+ ontology (Union[Unset, str]):
+ results (Union[Unset, List['SphnOntologySearchResult']]):
+ """
+
+ ontology: Union[Unset, str] = UNSET
+ results: Union[Unset, List["SphnOntologySearchResult"]] = UNSET
+ additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
+
+ def to_dict(self) -> Dict[str, Any]:
+ ontology = self.ontology
+ results: Union[Unset, List[Dict[str, Any]]] = UNSET
+ if not isinstance(self.results, Unset):
+ results = []
+ for results_item_data in self.results:
+ results_item = results_item_data.to_dict()
+
+ results.append(results_item)
+
+ field_dict: Dict[str, Any] = {}
+ field_dict.update(self.additional_properties)
+ field_dict.update({})
+ if ontology is not UNSET:
+ field_dict["ontology"] = ontology
+ if results is not UNSET:
+ field_dict["results"] = results
+
+ return field_dict
+
+ @classmethod
+ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
+ from ..models.sphn_ontology_search_result import SphnOntologySearchResult
+
+ d = src_dict.copy()
+ ontology = d.pop("ontology", UNSET)
+
+ results = []
+ _results = d.pop("results", UNSET)
+ for results_item_data in _results or []:
+ results_item = SphnOntologySearchResult.from_dict(results_item_data)
+
+ results.append(results_item)
+
+ sphn_ontologies_search_response_200_item = cls(
+ ontology=ontology,
+ results=results,
+ )
+
+ sphn_ontologies_search_response_200_item.additional_properties = d
+ return sphn_ontologies_search_response_200_item
+
+ @property
+ def additional_keys(self) -> List[str]:
+ return list(self.additional_properties.keys())
+
+ def __getitem__(self, key: str) -> Any:
+ return self.additional_properties[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self.additional_properties[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self.additional_properties[key]
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.additional_properties
diff --git a/src/tuneinsight/api/sdk/models/sphn_ontology_search_result.py b/src/tuneinsight/api/sdk/models/sphn_ontology_search_result.py
new file mode 100644
index 0000000..9d18ef3
--- /dev/null
+++ b/src/tuneinsight/api/sdk/models/sphn_ontology_search_result.py
@@ -0,0 +1,82 @@
+from typing import Any, Dict, List, Type, TypeVar, Union
+
+import attr
+
+from ..types import UNSET, Unset
+
+T = TypeVar("T", bound="SphnOntologySearchResult")
+
+
+@attr.s(auto_attribs=True)
+class SphnOntologySearchResult:
+ """Definition of an ontology search result
+
+ Attributes:
+ code (Union[Unset, str]):
+ description (Union[Unset, str]):
+ name (Union[Unset, str]):
+ breadcrumb (Union[Unset, str]):
+ """
+
+ code: Union[Unset, str] = UNSET
+ description: Union[Unset, str] = UNSET
+ name: Union[Unset, str] = UNSET
+ breadcrumb: Union[Unset, str] = UNSET
+ additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
+
+ def to_dict(self) -> Dict[str, Any]:
+ code = self.code
+ description = self.description
+ name = self.name
+ breadcrumb = self.breadcrumb
+
+ field_dict: Dict[str, Any] = {}
+ field_dict.update(self.additional_properties)
+ field_dict.update({})
+ if code is not UNSET:
+ field_dict["code"] = code
+ if description is not UNSET:
+ field_dict["description"] = description
+ if name is not UNSET:
+ field_dict["name"] = name
+ if breadcrumb is not UNSET:
+ field_dict["breadcrumb"] = breadcrumb
+
+ return field_dict
+
+ @classmethod
+ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
+ d = src_dict.copy()
+ code = d.pop("code", UNSET)
+
+ description = d.pop("description", UNSET)
+
+ name = d.pop("name", UNSET)
+
+ breadcrumb = d.pop("breadcrumb", UNSET)
+
+ sphn_ontology_search_result = cls(
+ code=code,
+ description=description,
+ name=name,
+ breadcrumb=breadcrumb,
+ )
+
+ sphn_ontology_search_result.additional_properties = d
+ return sphn_ontology_search_result
+
+ @property
+ def additional_keys(self) -> List[str]:
+ return list(self.additional_properties.keys())
+
+ def __getitem__(self, key: str) -> Any:
+ return self.additional_properties[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self.additional_properties[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self.additional_properties[key]
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.additional_properties
diff --git a/src/tuneinsight/api/sdk/models/statistical_aggregation.py b/src/tuneinsight/api/sdk/models/statistical_aggregation.py
index dd3c62f..a2324a1 100644
--- a/src/tuneinsight/api/sdk/models/statistical_aggregation.py
+++ b/src/tuneinsight/api/sdk/models/statistical_aggregation.py
@@ -23,8 +23,12 @@ class StatisticalAggregation:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class StatisticalAggregation:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class StatisticalAggregation:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,25 +66,12 @@ class StatisticalAggregation:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
aggregation_columns (Union[Unset, List[str]]): list of columns where all data is aggregated
binning_operations (Union[Unset, List['BinningOperation']]): list of binning operations to apply before
aggregating the results
@@ -79,28 +79,28 @@ class StatisticalAggregation:
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
aggregation_columns: Union[Unset, List[str]] = UNSET
binning_operations: Union[Unset, List["BinningOperation"]] = UNSET
include_dataset_length: Union[Unset, bool] = UNSET
@@ -109,44 +109,44 @@ class StatisticalAggregation:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
aggregation_columns: Union[Unset, List[str]] = UNSET
if not isinstance(self.aggregation_columns, Unset):
aggregation_columns = self.aggregation_columns
@@ -168,46 +168,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if aggregation_columns is not UNSET:
field_dict["aggregationColumns"] = aggregation_columns
if binning_operations is not UNSET:
@@ -228,14 +228,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -244,10 +237,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -255,6 +259,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -266,19 +284,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -286,17 +291,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
aggregation_columns = cast(List[str], d.pop("aggregationColumns", UNSET))
@@ -311,26 +311,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
statistical_aggregation = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
aggregation_columns=aggregation_columns,
binning_operations=binning_operations,
include_dataset_length=include_dataset_length,
diff --git a/src/tuneinsight/api/sdk/models/storage_definition.py b/src/tuneinsight/api/sdk/models/storage_definition.py
index 9cc2552..eabb425 100644
--- a/src/tuneinsight/api/sdk/models/storage_definition.py
+++ b/src/tuneinsight/api/sdk/models/storage_definition.py
@@ -17,6 +17,7 @@ class StorageDefinition:
"""specification of the storage operation
Attributes:
+ backup_definition (Union[Unset, BackupDefinition]): backup parameters
current_key (Union[Unset, str]): currently used b64-formatted encryption key, needs to be specified when running
'decrypt' or 'rotate'
encrypt_unencrypted (Union[Unset, bool]): when performing a rotation, if true, then unencrypted values get
@@ -24,17 +25,20 @@ class StorageDefinition:
new_key (Union[Unset, str]): new b64-formatted key to use on the storage, needs to be specified when running
'encrypt' or 'rotate'
operation (Union[Unset, StorageOperation]): operation to perform on the storage
- backup_definition (Union[Unset, BackupDefinition]): backup parameters
"""
+ backup_definition: Union[Unset, "BackupDefinition"] = UNSET
current_key: Union[Unset, str] = UNSET
encrypt_unencrypted: Union[Unset, bool] = UNSET
new_key: Union[Unset, str] = UNSET
operation: Union[Unset, StorageOperation] = UNSET
- backup_definition: Union[Unset, "BackupDefinition"] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ backup_definition: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.backup_definition, Unset):
+ backup_definition = self.backup_definition.to_dict()
+
current_key = self.current_key
encrypt_unencrypted = self.encrypt_unencrypted
new_key = self.new_key
@@ -42,13 +46,11 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.operation, Unset):
operation = self.operation.value
- backup_definition: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.backup_definition, Unset):
- backup_definition = self.backup_definition.to_dict()
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if backup_definition is not UNSET:
+ field_dict["backupDefinition"] = backup_definition
if current_key is not UNSET:
field_dict["currentKey"] = current_key
if encrypt_unencrypted is not UNSET:
@@ -57,8 +59,6 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["newKey"] = new_key
if operation is not UNSET:
field_dict["operation"] = operation
- if backup_definition is not UNSET:
- field_dict["backupDefinition"] = backup_definition
return field_dict
@@ -67,6 +67,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.backup_definition import BackupDefinition
d = src_dict.copy()
+ _backup_definition = d.pop("backupDefinition", UNSET)
+ backup_definition: Union[Unset, BackupDefinition]
+ if isinstance(_backup_definition, Unset):
+ backup_definition = UNSET
+ else:
+ backup_definition = BackupDefinition.from_dict(_backup_definition)
+
current_key = d.pop("currentKey", UNSET)
encrypt_unencrypted = d.pop("encryptUnencrypted", UNSET)
@@ -80,19 +87,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
operation = StorageOperation(_operation)
- _backup_definition = d.pop("backupDefinition", UNSET)
- backup_definition: Union[Unset, BackupDefinition]
- if isinstance(_backup_definition, Unset):
- backup_definition = UNSET
- else:
- backup_definition = BackupDefinition.from_dict(_backup_definition)
-
storage_definition = cls(
+ backup_definition=backup_definition,
current_key=current_key,
encrypt_unencrypted=encrypt_unencrypted,
new_key=new_key,
operation=operation,
- backup_definition=backup_definition,
)
storage_definition.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/survival.py b/src/tuneinsight/api/sdk/models/survival.py
index d73f2f2..3c2e723 100644
--- a/src/tuneinsight/api/sdk/models/survival.py
+++ b/src/tuneinsight/api/sdk/models/survival.py
@@ -17,9 +17,6 @@ class Survival:
"""
Attributes:
type (PreprocessingOperationType): type of preprocessing operation
- event_col (Union[Unset, str]): the name of the column that stores the event status for each sample Default:
- 'event'.
- event_val (Union[Unset, str]): the event value indicating a survival event (i.e. death)
interval (Union[Unset, Duration]): definition of a date-independent time interval
num_frames (Union[Unset, int]): the number of time frames to take into account starting from the start of the
survival
@@ -28,23 +25,24 @@ class Survival:
stored must be integers Default: 'duration'.
end_event (Union[Unset, str]): the column that must contain the timestamps of the end event (can be empty if no
event happened)
+ event_col (Union[Unset, str]): the name of the column that stores the event status for each sample Default:
+ 'event'.
+ event_val (Union[Unset, str]): the event value indicating a survival event (i.e. death)
"""
type: PreprocessingOperationType
- event_col: Union[Unset, str] = "event"
- event_val: Union[Unset, str] = UNSET
interval: Union[Unset, "Duration"] = UNSET
num_frames: Union[Unset, int] = UNSET
start_event: Union[Unset, str] = UNSET
duration_col: Union[Unset, str] = "duration"
end_event: Union[Unset, str] = UNSET
+ event_col: Union[Unset, str] = "event"
+ event_val: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- event_col = self.event_col
- event_val = self.event_val
interval: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.interval, Unset):
interval = self.interval.to_dict()
@@ -53,6 +51,8 @@ def to_dict(self) -> Dict[str, Any]:
start_event = self.start_event
duration_col = self.duration_col
end_event = self.end_event
+ event_col = self.event_col
+ event_val = self.event_val
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
@@ -61,10 +61,6 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if event_col is not UNSET:
- field_dict["eventCol"] = event_col
- if event_val is not UNSET:
- field_dict["eventVal"] = event_val
if interval is not UNSET:
field_dict["interval"] = interval
if num_frames is not UNSET:
@@ -75,6 +71,10 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["durationCol"] = duration_col
if end_event is not UNSET:
field_dict["endEvent"] = end_event
+ if event_col is not UNSET:
+ field_dict["eventCol"] = event_col
+ if event_val is not UNSET:
+ field_dict["eventVal"] = event_val
return field_dict
@@ -85,10 +85,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = PreprocessingOperationType(d.pop("type"))
- event_col = d.pop("eventCol", UNSET)
-
- event_val = d.pop("eventVal", UNSET)
-
_interval = d.pop("interval", UNSET)
interval: Union[Unset, Duration]
if isinstance(_interval, Unset):
@@ -104,15 +100,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
end_event = d.pop("endEvent", UNSET)
+ event_col = d.pop("eventCol", UNSET)
+
+ event_val = d.pop("eventVal", UNSET)
+
survival = cls(
type=type,
- event_col=event_col,
- event_val=event_val,
interval=interval,
num_frames=num_frames,
start_event=start_event,
duration_col=duration_col,
end_event=end_event,
+ event_col=event_col,
+ event_val=event_val,
)
survival.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/survival_aggregation.py b/src/tuneinsight/api/sdk/models/survival_aggregation.py
index 50b211f..ff91d2c 100644
--- a/src/tuneinsight/api/sdk/models/survival_aggregation.py
+++ b/src/tuneinsight/api/sdk/models/survival_aggregation.py
@@ -25,8 +25,12 @@ class SurvivalAggregation:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -37,8 +41,7 @@ class SurvivalAggregation:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -46,8 +49,18 @@ class SurvivalAggregation:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -55,118 +68,95 @@ class SurvivalAggregation:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ secure_matching (Union[Unset, bool]): if true then a cohort is created by matching records with a specified
+ organization
+ subgroups (Union[Unset, List['SurvivalAggregationSubgroupsItem']]): list of filters to create survival subgroups
+ survival_parameters (Union[Unset, Survival]):
encrypted_matching (Union[Unset, bool]): if true, then the resulting matches are kept encrypted before
aggregating the survival data (slower)
matching_columns (Union[Unset, List['MatchingColumn']]): The columns on which the data should be matched
matching_organization (Union[Unset, str]): when secure matching is enabled, the organization with whom to match
records with
- secure_matching (Union[Unset, bool]): if true then a cohort is created by matching records with a specified
- organization
- subgroups (Union[Unset, List['SurvivalAggregationSubgroupsItem']]): list of filters to create survival subgroups
- survival_parameters (Union[Unset, Survival]):
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
- encrypted_matching: Union[Unset, bool] = UNSET
- matching_columns: Union[Unset, List["MatchingColumn"]] = UNSET
- matching_organization: Union[Unset, str] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
secure_matching: Union[Unset, bool] = UNSET
subgroups: Union[Unset, List["SurvivalAggregationSubgroupsItem"]] = UNSET
survival_parameters: Union[Unset, "Survival"] = UNSET
+ encrypted_matching: Union[Unset, bool] = UNSET
+ matching_columns: Union[Unset, List["MatchingColumn"]] = UNSET
+ matching_organization: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
- encrypted_matching = self.encrypted_matching
- matching_columns: Union[Unset, List[Dict[str, Any]]] = UNSET
- if not isinstance(self.matching_columns, Unset):
- matching_columns = []
- for matching_columns_item_data in self.matching_columns:
- matching_columns_item = matching_columns_item_data.to_dict()
-
- matching_columns.append(matching_columns_item)
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
- matching_organization = self.matching_organization
secure_matching = self.secure_matching
subgroups: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.subgroups, Unset):
@@ -180,6 +170,17 @@ def to_dict(self) -> Dict[str, Any]:
if not isinstance(self.survival_parameters, Unset):
survival_parameters = self.survival_parameters.to_dict()
+ encrypted_matching = self.encrypted_matching
+ matching_columns: Union[Unset, List[Dict[str, Any]]] = UNSET
+ if not isinstance(self.matching_columns, Unset):
+ matching_columns = []
+ for matching_columns_item_data in self.matching_columns:
+ matching_columns_item = matching_columns_item_data.to_dict()
+
+ matching_columns.append(matching_columns_item)
+
+ matching_organization = self.matching_organization
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
@@ -187,58 +188,58 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
- if encrypted_matching is not UNSET:
- field_dict["encryptedMatching"] = encrypted_matching
- if matching_columns is not UNSET:
- field_dict["matchingColumns"] = matching_columns
- if matching_organization is not UNSET:
- field_dict["matchingOrganization"] = matching_organization
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if secure_matching is not UNSET:
field_dict["secureMatching"] = secure_matching
if subgroups is not UNSET:
field_dict["subgroups"] = subgroups
if survival_parameters is not UNSET:
field_dict["survivalParameters"] = survival_parameters
+ if encrypted_matching is not UNSET:
+ field_dict["encryptedMatching"] = encrypted_matching
+ if matching_columns is not UNSET:
+ field_dict["matchingColumns"] = matching_columns
+ if matching_organization is not UNSET:
+ field_dict["matchingOrganization"] = matching_organization
return field_dict
@@ -255,14 +256,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -271,10 +265,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -282,6 +287,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -293,19 +312,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -313,28 +319,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
-
- encrypted_matching = d.pop("encryptedMatching", UNSET)
-
- matching_columns = []
- _matching_columns = d.pop("matchingColumns", UNSET)
- for matching_columns_item_data in _matching_columns or []:
- matching_columns_item = MatchingColumn.from_dict(matching_columns_item_data)
-
- matching_columns.append(matching_columns_item)
-
- matching_organization = d.pop("matchingOrganization", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
secure_matching = d.pop("secureMatching", UNSET)
@@ -352,34 +342,45 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
survival_parameters = Survival.from_dict(_survival_parameters)
+ encrypted_matching = d.pop("encryptedMatching", UNSET)
+
+ matching_columns = []
+ _matching_columns = d.pop("matchingColumns", UNSET)
+ for matching_columns_item_data in _matching_columns or []:
+ matching_columns_item = MatchingColumn.from_dict(matching_columns_item_data)
+
+ matching_columns.append(matching_columns_item)
+
+ matching_organization = d.pop("matchingOrganization", UNSET)
+
survival_aggregation = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
- encrypted_matching=encrypted_matching,
- matching_columns=matching_columns,
- matching_organization=matching_organization,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
secure_matching=secure_matching,
subgroups=subgroups,
survival_parameters=survival_parameters,
+ encrypted_matching=encrypted_matching,
+ matching_columns=matching_columns,
+ matching_organization=matching_organization,
)
survival_aggregation.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/threshold.py b/src/tuneinsight/api/sdk/models/threshold.py
index a416efa..c07fc30 100644
--- a/src/tuneinsight/api/sdk/models/threshold.py
+++ b/src/tuneinsight/api/sdk/models/threshold.py
@@ -13,41 +13,40 @@ class Threshold:
"""represents a threshold, which can be made relative of the dataset size
Attributes:
- fixed_value (Union[Unset, int]): value of the fixed threshold
relative_factor (Union[Unset, float]): when the threshold is relative to the dataset size, factor of this
dataset size
type (Union[Unset, ThresholdType]):
+ fixed_value (Union[Unset, int]): value of the fixed threshold
"""
- fixed_value: Union[Unset, int] = UNSET
relative_factor: Union[Unset, float] = UNSET
type: Union[Unset, ThresholdType] = UNSET
+ fixed_value: Union[Unset, int] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- fixed_value = self.fixed_value
relative_factor = self.relative_factor
type: Union[Unset, str] = UNSET
if not isinstance(self.type, Unset):
type = self.type.value
+ fixed_value = self.fixed_value
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if fixed_value is not UNSET:
- field_dict["fixedValue"] = fixed_value
if relative_factor is not UNSET:
field_dict["relativeFactor"] = relative_factor
if type is not UNSET:
field_dict["type"] = type
+ if fixed_value is not UNSET:
+ field_dict["fixedValue"] = fixed_value
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- fixed_value = d.pop("fixedValue", UNSET)
-
relative_factor = d.pop("relativeFactor", UNSET)
_type = d.pop("type", UNSET)
@@ -57,10 +56,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
type = ThresholdType(_type)
+ fixed_value = d.pop("fixedValue", UNSET)
+
threshold = cls(
- fixed_value=fixed_value,
relative_factor=relative_factor,
type=type,
+ fixed_value=fixed_value,
)
threshold.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/user.py b/src/tuneinsight/api/sdk/models/user.py
index 85634d8..308deab 100644
--- a/src/tuneinsight/api/sdk/models/user.py
+++ b/src/tuneinsight/api/sdk/models/user.py
@@ -11,54 +11,54 @@
class User:
"""
Attributes:
+ username (Union[Unset, str]):
email (Union[Unset, str]):
first_name (Union[Unset, str]):
last_name (Union[Unset, str]):
- username (Union[Unset, str]):
"""
+ username: Union[Unset, str] = UNSET
email: Union[Unset, str] = UNSET
first_name: Union[Unset, str] = UNSET
last_name: Union[Unset, str] = UNSET
- username: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ username = self.username
email = self.email
first_name = self.first_name
last_name = self.last_name
- username = self.username
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if username is not UNSET:
+ field_dict["username"] = username
if email is not UNSET:
field_dict["email"] = email
if first_name is not UNSET:
field_dict["firstName"] = first_name
if last_name is not UNSET:
field_dict["lastName"] = last_name
- if username is not UNSET:
- field_dict["username"] = username
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
+ username = d.pop("username", UNSET)
+
email = d.pop("email", UNSET)
first_name = d.pop("firstName", UNSET)
last_name = d.pop("lastName", UNSET)
- username = d.pop("username", UNSET)
-
user = cls(
+ username=username,
email=email,
first_name=first_name,
last_name=last_name,
- username=username,
)
user.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/user_definition.py b/src/tuneinsight/api/sdk/models/user_definition.py
index 80069ed..759ac33 100644
--- a/src/tuneinsight/api/sdk/models/user_definition.py
+++ b/src/tuneinsight/api/sdk/models/user_definition.py
@@ -18,129 +18,130 @@
class UserDefinition:
"""
Attributes:
+ disableable_credential_types (Union[Unset, List['UserDefinitionDisableableCredentialTypesItem']]):
+ email (Union[Unset, str]):
+ federation_link (Union[Unset, str]):
first_name (Union[Unset, str]):
+ id (Union[Unset, str]):
attributes (Union[Unset, UserDefinitionAttributes]):
- created_timestamp (Union[Unset, int]):
- enabled (Union[Unset, bool]):
- email (Union[Unset, str]):
+ email_verified (Union[Unset, bool]):
groups (Union[Unset, List[str]]):
last_name (Union[Unset, str]):
totp (Union[Unset, bool]):
- access (Union[Unset, UserDefinitionAccess]):
client_roles (Union[Unset, UserDefinitionClientRoles]):
- disableable_credential_types (Union[Unset, List['UserDefinitionDisableableCredentialTypesItem']]):
- federation_link (Union[Unset, str]):
+ created_timestamp (Union[Unset, int]):
realm_roles (Union[Unset, List[str]]):
+ required_actions (Union[Unset, List[str]]):
service_account_client_id (Union[Unset, str]):
username (Union[Unset, str]):
- email_verified (Union[Unset, bool]):
- id (Union[Unset, str]):
- required_actions (Union[Unset, List[str]]):
+ access (Union[Unset, UserDefinitionAccess]):
+ enabled (Union[Unset, bool]):
"""
+ disableable_credential_types: Union[Unset, List["UserDefinitionDisableableCredentialTypesItem"]] = UNSET
+ email: Union[Unset, str] = UNSET
+ federation_link: Union[Unset, str] = UNSET
first_name: Union[Unset, str] = UNSET
+ id: Union[Unset, str] = UNSET
attributes: Union[Unset, "UserDefinitionAttributes"] = UNSET
- created_timestamp: Union[Unset, int] = UNSET
- enabled: Union[Unset, bool] = UNSET
- email: Union[Unset, str] = UNSET
+ email_verified: Union[Unset, bool] = UNSET
groups: Union[Unset, List[str]] = UNSET
last_name: Union[Unset, str] = UNSET
totp: Union[Unset, bool] = UNSET
- access: Union[Unset, "UserDefinitionAccess"] = UNSET
client_roles: Union[Unset, "UserDefinitionClientRoles"] = UNSET
- disableable_credential_types: Union[Unset, List["UserDefinitionDisableableCredentialTypesItem"]] = UNSET
- federation_link: Union[Unset, str] = UNSET
+ created_timestamp: Union[Unset, int] = UNSET
realm_roles: Union[Unset, List[str]] = UNSET
+ required_actions: Union[Unset, List[str]] = UNSET
service_account_client_id: Union[Unset, str] = UNSET
username: Union[Unset, str] = UNSET
- email_verified: Union[Unset, bool] = UNSET
- id: Union[Unset, str] = UNSET
- required_actions: Union[Unset, List[str]] = UNSET
+ access: Union[Unset, "UserDefinitionAccess"] = UNSET
+ enabled: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
+ disableable_credential_types: Union[Unset, List[Dict[str, Any]]] = UNSET
+ if not isinstance(self.disableable_credential_types, Unset):
+ disableable_credential_types = []
+ for disableable_credential_types_item_data in self.disableable_credential_types:
+ disableable_credential_types_item = disableable_credential_types_item_data.to_dict()
+
+ disableable_credential_types.append(disableable_credential_types_item)
+
+ email = self.email
+ federation_link = self.federation_link
first_name = self.first_name
+ id = self.id
attributes: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.attributes, Unset):
attributes = self.attributes.to_dict()
- created_timestamp = self.created_timestamp
- enabled = self.enabled
- email = self.email
+ email_verified = self.email_verified
groups: Union[Unset, List[str]] = UNSET
if not isinstance(self.groups, Unset):
groups = self.groups
last_name = self.last_name
totp = self.totp
- access: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.access, Unset):
- access = self.access.to_dict()
-
client_roles: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.client_roles, Unset):
client_roles = self.client_roles.to_dict()
- disableable_credential_types: Union[Unset, List[Dict[str, Any]]] = UNSET
- if not isinstance(self.disableable_credential_types, Unset):
- disableable_credential_types = []
- for disableable_credential_types_item_data in self.disableable_credential_types:
- disableable_credential_types_item = disableable_credential_types_item_data.to_dict()
-
- disableable_credential_types.append(disableable_credential_types_item)
-
- federation_link = self.federation_link
+ created_timestamp = self.created_timestamp
realm_roles: Union[Unset, List[str]] = UNSET
if not isinstance(self.realm_roles, Unset):
realm_roles = self.realm_roles
- service_account_client_id = self.service_account_client_id
- username = self.username
- email_verified = self.email_verified
- id = self.id
required_actions: Union[Unset, List[str]] = UNSET
if not isinstance(self.required_actions, Unset):
required_actions = self.required_actions
+ service_account_client_id = self.service_account_client_id
+ username = self.username
+ access: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.access, Unset):
+ access = self.access.to_dict()
+
+ enabled = self.enabled
+
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
+ if disableable_credential_types is not UNSET:
+ field_dict["disableableCredentialTypes"] = disableable_credential_types
+ if email is not UNSET:
+ field_dict["email"] = email
+ if federation_link is not UNSET:
+ field_dict["federationLink"] = federation_link
if first_name is not UNSET:
field_dict["firstName"] = first_name
+ if id is not UNSET:
+ field_dict["id"] = id
if attributes is not UNSET:
field_dict["attributes"] = attributes
- if created_timestamp is not UNSET:
- field_dict["createdTimestamp"] = created_timestamp
- if enabled is not UNSET:
- field_dict["enabled"] = enabled
- if email is not UNSET:
- field_dict["email"] = email
+ if email_verified is not UNSET:
+ field_dict["emailVerified"] = email_verified
if groups is not UNSET:
field_dict["groups"] = groups
if last_name is not UNSET:
field_dict["lastName"] = last_name
if totp is not UNSET:
field_dict["totp"] = totp
- if access is not UNSET:
- field_dict["access"] = access
if client_roles is not UNSET:
field_dict["clientRoles"] = client_roles
- if disableable_credential_types is not UNSET:
- field_dict["disableableCredentialTypes"] = disableable_credential_types
- if federation_link is not UNSET:
- field_dict["federationLink"] = federation_link
+ if created_timestamp is not UNSET:
+ field_dict["createdTimestamp"] = created_timestamp
if realm_roles is not UNSET:
field_dict["realmRoles"] = realm_roles
+ if required_actions is not UNSET:
+ field_dict["requiredActions"] = required_actions
if service_account_client_id is not UNSET:
field_dict["serviceAccountClientID"] = service_account_client_id
if username is not UNSET:
field_dict["username"] = username
- if email_verified is not UNSET:
- field_dict["emailVerified"] = email_verified
- if id is not UNSET:
- field_dict["id"] = id
- if required_actions is not UNSET:
- field_dict["requiredActions"] = required_actions
+ if access is not UNSET:
+ field_dict["access"] = access
+ if enabled is not UNSET:
+ field_dict["enabled"] = enabled
return field_dict
@@ -154,8 +155,23 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
)
d = src_dict.copy()
+ disableable_credential_types = []
+ _disableable_credential_types = d.pop("disableableCredentialTypes", UNSET)
+ for disableable_credential_types_item_data in _disableable_credential_types or []:
+ disableable_credential_types_item = UserDefinitionDisableableCredentialTypesItem.from_dict(
+ disableable_credential_types_item_data
+ )
+
+ disableable_credential_types.append(disableable_credential_types_item)
+
+ email = d.pop("email", UNSET)
+
+ federation_link = d.pop("federationLink", UNSET)
+
first_name = d.pop("firstName", UNSET)
+ id = d.pop("id", UNSET)
+
_attributes = d.pop("attributes", UNSET)
attributes: Union[Unset, UserDefinitionAttributes]
if isinstance(_attributes, Unset):
@@ -163,11 +179,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
attributes = UserDefinitionAttributes.from_dict(_attributes)
- created_timestamp = d.pop("createdTimestamp", UNSET)
-
- enabled = d.pop("enabled", UNSET)
-
- email = d.pop("email", UNSET)
+ email_verified = d.pop("emailVerified", UNSET)
groups = cast(List[str], d.pop("groups", UNSET))
@@ -175,13 +187,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
totp = d.pop("totp", UNSET)
- _access = d.pop("access", UNSET)
- access: Union[Unset, UserDefinitionAccess]
- if isinstance(_access, Unset):
- access = UNSET
- else:
- access = UserDefinitionAccess.from_dict(_access)
-
_client_roles = d.pop("clientRoles", UNSET)
client_roles: Union[Unset, UserDefinitionClientRoles]
if isinstance(_client_roles, Unset):
@@ -189,48 +194,44 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
client_roles = UserDefinitionClientRoles.from_dict(_client_roles)
- disableable_credential_types = []
- _disableable_credential_types = d.pop("disableableCredentialTypes", UNSET)
- for disableable_credential_types_item_data in _disableable_credential_types or []:
- disableable_credential_types_item = UserDefinitionDisableableCredentialTypesItem.from_dict(
- disableable_credential_types_item_data
- )
-
- disableable_credential_types.append(disableable_credential_types_item)
-
- federation_link = d.pop("federationLink", UNSET)
+ created_timestamp = d.pop("createdTimestamp", UNSET)
realm_roles = cast(List[str], d.pop("realmRoles", UNSET))
+ required_actions = cast(List[str], d.pop("requiredActions", UNSET))
+
service_account_client_id = d.pop("serviceAccountClientID", UNSET)
username = d.pop("username", UNSET)
- email_verified = d.pop("emailVerified", UNSET)
-
- id = d.pop("id", UNSET)
+ _access = d.pop("access", UNSET)
+ access: Union[Unset, UserDefinitionAccess]
+ if isinstance(_access, Unset):
+ access = UNSET
+ else:
+ access = UserDefinitionAccess.from_dict(_access)
- required_actions = cast(List[str], d.pop("requiredActions", UNSET))
+ enabled = d.pop("enabled", UNSET)
user_definition = cls(
+ disableable_credential_types=disableable_credential_types,
+ email=email,
+ federation_link=federation_link,
first_name=first_name,
+ id=id,
attributes=attributes,
- created_timestamp=created_timestamp,
- enabled=enabled,
- email=email,
+ email_verified=email_verified,
groups=groups,
last_name=last_name,
totp=totp,
- access=access,
client_roles=client_roles,
- disableable_credential_types=disableable_credential_types,
- federation_link=federation_link,
+ created_timestamp=created_timestamp,
realm_roles=realm_roles,
+ required_actions=required_actions,
service_account_client_id=service_account_client_id,
username=username,
- email_verified=email_verified,
- id=id,
- required_actions=required_actions,
+ access=access,
+ enabled=enabled,
)
user_definition.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/user_list_query.py b/src/tuneinsight/api/sdk/models/user_list_query.py
index 87907ad..9aaeeed 100644
--- a/src/tuneinsight/api/sdk/models/user_list_query.py
+++ b/src/tuneinsight/api/sdk/models/user_list_query.py
@@ -11,134 +11,134 @@
class UserListQuery:
"""
Attributes:
- brief_representation (Union[Unset, bool]):
- exact (Union[Unset, bool]):
- first (Union[Unset, int]):
- first_name (Union[Unset, str]):
- idp_alias (Union[Unset, str]):
+ email (Union[Unset, str]):
+ last_name (Union[Unset, str]):
username (Union[Unset, str]):
+ exact (Union[Unset, bool]):
+ search (Union[Unset, str]):
enabled (Union[Unset, bool]):
idp_user_id (Union[Unset, str]):
+ idp_alias (Union[Unset, str]):
max_ (Union[Unset, int]):
q (Union[Unset, str]):
+ brief_representation (Union[Unset, bool]):
email_verified (Union[Unset, bool]):
- email (Union[Unset, str]):
- last_name (Union[Unset, str]):
- search (Union[Unset, str]):
+ first (Union[Unset, int]):
+ first_name (Union[Unset, str]):
"""
- brief_representation: Union[Unset, bool] = UNSET
- exact: Union[Unset, bool] = UNSET
- first: Union[Unset, int] = UNSET
- first_name: Union[Unset, str] = UNSET
- idp_alias: Union[Unset, str] = UNSET
+ email: Union[Unset, str] = UNSET
+ last_name: Union[Unset, str] = UNSET
username: Union[Unset, str] = UNSET
+ exact: Union[Unset, bool] = UNSET
+ search: Union[Unset, str] = UNSET
enabled: Union[Unset, bool] = UNSET
idp_user_id: Union[Unset, str] = UNSET
+ idp_alias: Union[Unset, str] = UNSET
max_: Union[Unset, int] = UNSET
q: Union[Unset, str] = UNSET
+ brief_representation: Union[Unset, bool] = UNSET
email_verified: Union[Unset, bool] = UNSET
- email: Union[Unset, str] = UNSET
- last_name: Union[Unset, str] = UNSET
- search: Union[Unset, str] = UNSET
+ first: Union[Unset, int] = UNSET
+ first_name: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- brief_representation = self.brief_representation
- exact = self.exact
- first = self.first
- first_name = self.first_name
- idp_alias = self.idp_alias
+ email = self.email
+ last_name = self.last_name
username = self.username
+ exact = self.exact
+ search = self.search
enabled = self.enabled
idp_user_id = self.idp_user_id
+ idp_alias = self.idp_alias
max_ = self.max_
q = self.q
+ brief_representation = self.brief_representation
email_verified = self.email_verified
- email = self.email
- last_name = self.last_name
- search = self.search
+ first = self.first
+ first_name = self.first_name
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if brief_representation is not UNSET:
- field_dict["briefRepresentation"] = brief_representation
- if exact is not UNSET:
- field_dict["exact"] = exact
- if first is not UNSET:
- field_dict["first"] = first
- if first_name is not UNSET:
- field_dict["firstName"] = first_name
- if idp_alias is not UNSET:
- field_dict["idpAlias"] = idp_alias
+ if email is not UNSET:
+ field_dict["email"] = email
+ if last_name is not UNSET:
+ field_dict["lastName"] = last_name
if username is not UNSET:
field_dict["username"] = username
+ if exact is not UNSET:
+ field_dict["exact"] = exact
+ if search is not UNSET:
+ field_dict["search"] = search
if enabled is not UNSET:
field_dict["enabled"] = enabled
if idp_user_id is not UNSET:
field_dict["idpUserId"] = idp_user_id
+ if idp_alias is not UNSET:
+ field_dict["idpAlias"] = idp_alias
if max_ is not UNSET:
field_dict["max"] = max_
if q is not UNSET:
field_dict["q"] = q
+ if brief_representation is not UNSET:
+ field_dict["briefRepresentation"] = brief_representation
if email_verified is not UNSET:
field_dict["emailVerified"] = email_verified
- if email is not UNSET:
- field_dict["email"] = email
- if last_name is not UNSET:
- field_dict["lastName"] = last_name
- if search is not UNSET:
- field_dict["search"] = search
+ if first is not UNSET:
+ field_dict["first"] = first
+ if first_name is not UNSET:
+ field_dict["firstName"] = first_name
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
- brief_representation = d.pop("briefRepresentation", UNSET)
-
- exact = d.pop("exact", UNSET)
+ email = d.pop("email", UNSET)
- first = d.pop("first", UNSET)
+ last_name = d.pop("lastName", UNSET)
- first_name = d.pop("firstName", UNSET)
+ username = d.pop("username", UNSET)
- idp_alias = d.pop("idpAlias", UNSET)
+ exact = d.pop("exact", UNSET)
- username = d.pop("username", UNSET)
+ search = d.pop("search", UNSET)
enabled = d.pop("enabled", UNSET)
idp_user_id = d.pop("idpUserId", UNSET)
+ idp_alias = d.pop("idpAlias", UNSET)
+
max_ = d.pop("max", UNSET)
q = d.pop("q", UNSET)
- email_verified = d.pop("emailVerified", UNSET)
+ brief_representation = d.pop("briefRepresentation", UNSET)
- email = d.pop("email", UNSET)
+ email_verified = d.pop("emailVerified", UNSET)
- last_name = d.pop("lastName", UNSET)
+ first = d.pop("first", UNSET)
- search = d.pop("search", UNSET)
+ first_name = d.pop("firstName", UNSET)
user_list_query = cls(
- brief_representation=brief_representation,
- exact=exact,
- first=first,
- first_name=first_name,
- idp_alias=idp_alias,
+ email=email,
+ last_name=last_name,
username=username,
+ exact=exact,
+ search=search,
enabled=enabled,
idp_user_id=idp_user_id,
+ idp_alias=idp_alias,
max_=max_,
q=q,
+ brief_representation=brief_representation,
email_verified=email_verified,
- email=email,
- last_name=last_name,
- search=search,
+ first=first,
+ first_name=first_name,
)
user_list_query.additional_properties = d
diff --git a/src/tuneinsight/api/sdk/models/v_binned_aggregation.py b/src/tuneinsight/api/sdk/models/v_binned_aggregation.py
index 94cccff..a0e28e6 100644
--- a/src/tuneinsight/api/sdk/models/v_binned_aggregation.py
+++ b/src/tuneinsight/api/sdk/models/v_binned_aggregation.py
@@ -23,8 +23,12 @@ class VBinnedAggregation:
"""
Attributes:
type (ComputationType): Type of the computation.
- dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
- mode. Default: -1.0.
+ release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
+ results.
+ If set, then encrypted results are automatically key switched and decrypted
+ and a Result entity is saved
+ run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
+ timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for
clipping before encrypting values when running aggregation-based workflows.
The bounds are deduced based on the cryptographic parameters used for the aggregation.
@@ -35,8 +39,7 @@ class VBinnedAggregation:
(default)
- error: if some values are out of bounds, then the computation is aborted.
Default: ComputationDefinitionInputClippingMethod.WARNING.
- run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both)
- local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ join_id (Union[Unset, str]): Unique identifier of a data object.
maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be
aggregated collectively. If provided, the computation will automatically deduce
optimal cryptographic parameters in order to maximize precision while allowing encoding values up to this bound.
@@ -44,8 +47,18 @@ class VBinnedAggregation:
up to 16 million.
For example, when using default parameters and running an aggregation with 4 participants, local aggregated
values cannot exceed 4 million.
+ owner (Union[Unset, str]): The username of the end user who requested the computation.
preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters
applied to the input retrieved from the datasource, if applicable
+ project_id (Union[Unset, str]): Unique identifier of a project.
+ dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP
+ mode. Default: -1.0.
+ encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
+ local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
+ the network)
+ local_input_id (Union[Unset, str]): Unique identifier of a data object.
+ wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
+ cohort_id (Union[Unset, str]): Unique identifier of a data object.
data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource
from each node before the computation
end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true,
@@ -53,25 +66,12 @@ class VBinnedAggregation:
is initially encrypted with a network collective key, then it is key switched to
the initiating user's public key.
input_data_object (Union[Unset, str]): Shared identifier of a data object.
- timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run.
+ dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
+ disclosure prevention mechanisms
local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will
use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration
of the computation. The local input columns/values must be in the form {: [, , ...],
...}
- owner (Union[Unset, str]): The username of the end user who requested the computation.
- project_id (Union[Unset, str]): Unique identifier of a project.
- dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various
- disclosure prevention mechanisms
- cohort_id (Union[Unset, str]): Unique identifier of a data object.
- encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key.
- join_id (Union[Unset, str]): Unique identifier of a data object.
- local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured
- the network)
- release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output
- results.
- If set, then encrypted results are automatically key switched and decrypted
- and a Result entity is saved
- wait (Union[Unset, bool]): Whether to wait synchronously for the computation result.
binning_parameters (Union[Unset, BinningParameters]): parameters used to bin data
identifiable_columns (Union[Unset, List[str]]):
aggregation_column (Union[Unset, str]): the column on which to aggregate
@@ -79,28 +79,28 @@ class VBinnedAggregation:
"""
type: ComputationType
- dp_epsilon: Union[Unset, float] = -1.0
+ release_results: Union[Unset, bool] = UNSET
+ run_mode: Union[Unset, RunMode] = UNSET
+ timeout: Union[Unset, int] = UNSET
input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = (
ComputationDefinitionInputClippingMethod.WARNING
)
- run_mode: Union[Unset, RunMode] = UNSET
- local_input_id: Union[Unset, str] = UNSET
+ join_id: Union[Unset, str] = UNSET
maximum_aggregated_value: Union[Unset, None, float] = UNSET
- preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
- data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
- end_to_end_encrypted: Union[Unset, bool] = UNSET
- input_data_object: Union[Unset, str] = UNSET
- timeout: Union[Unset, int] = UNSET
- local_input: Union[Unset, "LocalInput"] = UNSET
owner: Union[Unset, str] = UNSET
+ preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET
project_id: Union[Unset, str] = UNSET
- dp_policy: Union[Unset, "DPPolicy"] = UNSET
- cohort_id: Union[Unset, str] = UNSET
+ dp_epsilon: Union[Unset, float] = -1.0
encrypted: Union[Unset, bool] = UNSET
- join_id: Union[Unset, str] = UNSET
local: Union[Unset, bool] = UNSET
- release_results: Union[Unset, bool] = UNSET
+ local_input_id: Union[Unset, str] = UNSET
wait: Union[Unset, bool] = UNSET
+ cohort_id: Union[Unset, str] = UNSET
+ data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET
+ end_to_end_encrypted: Union[Unset, bool] = UNSET
+ input_data_object: Union[Unset, str] = UNSET
+ dp_policy: Union[Unset, "DPPolicy"] = UNSET
+ local_input: Union[Unset, "LocalInput"] = UNSET
binning_parameters: Union[Unset, "BinningParameters"] = UNSET
identifiable_columns: Union[Unset, List[str]] = UNSET
aggregation_column: Union[Unset, str] = UNSET
@@ -110,44 +110,44 @@ class VBinnedAggregation:
def to_dict(self) -> Dict[str, Any]:
type = self.type.value
- dp_epsilon = self.dp_epsilon
- input_clipping_method: Union[Unset, str] = UNSET
- if not isinstance(self.input_clipping_method, Unset):
- input_clipping_method = self.input_clipping_method.value
-
+ release_results = self.release_results
run_mode: Union[Unset, str] = UNSET
if not isinstance(self.run_mode, Unset):
run_mode = self.run_mode.value
- local_input_id = self.local_input_id
+ timeout = self.timeout
+ input_clipping_method: Union[Unset, str] = UNSET
+ if not isinstance(self.input_clipping_method, Unset):
+ input_clipping_method = self.input_clipping_method.value
+
+ join_id = self.join_id
maximum_aggregated_value = self.maximum_aggregated_value
+ owner = self.owner
preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.preprocessing_parameters, Unset):
preprocessing_parameters = self.preprocessing_parameters.to_dict()
+ project_id = self.project_id
+ dp_epsilon = self.dp_epsilon
+ encrypted = self.encrypted
+ local = self.local
+ local_input_id = self.local_input_id
+ wait = self.wait
+ cohort_id = self.cohort_id
data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data_source_parameters, Unset):
data_source_parameters = self.data_source_parameters.to_dict()
end_to_end_encrypted = self.end_to_end_encrypted
input_data_object = self.input_data_object
- timeout = self.timeout
- local_input: Union[Unset, Dict[str, Any]] = UNSET
- if not isinstance(self.local_input, Unset):
- local_input = self.local_input.to_dict()
-
- owner = self.owner
- project_id = self.project_id
dp_policy: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.dp_policy, Unset):
dp_policy = self.dp_policy.to_dict()
- cohort_id = self.cohort_id
- encrypted = self.encrypted
- join_id = self.join_id
- local = self.local
- release_results = self.release_results
- wait = self.wait
+ local_input: Union[Unset, Dict[str, Any]] = UNSET
+ if not isinstance(self.local_input, Unset):
+ local_input = self.local_input.to_dict()
+
binning_parameters: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.binning_parameters, Unset):
binning_parameters = self.binning_parameters.to_dict()
@@ -166,46 +166,46 @@ def to_dict(self) -> Dict[str, Any]:
"type": type,
}
)
- if dp_epsilon is not UNSET:
- field_dict["dpEpsilon"] = dp_epsilon
- if input_clipping_method is not UNSET:
- field_dict["inputClippingMethod"] = input_clipping_method
+ if release_results is not UNSET:
+ field_dict["releaseResults"] = release_results
if run_mode is not UNSET:
field_dict["runMode"] = run_mode
- if local_input_id is not UNSET:
- field_dict["localInputID"] = local_input_id
- if maximum_aggregated_value is not UNSET:
- field_dict["maximumAggregatedValue"] = maximum_aggregated_value
- if preprocessing_parameters is not UNSET:
- field_dict["preprocessingParameters"] = preprocessing_parameters
- if data_source_parameters is not UNSET:
- field_dict["dataSourceParameters"] = data_source_parameters
- if end_to_end_encrypted is not UNSET:
- field_dict["endToEndEncrypted"] = end_to_end_encrypted
- if input_data_object is not UNSET:
- field_dict["inputDataObject"] = input_data_object
if timeout is not UNSET:
field_dict["timeout"] = timeout
- if local_input is not UNSET:
- field_dict["localInput"] = local_input
+ if input_clipping_method is not UNSET:
+ field_dict["inputClippingMethod"] = input_clipping_method
+ if join_id is not UNSET:
+ field_dict["joinId"] = join_id
+ if maximum_aggregated_value is not UNSET:
+ field_dict["maximumAggregatedValue"] = maximum_aggregated_value
if owner is not UNSET:
field_dict["owner"] = owner
+ if preprocessing_parameters is not UNSET:
+ field_dict["preprocessingParameters"] = preprocessing_parameters
if project_id is not UNSET:
field_dict["projectId"] = project_id
- if dp_policy is not UNSET:
- field_dict["DPPolicy"] = dp_policy
- if cohort_id is not UNSET:
- field_dict["cohortId"] = cohort_id
+ if dp_epsilon is not UNSET:
+ field_dict["dpEpsilon"] = dp_epsilon
if encrypted is not UNSET:
field_dict["encrypted"] = encrypted
- if join_id is not UNSET:
- field_dict["joinId"] = join_id
if local is not UNSET:
field_dict["local"] = local
- if release_results is not UNSET:
- field_dict["releaseResults"] = release_results
+ if local_input_id is not UNSET:
+ field_dict["localInputID"] = local_input_id
if wait is not UNSET:
field_dict["wait"] = wait
+ if cohort_id is not UNSET:
+ field_dict["cohortId"] = cohort_id
+ if data_source_parameters is not UNSET:
+ field_dict["dataSourceParameters"] = data_source_parameters
+ if end_to_end_encrypted is not UNSET:
+ field_dict["endToEndEncrypted"] = end_to_end_encrypted
+ if input_data_object is not UNSET:
+ field_dict["inputDataObject"] = input_data_object
+ if dp_policy is not UNSET:
+ field_dict["DPPolicy"] = dp_policy
+ if local_input is not UNSET:
+ field_dict["localInput"] = local_input
if binning_parameters is not UNSET:
field_dict["binningParameters"] = binning_parameters
if identifiable_columns is not UNSET:
@@ -228,14 +228,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = ComputationType(d.pop("type"))
- dp_epsilon = d.pop("dpEpsilon", UNSET)
-
- _input_clipping_method = d.pop("inputClippingMethod", UNSET)
- input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
- if isinstance(_input_clipping_method, Unset):
- input_clipping_method = UNSET
- else:
- input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+ release_results = d.pop("releaseResults", UNSET)
_run_mode = d.pop("runMode", UNSET)
run_mode: Union[Unset, RunMode]
@@ -244,10 +237,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
run_mode = RunMode(_run_mode)
- local_input_id = d.pop("localInputID", UNSET)
+ timeout = d.pop("timeout", UNSET)
+
+ _input_clipping_method = d.pop("inputClippingMethod", UNSET)
+ input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod]
+ if isinstance(_input_clipping_method, Unset):
+ input_clipping_method = UNSET
+ else:
+ input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method)
+
+ join_id = d.pop("joinId", UNSET)
maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET)
+ owner = d.pop("owner", UNSET)
+
_preprocessing_parameters = d.pop("preprocessingParameters", UNSET)
preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters]
if isinstance(_preprocessing_parameters, Unset):
@@ -255,6 +259,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters)
+ project_id = d.pop("projectId", UNSET)
+
+ dp_epsilon = d.pop("dpEpsilon", UNSET)
+
+ encrypted = d.pop("encrypted", UNSET)
+
+ local = d.pop("local", UNSET)
+
+ local_input_id = d.pop("localInputID", UNSET)
+
+ wait = d.pop("wait", UNSET)
+
+ cohort_id = d.pop("cohortId", UNSET)
+
_data_source_parameters = d.pop("dataSourceParameters", UNSET)
data_source_parameters: Union[Unset, ComputationDataSourceParameters]
if isinstance(_data_source_parameters, Unset):
@@ -266,19 +284,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
input_data_object = d.pop("inputDataObject", UNSET)
- timeout = d.pop("timeout", UNSET)
-
- _local_input = d.pop("localInput", UNSET)
- local_input: Union[Unset, LocalInput]
- if isinstance(_local_input, Unset):
- local_input = UNSET
- else:
- local_input = LocalInput.from_dict(_local_input)
-
- owner = d.pop("owner", UNSET)
-
- project_id = d.pop("projectId", UNSET)
-
_dp_policy = d.pop("DPPolicy", UNSET)
dp_policy: Union[Unset, DPPolicy]
if isinstance(_dp_policy, Unset):
@@ -286,17 +291,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
dp_policy = DPPolicy.from_dict(_dp_policy)
- cohort_id = d.pop("cohortId", UNSET)
-
- encrypted = d.pop("encrypted", UNSET)
-
- join_id = d.pop("joinId", UNSET)
-
- local = d.pop("local", UNSET)
-
- release_results = d.pop("releaseResults", UNSET)
-
- wait = d.pop("wait", UNSET)
+ _local_input = d.pop("localInput", UNSET)
+ local_input: Union[Unset, LocalInput]
+ if isinstance(_local_input, Unset):
+ local_input = UNSET
+ else:
+ local_input = LocalInput.from_dict(_local_input)
_binning_parameters = d.pop("binningParameters", UNSET)
binning_parameters: Union[Unset, BinningParameters]
@@ -313,26 +313,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
v_binned_aggregation = cls(
type=type,
- dp_epsilon=dp_epsilon,
- input_clipping_method=input_clipping_method,
+ release_results=release_results,
run_mode=run_mode,
- local_input_id=local_input_id,
- maximum_aggregated_value=maximum_aggregated_value,
- preprocessing_parameters=preprocessing_parameters,
- data_source_parameters=data_source_parameters,
- end_to_end_encrypted=end_to_end_encrypted,
- input_data_object=input_data_object,
timeout=timeout,
- local_input=local_input,
+ input_clipping_method=input_clipping_method,
+ join_id=join_id,
+ maximum_aggregated_value=maximum_aggregated_value,
owner=owner,
+ preprocessing_parameters=preprocessing_parameters,
project_id=project_id,
- dp_policy=dp_policy,
- cohort_id=cohort_id,
+ dp_epsilon=dp_epsilon,
encrypted=encrypted,
- join_id=join_id,
local=local,
- release_results=release_results,
+ local_input_id=local_input_id,
wait=wait,
+ cohort_id=cohort_id,
+ data_source_parameters=data_source_parameters,
+ end_to_end_encrypted=end_to_end_encrypted,
+ input_data_object=input_data_object,
+ dp_policy=dp_policy,
+ local_input=local_input,
binning_parameters=binning_parameters,
identifiable_columns=identifiable_columns,
aggregation_column=aggregation_column,
diff --git a/src/tuneinsight/api/sdk/models/workflow_item.py b/src/tuneinsight/api/sdk/models/workflow_item.py
index e7bc59b..a675a2a 100644
--- a/src/tuneinsight/api/sdk/models/workflow_item.py
+++ b/src/tuneinsight/api/sdk/models/workflow_item.py
@@ -16,66 +16,65 @@
class WorkflowItem:
"""
Attributes:
- target_handle (Union[Unset, str]): not used - UI specific
+ source (Union[Unset, str]): not used - UI specific
+ target (Union[Unset, str]): not used - UI specific
type (Union[Unset, str]):
+ progress (Union[Unset, int]):
+ source_handle (Union[Unset, str]): not used - UI specific
+ target_handle (Union[Unset, str]): not used - UI specific
data (Union[Unset, WorkflowItemData]):
id (Union[Unset, str]):
- source_handle (Union[Unset, str]): not used - UI specific
- target (Union[Unset, str]): not used - UI specific
position (Union[Unset, WorkflowItemPosition]):
- progress (Union[Unset, int]):
- source (Union[Unset, str]): not used - UI specific
"""
- target_handle: Union[Unset, str] = UNSET
+ source: Union[Unset, str] = UNSET
+ target: Union[Unset, str] = UNSET
type: Union[Unset, str] = UNSET
+ progress: Union[Unset, int] = UNSET
+ source_handle: Union[Unset, str] = UNSET
+ target_handle: Union[Unset, str] = UNSET
data: Union[Unset, "WorkflowItemData"] = UNSET
id: Union[Unset, str] = UNSET
- source_handle: Union[Unset, str] = UNSET
- target: Union[Unset, str] = UNSET
position: Union[Unset, "WorkflowItemPosition"] = UNSET
- progress: Union[Unset, int] = UNSET
- source: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
- target_handle = self.target_handle
+ source = self.source
+ target = self.target
type = self.type
+ progress = self.progress
+ source_handle = self.source_handle
+ target_handle = self.target_handle
data: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.data, Unset):
data = self.data.to_dict()
id = self.id
- source_handle = self.source_handle
- target = self.target
position: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.position, Unset):
position = self.position.to_dict()
- progress = self.progress
- source = self.source
-
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
- if target_handle is not UNSET:
- field_dict["targetHandle"] = target_handle
+ if source is not UNSET:
+ field_dict["source"] = source
+ if target is not UNSET:
+ field_dict["target"] = target
if type is not UNSET:
field_dict["type"] = type
+ if progress is not UNSET:
+ field_dict["progress"] = progress
+ if source_handle is not UNSET:
+ field_dict["sourceHandle"] = source_handle
+ if target_handle is not UNSET:
+ field_dict["targetHandle"] = target_handle
if data is not UNSET:
field_dict["data"] = data
if id is not UNSET:
field_dict["id"] = id
- if source_handle is not UNSET:
- field_dict["sourceHandle"] = source_handle
- if target is not UNSET:
- field_dict["target"] = target
if position is not UNSET:
field_dict["position"] = position
- if progress is not UNSET:
- field_dict["progress"] = progress
- if source is not UNSET:
- field_dict["source"] = source
return field_dict
@@ -85,10 +84,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.workflow_item_position import WorkflowItemPosition
d = src_dict.copy()
- target_handle = d.pop("targetHandle", UNSET)
+ source = d.pop("source", UNSET)
+
+ target = d.pop("target", UNSET)
type = d.pop("type", UNSET)
+ progress = d.pop("progress", UNSET)
+
+ source_handle = d.pop("sourceHandle", UNSET)
+
+ target_handle = d.pop("targetHandle", UNSET)
+
_data = d.pop("data", UNSET)
data: Union[Unset, WorkflowItemData]
if isinstance(_data, Unset):
@@ -98,10 +105,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
id = d.pop("id", UNSET)
- source_handle = d.pop("sourceHandle", UNSET)
-
- target = d.pop("target", UNSET)
-
_position = d.pop("position", UNSET)
position: Union[Unset, WorkflowItemPosition]
if isinstance(_position, Unset):
@@ -109,20 +112,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
else:
position = WorkflowItemPosition.from_dict(_position)
- progress = d.pop("progress", UNSET)
-
- source = d.pop("source", UNSET)
-
workflow_item = cls(
- target_handle=target_handle,
+ source=source,
+ target=target,
type=type,
+ progress=progress,
+ source_handle=source_handle,
+ target_handle=target_handle,
data=data,
id=id,
- source_handle=source_handle,
- target=target,
position=position,
- progress=progress,
- source=source,
)
workflow_item.additional_properties = d
diff --git a/src/tuneinsight/client/auth.py b/src/tuneinsight/client/auth.py
index d6eb3ba..bedb550 100644
--- a/src/tuneinsight/client/auth.py
+++ b/src/tuneinsight/client/auth.py
@@ -30,13 +30,6 @@ class KeycloakClient(client.AuthenticatedClient):
proxies: dict = {"http": "", "https": ""}
def __attrs_post_init__(self):
- self.kc_open_id = KeycloakOpenID(
- server_url=self.oidc_config.oidc_url,
- client_id=self.oidc_config.oidc_client_id,
- client_secret_key=self.oidc_config.oidc_client_secret,
- realm_name=self.oidc_config.oidc_realm,
- )
-
self.kc_open_id = KeycloakOpenID(
server_url=self.oidc_config.oidc_url,
client_id=self.oidc_config.oidc_client_id,
diff --git a/src/tuneinsight/client/config.py b/src/tuneinsight/client/config.py
index 0088fc6..66d93b1 100644
--- a/src/tuneinsight/client/config.py
+++ b/src/tuneinsight/client/config.py
@@ -39,14 +39,10 @@ def __init__(
oidc_url: str,
oidc_realm: str,
):
- self.oidc_client_id = "python-sdk" if oidc_client_id is None else oidc_client_id
- self.oidc_client_secret = (
- "" if oidc_client_secret is None else oidc_client_secret
- )
- self.oidc_url = (
- "https://auth.tuneinsight.com/auth/" if oidc_url is None else oidc_url
- )
- self.oidc_realm = "ti-realm" if oidc_realm is None else oidc_realm
+ self.oidc_client_id = oidc_client_id or "python-sdk"
+ self.oidc_client_secret = oidc_client_secret or ""
+ self.oidc_url = oidc_url or "https://auth.tuneinsight.com/auth/"
+ self.oidc_realm = oidc_realm or "ti-realm"
@staticmethod
def from_json(json_dct):
@@ -75,11 +71,11 @@ def __init__(
password: str,
verify_ssl: bool,
):
- self.verify_ssl = True if verify_ssl is None else verify_ssl
- self.static_token = "" if static_token is None else static_token
- self.username = "" if username is None else username
- self.password = "" if password is None else password
self.oidc_config = oidc_config
+ self.static_token = static_token or ""
+ self.username = username or ""
+ self.password = password or ""
+ self.verify_ssl = True if verify_ssl is None else verify_ssl
@staticmethod
def from_json(json_dct):
@@ -103,7 +99,7 @@ class ClientConfiguration:
def __init__(self, url, security, http_proxy: str = None, https_proxy: str = None):
"""
- Initialize a client.
+ Initializes a client.
Args:
url (str): The URL of the Tune Insight API
@@ -117,7 +113,7 @@ def __init__(self, url, security, http_proxy: str = None, https_proxy: str = Non
self.https_proxy = https_proxy
def save(self, filepath: str):
- """Save this configuration to a file."""
+ """Saves this configuration to a file."""
with open(filepath, "w", encoding="utf-8") as f:
res = to_dict(self)
yaml.safe_dump(res, f)
@@ -125,7 +121,7 @@ def save(self, filepath: str):
@staticmethod
def from_json(json_dct):
"""
- Create a Client configuration from a JSON dictionary.
+ Creates a Client configuration from a JSON dictionary.
Args:
json_dct (dict): The JSON dictionary containing the client configuration.
@@ -133,22 +129,14 @@ def from_json(json_dct):
"""
security = SecurityConfiguration.from_json(json_dct.get("security"))
client = ClientConfiguration(json_dct.get("url"), security)
- client.http_proxy = (
- json_dct.get("http_proxy")
- if json_dct.get("http_proxy") is not None
- else client.http_proxy
- )
- client.https_proxy = (
- json_dct.get("https_proxy")
- if json_dct.get("https_proxy") is not None
- else client.https_proxy
- )
+ client.http_proxy = json_dct.get("http_proxy") or client.http_proxy
+ client.https_proxy = json_dct.get("https_proxy") or client.https_proxy
return client
@staticmethod
def from_path(filepath: str):
"""
- Create a Client configuration from a file.
+ Creates a Client configuration from a file.
Args:
filepath: the path to the file to load from, a text file with a
@@ -163,7 +151,7 @@ def from_path(filepath: str):
@staticmethod
def from_env(envpath: str = None):
"""
- Create a Client configuration from environment variables.
+ Creates a Client configuration from environment variables.
Args:
envpath (optional): path to a file containing environment variables
@@ -208,15 +196,7 @@ def from_env(envpath: str = None):
url=os.getenv("NODE_URL"), security=security_config
)
- client.http_proxy = (
- os.getenv("HTTP_PROXY")
- if os.getenv("HTTP_PROXY") is not None
- else client.http_proxy
- )
- client.https_proxy = (
- os.getenv("HTTPS_PROXY")
- if os.getenv("HTTPS_PROXY") is not None
- else client.https_proxy
- )
+ client.http_proxy = os.getenv("HTTP_PROXY") or client.http_proxy
+ client.https_proxy = os.getenv("HTTPS_PROXY") or client.https_proxy
return client
diff --git a/src/tuneinsight/client/datasource.py b/src/tuneinsight/client/datasource.py
index 0994a38..1358e1f 100644
--- a/src/tuneinsight/client/datasource.py
+++ b/src/tuneinsight/client/datasource.py
@@ -129,6 +129,7 @@ def from_api(
name: str,
clear_if_exists: bool = False,
cert: str = "",
+ insecure_skip_verify_tls: bool = False,
):
"""
Creates a new API datasource.
@@ -146,7 +147,10 @@ def from_api(
definition.clear_if_exists = clear_if_exists
definition.type = models.DataSourceType.API
definition.configuration = models.DataSourceConfig(
- api_url=api_url, api_type=api_type, cert=cert
+ api_url=api_url,
+ api_type=api_type,
+ cert=cert,
+ insecure_skip_verify_tls=insecure_skip_verify_tls,
)
definition.credentials = models.Credentials(api_token=api_token)
return cls._from_definition(client, definition=definition)
@@ -185,6 +189,11 @@ def get_id(self) -> str:
"""
return self.model.id
+ @property
+ def is_mock(self) -> bool:
+ """Whether this datasource contains mock/synthetic data, and should not be used in production."""
+ return self.model.is_mock
+
## Methods to manipulate a datasource object.
def adapt(
@@ -265,6 +274,8 @@ def get_dataframe(self, query: Any = "", json_path: str = "") -> pd.DataFrame:
Raises:
AuthorizationError: if the client is not the owner of the datasource.
"""
+ if not query and self.query_parameters is not None:
+ query = self.query_parameters.database_query
do = self.adapt(
do_type=models.DataObjectType.TABLE, query=query, json_path=json_path
)
@@ -287,6 +298,7 @@ def synthesize(
query: str = UNSET,
name: str = UNSET,
num_rows: int = UNSET,
+ epsilon: float = UNSET,
) -> "DataSource":
"""
Generates a synthetic dataset that mimics this datasource.
@@ -306,21 +318,33 @@ def synthesize(
synthetic_{datasource_name} is used instead.
num_rows (int, optional): number of rows to generate. If not provided,
the synthetic dataset will have the same number of rows as this datasource.
+ epsilon (float, optional): if set, use a differentially private generation
+ method with this value of epsilon. The synthetic data produced by this
+ method is guaranteed to be privacy-preserving, but will be less accurate.
+ A good default value is epsilon=1. This will work better with large datasets.
"""
+ # If no query is provided, but this datasource has a local query, use it.
+ if query is None and self.query_parameters is not None:
+ query = self.query_parameters.database_query
+ # If the user specified neither the query nor the table name, use the datasource name as table.
+ # This is a default case that will work for mock data.
if isinstance(table, Unset) and isinstance(query, Unset):
table = self.model.name
- if isinstance(name, Unset) and not isinstance(self.model.name, Unset):
- name = f"synthetic_{self.model.name}"
response = post_synthetic_dataset.sync_detailed(
client=self.client,
- data_source_id=self.model.unique_id,
+ data_source_id=self.get_id(),
num_rows=num_rows,
table=table,
query=query,
table_name=name,
+ dp_epsilon=epsilon,
)
validate_response(response)
- return DataSource(response.parsed, self.client)
+ ds = DataSource(response.parsed, self.client)
+ # For synthetic data, the table name is the same as the datasource name, so set the local query.
+ if not isinstance(ds.model.name, Unset):
+ ds.set_query(f"select * from {ds.model.name}")
+ return ds
## Methods to interact with queries etc.
def set_query(self, query: str):
diff --git a/src/tuneinsight/client/diapason.py b/src/tuneinsight/client/diapason.py
index c9ada68..defcee0 100644
--- a/src/tuneinsight/client/diapason.py
+++ b/src/tuneinsight/client/diapason.py
@@ -6,7 +6,6 @@
import webbrowser
import os
-from keycloak.exceptions import KeycloakError
import attr
import pandas as pd
@@ -18,6 +17,7 @@
from tuneinsight.api.sdk.api.api_datasource import get_data_source_list
from tuneinsight.api.sdk.api.api_dataobject import get_data_object
from tuneinsight.api.sdk.api.api_infos import get_infos
+from tuneinsight.api.sdk.api.health import get_health
from tuneinsight.api.sdk import models
from tuneinsight.client.dataobject import DataObject
@@ -27,7 +27,6 @@
from tuneinsight.client import config
from tuneinsight.client import auth
from tuneinsight.api.sdk.types import UNSET
-from tuneinsight.client.validation import InvalidResponseError
from tuneinsight.utils import time_tools
@@ -196,16 +195,25 @@ def login(self, open_page=True, blocking=True):
device_resp = self.client.get_device_code()
login_url = device_resp["verification_uri_complete"]
+ # Sometimes, the login_url returned by keycloak is just the end of the query.
+ # When that happens, complete the URL by adding the OIDC information.
+ if not login_url.startswith("http") and isinstance(
+ self.client, auth.KeycloakClient
+ ):
+ oidc_config = self.client.oidc_config # pylint: disable=no-member
+ oidc_url = oidc_config.oidc_url
+ if not oidc_url.endswith("/"):
+ oidc_url += "/"
+ login_url = f"{oidc_url}realms/{oidc_config.oidc_realm}/device/{login_url}"
print("Follow this link to login: " + login_url)
if open_page:
webbrowser.open(login_url)
if blocking:
self.wait_ready(sleep_seconds=1)
- return None
return login_url
def wait_ready(self, repeat: int = 50, sleep_seconds: int = 5):
- """Polls the API until it answers by using the get_projects() endpoint.
+ """Polls the API until it answers by using the healthcheck() endpoint.
Args:
repeat (int, optional): maximum number of requests sent to the API. Defaults to 50.
@@ -216,22 +224,13 @@ def wait_ready(self, repeat: int = 50, sleep_seconds: int = 5):
"""
num_tries = repeat
sleep_time = sleep_seconds * time_tools.SECOND
- last_ex = None
# Disable API version checks while waiting for the server.
with self._disabled_api_check():
- for _ in range(num_tries):
- try:
- self.get_projects()
+ for i in range(num_tries):
+ # At the last iteration, raise an error instead of silencing it.
+ if self.healthcheck(warn=False, error=i == num_tries - 1):
return
- except (
- ConnectionError,
- KeycloakError,
- InvalidResponseError,
- ) as ex:
- time_tools.sleep(sleep_time)
- last_ex = ex
- continue
- raise last_ex
+ time_tools.sleep(sleep_time)
@contextmanager
def timeout(self, timeout: int):
@@ -281,6 +280,7 @@ def new_api_datasource(
api_token: str = "",
clear_if_exists: bool = False,
cert: str = "",
+ insecure_skip_verify_tls: bool = False,
) -> DataSource:
"""
Creates a new API datasource.
@@ -302,6 +302,7 @@ def new_api_datasource(
name,
clear_if_exists,
cert,
+ insecure_skip_verify_tls,
)
def new_csv_datasource(
@@ -426,6 +427,7 @@ def new_project(
participants: list = None,
non_contributor: bool = False,
run_async: bool = True,
+ description: str = None,
) -> Project:
"""Creates a new project.
@@ -440,6 +442,7 @@ def new_project(
participants (Union[Unset, List[str]]): The IDs of the users who participate in the project.
non_contributor (bool, default False): indicates that this participant participates in the computations but does not contribute any data.
run_async (bool, default True): whether to run computations asynchronously.
+ description (str,default None): optional description of the project. Defaults to None.
Raises:
Exception: in case the project already exists and clear_if_exists is False.
@@ -457,6 +460,9 @@ def new_project(
if participants is None:
participants = []
+ if description is None:
+ description = ""
+
if name in [p.get_name() for p in self.get_projects()]:
if clear_if_exists:
warnings.warn(
@@ -478,6 +484,7 @@ def new_project(
participants=participants,
non_contributor=non_contributor,
run_async=run_async,
+ description=description,
)
# authorization_status = models.AuthorizationStatus.UNAUTHORIZED)
proj_response: Response[models.Project] = post_project.sync_detailed(
@@ -608,3 +615,25 @@ def _disabled_api_check(self):
self._api_compatible = True
yield self
self._api_compatible = old_check
+
+ def healthcheck(self, warn: bool = True, error: bool = False) -> bool:
+ """
+ Checks that the client is set up properly and the instance is reachable.
+
+ Args
+ warn (bool, default True): whether to print a warning with the error
+ message if the healthcheck fails.
+ error (bool, default False): whether to raise an error if the healthcheck
+ fails. This raises the error that made the it fail.
+
+ """
+ try:
+ response = get_health.sync_detailed(client=self.client)
+ validate_response(response)
+ return True
+ except Exception as err: # pylint: disable=broad-exception-caught
+ if error:
+ raise err
+ if warn:
+ warnings.warn(f"Healthcheck error: {err} ({type(err)})")
+ return False
diff --git a/src/tuneinsight/client/project.py b/src/tuneinsight/client/project.py
index 1445e38..2f8f892 100644
--- a/src/tuneinsight/client/project.py
+++ b/src/tuneinsight/client/project.py
@@ -793,6 +793,7 @@ def is_differentially_private(self):
return dp_policy.use_differential_privacy
def get_computations(self) -> List[models.Computation]:
+ self._refresh()
return self.model.computations
def get_remaining_quota(
diff --git a/src/tuneinsight/computations/intersection.py b/src/tuneinsight/computations/intersection.py
index 4a7cb6c..12e11f9 100644
--- a/src/tuneinsight/computations/intersection.py
+++ b/src/tuneinsight/computations/intersection.py
@@ -48,8 +48,8 @@ def plot(self, title: str = "", x_label: str = "", y_label: str = ""):
plt.style.use("bmh")
fig, ax = plt.subplots()
ax.bar(
- self.data.psi_ratio,
self.data.index,
+ self.data.psi_ratio,
color="#DE5F5A",
edgecolor="#354661",
linewidth=2.5,
diff --git a/src/tuneinsight/computations/policy.py b/src/tuneinsight/computations/policy.py
index be45cbd..8376521 100644
--- a/src/tuneinsight/computations/policy.py
+++ b/src/tuneinsight/computations/policy.py
@@ -226,6 +226,24 @@ def add_authorized_computation_type(self, computation_type: Type):
comp_types.add(models.ComputationType(computation_type.to_computation_type()))
self.authorized_computation_types = list(comp_types)
+ def set_min_dataset_size(self, local_size: int = None, collective_size: int = None):
+ """
+ Sets a minimum dataset size policy either on the local (per-instance) datasets or the collective (including all participants) dataset.
+
+ Args:
+ local_size (int, optional): minimum dataset size that should be satisfied from all participating organizations locally. Defaults to None.
+ collective_size (int, optional): minimum dataset size that should be satisfied from the collective dataset of all organizations. Defaults to None.
+
+ Raises:
+ ValueError: if both the local and collective size arguments have not been provided to this method.
+ """
+ if local_size is None and collective_size is None:
+ raise ValueError("No dataset size was provided")
+ if local_size is not None:
+ self.dp_policy.min_dataset_size = int(local_size)
+ if collective_size is not None:
+ self.dp_policy.min_global_dataset_size = int(collective_size)
+
displayed_labels = {
"validateParameters": "template validation",
diff --git a/src/tuneinsight/computations/preprocessing.py b/src/tuneinsight/computations/preprocessing.py
index e0cc662..a9577e7 100644
--- a/src/tuneinsight/computations/preprocessing.py
+++ b/src/tuneinsight/computations/preprocessing.py
@@ -169,7 +169,7 @@ def filter(
type=models.PreprocessingOperationType.FILTER,
col_name=target_column,
comparator=comparator,
- value=value,
+ value=str(value),
numerical=numerical,
),
nodes,
diff --git a/src/tuneinsight/computations/statistical_aggregation.py b/src/tuneinsight/computations/statistical_aggregation.py
index 372429b..36a278c 100644
--- a/src/tuneinsight/computations/statistical_aggregation.py
+++ b/src/tuneinsight/computations/statistical_aggregation.py
@@ -105,14 +105,15 @@ class GroupByAggregation(ModelBasedComputation):
# Other computation parameters.
join_id: str = ""
aggregated_columns: List[str] = []
- keep_non_categorized_items: bool = True
- def __init__(self, project: "Project"):
+ def __init__(self, project: "Project", keep_non_categorized_items: bool = True):
"""
Creates an Aggregation computation.
Args
project (client.Project): the project to which this computation belongs.
+ keep_non_categorized_items (bool, optional): whether non-binned leftover records
+ should be included in the aggregation. Defaults to False.
"""
super().__init__(
@@ -121,6 +122,7 @@ def __init__(self, project: "Project"):
models.ComputationType.STATISTICALAGGREGATION,
)
self._labeller = None
+ self.keep_non_categorized_items = keep_non_categorized_items
def _reset_model(self):
"""Reset the internal memory of the model."""
diff --git a/src/tuneinsight/cryptolib/cryptolib-linux_386.so b/src/tuneinsight/cryptolib/cryptolib-linux_386.so
new file mode 100644
index 0000000..0f6bf46
Binary files /dev/null and b/src/tuneinsight/cryptolib/cryptolib-linux_386.so differ
diff --git a/src/tuneinsight/cryptolib/cryptolib-linux_arm64.so b/src/tuneinsight/cryptolib/cryptolib-linux_arm64.so
new file mode 100644
index 0000000..712e9d7
Binary files /dev/null and b/src/tuneinsight/cryptolib/cryptolib-linux_arm64.so differ
diff --git a/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so b/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so
index 59a638a..5571169 100644
Binary files a/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so and b/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so differ
diff --git a/src/tuneinsight/cryptolib/cryptolib-windows_386.dll b/src/tuneinsight/cryptolib/cryptolib-windows_386.dll
new file mode 100644
index 0000000..6da00c3
Binary files /dev/null and b/src/tuneinsight/cryptolib/cryptolib-windows_386.dll differ
diff --git a/src/tuneinsight/cryptolib/cryptolib-windows_x86_64.dll b/src/tuneinsight/cryptolib/cryptolib-windows_x86_64.dll
new file mode 100644
index 0000000..5e53189
Binary files /dev/null and b/src/tuneinsight/cryptolib/cryptolib-windows_x86_64.dll differ
diff --git a/src/tuneinsight/cryptolib/cryptolib.py b/src/tuneinsight/cryptolib/cryptolib.py
index ff9e063..05238d7 100644
--- a/src/tuneinsight/cryptolib/cryptolib.py
+++ b/src/tuneinsight/cryptolib/cryptolib.py
@@ -145,6 +145,40 @@ def key_generation(hefloat_operator_id: bytes) -> bytes:
return key_response
+def get_secret_key_bytes(hefloat_operator_id: bytes) -> bytes:
+ """Returns the bytes of the secret key.
+
+ Args:
+ hefloat_operator_id (bytes): The crypto system id
+
+ Returns:
+ get_sk_response (bytes): The bytes of the secret key
+ """
+ get_sk = so.GetSecretKeyBytes
+ get_sk.restype = ctypes.c_char_p
+ get_sk_response = get_sk(hefloat_operator_id)
+ if get_sk_response is None:
+ raise go_error()
+ return get_sk_response
+
+
+def get_public_key_bytes(hefloat_operator_id: bytes) -> bytes:
+ """Returns the bytes of the public key.
+
+ Args:
+ hefloat_operator_id (bytes): The crypto system id
+
+ Returns:
+ get_pk_response (bytes): The bytes of the public key
+ """
+ get_pk = so.GetPublicKeyBytes
+ get_pk.restype = ctypes.c_char_p
+ get_pk_response = get_pk(hefloat_operator_id)
+ if get_pk_response is None:
+ raise go_error()
+ return get_pk_response
+
+
def relinearization_key_generation(hefloat_operator_id: bytes) -> bytes:
"""Generates a key for a given cryptosystem.
diff --git a/src/tuneinsight/utils/datagen.py b/src/tuneinsight/utils/datagen.py
index fc31744..659c077 100644
--- a/src/tuneinsight/utils/datagen.py
+++ b/src/tuneinsight/utils/datagen.py
@@ -198,8 +198,18 @@ class SKUGenerator(MockGenerator):
"""
- def __init__(self):
+ typo_probability: float
+
+ def __init__(self, typo_probability: float = 0.0):
+ """
+ Args:
+ typo_probability (float, optional): the probability for typos in the product names. Defaults to None.
+ """
MockGenerator.__init__(self, PostMockDatasetMethod.SKUS)
+ self.typo_probability = typo_probability
+
+ def get_config(self):
+ return {"typo_probability": self.typo_probability}
class PersonsGenerator(MockGenerator):
@@ -477,7 +487,7 @@ def add_date(
end: the latest possible date in the dataset.
bins (optional, default 10): the extremities of the bins in which to group values.
If an integer is provided, the domain is divided into `bins` bins of uniform size.
- strformat (optional, default %d-%m-%Y): a formatting str acceptable by datetime.strptime to represent dates.
+ strformat (optional, default %Y-%m-%d): a formatting str acceptable by datetime.strptime to represent dates.
"""
self._add_attribute(
name,
diff --git a/src/tuneinsight/utils/mbi/domain.py b/src/tuneinsight/utils/mbi/domain.py
index c6a59ad..3d437d9 100644
--- a/src/tuneinsight/utils/mbi/domain.py
+++ b/src/tuneinsight/utils/mbi/domain.py
@@ -161,7 +161,7 @@ def __init__(
start_date: str,
end_date: str,
bins: Union[int, list] = 10,
- strformat: str = "%d-%m-%Y",
+ strformat: str = "%Y-%m-%d",
):
"""
Args