diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml index 6bd65a8..8b2eb0d 100644 --- a/.github/workflows/branch-deploy.yml +++ b/.github/workflows/branch-deploy.yml @@ -19,7 +19,7 @@ jobs: - name: Setup python uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.11 architecture: x64 - run: pip install dokkusd - uses: oNaiPs/secrets-to-env-action@v1 diff --git a/.github/workflows/branch-destroy.yml b/.github/workflows/branch-destroy.yml index 7f4947d..615ef73 100644 --- a/.github/workflows/branch-destroy.yml +++ b/.github/workflows/branch-destroy.yml @@ -16,7 +16,7 @@ jobs: - name: Setup python uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.11 architecture: x64 - run: pip install dokkusd - run: python -m dokkusd.cli destroy --appname ${{ vars.DOKKU_APP_NAME_PREFIX }}-${{ github.event.ref }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 23019c5..cbb1684 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -18,4 +18,6 @@ jobs: restore-keys: | ${{ runner.os }}-pip- - run: pip install -r requirements_dev.txt + - run: isort --check-only cove_project/ cove_bods/ + - run: black --check cove_project/ cove_bods/ - run: flake8 cove_project/ cove_bods/ diff --git a/.github/workflows/live-deploy.yml b/.github/workflows/live-deploy.yml index 49f656d..951c3c9 100644 --- a/.github/workflows/live-deploy.yml +++ b/.github/workflows/live-deploy.yml @@ -20,7 +20,7 @@ jobs: - name: Setup python uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.11 architecture: x64 - run: pip install dokkusd - uses: oNaiPs/secrets-to-env-action@v1 diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..9caba16 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,2 @@ +[isort] +profile=black diff --git a/README.md b/README.md index 17ab9aa..c16e968 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,8 @@ -# openownership-cove-bods-alpha +# Open Ownership BODS Cove + +Checks data complies with the Beneficial Ownership Data Standard (BODS) versions 0.1-0.4, +and displays any errors. Also converts uploaded data between JSON and XLSX formats. +Based on: https://github.com/OpenDataServices/cove ## Dev installation diff --git a/cove_bods/apps.py b/cove_bods/apps.py index 4b6764b..f15dad6 100644 --- a/cove_bods/apps.py +++ b/cove_bods/apps.py @@ -2,4 +2,4 @@ class CoveBodsConfig(AppConfig): - name = 'cove_bods' + name = "cove_bods" diff --git a/cove_bods/forms.py b/cove_bods/forms.py index b1eb862..3f2ab70 100644 --- a/cove_bods/forms.py +++ b/cove_bods/forms.py @@ -17,16 +17,22 @@ class NewUploadForm(forms.Form): ), label="", ) - sample_mode = forms.BooleanField(label="Process using Sample mode (see information above)", required=False) + sample_mode = forms.BooleanField( + label="Process using Sample mode (see information above)", required=False + ) class NewTextForm(forms.Form): file_field_names = [] paste = forms.CharField(label="Paste (JSON only)", widget=forms.Textarea) - sample_mode = forms.BooleanField(label="Process using Sample mode (see information above)", required=False) + sample_mode = forms.BooleanField( + label="Process using Sample mode (see information above)", required=False + ) class NewURLForm(forms.Form): file_field_names = [] url = forms.URLField(label="URL") - sample_mode = forms.BooleanField(label="Process using Sample mode (see information above)", required=False) + sample_mode = forms.BooleanField( + label="Process using Sample mode (see information above)", required=False + ) diff --git a/cove_bods/process.py b/cove_bods/process.py index 66a8663..5009f1d 100644 --- a/cove_bods/process.py +++ b/cove_bods/process.py @@ -1,33 +1,48 @@ -from django.core.files.base import ContentFile -from django.core.files.storage import default_storage - -from libcovebods.schema import SchemaBODS -from libcovebods.config import LibCoveBODSConfig -from libcovebods.jsonschemavalidate import JSONSchemaValidator -from libcovebods.additionalfields import AdditionalFields -import libcovebods.run_tasks -import libcovebods.data_reader -from typing import List - import json import os.path +from typing import List import flattentool -from sentry_sdk import capture_exception - -from libcoveweb2.models import SuppliedDataFile, SuppliedData +import libcovebods.data_reader +import libcovebods.run_tasks +import pandas +from django.core.files.base import ContentFile +from django.core.files.storage import default_storage +from libcovebods.additionalfields import AdditionalFields +from libcovebods.config import LibCoveBODSConfig +from libcovebods.jsonschemavalidate import JSONSchemaValidator +from libcovebods.schema import SchemaBODS +from libcovebods.schema_dir import schema_registry +from libcoveweb2.models import SuppliedData, SuppliedDataFile from libcoveweb2.process.base import ProcessDataTask from libcoveweb2.process.common_tasks.task_with_state import TaskWithState # from libcove.lib.converters import convert_json, convert_spreadsheet -from libcoveweb2.utils import get_file_type_for_flatten_tool -from libcoveweb2.utils import group_data_list_by +from libcoveweb2.utils import get_file_type_for_flatten_tool, group_data_list_by +from packaging import version as packaging_version +from sentry_sdk import capture_exception + + +def check_table_file_new(input_file): + if get_file_type_for_flatten_tool(input_file) == "xlsx": + data = pandas.read_excel(input_file.upload_dir_and_filename()) + if "statementID" in data.head(): + return False + else: + return True + else: + with open(input_file.upload_dir_and_filename()) as file: + head = file.readline() + if "statementID" in head: + return False + else: + return True def create_error_file(directory: str, name: str, data: dict): """Create temporary error file""" filename = os.path.join(directory, f"{name}-error.json") - return default_storage.save(filename, ContentFile(json.dumps(data).encode('utf-8'))) + return default_storage.save(filename, ContentFile(json.dumps(data).encode("utf-8"))) def error_file_exists(directory: str, name: str) -> bool: @@ -39,7 +54,7 @@ def error_file_exists(directory: str, name: str) -> bool: def read_error_file(directory: str, name: str) -> dict: """Read data from error file""" filename = os.path.join(directory, f"{name}-error.json") - return json.loads(default_storage.open(filename).read().decode('utf-8')) + return json.loads(default_storage.open(filename).read().decode("utf-8")) def delete_error_file(directory: str, name: str): @@ -63,9 +78,9 @@ def get_context(self): class SetOrTestSuppliedDataFormat(ProcessDataTask): map_file_type_to_format = { - 'json': 'json', - 'xlsx': 'spreadsheet', - 'ods': 'spreadsheet' + "json": "json", + "xlsx": "spreadsheet", + "ods": "spreadsheet", } def is_processing_applicable(self) -> bool: @@ -80,10 +95,14 @@ def process(self, process_data: dict) -> dict: supplied_data_files = SuppliedDataFile.objects.filter( supplied_data=self.supplied_data ) - all_file_types = [get_file_type_for_flatten_tool(i) for i in supplied_data_files] + all_file_types = [ + get_file_type_for_flatten_tool(i) for i in supplied_data_files + ] file_types_reduced = list(set([i for i in all_file_types if i])) if len(file_types_reduced) == 1: - self.supplied_data.format = self.map_file_type_to_format[file_types_reduced[0]] + self.supplied_data.format = self.map_file_type_to_format[ + file_types_reduced[0] + ] self.supplied_data.save() elif len(file_types_reduced) == 0: @@ -179,14 +198,25 @@ def process(self, process_data: dict) -> dict: # We don't know what schema version the spreadsheet is in. Use default schema. schema = SchemaBODS() + config = LibCoveBODSConfig().config + + if check_table_file_new(supplied_data_json_file): + statement_id_name = "statementID" + schema = config["schema_versions"]["0.2"]["schema_url"] + else: + statement_id_name = "statementId" + schema = schema_registry( + config["schema_versions"][config["schema_latest_version"]]["schema_url"] + ).contents("urn:statement") + unflatten_kwargs = { "output_name": os.path.join(output_dir, "unflattened.json"), "root_list_path": "there-is-no-root-list-path", - "root_id": "statementID", - "id_name": "statementID", + "root_id": statement_id_name, + "id_name": statement_id_name, "root_is_list": True, "input_format": get_file_type_for_flatten_tool(supplied_data_json_file), - "schema": schema.pkg_schema_url, + "schema": schema, } flattentool.unflatten(input_filename, **unflatten_kwargs) @@ -218,9 +248,7 @@ def __init__( self, supplied_data: SuppliedData, supplied_data_files: List[SuppliedDataFile] ): super().__init__(supplied_data, supplied_data_files) - self.data_filename = os.path.join( - self.supplied_data.data_dir(), "schema.json" - ) + self.data_filename = os.path.join(self.supplied_data.data_dir(), "schema.json") def is_processing_applicable(self) -> bool: return True @@ -230,16 +258,25 @@ def is_processing_needed(self) -> bool: def process(self, process_data: dict) -> dict: # Make things and set info for later in processing - process_data['data_reader'] = libcovebods.data_reader.DataReader( - process_data["json_data_filename"], sample_mode=process_data['sample_mode'] + process_data["data_reader"] = libcovebods.data_reader.DataReader( + process_data["json_data_filename"], sample_mode=process_data["sample_mode"] ) - process_data['config'] = LibCoveBODSConfig() - process_data['schema'] = SchemaBODS(process_data['data_reader'], process_data['config']) + process_data["config"] = LibCoveBODSConfig() + try: + process_data["schema"] = SchemaBODS( + process_data["data_reader"], process_data["config"] + ) + except json.decoder.JSONDecodeError: + raise ValueError("JSON: Data parsing error") # Save some to disk for templates if not os.path.exists(self.data_filename): - save_data = { - "schema_version_used": process_data['schema'].schema_version - } + save_data = {"schema_version_used": process_data["schema"].schema_version} + if packaging_version.parse( + process_data["schema"].schema_version + ) < packaging_version.parse("0.4"): + save_data["record_schema_used"] = False + else: + save_data["record_schema_used"] = True with open(self.data_filename, "w") as fp: json.dump(save_data, fp, indent=4) # return @@ -282,7 +319,9 @@ def is_processing_applicable(self) -> bool: def is_processing_needed(self) -> bool: if os.path.exists(self.xlsx_filename): return False - if error_file_exists(self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets"): + if error_file_exists( + self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets" + ): return False return True @@ -294,22 +333,41 @@ def process(self, process_data: dict) -> dict: os.makedirs(self.output_dir, exist_ok=True) + if os.path.isdir(process_data["schema"].pkg_schema_url): + schema = schema_registry(process_data["schema"].pkg_schema_url).contents( + "urn:statement" + ) + else: + schema = process_data["schema"].pkg_schema_url + + if packaging_version.parse( + process_data["schema"].schema_version + ) < packaging_version.parse("0.4"): + statement_id_name = "statementID" + else: + statement_id_name = "statementId" + flatten_kwargs = { "output_name": self.output_dir, "root_list_path": "there-is-no-root-list-path", - "root_id": "statementID", - "id_name": "statementID", + "root_id": statement_id_name, + "id_name": statement_id_name, "root_is_list": True, - "schema": process_data['schema'].pkg_schema_url, + "schema": schema, } try: flattentool.flatten(process_data["json_data_filename"], **flatten_kwargs) except Exception as err: capture_exception(err) - create_error_file(self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets", - {"type": type(err).__name__, - "filename": process_data["json_data_filename"].split('/')[-1]}) + create_error_file( + self.supplied_data.storage_dir(), + "ConvertJSONIntoSpreadsheets", + { + "type": type(err).__name__, + "filename": process_data["json_data_filename"].split("/")[-1], + }, + ) return process_data @@ -326,10 +384,15 @@ def get_context(self): context["download_xlsx_size"] = os.stat(self.xlsx_filename).st_size else: context["can_download_xlsx"] = False - if error_file_exists(self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets"): - context["xlsx_error"] = read_error_file(self.supplied_data.storage_dir(), - "ConvertJSONIntoSpreadsheets") - delete_error_file(self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets") + if error_file_exists( + self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets" + ): + context["xlsx_error"] = read_error_file( + self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets" + ) + delete_error_file( + self.supplied_data.storage_dir(), "ConvertJSONIntoSpreadsheets" + ) else: context["xlsx_error"] = False # done! @@ -342,11 +405,12 @@ class PythonValidateTask(TaskWithState): def process_get_state(self, process_data: dict) -> dict: context = libcovebods.run_tasks.process_additional_checks( - process_data['data_reader'], - process_data['config'], - process_data['schema'], - task_classes=libcovebods.run_tasks.TASK_CLASSES_IN_SAMPLE_MODE if - process_data["sample_mode"] else libcovebods.run_tasks.TASK_CLASSES + process_data["data_reader"], + process_data["config"], + process_data["schema"], + task_classes=libcovebods.run_tasks.TASK_CLASSES_IN_SAMPLE_MODE + if process_data["sample_mode"] + else libcovebods.run_tasks.TASK_CLASSES, ) # counts @@ -354,46 +418,72 @@ def process_get_state(self, process_data: dict) -> dict: # We need to calculate some stats for showing in the view total_ownership_or_control_interest_statements = 0 - for key, count in \ - context['statistics']['count_ownership_or_control_statement_interest_statement_types'].items(): + for key, count in context["statistics"][ + "count_ownership_or_control_statement_interest_statement_types" + ].items(): total_ownership_or_control_interest_statements += count - context['statistics'][ - 'count_ownership_or_control_interest_statement'] = total_ownership_or_control_interest_statements # noqa + context["statistics"][ + "count_ownership_or_control_interest_statement" + ] = total_ownership_or_control_interest_statements # noqa # The use of r_e_type is to stop flake8 complaining about line length - r_e_type = 'registeredEntity' - context['statistics']['count_entities_registeredEntity_legalEntity_with_any_identifier'] = ( - context['statistics']['count_entity_statements_types_with_any_identifier'][r_e_type] + - context['statistics']['count_entity_statements_types_with_any_identifier']['legalEntity']) - context['statistics']['count_entities_registeredEntity_legalEntity_with_any_identifier_with_id_and_scheme'] = ( - context['statistics']['count_entity_statements_types_with_any_identifier_with_id_and_scheme'][ - r_e_type] + - context['statistics']['count_entity_statements_types_with_any_identifier_with_id_and_scheme'][ - 'legalEntity']) - context['statistics']['count_entities_registeredEntity_legalEntity'] = ( - context['statistics']['count_entity_statements_types'][r_e_type] + - context['statistics']['count_entity_statements_types']['legalEntity']) - unknown_schema_version_used = \ - [i for i in context['additional_checks'] if i['type'] == 'unknown_schema_version_used'] - context['unknown_schema_version_used'] = unknown_schema_version_used[0] \ - if unknown_schema_version_used else None - context['inconsistent_schema_version_used_count'] = \ - len([i for i in context['additional_checks'] if i['type'] == 'inconsistent_schema_version_used']) - - context['checks_not_run_in_sample_mode'] = [] + r_e_type = "registeredEntity" + context["statistics"][ + "count_entities_registeredEntity_legalEntity_with_any_identifier" + ] = ( + context["statistics"]["count_entity_statements_types_with_any_identifier"][ + r_e_type + ] + + context["statistics"][ + "count_entity_statements_types_with_any_identifier" + ]["legalEntity"] + ) + context["statistics"][ + "count_entities_registeredEntity_legalEntity_with_any_identifier_with_id_and_scheme" + ] = ( + context["statistics"][ + "count_entity_statements_types_with_any_identifier_with_id_and_scheme" + ][r_e_type] + + context["statistics"][ + "count_entity_statements_types_with_any_identifier_with_id_and_scheme" + ]["legalEntity"] + ) + context["statistics"]["count_entities_registeredEntity_legalEntity"] = ( + context["statistics"]["count_entity_statements_types"][r_e_type] + + context["statistics"]["count_entity_statements_types"]["legalEntity"] + ) + unknown_schema_version_used = [ + i + for i in context["additional_checks"] + if i["type"] == "unknown_schema_version_used" + ] + context["unknown_schema_version_used"] = ( + unknown_schema_version_used[0] if unknown_schema_version_used else None + ) + context["inconsistent_schema_version_used_count"] = len( + [ + i + for i in context["additional_checks"] + if i["type"] == "inconsistent_schema_version_used" + ] + ) + + context["checks_not_run_in_sample_mode"] = [] if process_data["sample_mode"]: classes_not_run_in_sample_mode = [ - x for x in libcovebods.run_tasks.TASK_CLASSES + x + for x in libcovebods.run_tasks.TASK_CLASSES if x not in libcovebods.run_tasks.TASK_CLASSES_IN_SAMPLE_MODE ] for class_not_run_in_sample_mode in classes_not_run_in_sample_mode: - context['checks_not_run_in_sample_mode'].extend( + context["checks_not_run_in_sample_mode"].extend( class_not_run_in_sample_mode.get_additional_check_types_possible( - process_data['config'], - process_data['schema'] + process_data["config"], process_data["schema"] ) ) - context['checks_not_run_in_sample_mode'] = list(set(context['checks_not_run_in_sample_mode'])) + context["checks_not_run_in_sample_mode"] = list( + set(context["checks_not_run_in_sample_mode"]) + ) return context, process_data @@ -403,18 +493,19 @@ class JsonSchemaValidateTask(TaskWithState): state_filename: str = "jsonschema_validate.json" def process_get_state(self, process_data: dict) -> dict: - worker = JSONSchemaValidator(process_data['schema']) + worker = JSONSchemaValidator(process_data["schema"]) # Get list of validation errors - validation_errors = worker.validate(process_data['data_reader']) + validation_errors = worker.validate(process_data["data_reader"]) validation_errors = [i.json() for i in validation_errors] # Context context = { "validation_errors_count": len(validation_errors), "validation_errors": group_data_list_by( - validation_errors, lambda i: i["validator"] + str(i['path_ending']) + i["message"] - ) + validation_errors, + lambda i: i["validator"] + str(i["path_ending"]) + i["message"], + ), } return context, process_data @@ -425,9 +516,9 @@ class AdditionalFieldsChecksTask(TaskWithState): state_filename: str = "additional_fields.json" def process_get_state(self, process_data: dict) -> dict: - worker = AdditionalFields(process_data['schema']) + worker = AdditionalFields(process_data["schema"]) - output = worker.process(process_data['data_reader']) + output = worker.process(process_data["data_reader"]) context = {"additional_fields": output} context["any_additional_fields_exist"] = len(output) > 0 diff --git a/cove_bods/templates/cove_bods/additional_checks_table.html b/cove_bods/templates/cove_bods/additional_checks_table.html index 5931edc..96a42ab 100644 --- a/cove_bods/templates/cove_bods/additional_checks_table.html +++ b/cove_bods/templates/cove_bods/additional_checks_table.html @@ -13,10 +13,10 @@ {% if additional_check.type == 'entity_identifier_scheme_not_known' %}
scheme
is not valid. Check the BODS documentation for guidance on identifiers. {% endblocktrans %}
scheme
: {{ additional_check.scheme }}
Entity statement
not in correct order. Check that the Entity statement is placed in the array before any statement referencing it.{% endblocktrans %}
{% trans 'Entity statement' %}
: {{ additional_check.entity_statement_out_of_order }}
Person statement
not in correct order. Check that the Person statement is placed in the array before any statement referencing it.{% endblocktrans %}
{% trans 'Person statement' %}
: {{ additional_check.person_statement_out_of_order }}
Entity statement
is not referenced from any Relationship statements. Check whether it should be the subject
or interestedParty
of a relationship.{% endblocktrans %}
Person statement
is not referenced from any Relationship statements. Check whether it should be the interestedParty
of a relationship.{% endblocktrans %}
Entity statement
is missing. Check whether an Entity statement is incorrectly referenced from interestedParty
or subject
, or whether an Entity statement is missing.{% endblocktrans %}
{% trans 'Entity statement' %}
: {{ additional_check.entity_statement_missing }}
Person statement
is missing. Check whether a Person statement is incorrectly referenced from interestedParty
, or whether a Person statement is missing.{% endblocktrans %}
{% trans 'Person statement' %}
: {{ additional_check.person_statement_missing }}
statementId
value used in multiple statements. Different statements should not have the same statementId
value.{% endblocktrans %}
statementId
: {{ additional_check.id }}
birthDate
value is invalid. The year is too far in the past. Check that the date is correct and well formatted.{% endblocktrans %}
birthDate
{% trans 'year' %}: {{ additional_check.year }}
birthDate
value is invalid. The date is in the future. Check that the date is correct and well formatted.{% endblocktrans %}
birthDate
{% trans 'year' %}: {{ additional_check.year }}
type
of address is invalid in an Entity statement. Check that the address type is correct.{% endblocktrans %}
type
: {{ additional_check.address_type }}
type
of address is invalid in a Person statement. Check that the address type is correct.{% endblocktrans %}
type
: {{ additional_check.address_type }}
type
of address is 'alternative' when no other addresses are published. Check that the address type is correct.{% endblocktrans %}
type
of address is 'alternative' when no other addresses are published. Check that the address type is correct.{% endblocktrans %}
componentStatementIDs
contains a statementID
not included in this dataset. Check that this is expected.{% endblocktrans %}
statementID
: {{ additional_check.component_statement_id }}
Ownership-or-control statement
has an isComponent
value ('true') incompatible with having its own components in componentStatementIDs
.{% endblocktrans %}
Person statement
has an isComponent
value of 'true' but does not appear in any componentStatementIDs
list. Check that this is expected.{% endblocktrans %}
Entity statement
has an isComponent
value of 'true' but does not appear in any componentStatementIDs
list. Check that this is expected.{% endblocktrans %}
Ownership-or-control statement
has an isComponent
value of 'true' but does not appear in any componentStatementIDs
list. Check that this is expected.{% endblocktrans %}
Person statement
not in the correct position. As a component (isComponent
'true'), it must appear before the primary Ownership-or-control statement that references it (from componentStatementIDs
).{% endblocktrans %}
Entity statement
not in the correct position. As a component (isComponent
'true'), it must appear before the primary Ownership-or-control statement that references it (from componentStatementIDs
).{% endblocktrans %}
Ownership-or-control statement
not in correct order. As a component (isComponent
'true'), it must appear before the primary Ownership-or-control statement that references it (from componentStatementIDs
).{% endblocktrans %}
bodsVersion
is different than that in the first statement of the dataset. Check that the schema versions are compatible.{% endblocktrans %}
bodsVersion
: {{ additional_check.schema_version }}
bodsVersion
is different than that in the first statement of the dataset. Check that the schema versions are compatible.{% endblocktrans %}
bodsVersion
is different than that in the first statement of the dataset. Check that the schema versions are compatible.{% endblocktrans %}
bodsVersion
not valid. Check that the value is correctly formatted.{% endblocktrans %}
bodsVersion
: {{ additional_check.schema_version }}
Ownership-or-control statement
asserts beneficialOwnershipOrControl
is 'true' but interestedParty
does not reference a Person statement. Check that information is correctly represented.{% endblocktrans %}
entitySubtype
is not valid for the specified entityType
.{% endblocktrans %}
hasPublicListing
has incorrect value. Value of companyFilingsURLs
or securitiesListings
suggests hasPublicListing
must be 'true'.{% endblocktrans %}
operatingMarketIdentifierCode
should be set alongside marketIdentifierCode
.{% endblocktrans %}
marketIdentifierCode
should be set alongside operatingMarketIdentifierCode
.{% endblocktrans %}
hasPepStatus
has incorrect value. pepStatusDetails
are substantive, suggesting that hasPepStatus
should be 'true'.{% endblocktrans %}
politicalExposure.status
has incorrect value. politicalExposure.details
are substantive, suggesting that politicalExposure.status
should be 'isPep'.{% endblocktrans %}
hasPepStatus
has incorrect value. pepStatusDetails
contains missingInfoReason
, suggesting that hasPepStatus
should have no value.{% endblocktrans %}
{% else %}
- {% blocktrans %}This Person Statement has a missingInfoReason for PEP status details, so PEP status should be declared as 'unknown'.{%endblocktrans%}
+ {% blocktrans %}politicalExposure.status
has incorrect value. politicalExposure.details
contains missingInfoReason
, suggesting that status
should be 'unknown'.{% endblocktrans %}
{% endif %}
creationDate
of an annotation is in the future. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ creationDate
: {{ additional_check.creation_date }}
+ publicationDate
is in the future. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ publicationDate
: {{ additional_check.publication_date }}
+ Entity statement
has an isComponent
value of 'true' but does not appear in the componentRecords
list of a later Relationship statement in the dataset. Check that component records are correctly listed and that statements are in the correct order.{% endblocktrans %}
+ Person statement
has an isComponent
value of 'true' but appears in the componentRecords
list of no later Relationship statement in the dataset. Check that component records are correctly listed and that statements are in the correct order.{% endblocktrans %}
+ Relationship statement
has an isComponent
value of 'true' but does not appear in the componentRecords
list of a later Relationship statement in the dataset. Check that component records are correctly listed and that statements are in the correct order.{% endblocktrans %}
+ retrievedAt
is in the future. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ retrievedAt
: {{ additional_check.retrieval_date }}
+ statementDate
is in the future. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ statementDate
: {{ additional_check.statement_date }}
+ birthDate
is in the future. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ birthDate
: {{ additional_check.birth_date }}
+ birthDate
is before 1800. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ birthDate
: {{ additional_check.birth_date }}
+ foundingDate
is later than dissolutionDate
. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ foundingDate
: {{ additional_check.founding_date }} dissolutionDate
: {{ additional_check.dissolution_date }}
+ deathDate
is incorrect. Check that the date is not before the birthDate
,in the future or more than 120 years after birth date.{% endblocktrans %}
+ deathDate
: {{ additional_check.death_date }}
+ startDate
is later than endDate
in an Interest. Check that dates are correctly generated and well formatted.{% endblocktrans %}
+ startDate
: {{ additional_check.start_date }} endDate
: {{ additional_check.end_date }}
+ hasPublicListing
has incorrect value. Value of securitiesListings
suggests hasPublicListing
must be 'true'.{% endblocktrans %}
+ securitiesListings
: {{ additional_check.securities_listings }}
+ share.exact
is provided, alongside a range (minimum and maximum values). Provide either an exact value, or a range.{% endblocktrans %}
+ share.exact
: {{ additional_check.share_exact }}+
share
is an invalid range. The maximum value is less than the minimum value.{% endblocktrans %}
+ share
{% trans '(exclusive) minimum' %}: {{ additional_check.minval }}
share
{% trans '(exclusive) maximum' %}: {{ additional_check.maxval }}+
share
is an invalid range. The maximum value is the same as the minimum value.{% endblocktrans %}
+ share
{% trans '(exclusive) minimum' %}: {{ additional_check.minval }}
share
{% trans '(exclusive) maximum' %}: {{ additional_check.maxval }}+
share
is an invalid range. Only one of minimum
and exclusiveMinimum
must be provided.{% endblocktrans %}
+ share
is an invalid range. Only one of maximum
and exclusiveMaximum
must be provided.{% endblocktrans %}
+ declarationSubject
does not appear in the dataset. Check that the dataset is complete, or that this is expected.{% endblocktrans %}
+ declarationSubject
: {{ additional_check.declaration_subject }}
+ declarationSubject
must reference an entity or person. Check that recordId
values are being correctly generated and used.{% endblocktrans %}
+ declarationSubject
: {{ additional_check.record_id }} recordType
of {{ additional_check.record_id }}: {{ additional_check.record_type }}
+ recordStatus
reported as 'new' in multiple Statements for a single Record. Check that recordStatus
is ‘new’ only the first time a Statement is published for the record.{% endblocktrans %}
+ recordId
: {{ additional_check.record_id }}
+ statementDate
is too early. The recordStatus
is 'updated' or 'closed' but statementDate
is earlier than that of the corresponding 'new' Statement.{% endblocktrans %}
+ recordId
: {{ additional_check.record_id }} statementId
: {{ additional_check.statement_id }}
+ recordStatus
reported as 'closed' in multiple Statements for a single Record. Check that recordStatus
is 'closed' only the final time a Statement is published for a record.{% endblocktrans %}
+ recordId
: {{ additional_check.record_id }}
+ statementDate
is too late. recordStatus
is 'new' or 'updated' but statementDate
is later than that of the corresponding 'closed' Statement.{% endblocktrans %}
+ recordId
:: {{ additional_check.record_id }} statementId
:: {{ additional_check.statement_id }}
+ recordType
varies across Statements for the same record. Check that Statements relating to the same record all have the same type.{%endblocktrans%}
+ recordId
: {{ additional_check.record_id }}
+ componentRecords
contains a statement ID value. componentRecords
entries must be record IDs.{% endblocktrans %}
+ componentRecords
: {{ additional_check.component_id }}
+ componentRecords
contains a recordId
not included in this dataset. Check that this is expected.{% endblocktrans %}
+ componentRecords
: {{ additional_check.component_id }}
+ subject
has unexpected value. subject
must be either a record ID for a Statement in the dataset or an Unspecified Record object.{% endblocktrans %}
+ subject
: {{ additional_check.subject }}
+ subject
must be the recordId
of an entity (not a person or relationship). Check that recordId
values are being correctly generated and used.{% endblocktrans %}
+ subject
: {{ additional_check.subject }}
+ interestedParty
not recognised. Check that the value matches a recordId
in the dataset or is an Unspecified Record object.{% endblocktrans %}
+ interestedParty
: {{ additional_check.interested_party }}
+ interestedParty
is invalid. The value should be a recordId
for a person or an entity in the dataset (not a relationship).{% endblocktrans %}
+ interestedParty
: {{ additional_check.interested_party }}
+ beneficialOwnershipOrControl
is 'true' but interestedParty
is not a person. Check that the interested party is correct and that beneficialOwnershipOrControl
is used correctly.{% endblocktrans %}
+ interestedParty
: {{ additional_check.interested_party }}
+ statementPointerTarget
is invalid. Check that it is a valid JSON pointer and that it points to an existing field in the Statement.{% endblocktrans %}
+ statementPointerTarget
: {{ additional_check.pointer }}
+ subject
has unexpected type. Interests in this Relationship statement suggest that the subject's entityType.subtype
should be 'nomination'.{% endblocktrans %}
+ entityType.subtype
: {{ additional_check.subject_record_subtype }}
+ subject
has unexpected type. Interests in this Relationship statement suggest that the subject's entityType.subtype
should be 'trust'.{% endblocktrans %}
+ entityType.subtype
: {{ additional_check.subject_record_subtype }}
+ subject
not found. The subject must match the recordId
of at least one prior Statement in the dataset. Check that Statements are ordered correctly.{% endblocktrans %}
+ subject
: {{ additional_check.subject_id }}
+ interestedParty
not found. The interested party must match the recordId
of at least one prior Statement in the dataset. Check that Statements are ordered correctly.{% endblocktrans %}
+ interestedParty
: {{ additional_check.interested_party_id }}
+ scheme
has incorrect formatting. Check the field description for guidance.{% endblocktrans %}
+ scheme
: {{ additional_check.scheme }}+
scheme
contains an unrecognised jurisdiction. An ISO 3166-1 3-digit country code is expected. Check the field description for guidance.{% endblocktrans %}
+ scheme
: {{ additional_check.scheme }}+
scheme
has unrecognised type. 'PASSPORT', 'TAXID' or 'IDCARD' is expected. Check the field description for guidance.{% endblocktrans %}
+ scheme
: {{ additional_check.scheme }}+
scheme
is unrecognised. A code from org-id.guide is expected. Check the field description for guidance.{% endblocktrans %}
+ scheme
: {{ additional_check.scheme }}+
beneficialOwnershipOrControl
expected to be 'true' in at least one Relationship statement with a person as an interested party. If this dataset contains beneficial owners, check that beneficialOwnershipOrControl
is correctly used.{% endblocktrans %}
Entity statement
is missing. Check whether an Entity statement is incorrectly referenced from interestedParty
or subject
, or whether an Entity statement is missing.{% endblocktrans %}
+ Person statement
not in correct order. Check that the Person statement is placed in the array before any statement referencing it.{% endblocktrans %}
+ Ownership-or-control statement
not in correct order. As a component (isComponent
'true'), it must appear before the primary Ownership-or-control statement that references it (from componentStatementIDs
).{% endblocktrans %}
+ Ownership-or-control statement
has an isComponent
value of 'true' but appears in no componentStatementIDs
list. Check that this is expected.{% endblocktrans %}
+ Person statement
is missing. Check whether a Person statement is incorrectly referenced from interestedParty
, or whether a Person statement is missing.{% endblocktrans %}
+ Person statement
is not referenced from any Relationship statements. Check whether it should be the interestedParty
of a relationship.{% endblocktrans %}
+ Entity statement
is not referenced from any Relationship statements. Check whether it should be the subject
or interestedParty
of a relationship.{% endblocktrans %}
+ statementId
value used in multiple statements. Different statements should not have the same statementId
value.{% endblocktrans %}
+ Entity statement
not in correct order. Check that the Entity statement is placed in the array before any statement referencing it.{% endblocktrans %}
+ componentStatementIDs
contains a statementID
not included in this dataset. Check that this is expected.{% endblocktrans %}
+ {% trans 'Field Name' %} | +{% trans 'Field Path' %} | +{% trans 'Usage Count' %} | +{% trans 'First 3 Values' %} | +{% trans 'Child Fields' %} | +
---|---|---|---|---|
+ {{ info.field_name }} + | ++ {{ full_path }} + | ++ {{ info.count }} + | +
+
|
+ + {% if info.additional_field_descendance %} + {{info.additional_field_descendance|length}} + {% trans "(See child fields)" %} + {% endif %} + | +
{% trans 'Total Statements' %}: {{ statistics.count_ownership_or_control_statement }}
diff --git a/cove_bods/templates/cove_bods/validation_table.html b/cove_bods/templates/cove_bods/validation_table.html index 75ddffa..9dbb907 100644 --- a/cove_bods/templates/cove_bods/validation_table.html +++ b/cove_bods/templates/cove_bods/validation_table.html @@ -26,11 +26,11 @@ {% elif errors.0.validator == "pattern" %}{{ errors.0.path_ending }}
does not match the regex {{ errors.0.validator_value }}
{% elif errors.0.validator == "format" and errors.0.validator_value == "date" %}
- Date is not in the correct format. The correct format is YYYY-MM-DD.
+ {{ errors.0.path_ending }}
is not a valid date. The correct format is YYYY-MM-DD.
{% elif errors.0.validator == "format" and errors.0.validator_value == "date-time" %}
- Date is not in the correct format. The correct format is YYYY-MM-DDT00:00:00Z.
+ {{ errors.0.path_ending }}
is not a valid date-time.
{% elif errors.0.validator == "format" and errors.0.validator_value == "uri" %}
- Invalid uri found
+ {{ errors.0.path_ending }}
is not a valid uri.
{% elif errors.0.validator == "minLength" %}
{{ errors.0.path_ending }}
is too short. It should be at least {{ errors.0.validator_value }} characters.
{% elif errors.0.validator == "maxLength" %}
@@ -38,7 +38,11 @@
{% elif errors.0.validator == "type" and errors.0.validator_value == "number" %}
{{ errors.0.path_ending }}
should be a number. Check that the value is not null, and doesn’t contain any characters other than 0-9 and dot (.
). Number values should not be in quotes.
{% elif errors.0.validator == "type" and errors.0.validator_value == "array" %}
- {{ errors.0.path_ending }}
should be a JSON array. Check that value(s) appear within square brackets, [...]
+ {% if errors.0.path_ending == "$" %}
+ The dataset should be a JSON array. Check that the object(s) appear within square brackets, [...]
+ {% else %}
+ {{ errors.0.path_ending }}
should be a JSON array. Check that value(s) appear within square brackets, [...]
+ {% endif %}
{% elif errors.0.validator == "type" and errors.0.validator_value == "string" %}
{{ errors.0.path_ending }}
should be a string. Check that the value is not null, and has quotes at the start and end. Escape any quotes in the value with \
{% elif errors.0.validator == "type" and errors.0.validator_value == "boolean" %}
@@ -48,7 +52,11 @@
{% elif errors.0.validator == "dependencies" %}
{{ errors.0.message }}
{% elif errors.0.validator == "anyOf" %}
- {{ errors.0.message }}
+ {{ errors.0.path_ending }}
is not a valid value. Check the description of this field in the schema documentation for details.
+ {% elif errors.0.validator == "oneOf" %}
+ {{ errors.0.path_ending }}
is not a valid value. Check the description of this field in the schema documentation for details.
+ {% elif errors.0.validator == "const" %}
+ {{ errors.0.path_ending }}
is expected to be {{ errors.0.validator_value }}in this Statement. {% else %} {{ errors.0.validator }} = {{ errors.0.message }} {% endif %} diff --git a/cove_bods/tests/test_page_content.py b/cove_bods/tests/test_page_content.py index c76bceb..dd4ed19 100644 --- a/cove_bods/tests/test_page_content.py +++ b/cove_bods/tests/test_page_content.py @@ -1,7 +1,5 @@ import pytest - from libcoveweb2.tests.lib_functional import browser, server_url # noqa - from selenium.webdriver.common.by import By diff --git a/cove_bods/views.py b/cove_bods/views.py index f6804b0..39e5e13 100644 --- a/cove_bods/views.py +++ b/cove_bods/views.py @@ -1,31 +1,32 @@ import logging -from cove_project import settings from django.shortcuts import render -from libcoveweb2.views import ( - ExploreDataView, - InputDataView -) from libcoveweb2.models import SuppliedDataFile -from cove_bods.forms import NewTextForm, NewUploadForm, NewURLForm +from libcoveweb2.views import ExploreDataView, InputDataView +from cove_bods.forms import NewTextForm, NewUploadForm, NewURLForm +from cove_project import settings logger = logging.getLogger(__name__) JSON_FORM_CLASSES = { - "upload_form": NewUploadForm, - "text_form": NewTextForm, - "url_form": NewURLForm, - } + "upload_form": NewUploadForm, + "text_form": NewTextForm, + "url_form": NewURLForm, +} class NewInput(InputDataView): form_classes = JSON_FORM_CLASSES input_template = "cove_bods/index.html" - allowed_content_types = settings.ALLOWED_JSON_CONTENT_TYPES + settings.ALLOWED_SPREADSHEET_CONTENT_TYPES + allowed_content_types = ( + settings.ALLOWED_JSON_CONTENT_TYPES + settings.ALLOWED_SPREADSHEET_CONTENT_TYPES + ) content_type_incorrect_message = "This does not appear to be a supported file." - allowed_file_extensions = settings.ALLOWED_JSON_EXTENSIONS + settings.ALLOWED_SPREADSHEET_EXTENSIONS + allowed_file_extensions = ( + settings.ALLOWED_JSON_EXTENSIONS + settings.ALLOWED_SPREADSHEET_EXTENSIONS + ) file_extension_incorrect_message = "This does not appear to be a supported file." supplied_data_format = "unknown" @@ -47,10 +48,7 @@ def save_file_content_to_supplied_data( supplied_data.save_file(request.FILES["file_upload"]) elif form_name == "text_form": supplied_data.save_file_contents( - "input.json", - form.cleaned_data["paste"], - "application/json", - None + "input.json", form.cleaned_data["paste"], "application/json", None ) elif form_name == "url_form": supplied_data.save_file_from_source_url( diff --git a/cove_project/settings.py b/cove_project/settings.py index d532a8e..a620fa6 100644 --- a/cove_project/settings.py +++ b/cove_project/settings.py @@ -11,35 +11,33 @@ """ import os -from libcoveweb2 import settings + import environ +from libcoveweb2 import settings # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) env = environ.Env( # set default values and casting - DB_NAME=(str, os.path.join(BASE_DIR, 'db.sqlite3')), - SENTRY_DSN=(str, ''), + DB_NAME=(str, os.path.join(BASE_DIR, "db.sqlite3")), + SENTRY_DSN=(str, ""), CELERY_BROKER_URL=(str, ""), REDIS_URL=(str, ""), ) # We use the setting to choose whether to show the section about Sentry in the # terms and conditions -SENTRY_DSN = env('SENTRY_DSN') +SENTRY_DSN = env("SENTRY_DSN") if SENTRY_DSN: import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.logging import ignore_logger - ignore_logger('django.security.DisallowedHost') - sentry_sdk.init( - dsn=env('SENTRY_DSN'), - integrations=[DjangoIntegration()] - ) + ignore_logger("django.security.DisallowedHost") + sentry_sdk.init(dsn=env("SENTRY_DSN"), integrations=[DjangoIntegration()]) -DEALER_TYPE = 'git' +DEALER_TYPE = "git" PIWIK = settings.PIWIK GOOGLE_ANALYTICS_ID = settings.GOOGLE_ANALYTICS_ID @@ -47,8 +45,8 @@ # We can't take MEDIA_ROOT and MEDIA_URL from cove settings, # ... otherwise the files appear under the BASE_DIR that is the Cove library install. # That could get messy. We want them to appear in our directory. -MEDIA_ROOT = os.path.join(BASE_DIR, 'media') -MEDIA_URL = '/media/' +MEDIA_ROOT = os.path.join(BASE_DIR, "media") +MEDIA_URL = "/media/" SECRET_KEY = settings.SECRET_KEY DEBUG = settings.DEBUG @@ -57,45 +55,45 @@ # Application definition INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'bootstrap3', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "bootstrap3", "libcoveweb2", - 'cove_bods', + "cove_bods", ] MIDDLEWARE = ( - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.locale.LocaleMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'django.middleware.security.SecurityMiddleware', - 'dealer.contrib.django.Middleware', + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.locale.LocaleMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django.middleware.security.SecurityMiddleware", + "dealer.contrib.django.Middleware", "libcoveweb2.middleware.CoveConfigCurrentApp", ) -ROOT_URLCONF = 'cove_project.urls' +ROOT_URLCONF = "cove_project.urls" TEMPLATES = settings.TEMPLATES -WSGI_APPLICATION = 'cove_project.wsgi.application' +WSGI_APPLICATION = "cove_project.wsgi.application" # We can't take DATABASES from cove settings, # ... otherwise the files appear under the BASE_DIR that is the Cove library install. # That could get messy. We want them to appear in our directory. DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': env('DB_NAME'), + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": env("DB_NAME"), } } @@ -104,16 +102,16 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] @@ -129,7 +127,7 @@ LANGUAGES = settings.LANGUAGES -LOCALE_PATHS = (os.path.join(BASE_DIR, 'cove_bods', 'locale'),) +LOCALE_PATHS = (os.path.join(BASE_DIR, "cove_bods", "locale"),) # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ @@ -137,8 +135,8 @@ # We can't take STATIC_URL and STATIC_ROOT from cove settings, # ... otherwise the files appear under the BASE_DIR that is the Cove library install. # and that doesn't work with our standard Apache setup. -STATIC_URL = '/static/' -STATIC_ROOT = os.path.join(BASE_DIR, 'static') +STATIC_URL = "/static/" +STATIC_ROOT = os.path.join(BASE_DIR, "static") # Misc @@ -147,17 +145,17 @@ # BODS Config COVE_CONFIG = { - 'app_name': 'cove_bods', - 'app_base_template': 'cove_bods/base.html', - 'app_verbose_name': 'BODS Data Review Tool', - 'app_strapline': 'Review your BODS data.', - 'root_list_path': 'there-is-no-root-list-path', - 'root_id': 'statementID', - 'id_name': 'statementID', - 'root_is_list': True, - 'convert_titles': False, - 'input_methods': ['upload', 'url', 'text'], - 'support_email': 'data@open-contracting.org' + "app_name": "cove_bods", + "app_base_template": "cove_bods/base.html", + "app_verbose_name": "BODS Data Review Tool", + "app_strapline": "Review your BODS data.", + "root_list_path": "there-is-no-root-list-path", + "root_id": "statementID", + "id_name": "statementID", + "root_is_list": True, + "convert_titles": False, + "input_methods": ["upload", "url", "text"], + "support_email": "data@open-contracting.org", } # https://github.com/OpenDataServices/cove/issues/1098 diff --git a/cove_project/urls.py b/cove_project/urls.py index 7d56bb5..0b00102 100644 --- a/cove_project/urls.py +++ b/cove_project/urls.py @@ -1,12 +1,14 @@ -from django.conf.urls import url -from django.conf.urls.static import static from django.conf import settings +from django.conf.urls.static import static +from django.urls import re_path from libcoveweb2.urls import urlpatterns + import cove_bods.views -from django.urls import re_path urlpatterns += [re_path(r"^$", cove_bods.views.NewInput.as_view(), name="index")] -urlpatterns += [url(r'^data/(.+)$', cove_bods.views.ExploreBODSView.as_view(), name='explore')] +urlpatterns += [ + re_path(r"^data/(.+)$", cove_bods.views.ExploreBODSView.as_view(), name="explore") +] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) diff --git a/cove_project/wsgi.py b/cove_project/wsgi.py index d27b945..e4bf27d 100644 --- a/cove_project/wsgi.py +++ b/cove_project/wsgi.py @@ -11,6 +11,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cove_project.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cove_project.settings") application = get_wsgi_application() diff --git a/requirements.in b/requirements.in index 226c357..1761ba6 100644 --- a/requirements.in +++ b/requirements.in @@ -1,9 +1,11 @@ dealer sentry-sdk -Django>3.2,<3.3 +Django>5.0,<5.1 jsonschema -libcovebods>=0.15.0 -libcoveweb2>=0.1.0 +libcovebods>=0.16.0 +libcoveweb2>=0.4.0 gunicorn django-bootstrap3 -flattentool +flattentool>=0.27.0 +pandas +urllib3<2 # selenium (requirements_dev.in) does not support v2 diff --git a/requirements.txt b/requirements.txt index 4d6e1b2..8043672 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,169 +4,203 @@ # # pip-compile requirements.in # -amqp==5.1.1 +amqp==5.3.1 # via kombu -asgiref==3.7.2 +asgiref==3.8.1 # via django -async-timeout==4.0.2 - # via redis -attrs==23.1.0 - # via jsonschema -backports-datetime-fromisoformat==2.0.0 +attrs==24.3.0 + # via + # jsonschema + # referencing +backports-datetime-fromisoformat==2.0.3 # via flattentool -billiard==3.6.4.0 +billiard==4.2.1 # via celery -btrees==5.0 +btrees==6.1 # via zodb -celery[redis]==5.2.7 +celery[redis]==5.4.0 # via libcoveweb2 -certifi==2023.5.7 +certifi==2024.12.14 # via # requests # sentry-sdk -cffi==1.15.1 +cffi==1.17.1 # via persistent -charset-normalizer==3.1.0 +charset-normalizer==3.4.1 # via requests -click==8.1.3 +click==8.1.8 # via # celery # click-didyoumean # click-plugins # click-repl -click-didyoumean==0.3.0 +click-didyoumean==0.3.1 # via celery click-plugins==1.1.1 # via celery -click-repl==0.2.0 +click-repl==0.3.0 # via celery -contextlib2==21.6.0 - # via schema dealer==2.1.0 # via -r requirements.in defusedxml==0.7.1 # via odfpy -django==3.2.19 +django==5.0.10 # via # -r requirements.in # django-bootstrap3 # libcoveweb2 -django-bootstrap3==23.1 +django-bootstrap3==24.3 # via # -r requirements.in # libcoveweb2 -django-environ==0.10.0 +django-environ==0.11.2 # via libcoveweb2 -et-xmlfile==1.1.0 +et-xmlfile==2.0.0 # via openpyxl -flattentool==0.20.1 +flattentool==0.27.0 # via -r requirements.in -gunicorn==20.1.0 +gunicorn==23.0.0 # via -r requirements.in -idna==3.4 +idna==3.10 # via requests -ijson==3.2.0.post0 +ijson==3.3.0 # via # flattentool # libcovebods +jscc==0.3.0 + # via libcovebods +json-merge-patch==0.2 + # via jscc +jsonpointer==3.0.0 + # via libcovebods jsonref==1.1.0 - # via flattentool -jsonschema==4.9.1 + # via + # flattentool + # jscc +jsonschema==4.23.0 # via # -r requirements.in # libcovebods -kombu==5.2.4 +jsonschema-specifications==2024.10.1 + # via jsonschema +kombu==5.4.2 # via celery -libcove2==0.1.0 +libcove2==0.2.1 # via libcovebods -libcovebods==0.15.0 +libcovebods==0.16.0 # via -r requirements.in -libcoveweb2==0.1.0 +libcoveweb2==0.4.0 # via -r requirements.in -lxml==4.9.2 +lxml==5.3.0 # via flattentool +numpy==2.2.1 + # via pandas odfpy==1.4.1 # via flattentool -openpyxl==3.1.2 +openpyxl==3.1.5 # via flattentool -packaging==23.1 - # via libcovebods -persistent==5.0 +packaging==24.2 + # via + # gunicorn + # libcovebods +pandas==2.2.3 + # via -r requirements.in +persistent==6.1 # via # btrees # zodb -prompt-toolkit==3.0.38 +prompt-toolkit==3.0.48 # via click-repl -pycparser==2.21 - # via cffi -pyrsistent==0.19.3 - # via jsonschema -python-dateutil==2.8.2 +pycountry==24.6.1 # via libcovebods -pytz==2023.3 +pycparser==2.22 + # via cffi +python-dateutil==2.9.0.post0 # via # celery - # django + # libcovebods + # pandas +pytz==2024.2 + # via # flattentool # libcovebods -redis==4.5.5 + # pandas +redis==5.2.1 # via celery -requests==2.31.0 +referencing==0.35.1 # via + # jsonschema + # jsonschema-specifications +requests==2.32.3 + # via + # jscc # libcove2 # libcoveweb2 rfc3339-validator==0.1.4 # via libcovebods rfc3987==1.3.8 # via libcovebods -schema==0.7.5 +rpds-py==0.22.3 + # via + # jsonschema + # referencing +schema==0.7.7 # via flattentool -sentry-sdk==1.24.0 +sentry-sdk==2.19.2 # via # -r requirements.in # libcoveweb2 -six==1.16.0 +six==1.17.0 # via - # click-repl # python-dateutil # rfc3339-validator - # zodb -sqlparse==0.4.4 +sqlparse==0.5.3 # via django -transaction==3.1.0 +transaction==5.0 # via zodb -urllib3==1.26.16 +tzdata==2024.2 + # via + # celery + # kombu + # pandas +urllib3==1.26.20 # via + # -r requirements.in # requests # sentry-sdk -vine==5.0.0 +vine==5.1.0 # via # amqp # celery # kombu -wcwidth==0.2.6 +wcwidth==0.2.13 # via prompt-toolkit -xmltodict==0.13.0 +xmltodict==0.14.2 # via flattentool zc-lockfile==3.0.post1 # via zodb zc-zlibstorage==1.2.0 # via flattentool -zconfig==4.0 +zconfig==4.1 # via zodb -zodb==5.8.0 +zodb==6.0 # via # flattentool # zc-zlibstorage -zodbpickle==3.0.1 +zodbpickle==4.1.1 # via zodb -zope-interface==6.0 +zope-deferredimport==5.0 + # via persistent +zope-interface==7.2 # via # btrees # persistent # transaction # zc-zlibstorage # zodb + # zope-proxy +zope-proxy==6.1 + # via zope-deferredimport # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements_dev.in b/requirements_dev.in index 0adbf26..a77f100 100644 --- a/requirements_dev.in +++ b/requirements_dev.in @@ -2,7 +2,9 @@ pytest pytest-django flake8 +black==22.3.0 +isort pytest-localserver -selenium +selenium<4.10 # to work with libcoveweb2 transifex-client -pip-tools \ No newline at end of file +pip-tools diff --git a/requirements_dev.txt b/requirements_dev.txt index fcb0703..1eb5bfa 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -4,70 +4,65 @@ # # pip-compile requirements_dev.in # -amqp==5.1.1 +amqp==5.3.1 # via # -r requirements.txt # kombu -asgiref==3.7.2 +asgiref==3.8.1 # via # -r requirements.txt # django -async-generator==1.10 - # via - # trio - # trio-websocket -async-timeout==4.0.2 - # via - # -r requirements.txt - # redis -attrs==23.1.0 +attrs==24.3.0 # via # -r requirements.txt # jsonschema # outcome - # pytest + # referencing # trio -backports-datetime-fromisoformat==2.0.0 +backports-datetime-fromisoformat==2.0.3 # via # -r requirements.txt # flattentool -billiard==3.6.4.0 +billiard==4.2.1 # via # -r requirements.txt # celery -btrees==5.0 +black==22.3.0 + # via -r requirements_dev.in +btrees==6.1 # via # -r requirements.txt # zodb -build==0.10.0 +build==1.2.2.post1 # via pip-tools -celery[redis]==5.2.7 +celery[redis]==5.4.0 # via # -r requirements.txt # libcoveweb2 -certifi==2023.5.7 +certifi==2024.12.14 # via # -r requirements.txt # requests # selenium # sentry-sdk -cffi==1.15.1 +cffi==1.17.1 # via # -r requirements.txt # persistent -charset-normalizer==3.1.0 +charset-normalizer==3.4.1 # via # -r requirements.txt # requests -click==8.1.3 +click==8.1.8 # via # -r requirements.txt + # black # celery # click-didyoumean # click-plugins # click-repl # pip-tools -click-didyoumean==0.3.0 +click-didyoumean==0.3.1 # via # -r requirements.txt # celery @@ -75,154 +70,193 @@ click-plugins==1.1.1 # via # -r requirements.txt # celery -click-repl==0.2.0 +click-repl==0.3.0 # via # -r requirements.txt # celery -contextlib2==21.6.0 - # via - # -r requirements.txt - # schema dealer==2.1.0 # via -r requirements.txt defusedxml==0.7.1 # via # -r requirements.txt # odfpy -django==3.2.19 +django==5.0.10 # via # -r requirements.txt # django-bootstrap3 # libcoveweb2 -django-bootstrap3==23.1 +django-bootstrap3==24.3 # via # -r requirements.txt # libcoveweb2 -django-environ==0.10.0 +django-environ==0.11.2 # via # -r requirements.txt # libcoveweb2 -et-xmlfile==1.1.0 +et-xmlfile==2.0.0 # via # -r requirements.txt # openpyxl -flake8==6.0.0 +flake8==7.1.1 # via -r requirements_dev.in -flattentool==0.20.1 +flattentool==0.27.0 # via -r requirements.txt -gunicorn==20.1.0 +gunicorn==23.0.0 # via -r requirements.txt h11==0.14.0 # via wsproto -idna==3.4 +idna==3.10 # via # -r requirements.txt # requests # trio -ijson==3.2.0.post0 +ijson==3.3.0 # via # -r requirements.txt # flattentool # libcovebods iniconfig==2.0.0 # via pytest +isort==5.13.2 + # via -r requirements_dev.in +jscc==0.3.0 + # via + # -r requirements.txt + # libcovebods +json-merge-patch==0.2 + # via + # -r requirements.txt + # jscc +jsonpointer==3.0.0 + # via + # -r requirements.txt + # libcovebods jsonref==1.1.0 # via # -r requirements.txt # flattentool -jsonschema==4.9.1 + # jscc +jsonschema==4.23.0 # via # -r requirements.txt # libcovebods -kombu==5.2.4 +jsonschema-specifications==2024.10.1 + # via + # -r requirements.txt + # jsonschema +kombu==5.4.2 # via # -r requirements.txt # celery -libcove2==0.1.0 +libcove2==0.2.1 # via # -r requirements.txt # libcovebods -libcovebods==0.15.0 +libcovebods==0.16.0 # via -r requirements.txt -libcoveweb2==0.1.0 +libcoveweb2==0.4.0 # via -r requirements.txt -lxml==4.9.2 +lxml==5.3.0 # via # -r requirements.txt # flattentool +markupsafe==3.0.2 + # via werkzeug mccabe==0.7.0 # via flake8 +mypy-extensions==1.0.0 + # via black +numpy==2.2.1 + # via + # -r requirements.txt + # pandas odfpy==1.4.1 # via # -r requirements.txt # flattentool -openpyxl==3.1.2 +openpyxl==3.1.5 # via # -r requirements.txt # flattentool -outcome==1.2.0 +outcome==1.3.0.post0 # via trio -packaging==23.1 +packaging==24.2 # via # -r requirements.txt # build + # gunicorn # libcovebods # pytest -persistent==5.0 +pandas==2.2.3 + # via -r requirements.txt +pathspec==0.12.1 + # via black +persistent==6.1 # via # -r requirements.txt # btrees # zodb -pip-tools==6.12.2 +pip-tools==7.4.1 # via -r requirements_dev.in -pluggy==1.0.0 +platformdirs==4.3.6 + # via black +pluggy==1.5.0 # via pytest -prompt-toolkit==3.0.38 +prompt-toolkit==3.0.48 # via # -r requirements.txt # click-repl -pycodestyle==2.10.0 +pycodestyle==2.12.1 # via flake8 -pycparser==2.21 +pycountry==24.6.1 + # via + # -r requirements.txt + # libcovebods +pycparser==2.22 # via # -r requirements.txt # cffi -pyflakes==3.0.1 +pyflakes==3.2.0 # via flake8 -pyproject-hooks==1.0.0 - # via build -pyrsistent==0.19.3 +pyproject-hooks==1.2.0 # via - # -r requirements.txt - # jsonschema + # build + # pip-tools pysocks==1.7.1 # via urllib3 -pytest==7.2.1 +pytest==8.3.4 # via # -r requirements_dev.in # pytest-django -pytest-django==4.5.2 +pytest-django==4.9.0 # via -r requirements_dev.in -pytest-localserver==0.7.1 +pytest-localserver==0.9.0.post0 # via -r requirements_dev.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # -r requirements.txt + # celery # libcovebods -pytz==2023.3 + # pandas +pytz==2024.2 # via # -r requirements.txt - # celery - # django # flattentool # libcovebods -redis==4.5.5 + # pandas +redis==5.2.1 # via # -r requirements.txt # celery -requests==2.31.0 +referencing==0.35.1 + # via + # -r requirements.txt + # jsonschema + # jsonschema-specifications +requests==2.32.3 # via # -r requirements.txt + # jscc # libcove2 # libcoveweb2 rfc3339-validator==0.1.4 @@ -233,68 +267,77 @@ rfc3987==1.3.8 # via # -r requirements.txt # libcovebods -schema==0.7.5 +rpds-py==0.22.3 + # via + # -r requirements.txt + # jsonschema + # referencing +schema==0.7.7 # via # -r requirements.txt # flattentool -selenium==4.8.0 +selenium==4.9.1 # via -r requirements_dev.in -sentry-sdk==1.24.0 +sentry-sdk==2.19.2 # via # -r requirements.txt # libcoveweb2 -six==1.16.0 +six==1.17.0 # via # -r requirements.txt - # click-repl # python-dateutil # rfc3339-validator # transifex-client - # zodb -sniffio==1.3.0 +sniffio==1.3.1 # via trio sortedcontainers==2.4.0 # via trio -sqlparse==0.4.4 +sqlparse==0.5.3 # via # -r requirements.txt # django -transaction==3.1.0 +transaction==5.0 # via # -r requirements.txt # zodb transifex-client==0.12.5 # via -r requirements_dev.in -trio==0.22.0 +trio==0.28.0 # via # selenium # trio-websocket -trio-websocket==0.9.2 +trio-websocket==0.11.1 # via selenium -urllib3[socks]==1.26.16 +tzdata==2024.2 + # via + # -r requirements.txt + # celery + # kombu + # pandas +urllib3[socks]==1.26.20 # via # -r requirements.txt # requests # selenium # sentry-sdk # transifex-client -vine==5.0.0 +vine==5.1.0 # via # -r requirements.txt # amqp # celery # kombu -wcwidth==0.2.6 +wcwidth==0.2.13 # via # -r requirements.txt # prompt-toolkit -werkzeug==1.0.1 +werkzeug==3.1.3 # via pytest-localserver -wheel==0.38.4 +wheel==0.45.1 # via pip-tools wsproto==1.2.0 # via trio-websocket -xmltodict==0.13.0 +xmltodict==0.14.2 # via # -r requirements.txt # flattentool @@ -306,20 +349,24 @@ zc-zlibstorage==1.2.0 # via # -r requirements.txt # flattentool -zconfig==4.0 +zconfig==4.1 # via # -r requirements.txt # zodb -zodb==5.8.0 +zodb==6.0 # via # -r requirements.txt # flattentool # zc-zlibstorage -zodbpickle==3.0.1 +zodbpickle==4.1.1 # via # -r requirements.txt # zodb -zope-interface==6.0 +zope-deferredimport==5.0 + # via + # -r requirements.txt + # persistent +zope-interface==7.2 # via # -r requirements.txt # btrees @@ -327,6 +374,11 @@ zope-interface==6.0 # transaction # zc-zlibstorage # zodb + # zope-proxy +zope-proxy==6.1 + # via + # -r requirements.txt + # zope-deferredimport # The following packages are considered to be unsafe in a requirements file: # pip