Skip to content

Commit

Permalink
Linting.
Browse files Browse the repository at this point in the history
  • Loading branch information
GCHQDeveloper42 committed Mar 22, 2024
1 parent f284a2e commit 7c48d3f
Show file tree
Hide file tree
Showing 9 changed files with 249 additions and 189 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@
import uk.gov.gchq.magmacore.service.transformation.DbDeleteOperation;

/**
* Interface defining CRUD operations and generic queries for Magma Core data collections.
* Interface defining CRUD operations and generic queries for Magma Core data
* collections.
*/
public interface MagmaCoreDatabase {

Expand All @@ -43,13 +44,15 @@ public interface MagmaCoreDatabase {
void beginWrite();

/**
* Commit a transaction - Finish the current transaction and make any changes permanent (if a
* Commit a transaction - Finish the current transaction and make any changes
* permanent (if a
* "write" transaction).
*/
void commit();

/**
* Abort a transaction - Finish the transaction and undo any changes (if a "write" transaction).
* Abort a transaction - Finish the transaction and undo any changes (if a
* "write" transaction).
*/
void abort();

Expand Down Expand Up @@ -128,7 +131,8 @@ public interface MagmaCoreDatabase {
List<Thing> findByPredicateIriAndValue(IRI predicateIri, Object value);

/**
* Find object(s) that have a specific string-value attribute associated with them.
* Find object(s) that have a specific string-value attribute associated with
* them.
*
* @param predicateIri IRI of the predicate being queried.
* @param value Case-insensitive string to match.
Expand All @@ -144,7 +148,8 @@ public interface MagmaCoreDatabase {
void dump(PrintStream out);

/**
* Write the database as TTL using the {@link PrintStream} and {@link org.apache.jena.riot.Lang}.
* Write the database as TTL using the {@link PrintStream} and
* {@link org.apache.jena.riot.Lang}.
*
* @param out a {@link PrintStream}
* @param language a {@link Lang}
Expand Down Expand Up @@ -184,32 +189,41 @@ public interface MagmaCoreDatabase {
List<Thing> executeConstruct(final String query);

/**
* Apply a set of inference rules to a subset of the model and return a MagmaCoreService attached to
* Apply a set of inference rules to a subset of the model and return a
* MagmaCoreService attached to
* the resulting inference model for further use by the caller.
*
* @param constructQuery a SPARQL query String to extract a subset of the model for inferencing.
* @param rules a set of inference rules to be applied to the model subset.
* @param includeRdfsRules boolean true if inferencing should include the standard RDFS entailments.
* @return an in-memory MagmaCoreDatabase attached to the inferencing results which is
* independent of the source dataset.
* @param constructQuery a SPARQL query String to extract a subset of the
* model for inferencing.
* @param rules a set of inference rules to be applied to the model
* subset.
* @param includeRdfsRules boolean true if inferencing should include the
* standard RDFS entailments.
* @return an in-memory MagmaCoreDatabase attached to the inferencing results
* which is
* independent of the source dataset.
*/
MagmaCoreDatabase applyInferenceRules(
final String constructQuery,
final String rules,
final String constructQuery,
final String rules,
final boolean includeRdfsRules);

/**
* Run a validation report. This is only valid for databases obtained from
* the {@link MagmaCoreDatabase.applyInferenceRules} method.
* Run a validation report. This is only valid for databases obtained from
* the {@link MagmaCoreDatabase.applyInferenceRules} method.
*
* @param constructQuery a SPARQL query String to extract a subset of the model for inferencing.
* @param rules a set of inference rules to be applied to the model subset.
* @param includeRdfsRules boolean true if inferencing should include the standard RDFS entailments.
* @param constructQuery a SPARQL query String to extract a subset of the
* model for inferencing.
* @param rules a set of inference rules to be applied to the model
* subset.
* @param includeRdfsRules boolean true if inferencing should include the
* standard RDFS entailments.
* @return A {@link List} of {@link ValidationReportEntry} objects.
* It will be Optional.empty if the underlying database is not an inference model.
* It will be Optional.empty if the underlying database is not an
* inference model.
*/
List<ValidationReportEntry> validate(
final String constructQuery,
final String rules,
final String constructQuery,
final String rules,
final boolean includeRdfsRules);
}
Original file line number Diff line number Diff line change
Expand Up @@ -486,13 +486,13 @@ public final void load(final InputStream in, final Lang language) {
*/
@Override
public MagmaCoreDatabase applyInferenceRules(
final String constructQuery,
final String rules,
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {
// Create an Inference Model which will run the rules.
final InfModel model = getInferenceModel(constructQuery, rules, includeRdfsRules);

// Convert the inference model to a dataset and return it wrapped as
// Convert the inference model to a dataset and return it wrapped as
// an in-memory MagmaCoreDatabase.
final Dataset inferenceDataset = DatasetFactory.wrap(model);
return new MagmaCoreJenaDatabase(inferenceDataset);
Expand All @@ -502,10 +502,9 @@ public MagmaCoreDatabase applyInferenceRules(
* {@inheritDoc}
*/
@Override
public List<ValidationReportEntry> validate(final String constructQuery,
final String rules,
public List<ValidationReportEntry> validate(final String constructQuery,
final String rules,
final boolean includeRdfsRules) {
//
// Create an Inference Model which will run the rules.
final InfModel model = getInferenceModel(constructQuery, rules, includeRdfsRules);

Expand All @@ -520,26 +519,25 @@ public List<ValidationReportEntry> validate(final String constructQuery,
final Report report = reports.next();

entries.add(new ValidationReportEntry(
report.getType(),
report.getExtension(),
report.getDescription()
));
report.getType(),
report.getExtension(),
report.getDescription()));
}

return entries;
}

/**
* Create an in-memory model for inferencing.
*
* @param constructQuery {@link String}
* @param rules {@link String}
* @param constructQuery {@link String}
* @param rules {@link String}
* @param includeRdfsRules boolean
* @return {@link InfModel}
*/
private InfModel getInferenceModel(
final String constructQuery,
final String rules,
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {
// Get the default Model
// Execute the query to get a subset of the data model.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,12 @@ public class MagmaCoreRemoteSparqlDatabase implements MagmaCoreDatabase {
*/
public MagmaCoreRemoteSparqlDatabase(final String serviceUrl) {
connection = RDFConnectionRemote.newBuilder().destination(serviceUrl).queryEndpoint("query")
.updateEndpoint("update").triplesFormat(RDFFormat.RDFJSON).build();
.updateEndpoint("update").triplesFormat(RDFFormat.RDFJSON).build();
}

/**
* Constructs a MagmaCoreRemoteSparqlDatabase connection to a SPARQL server and populates it with
* Constructs a MagmaCoreRemoteSparqlDatabase connection to a SPARQL server and
* populates it with
* the dataset.
*
* @param serviceUrl The URL String of the SPARQL update endpoint.
Expand Down Expand Up @@ -151,7 +152,7 @@ public final void commit() {
public Thing get(final IRI iri) {

final String query = String.format("SELECT (<%1$s> as ?s) ?p ?o WHERE {<%1$s> ?p ?o.}",
iri.toString());
iri.toString());
final QueryResultList list = executeQuery(query);
final List<Thing> objects = toTopObjects(list);

Expand Down Expand Up @@ -224,7 +225,7 @@ public void update(final Thing object) {
@Override
public void delete(final Thing object) {
executeUpdate(String.format("delete {<%s> ?p ?o} WHERE {<%s> ?p ?o}", object.getId(),
object.getId()));
object.getId()));
}

/**
Expand Down Expand Up @@ -268,7 +269,7 @@ public void delete(final List<DbDeleteOperation> deletes) {
@Override
public List<Thing> findByPredicateIri(final IRI predicateIri, final IRI objectIri) {
final String query = "SELECT ?s ?p ?o WHERE {?s ?p ?o. ?s <" + predicateIri.toString() + "> <"
+ objectIri.toString() + ">.}";
+ objectIri.toString() + ">.}";
final QueryResultList list = executeQuery(query);
return toTopObjects(list);
}
Expand All @@ -278,9 +279,8 @@ public List<Thing> findByPredicateIri(final IRI predicateIri, final IRI objectIr
*/
@Override
public List<Thing> findByPredicateIriOnly(final IRI predicateIri) {
final String query =
"SELECT ?s ?p ?o WHERE {{select ?s ?p ?o where { ?s ?p ?o.}}{select ?s where {?s <"
+ predicateIri.toString() + "> ?o.}}}";
final String query = "SELECT ?s ?p ?o WHERE {{select ?s ?p ?o where { ?s ?p ?o.}}{select ?s where {?s <"
+ predicateIri.toString() + "> ?o.}}}";
final QueryResultList list = executeQuery(query);
return toTopObjects(list);
}
Expand Down Expand Up @@ -310,10 +310,9 @@ public List<Thing> findByPredicateIriAndValue(final IRI predicateIri, final Obje
@Override
public List<Thing> findByPredicateIriAndStringCaseInsensitive(final IRI predicateIri,
final String value) {
final String query =
"SELECT ?s ?p ?o WHERE {{ SELECT ?s ?p ?o where { ?s ?p ?o.}}{select ?s where {?s <"
+ predicateIri.toString() + "> ?o. BIND(LCASE(?o) AS ?lcase) FILTER(?lcase= \"\"\"" + value
+ "\"\"\")}}}";
final String query = "SELECT ?s ?p ?o WHERE {{ SELECT ?s ?p ?o where { ?s ?p ?o.}}{select ?s where {?s <"
+ predicateIri.toString() + "> ?o. BIND(LCASE(?o) AS ?lcase) FILTER(?lcase= \"\"\"" + value
+ "\"\"\")}}}";
final QueryResultList list = executeQuery(query);
return toTopObjects(list);
}
Expand Down Expand Up @@ -356,7 +355,8 @@ public QueryResultList executeQuery(final String sparqlQueryString) {
}

/**
* Execute a SPARQL query and construct a list of HQDM objects from the resulting RDF triples.
* Execute a SPARQL query and construct a list of HQDM objects from the
* resulting RDF triples.
*
* @param queryExec SPARQL query to execute.
* @return Results of the query.
Expand All @@ -365,7 +365,7 @@ private final QueryResultList getQueryResultList(final QueryExecution queryExec)
final ResultSet resultSet = queryExec.execSelect();
final List<QueryResult> queryResults = new ArrayList<>();
final QueryResultList queryResultList = new QueryResultList(resultSet.getResultVars(),
queryResults);
queryResults);

while (resultSet.hasNext()) {
final QuerySolution querySolution = resultSet.next();
Expand Down Expand Up @@ -414,17 +414,17 @@ public final List<Thing> toTopObjects(final QueryResultList queryResultsList) {
dataModelObject.add(new Pair<>(new IRI(predicateValue.toString()), objectValue.toString()));
} else if (objectValue instanceof Resource) {
dataModelObject.add(new Pair<>(new IRI(predicateValue.toString()),
new IRI(objectValue.toString())));
new IRI(objectValue.toString())));
} else {
throw new RuntimeException("objectValue is of unknown type: " + objectValue.getClass());
}
});

return objectMap
.entrySet()
.stream()
.map(entry -> HqdmObjectFactory.create(new IRI(entry.getKey().toString()), entry.getValue()))
.collect(Collectors.toList());
.entrySet()
.stream()
.map(entry -> HqdmObjectFactory.create(new IRI(entry.getKey().toString()), entry.getValue()))
.collect(Collectors.toList());
}

/**
Expand Down Expand Up @@ -473,9 +473,9 @@ public final void load(final InputStream in, final Lang language) {
*/
@Override
public MagmaCoreDatabase applyInferenceRules(
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {

// Create an Inference Model which will run the rules.
final InfModel model = getInferenceModel(constructQuery, rules, includeRdfsRules);
Expand All @@ -491,10 +491,10 @@ public MagmaCoreDatabase applyInferenceRules(
*/
@Override
public List<ValidationReportEntry> validate(
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {
//
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {

// Create an Inference Model which will run the rules.
final InfModel model = getInferenceModel(constructQuery, rules, includeRdfsRules);

Expand All @@ -509,26 +509,25 @@ public List<ValidationReportEntry> validate(
final Report report = reports.next();

entries.add(new ValidationReportEntry(
report.getType(),
report.getExtension(),
report.getDescription()
));
report.getType(),
report.getExtension(),
report.getDescription()));
}

return entries;
}

/**
* Create an in-memory model for inferencing.
*
* @param constructQuery {@link String}
* @param rules {@link String}
* @param constructQuery {@link String}
* @param rules {@link String}
* @param includeRdfsRules boolean
* @return {@link InfModel}
*/
private InfModel getInferenceModel(
final String constructQuery,
final String rules,
final String constructQuery,
final String rules,
final boolean includeRdfsRules) {
// Execute the query to get a subset of the data model.
final QueryExecution queryExec = connection.query(constructQuery);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
/**
* An implementation agnostic model validation report entry.
*/
public record ValidationReportEntry(String type, Object additionalInformation, String description) {}

public record ValidationReportEntry(String type, Object additionalInformation, String description) {
}
Loading

0 comments on commit 7c48d3f

Please sign in to comment.