Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Copy shape on update #190

Merged
merged 6 commits into from
Feb 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public Map<String, Object> get (DataFetchingEnvironment environment) {
try {
connection = GTFSGraphQL.getConnection();
Statement statement = connection.createStatement();
LOG.info("SQL: {}", sqlBuilder.toString());
LOG.debug("SQL: {}", sqlBuilder.toString());
if (statement.execute(sqlBuilder.toString())) {
ResultSet resultSet = statement.getResultSet();
ResultSetMetaData meta = resultSet.getMetaData();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ List<Map<String, Object>> getResults (
}
// This logging produces a lot of noise during testing due to large numbers of joined sub-queries
// LOG.info("table name={}", tableName);
LOG.info("SQL: {}", preparedStatement.toString());
LOG.debug("SQL: {}", preparedStatement.toString());
if (preparedStatement.execute()) {
ResultSet resultSet = preparedStatement.getResultSet();
ResultSetMetaData meta = resultSet.getMetaData();
Expand All @@ -363,7 +363,7 @@ List<Map<String, Object>> getResults (
} finally {
DbUtils.closeQuietly(connection);
}
LOG.info("Result size: {}", results.size());
LOG.debug("Result size: {}", results.size());
// Return a List of Maps, one Map for each row in the result.
return results;
}
Expand Down
86 changes: 67 additions & 19 deletions src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java
Original file line number Diff line number Diff line change
Expand Up @@ -186,14 +186,18 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce
referencedPatternUsesFrequencies = selectResults.getBoolean(1);
}
}
updateChildTable(
String keyValue = updateChildTable(
childEntitiesArray,
entityId,
referencedPatternUsesFrequencies,
isCreating,
referencingTable,
connection
);
// Ensure JSON return object is updated with referencing table's (potentially) new key value.
// Currently, the only case where an update occurs is when a referenced shape is referenced by other
// patterns.
jsonObject.put(referencingTable.getKeyFieldName(), keyValue);
}
}
// Iterate over table's fields and apply linked values to any tables. This is to account for "exemplar"
Expand Down Expand Up @@ -441,21 +445,19 @@ private void setStatementParameters(
* have a hierarchical relationship.
* FIXME develop a better way to update tables with foreign keys to the table being updated.
*/
private void updateChildTable(
private String updateChildTable(
ArrayNode subEntities,
int id,
boolean referencedPatternUsesFrequencies,
boolean isCreatingNewEntity,
Table subTable,
Connection connection
) throws SQLException, IOException {
// Get parent table's key field
Field keyField;
String keyValue;
// Primary key fields are always referenced by foreign key fields with the same name.
keyField = specTable.getFieldForName(subTable.getKeyFieldName());
// Get parent table's key field. Primary key fields are always referenced by foreign key fields with the same
// name.
Field keyField = specTable.getFieldForName(subTable.getKeyFieldName());
// Get parent entity's key value
keyValue = getValueForId(id, keyField.name, tablePrefix, specTable, connection);
String keyValue = getValueForId(id, keyField.name, tablePrefix, specTable, connection);
String childTableName = String.join(".", tablePrefix, subTable.name);
if (!referencedPatternUsesFrequencies && subTable.name.equals(Table.FREQUENCIES.name) && subEntities.size() > 0) {
// Do not permit the illegal state where frequency entries are being added/modified for a timetable pattern.
Expand All @@ -479,17 +481,44 @@ private void updateChildTable(
reconcilePatternStops(keyValue, newPatternStops, connection);
}
if (!isCreatingNewEntity) {
// Delete existing sub-entities for given entity ID if the parent entity is not being newly created.
String deleteSql = getUpdateReferencesSql(SqlMethod.DELETE, childTableName, keyField, keyValue, null);
LOG.info(deleteSql);
Statement statement = connection.createStatement();
// FIXME: Use copy on update for a pattern's shape instead of deleting the previous shape and replacing it.
// This would better account for GTFS data loaded from a file where multiple patterns reference a single
// shape.
int result = statement.executeUpdate(deleteSql);
LOG.info("Deleted {} {}", result, subTable.name);
// FIXME: are there cases when an update should not return zero?
// if (result == 0) throw new SQLException("No stop times found for trip ID");
// If not creating a new entity, we will delete the child entities (e.g., shape points or pattern stops) and
// regenerate them anew to avoid any messiness that we may encounter with update statements.
if (Table.SHAPES.name.equals(subTable.name)) {
// Check how many patterns are referencing the same shape_id to determine if we should copy on update.
String patternsForShapeIdSql = String.format("select id from %s.patterns where shape_id = ?", tablePrefix);
PreparedStatement statement = connection.prepareStatement(patternsForShapeIdSql);
statement.setString(1, keyValue);
LOG.info(statement.toString());
ResultSet resultSet = statement.executeQuery();
int patternsForShapeId = 0;
while (resultSet.next()) {
patternsForShapeId++;
}
if (patternsForShapeId > 1) {
// Use copy on update for pattern shape if a single shape is being used for multiple patterns because
// we do not want edits for a given pattern (for example, a short run) to impact the shape for another
// pattern (perhaps a long run that extends farther). Note: this behavior will have the side effect of
// creating potentially redundant shape information, but this is better than accidentally butchering the
// shapes for other patterns.
LOG.info("More than one pattern references shape_id: {}", keyValue);
keyValue = UUID.randomUUID().toString();
LOG.info("Creating new shape_id ({}) for pattern id={}.", keyValue, id);
// Update pattern#shape_id with new value. Note: shape_point#shape_id values are coerced to new
// value further down in this function.
String updatePatternShapeIdSql = String.format("update %s.patterns set shape_id = ? where id = ?", tablePrefix);
PreparedStatement updateStatement = connection.prepareStatement(updatePatternShapeIdSql);
updateStatement.setString(1, keyValue);
updateStatement.setInt(2, id);
LOG.info(updateStatement.toString());
updateStatement.executeUpdate();
} else {
// If only one pattern references this shape, delete the existing shape points to start anew.
deleteChildEntities(subTable, keyField, keyValue);
}
} else {
// If not handling shape points, delete the child entities and create them anew.
deleteChildEntities(subTable, keyField, keyValue);
}
}
int entityCount = 0;
PreparedStatement insertStatement = null;
Expand Down Expand Up @@ -587,6 +616,25 @@ private void updateChildTable(
} else {
LOG.info("No inserts to execute. Empty array found in JSON for child table {}", childTableName);
}
// Return key value in the case that it was updated (the only case for this would be if the shape was referenced
// by multiple patterns).
return keyValue;
}

/**
* Delete existing sub-entities for given key value for when an update to the parent entity is made (i.e., the parent
* entity is not being newly created). Examples of sub-entities include stop times for trips, pattern stops for a
* pattern, or shape points (for a pattern in our model).
*/
private void deleteChildEntities(Table childTable, Field keyField, String keyValue) throws SQLException {
String childTableName = String.join(".", tablePrefix, childTable.name);
String deleteSql = getUpdateReferencesSql(SqlMethod.DELETE, childTableName, keyField, keyValue, null);
LOG.info(deleteSql);
Statement deleteStatement = connection.createStatement();
int result = deleteStatement.executeUpdate(deleteSql);
LOG.info("Deleted {} {}", result, childTable.name);
// FIXME: are there cases when an update should not return zero?
// if (result == 0) throw new SQLException("No stop times found for trip ID");
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class CalendarDTO {
public Integer id;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class FrequencyDTO {
public String trip_id;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class PatternDTO {
public Integer id;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class PatternStopDTO {
public Integer id;
Expand All @@ -12,6 +12,9 @@ public class PatternStopDTO {
public Integer stop_sequence;
public Integer timepoint;

/** Empty constructor for deserialization */
public PatternStopDTO() {}

public PatternStopDTO (String patternId, String stopId, int stopSequence) {
pattern_id = patternId;
stop_id = stopId;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

Expand Down
23 changes: 23 additions & 0 deletions src/test/java/com/conveyal/gtfs/dto/ShapePointDTO.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package com.conveyal.gtfs.dto;

// TODO add fields
public class ShapePointDTO {
public Integer point_type;
public Double shape_dist_traveled;
public String shape_id;
public Double shape_pt_lat;
public Double shape_pt_lon;
public Integer shape_pt_sequence;

/** Empty constructor for deserialization */
public ShapePointDTO() {}

public ShapePointDTO(Integer point_type, Double shape_dist_traveled, String shape_id, Double shape_pt_lat, Double shape_pt_lon, Integer shape_pt_sequence) {
this.point_type = point_type;
this.shape_dist_traveled = shape_dist_traveled;
this.shape_id = shape_id;
this.shape_pt_lat = shape_pt_lat;
this.shape_pt_lon = shape_pt_lon;
this.shape_pt_sequence = shape_pt_sequence;
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class StopDTO {
public Integer id;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class StopTimeDTO {
public String trip_id;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.conveyal.gtfs.util;
package com.conveyal.gtfs.dto;

public class TripDTO {
public Integer id;
Expand Down
18 changes: 18 additions & 0 deletions src/test/java/com/conveyal/gtfs/dto/package-info.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
/**
* This package contains Data Transfer Objects that are solely used for efficient serialization and deserialization of
* data into and out of JSON strings in unit tests, which the {@link com.conveyal.gtfs.loader.JdbcTableWriter#update(java.lang.Integer, java.lang.String, boolean)}
* and {@link com.conveyal.gtfs.loader.JdbcTableWriter#create(java.lang.String, boolean)} methods take as input in order
* to modify or create GTFS entities. These DTOs are not used internally by the JdbcTableWriter in favor of simply
* deserializing JSON into {@link com.fasterxml.jackson.databind.JsonNode} objects and validating the fields using
* {@link com.conveyal.gtfs.loader.Table} definitions. This approach was chosen because it allows the update and create
* methods to generically process the JSON and update the data based on the rules and relations defined by the Tables.
*
* The value/wisdom of maintaining these objects for tests alone has been posed and is up for debate, but they do offer
* an advantage over generating JSON with Jackson because JdbcTableWriter expects input JSON to have a value defined for
* each field for a given entity (even if it is just null). Using Jackson for this purpose would become verbose. They
* also might be better suited for tests than {@link com.conveyal.gtfs.model} objects because they do not use primitive
* types, which allows for testing bad inputs (though as of 2019/02/13 we are not testing for bad inputs AFAIK).
*/

package com.conveyal.gtfs.dto;

Loading