Skip to content

Commit

Permalink
refactor(Fixed merge conflicts):
Browse files Browse the repository at this point in the history
  • Loading branch information
br648 committed Jan 24, 2024
2 parents fc879ca + a8a376c commit 5e24a64
Show file tree
Hide file tree
Showing 6 changed files with 169 additions and 22 deletions.
4 changes: 1 addition & 3 deletions src/main/java/com/conveyal/gtfs/GTFSFeed.java
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,6 @@ public Shape getShape (String shape_id) {
}

/**
<<<<<<< HEAD
* MapDB-based implementation to find patterns.
*
* FIXME: Remove and make pattern finding happen during validation? We want to share the pattern finder between the
Expand All @@ -396,15 +395,14 @@ public void findPatterns () {
null,
null,
null,
null,
null
);
this.patterns.putAll(patternObjects.values().stream()
.collect(Collectors.toMap(Pattern::getId, pattern -> pattern)));
}

/**
=======
>>>>>>> dev
* For the given trip ID, fetch all the stop times in order, and interpolate stop-to-stop travel times.
*/
public Iterable<StopTime> getInterpolatedStopTimesForTrip (String trip_id) throws FirstAndLastStopsDoNotHaveTimes {
Expand Down
18 changes: 9 additions & 9 deletions src/main/java/com/conveyal/gtfs/PatternBuilder.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import static com.conveyal.gtfs.loader.JdbcGtfsLoader.copyFromFile;
import static com.conveyal.gtfs.model.Entity.INT_MISSING;
Expand Down Expand Up @@ -75,7 +74,7 @@ public PatternBuilder() {

public void create(
Map<TripPatternKey,Pattern> patterns,
Set<String> patternIdsLoadedFromFile,
boolean usePatternsFromFeed,
Map<String, Stop> stopById,
Map<String, Location> locationById,
Map<String, StopArea> stopAreaById
Expand All @@ -89,12 +88,12 @@ public void create(
LOG.info("Creating pattern and pattern stops tables.");
Statement statement = connection.createStatement();
statement.execute(String.format("alter table %s add column pattern_id varchar", tripsTableName));
createDatabaseTables(patternIdsLoadedFromFile.isEmpty());
createDatabaseTables(usePatternsFromFeed);
try (PrintStream patternForTripsFileStream = createTempPatternForTripsTable(tempPatternForTripsTextFile, statement)) {
processPatternAndPatternStops(
patternForTripsFileStream,
patterns,
patternIdsLoadedFromFile
usePatternsFromFeed
);
}
updateTripPatternIds(tempPatternForTripsTextFile, statement, tripsTableName);
Expand All @@ -111,9 +110,10 @@ public void create(
}
}

private void createDatabaseTables(boolean createPatternsTable) {
if (createPatternsTable) {
// No patterns were loaded from file so the pattern table has not previously been created.
private void createDatabaseTables(boolean usePatternsFromFeed) {
if (!usePatternsFromFeed) {
// If no patterns were loaded from file, create the pattern table. Conversely, if the patterns loaded
// from file have been superseded by generated patterns, recreate the table to start afresh.
patternsTable.createSqlTable(connection, null, true);
}
patternStopsTable.createSqlTable(connection, null, true);
Expand All @@ -124,7 +124,7 @@ private void createDatabaseTables(boolean createPatternsTable) {
private void processPatternAndPatternStops(
PrintStream patternForTripsFileStream,
Map<TripPatternKey, Pattern> patterns,
Set<String> patternIdsLoadedFromFile
boolean usePatternsFromFeed
) throws SQLException {

// Generate prepared statements for inserts.
Expand All @@ -135,7 +135,7 @@ private void processPatternAndPatternStops(
for (Map.Entry<TripPatternKey, Pattern> entry : patterns.entrySet()) {
Pattern pattern = entry.getValue();
LOG.debug("Batching pattern {}", pattern.pattern_id);
if (!patternIdsLoadedFromFile.contains(pattern.pattern_id)) {
if (!usePatternsFromFeed) {
// Only insert the pattern if it has not already been imported from file.
pattern.setStatementParameters(insertPatternStatement, true);
patternTracker.addBatch();
Expand Down
37 changes: 33 additions & 4 deletions src/main/java/com/conveyal/gtfs/PatternFinder.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,25 +64,37 @@ public void processTrip(Trip trip, Iterable<StopTime> orderedStopTimes) {
* Once all trips have been processed, call this method to produce the final Pattern objects representing all the
* unique sequences of stops encountered. Returns map of patterns to their keys so that downstream functions can
* make use of trip pattern keys for constructing pattern stops or other derivative objects.
*
* There is no viable relationship between patterns that are loaded from a feed (patternsFromFeed) and patterns
* generated here. Process ordering is used to update the pattern id and name if patterns from a feed are available.
* E.g. The first pattern loaded from a feed will be used to updated the first pattern created here.
*/
public Map<TripPatternKey, Pattern> createPatternObjects(
Map<String, Stop> stopById,
Map<String, Location> locationById,
Map<String, StopArea> stopAreaById,
Map<String, Area> areaById,
List<Pattern> patternsFromFeed,
SQLErrorStorage errorStorage
) {
// Make pattern ID one-based to avoid any JS type confusion between an ID of zero vs. null value.
int nextPatternId = 1;
int patternsFromFeedIndex = 0;
boolean usePatternsFromFeed = canUsePatternsFromFeed(patternsFromFeed);
// Create an in-memory list of Patterns because we will later rename them before inserting them into storage.
// Use a LinkedHashMap so we can retrieve the entrySets later in the order of insertion.
Map<TripPatternKey, Pattern> patterns = new LinkedHashMap<>();
// TODO assign patterns sequential small integer IDs (may include route)
for (TripPatternKey key : tripsForPattern.keySet()) {
Collection<Trip> trips = tripsForPattern.get(key);
Pattern pattern = new Pattern(key.stops, trips, null);
// Overwrite long UUID with sequential integer pattern ID
pattern.pattern_id = Integer.toString(nextPatternId++);
if (usePatternsFromFeed) {
pattern.pattern_id = patternsFromFeed.get(patternsFromFeedIndex).pattern_id;
pattern.name = patternsFromFeed.get(patternsFromFeedIndex).name;
} else {
// Overwrite long UUID with sequential integer pattern ID
pattern.pattern_id = Integer.toString(nextPatternId++);
}
// FIXME: Should associated shapes be a single entry?
pattern.associatedShapes = new HashSet<>();
trips.stream().forEach(trip -> pattern.associatedShapes.add(trip.shape_id));
Expand All @@ -96,9 +108,12 @@ public Map<TripPatternKey, Pattern> createPatternObjects(
.setBadValue(pattern.associatedShapes.toString()));
}
patterns.put(key, pattern);
patternsFromFeedIndex++;
}
if (!usePatternsFromFeed) {
// Name patterns before storing in SQL database if they have not already been provided with a feed.
renamePatterns(patterns.values(), stopById, locationById, stopAreaById, areaById);
}
// Name patterns before storing in SQL database.
renamePatterns(patterns.values(), stopById, locationById, stopAreaById, areaById);
LOG.info("Total patterns: {}", tripsForPattern.keySet().size());
return patterns;
}
Expand All @@ -107,6 +122,20 @@ public Map<TripPatternKey, Pattern> createPatternObjects(
* Destructively rename the supplied collection of patterns. This process requires access to all stops, locations
* and stop areas in the feed. Some validators already cache a map of all the stops. There's probably a
* cleaner way to do this.
*
* If there is a difference in the number of patterns provided by a feed and the number of patterns generated here,
* the patterns provided by the feed are rejected.
*/
public boolean canUsePatternsFromFeed(List<Pattern> patternsFromFeed) {
boolean usePatternsFromFeed = patternsFromFeed.size() == tripsForPattern.keySet().size();
LOG.info("Using patterns from feed: {}", usePatternsFromFeed);
return usePatternsFromFeed;
}

/**
* Destructively rename the supplied collection of patterns.
* This process requires access to all the stops in the feed.
* Some validators already cache a map of all the stops. There's probably a cleaner way to do this.
*/
public static void renamePatterns(
Collection<Pattern> patterns,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,10 @@
import org.slf4j.LoggerFactory;

import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
* Groups trips together into "patterns" that share the same sequence of stops.
Expand Down Expand Up @@ -63,9 +62,9 @@ public void validateTrip(
*/
@Override
public void complete(ValidationResult validationResult) {
Set<String> patternIds = new HashSet<>();
List<Pattern> patternsFromFeed = new ArrayList<>();
for(Pattern pattern : feed.patterns) {
patternIds.add(pattern.pattern_id);
patternsFromFeed.add(pattern);
}
LOG.info("Finding patterns...");
Map<String, Stop> stopById = new HashMap<>();
Expand All @@ -85,7 +84,7 @@ public void complete(ValidationResult validationResult) {
areaById.put(area.area_id, area);
}
// Although patterns may have already been loaded from file, the trip patterns are still required.
Map<TripPatternKey, Pattern> patterns = patternFinder.createPatternObjects(stopById, locationById, stopAreaById, areaById, errorStorage);
patternBuilder.create(patterns, patternIds, stopById, locationById, stopAreaById);
Map<TripPatternKey, Pattern> patterns = patternFinder.createPatternObjects(stopById, locationById, stopAreaById, areaById, patternsFromFeed, errorStorage);
patternBuilder.create(patterns, patternFinder.canUsePatternsFromFeed(patternsFromFeed), stopById, locationById, stopAreaById);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
package com.conveyal.gtfs.validator;

import com.conveyal.gtfs.GTFSFeed;
import com.conveyal.gtfs.TestUtils;
import com.conveyal.gtfs.loader.EntityPopulator;
import com.conveyal.gtfs.loader.FeedLoadResult;
import com.conveyal.gtfs.loader.JDBCTableReader;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.model.Pattern;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;

import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

import static com.conveyal.gtfs.GTFS.load;
import static com.conveyal.gtfs.GTFS.validate;
import static com.conveyal.gtfs.TestUtils.getResourceFileName;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;


class PatternFinderValidatorTest {

private static String testDBName;

private static DataSource testDataSource;

@BeforeAll
public static void setUpClass() {
// create a new database
testDBName = TestUtils.generateNewDB();
String dbConnectionUrl = String.format("jdbc:postgresql://localhost/%s", testDBName);
testDataSource = TestUtils.createTestDataSource(dbConnectionUrl);
}

@AfterAll
public static void tearDownClass() {
TestUtils.dropDB(testDBName);
}

@Test
void canUseFeedPatterns() throws SQLException {
String fileName = getResourceFileName("real-world-gtfs-feeds/RABA.zip");
FeedLoadResult feedLoadResult = load(fileName, testDataSource);
String testNamespace = feedLoadResult.uniqueIdentifier;
validate(testNamespace, testDataSource);
checkPatternStopsAgainstFeedPatterns(fileName, testNamespace);
}

/**
* Remove one pattern from the feed so that there is a mismatch between the patterns loaded and the patterns
* generated. This will result in the generated patterns taking precedence over the loaded patterns.
*/
@Test
void canRevertToGeneratedPatterns() throws SQLException {
String fileName = getResourceFileName("real-world-gtfs-feeds/RABA.zip");
FeedLoadResult feedLoadResult = load(fileName, testDataSource);
String testNamespace = feedLoadResult.uniqueIdentifier;
String patternIdToExclude = "2k3j";
executeSqlStatement(String.format(
"delete from %s where pattern_id = '%s'",
String.format("%s.%s", testNamespace, Table.PATTERNS.name),
patternIdToExclude
));
validate(testNamespace, testDataSource);
JDBCTableReader<Pattern> patterns = new JDBCTableReader(Table.PATTERNS,
testDataSource,
testNamespace + ".",
EntityPopulator.PATTERN
);
for (Pattern pattern : patterns.getAllOrdered()) {
assertThatSqlQueryYieldsRowCountGreaterThanZero(generateSql(testNamespace, pattern.pattern_id));
}
}

@Test
void canUseGeneratedPatterns() throws SQLException, IOException {
String zipFileName = TestUtils.zipFolderFiles("fake-agency", true);
FeedLoadResult feedLoadResult = load(zipFileName, testDataSource);
String testNamespace = feedLoadResult.uniqueIdentifier;
validate(testNamespace, testDataSource);
checkPatternStopsAgainstFeedPatterns(zipFileName, testNamespace);
}

private void checkPatternStopsAgainstFeedPatterns(String zipFileName, String testNamespace) throws SQLException {
GTFSFeed feed = GTFSFeed.fromFile(zipFileName);
for (String key : feed.patterns.keySet()) {
Pattern pattern = feed.patterns.get(key);
assertThatSqlQueryYieldsRowCountGreaterThanZero(generateSql(testNamespace, pattern.pattern_id));
}
}

private String generateSql(String testNamespace, String patternId) {
return String.format(
"select * from %s where pattern_id = '%s'",
String.format("%s.%s", testNamespace, Table.PATTERN_STOP.name),
patternId
);
}

private void assertThatSqlQueryYieldsRowCountGreaterThanZero(String sql) throws SQLException {
int recordCount = 0;
ResultSet rs = testDataSource.getConnection().prepareStatement(sql).executeQuery();
while (rs.next()) recordCount++;
assertThat(recordCount, greaterThan(0));
}

private void executeSqlStatement(String sql) throws SQLException {
Connection connection = testDataSource.getConnection();
Statement statement = connection.createStatement();
statement.execute(sql);
statement.close();
connection.commit();
}
}
Binary file added src/test/resources/real-world-gtfs-feeds/RABA.zip
Binary file not shown.

0 comments on commit 5e24a64

Please sign in to comment.