diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 539a998bf..8452340b0 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -5,9 +5,10 @@ - [ ] The description lists all applicable issues this PR seeks to resolve - [ ] The description lists any configuration setting(s) that differ from the default settings - [ ] All tests and CI builds passing +- [ ] The description lists all relevant PRs included in this release _(remove this if not merging to master)_ - [ ] e2e tests are all passing _(remove this if not merging to master)_ - [ ] Code coverage improves or is at 100% _(remove this if not merging to master)_ ### Description -Please explain the changes you made here and, if not immediately obvious from the code, how they resolve any referenced issues. Be sure to include all issues being resolved and any special configuration settings that are need for the software to run properly with these changes. +Please explain the changes you made here and, if not immediately obvious from the code, how they resolve any referenced issues. Be sure to include all issues being resolved and any special configuration settings that are need for the software to run properly with these changes. If merging to master, please also list the PRs that are to be included. diff --git a/.travis.yml b/.travis.yml index b999278c2..ea6dfeb81 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,21 +1,31 @@ +dist: trusty # jdk 8 not available on xenial language: java -jdk: -- oraclejdk8 +java: + - oraclejdk8 install: true sudo: false # Install mongoDB to perform persistence tests -services: mongodb +services: + - mongodb + - postgresql +addons: + postgresql: 9.6 cache: directories: - - "$HOME/.m2" + - $HOME/.m2 + - $HOME/.cache/yarn # Install semantic-release before_script: - - yarn global add @conveyal/maven-semantic-release semantic-release + - yarn global add @conveyal/maven-semantic-release semantic-release@15 + # Create dir for GTFS+ files (used during testing) + - mkdir /tmp/gtfsplus before_install: #- sed -i.bak -e 's|https://nexus.codehaus.org/snapshots/|https://oss.sonatype.org/content/repositories/codehaus-snapshots/|g' ~/.m2/settings.xml # set region in AWS config for S3 setup - mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=foo' 'aws_secret_access_key=bar' 'region=us-east-1' > ~/.aws/config - cp configurations/default/server.yml.tmp configurations/default/server.yml +# create database for tests +- psql -U postgres -c 'CREATE DATABASE catalogue;' script: # package jar - mvn package @@ -65,5 +75,5 @@ deploy: local-dir: deploy acl: public_read on: - repo: conveyal/datatools-server + repo: ibi-group/datatools-server all_branches: true diff --git a/configurations/default/server.yml.tmp b/configurations/default/server.yml.tmp index d95317b46..c29382e26 100644 --- a/configurations/default/server.yml.tmp +++ b/configurations/default/server.yml.tmp @@ -15,6 +15,9 @@ modules: enabled: false user_admin: enabled: true + # Enable GTFS+ module for testing purposes + gtfsplus: + enabled: true gtfsapi: enabled: true load_on_fetch: false @@ -29,3 +32,6 @@ extensions: enabled: true api: http://api.transitfeeds.com/v1/getFeeds key: your-api-key + # Enable MTC for testing purposes + mtc: + enabled: true diff --git a/pom.xml b/pom.xml index 18b06a86a..a37fbe65a 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,10 @@ https://github.com/conveyal/datatools-server.git - 2.9.8 + 2.9.9 + + 17.5 @@ -158,23 +161,6 @@ - - - maven-surefire-plugin - 2.22.0 - - - org.junit.platform - junit-platform-surefire-provider - 1.3.1 - - - org.junit.jupiter - junit-jupiter-engine - 5.3.1 - - - @@ -218,6 +204,11 @@ always + + + jitpack.io + https://jitpack.io + @@ -255,11 +246,22 @@ 2.1.0 - + + + junit + junit + 4.12 + test + + + com.conveyal gtfs-lib - 4.2.5 + 4.3.6 @@ -322,15 +324,18 @@ org.geotools gt-shapefile - 19.2 + ${geotools.version} - - + - org.junit.jupiter - junit-jupiter-api - 5.3.1 - test + org.geotools + gt-metadata + ${geotools.version} + + + org.geotools + gt-api + ${geotools.version} @@ -359,8 +364,32 @@ org.hamcrest java-hamcrest 2.0.0.0 + test + + + + com.github.conveyal + java-snapshot-matcher + 3495b32f7b4d3f82590e0a2284029214070b6984 + test + + + + com.github.tomakehurst + wiremock-standalone + 2.14.0 + test + + + + net.sf.supercsv + super-csv + 2.4.0 - diff --git a/src/main/java/com/conveyal/datatools/common/status/MonitorableJob.java b/src/main/java/com/conveyal/datatools/common/status/MonitorableJob.java index b88c120ed..660b21b68 100644 --- a/src/main/java/com/conveyal/datatools/common/status/MonitorableJob.java +++ b/src/main/java/com/conveyal/datatools/common/status/MonitorableJob.java @@ -6,6 +6,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.File; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; @@ -19,10 +20,11 @@ */ public abstract class MonitorableJob implements Runnable { private static final Logger LOG = LoggerFactory.getLogger(MonitorableJob.class); - protected final String owner; + public final String owner; // Public fields will be serialized over HTTP API and visible to the web client public final JobType type; + public File file; public String parentJobId; public JobType parentJobType; // Status is not final to allow some jobs to have extra status fields. @@ -48,6 +50,7 @@ public enum JobType { LOAD_FEED, VALIDATE_FEED, DEPLOY_TO_OTP, + EXPORT_GIS, FETCH_PROJECT_FEEDS, FETCH_SINGLE_FEED, MAKE_PROJECT_PUBLIC, @@ -57,7 +60,7 @@ public enum JobType { EXPORT_SNAPSHOT_TO_GTFS, CONVERT_EDITOR_MAPDB_TO_SQL, VALIDATE_ALL_FEEDS, - MERGE_PROJECT_FEEDS + MERGE_FEED_VERSIONS } public MonitorableJob(String owner, String name, JobType type) { @@ -90,6 +93,10 @@ private void registerJob() { DataManager.userJobsMap.put(this.owner, userJobs); } + public File retrieveFile () { + return file; + } + /** * This method should never be called directly or overridden. It is a standard clean up stage for all * monitorable jobs. diff --git a/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorController.java b/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorController.java index 0d488b603..fc84d4d6b 100644 --- a/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorController.java +++ b/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorController.java @@ -1,6 +1,7 @@ package com.conveyal.datatools.editor.controllers.api; import com.conveyal.datatools.common.utils.S3Utils; +import com.conveyal.datatools.common.utils.SparkUtils; import com.conveyal.datatools.editor.controllers.EditorLockController; import com.conveyal.datatools.manager.auth.Auth0UserProfile; import com.conveyal.datatools.manager.models.FeedSource; @@ -86,6 +87,7 @@ private void registerRoutes() { // Handle update useFrequency field. Hitting this endpoint will delete all trips for a pattern and update the // useFrequency field. if ("pattern".equals(classToLowercase)) { + put(ROOT_ROUTE + ID_PARAM + "/stop_times", this::updateStopTimesFromPatternStops, json::write); delete(ROOT_ROUTE + ID_PARAM + "/trips", this::deleteTripsForPattern, json::write); } } @@ -112,7 +114,7 @@ private String deleteTripsForPattern(Request req, Response res) { logMessageAndHalt(req, 500, "Error deleting entity", e); return null; } finally { - LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime); + LOG.info("Delete trips for pattern operation took {} msec", System.currentTimeMillis() - startTime); } } @@ -124,8 +126,9 @@ private String deleteMultipleTrips(Request req, Response res) { long startTime = System.currentTimeMillis(); String namespace = getNamespaceAndValidateSession(req); String[] tripIds = req.queryParams("tripIds").split(","); + JdbcTableWriter tableWriter = null; try { - JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace); + tableWriter = new JdbcTableWriter(table, datasource, namespace); for (String tripId: tripIds) { // Delete each trip ID found in query param WITHOUT auto-committing. int result = tableWriter.delete(Integer.parseInt(tripId), false); @@ -135,7 +138,7 @@ private String deleteMultipleTrips(Request req, Response res) { throw new SQLException(message); } } - // Commit the transaction after iterating over trip IDs (because the deletes where made without autocommit). + // Commit the transaction after iterating over trip IDs (because the deletes were made without autocommit). tableWriter.commit(); LOG.info("Deleted {} trips", tripIds.length); } catch (InvalidNamespaceException e) { @@ -143,6 +146,7 @@ private String deleteMultipleTrips(Request req, Response res) { } catch (Exception e) { logMessageAndHalt(req, 500, "Error deleting entity", e); } finally { + if (tableWriter != null) tableWriter.close(); LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime); } return formatJSON(String.format("Deleted %d.", tripIds.length), 200); @@ -159,7 +163,7 @@ private String deleteOne(Request req, Response res) { JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace); if (tableWriter.delete(id, true) == 1) { // FIXME: change return message based on result value - return formatJSON(String.valueOf("Deleted one."), 200); + return formatJSON("Deleted one.", 200); } } catch (Exception e) { logMessageAndHalt(req, 400, "Error deleting entity", e); @@ -169,17 +173,37 @@ private String deleteOne(Request req, Response res) { return null; } + /** + * For a given pattern ID, update all its trips' stop times to conform to the default travel and dwell times. This + * is used, for instance, when a new pattern stop is added or inserted into an existing pattern that has trips which + * need the updated travel times applied in bulk. + */ + private String updateStopTimesFromPatternStops (Request req, Response res) { + long startTime = System.currentTimeMillis(); + String namespace = getNamespaceAndValidateSession(req); + int patternId = getIdFromRequest(req); + try { + int beginStopSequence = Integer.parseInt(req.queryParams("stopSequence")); + JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace); + int stopTimesUpdated = tableWriter.normalizeStopTimesForPattern(patternId, beginStopSequence); + return SparkUtils.formatJSON("updateResult", stopTimesUpdated + " stop times updated."); + } catch (Exception e) { + logMessageAndHalt(req, 400, "Error normalizing stop times", e); + return null; + } finally { + LOG.info("Normalize stop times operation took {} msec", System.currentTimeMillis() - startTime); + } + } + /** * HTTP endpoint to upload branding image to S3 for either agency or route entities. The endpoint also handles * updating the branding URL field to match the S3 URL. */ private String uploadEntityBranding (Request req, Response res) { int id = getIdFromRequest(req); - String url = null; + String url; try { - // FIXME: remove cast to string. - String idAsString = String.valueOf(id); - url = S3Utils.uploadBranding(req, String.join("_", classToLowercase, idAsString)); + url = S3Utils.uploadBranding(req, String.format("%s_%d", classToLowercase, id)); } catch (HaltException e) { // Do not re-catch halts thrown for exceptions that have already been caught. throw e; @@ -300,10 +324,4 @@ private Integer getIdFromRequest(Request req) { } return id; } - - // TODO add hooks - abstract void getEntityHook(T entity); - abstract void createEntityHook(T entity); - abstract void updateEntityHook(T entity); - abstract void deleteEntityHook(T entity); } diff --git a/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorControllerImpl.java b/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorControllerImpl.java index fa796e887..e25e29601 100644 --- a/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorControllerImpl.java +++ b/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorControllerImpl.java @@ -9,24 +9,4 @@ public class EditorControllerImpl extends EditorController { public EditorControllerImpl(String apiPrefix, Table table, DataSource dataSource){ super(apiPrefix, table, dataSource); } - - @Override - void getEntityHook(Entity entity) { - - } - - @Override - void createEntityHook(Entity entity) { - - } - - @Override - void updateEntityHook(Entity entity) { - - } - - @Override - void deleteEntityHook(Entity entity) { - - } } diff --git a/src/main/java/com/conveyal/datatools/editor/controllers/api/SnapshotController.java b/src/main/java/com/conveyal/datatools/editor/controllers/api/SnapshotController.java index 9f4a45884..0ef5be411 100644 --- a/src/main/java/com/conveyal/datatools/editor/controllers/api/SnapshotController.java +++ b/src/main/java/com/conveyal/datatools/editor/controllers/api/SnapshotController.java @@ -6,6 +6,7 @@ import com.conveyal.datatools.editor.jobs.ExportSnapshotToGTFSJob; import com.conveyal.datatools.manager.DataManager; import com.conveyal.datatools.manager.auth.Auth0UserProfile; +import com.conveyal.datatools.manager.auth.Actions; import com.conveyal.datatools.manager.controllers.api.FeedVersionController; import com.conveyal.datatools.manager.models.FeedDownloadToken; import com.conveyal.datatools.manager.models.FeedSource; @@ -57,7 +58,7 @@ private static Snapshot getSnapshotFromRequest(Request req) { String id = req.params("id"); if (id == null) logMessageAndHalt(req, 400, "Must provide valid snapshot ID"); // Check user permissions on feed source. - FeedVersionController.requestFeedSourceById(req, "view", "feedId"); + FeedVersionController.requestFeedSourceById(req, Actions.VIEW, "feedId"); return Persistence.snapshots.getById(id); } @@ -66,7 +67,7 @@ private static Snapshot getSnapshotFromRequest(Request req) { */ private static Collection getSnapshots(Request req, Response res) { // Get feed source and check user permissions. - FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, "view", "feedId"); + FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, Actions.VIEW, "feedId"); // FIXME Do we need a way to return all snapshots? // Is this used in GTFS Data Manager to retrieveById snapshots in bulk? @@ -79,7 +80,7 @@ private static Collection getSnapshots(Request req, Response res) { */ private static String createSnapshot (Request req, Response res) throws IOException { Auth0UserProfile userProfile = req.attribute("user"); - FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, "edit", "feedId"); + FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, Actions.EDIT, "feedId"); // Take fields from request body for creating snapshot. Snapshot snapshot = json.read(req.body()); // Ensure feed source ID and snapshotOf namespace is correct @@ -104,7 +105,7 @@ private static String importFeedVersionAsSnapshot(Request req, Response res) { Auth0UserProfile userProfile = req.attribute("user"); // Get feed version from request (and check permissions). String feedVersionId = req.queryParams("feedVersionId"); - FeedVersion feedVersion = FeedVersionController.requestFeedVersion(req, "edit", feedVersionId); + FeedVersion feedVersion = FeedVersionController.requestFeedVersion(req, Actions.EDIT, feedVersionId); FeedSource feedSource = feedVersion.parentFeedSource(); // Create and run snapshot job Snapshot snapshot = new Snapshot("Snapshot of " + feedVersion.name, feedSource.id, feedVersion.namespace); @@ -135,7 +136,7 @@ private static String restoreSnapshot (Request req, Response res) { String id = req.params("id"); // FIXME Ensure namespace id exists in database? // Retrieve feed source. - FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, "edit", "feedId"); + FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, Actions.EDIT, "feedId"); Snapshot snapshotToRestore = Persistence.snapshots.getById(id); if (snapshotToRestore == null) { logMessageAndHalt(req, 400, "Must specify valid snapshot ID"); @@ -212,7 +213,7 @@ private static Snapshot deleteSnapshot(Request req, Response res) { String id = req.params("id"); // FIXME Ensure namespace id exists in database. // Check feed source permissions. - FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, "edit", "feedId"); + FeedSource feedSource = FeedVersionController.requestFeedSourceById(req, Actions.EDIT, "feedId"); // Retrieve snapshot Snapshot snapshot = Persistence.snapshots.getById(id); if (snapshot == null) logMessageAndHalt(req, 400, "Must provide valid snapshot ID."); diff --git a/src/main/java/com/conveyal/datatools/editor/jobs/ConvertEditorMapDBToSQL.java b/src/main/java/com/conveyal/datatools/editor/jobs/ConvertEditorMapDBToSQL.java index c48fc7273..cbc21b453 100644 --- a/src/main/java/com/conveyal/datatools/editor/jobs/ConvertEditorMapDBToSQL.java +++ b/src/main/java/com/conveyal/datatools/editor/jobs/ConvertEditorMapDBToSQL.java @@ -62,8 +62,7 @@ public void jobLogic() { and( eq("version", versionNumber), eq(Snapshot.FEED_SOURCE_REF, feedId) - ), - null + ) ); boolean snapshotExists = true; if (matchingSnapshot == null) { @@ -351,4 +350,4 @@ private int handleBatchExecution(int batchSize, PreparedStatement ... preparedSt return batchSize; } } -} \ No newline at end of file +} diff --git a/src/main/java/com/conveyal/datatools/editor/jobs/GisExport.java b/src/main/java/com/conveyal/datatools/editor/jobs/GisExport.java deleted file mode 100644 index d224c0779..000000000 --- a/src/main/java/com/conveyal/datatools/editor/jobs/GisExport.java +++ /dev/null @@ -1,222 +0,0 @@ -package com.conveyal.datatools.editor.jobs; - -import com.conveyal.datatools.editor.datastore.FeedTx; -import com.conveyal.datatools.editor.datastore.GlobalTx; -import com.conveyal.datatools.editor.datastore.VersionedDataStore; -import com.conveyal.datatools.editor.models.transit.EditorFeed; -import com.conveyal.datatools.editor.models.transit.Route; -import com.conveyal.datatools.editor.models.transit.Stop; -import com.conveyal.datatools.editor.models.transit.TripPattern; -import com.conveyal.datatools.editor.utils.DirectoryZip; -import com.google.common.io.Files; -import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.GeometryFactory; -import com.vividsolutions.jts.geom.LineString; -import org.geotools.data.DataUtilities; -import org.geotools.data.DefaultTransaction; -import org.geotools.data.Transaction; -import org.geotools.data.collection.ListFeatureCollection; -import org.geotools.data.shapefile.ShapefileDataStore; -import org.geotools.data.shapefile.ShapefileDataStoreFactory; -import org.geotools.data.simple.SimpleFeatureCollection; -import org.geotools.data.simple.SimpleFeatureSource; -import org.geotools.data.simple.SimpleFeatureStore; -import org.geotools.feature.simple.SimpleFeatureBuilder; -import org.geotools.referencing.crs.DefaultGeographicCRS; -import org.opengis.feature.simple.SimpleFeature; -import org.opengis.feature.simple.SimpleFeatureType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** Export routes or stops as a shapefile */ -public class GisExport implements Runnable { - public static final Logger LOG = LoggerFactory.getLogger(GisExport.class); - File file; - Type type; - Collection agencyIds; - - public GisExport(Type type, File file, Collection agencyIds) { - this.type = type; - this.file = file; - this.agencyIds = agencyIds; - } - - @Override - public void run() { - File outDir = Files.createTempDir(); - File outShp = new File(outDir, file.getName().replaceAll("\\.zip", "") + ".shp"); - - GlobalTx gtx = VersionedDataStore.getGlobalTx(); - FeedTx atx = null; - try { - ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory(); - - Map params = new HashMap(); - params.put("url", outShp.toURI().toURL()); - params.put("create spatial index", Boolean.TRUE); - - ShapefileDataStore datastore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params); - datastore.forceSchemaCRS(DefaultGeographicCRS.WGS84); - - SimpleFeatureType STOP_TYPE = DataUtilities.createType( - "Stop", - "the_geom:Point:srid=4326," + - "name:String," + - "code:String," + - "desc:String," + - "id:String," + - "agency:String" - ); - - SimpleFeatureType ROUTE_TYPE = DataUtilities.createType( - "Route", // <- the name for our feature type - "the_geom:LineString:srid=4326," + - "patternName:String," + - "shortName:String," + - "longName:String," + - "desc:String," + - "type:String," + - "url:String," + - "routeColor:String," + - "routeTextColor:String," + - "agency:String" - ); - - SimpleFeatureCollection collection; - - SimpleFeatureType collectionType; - - SimpleFeatureBuilder featureBuilder = null; - - List features = new ArrayList(); - - if (type.equals(Type.STOPS)) { - collectionType = STOP_TYPE; - datastore.createSchema(STOP_TYPE); - featureBuilder = new SimpleFeatureBuilder(STOP_TYPE); - - for (String feedId : agencyIds) { - EditorFeed fs = gtx.feeds.get(feedId); - - atx = VersionedDataStore.getFeedTx(feedId); - for (Stop s : atx.stops.values()) { - featureBuilder.add(s.location); - featureBuilder.add(s.stopName); - featureBuilder.add(s.stopCode); - featureBuilder.add(s.stopDesc); - featureBuilder.add(s.getGtfsId()); - featureBuilder.add(fs.feedPublisherName); - SimpleFeature feature = featureBuilder.buildFeature(null); - features.add(feature); - } - - atx.rollback(); - } - } else if (type.equals(Type.ROUTES)) { - collectionType = ROUTE_TYPE; - datastore.createSchema(ROUTE_TYPE); - featureBuilder = new SimpleFeatureBuilder(ROUTE_TYPE); - - GeometryFactory gf = new GeometryFactory(); - - for (String feedId : agencyIds) { - EditorFeed fs = gtx.feeds.get(feedId); - - atx = VersionedDataStore.getFeedTx(feedId); - - // we loop over trip patterns. Note that this will yield several lines for routes that have - // multiple patterns. There's no real good way to reconcile the shapes of multiple patterns. - for (TripPattern tp : atx.tripPatterns.values()) { - LineString shape; - if (tp.shape != null) { - shape = tp.shape; - } else { - // build the shape from the stops - Coordinate[] coords = new Coordinate[tp.patternStops.size()]; - - for (int i = 0; i < coords.length; i++) { - coords[i] = atx.stops.get(tp.patternStops.get(i).stopId).location.getCoordinate(); - } - - shape = gf.createLineString(coords); - } - - Route r = atx.routes.get(tp.routeId); - - featureBuilder.add(shape); - featureBuilder.add(tp.name); - featureBuilder.add(r.routeShortName); - featureBuilder.add(r.routeLongName); - featureBuilder.add(r.routeDesc); - - if (r.routeTypeId != null) - featureBuilder.add(gtx.routeTypes.get(r.routeTypeId).toString()); - else - featureBuilder.add(""); - - featureBuilder.add(r.routeUrl); - featureBuilder.add(r.routeColor); - featureBuilder.add(r.routeTextColor); - featureBuilder.add(fs.feedPublisherName); - SimpleFeature feature = featureBuilder.buildFeature(null); - features.add(feature); - } - - atx.rollback(); - } - } - else - throw new IllegalStateException("Invalid type"); - - // save the file - collection = new ListFeatureCollection(collectionType, features); - - Transaction transaction = new DefaultTransaction("create"); - - String typeName = datastore.getTypeNames()[0]; - SimpleFeatureSource featureSource = datastore.getFeatureSource(typeName); - - if (featureSource instanceof SimpleFeatureStore) - { - SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource; - - featureStore.setTransaction(transaction); - - featureStore.addFeatures(collection); - transaction.commit(); - - transaction.close(); - } - else - { - throw new Exception(typeName + " does not support read/write access"); - } - - // zip the file - DirectoryZip.zip(outDir, file); - - // clean up - for (File f : outDir.listFiles()) { - f.delete(); - } - outDir.delete(); - - } catch (Exception e) { - LOG.error("An excpetion occurred during the GIS export"); - e.printStackTrace(); - } finally { - if (gtx != null) gtx.rollback(); - if (atx != null) atx.rollbackIfOpen(); - } - } - - public static enum Type { ROUTES, STOPS }; -} diff --git a/src/main/java/com/conveyal/datatools/editor/jobs/ProcessGisExport.java b/src/main/java/com/conveyal/datatools/editor/jobs/ProcessGisExport.java deleted file mode 100755 index 5467a7c28..000000000 --- a/src/main/java/com/conveyal/datatools/editor/jobs/ProcessGisExport.java +++ /dev/null @@ -1,199 +0,0 @@ -package com.conveyal.datatools.editor.jobs; - - -public class ProcessGisExport implements Runnable { - @Override - public void run() { - - } -/* - private Long _gisExportId; - - - public ProcessGisExport(Long gisExportId) - { - this._gisExportId = gisExportId; - } - - public void doJob() { - - String exportName = "gis_" + this._gisExportId; - - File outputZipFile = new File(Play.configuration.getProperty("application.publicDataDirectory"), exportName + ".zip"); - - File outputDirectory = new File(Play.configuration.getProperty("application.publicDataDirectory"), exportName); - - LOG.info("outfile path:" + outputDirectory.getAbsolutePath()); - - File outputShapefile = new File(outputDirectory, exportName + ".shp"); - - try - { - GisExport gisExport = null; - - while(gisExport == null) - { - gisExport = GisExport.findById(this._gisExportId); - Thread.sleep(1000); - - LOG.info("Waiting for gisExport object..."); - } - - - if(!outputDirectory.exists()) - { - outputDirectory.mkdir(); - } - - ShapefileDataStoreFactory com.conveyal.datatools.editor.datastoreFactory = new ShapefileDataStoreFactory(); - - Map params = new HashMap(); - params.put("url", outputShapefile.toURI().toURL()); - params.put("create spatial index", Boolean.TRUE); - - ShapefileDataStore com.conveyal.datatools.editor.datastore = (ShapefileDataStore)dataStoreFactory.createNewDataStore(params); - com.conveyal.datatools.editor.datastore.forceSchemaCRS(DefaultGeographicCRS.WGS84); - - SimpleFeatureType STOP_TYPE = DataUtilities.createType( - "Stop", - "location:Point:srid=4326," + - "name:String," + - "code:String," + - "desc:String," + - "id:String," + - "agency:String" - ); - - SimpleFeatureType ROUTE_TYPE = DataUtilities.createType( - "Route", // <- the name for our feature type - "route:LineString:srid=4326," + - "patternName:String," + - "shortName:String," + - "longName:String," + - "desc:String," + - "type:String," + - "url:String," + - "routeColor:String," + - "routeTextColor:String," + - "agency:String" - ); - - SimpleFeatureCollection collection; - - SimpleFeatureType collectionType; - - SimpleFeatureBuilder featureBuilder = null; - - List features = new ArrayList(); - - if(gisExport.type.equals(GisUploadType.STOPS)) - { - collectionType = STOP_TYPE; - com.conveyal.datatools.editor.datastore.createSchema(STOP_TYPE); - featureBuilder = new SimpleFeatureBuilder(STOP_TYPE); - - List stops = Stop.find("agency in (:ids)").bind("ids", gisExport.feeds).fetch(); - - for(Stop s : stops) - { - featureBuilder.add(s.locationPoint()); - featureBuilder.add(s.stopName); - featureBuilder.add(s.stopCode); - featureBuilder.add(s.stopDesc); - featureBuilder.add(s.gtfsStopId); - featureBuilder.add(s.agency.name); - SimpleFeature feature = featureBuilder.buildFeature(null); - features.add(feature); - } - } - else if(gisExport.type.equals(GisUploadType.ROUTES)) - { - collectionType = ROUTE_TYPE; - com.conveyal.datatools.editor.datastore.createSchema(ROUTE_TYPE); - featureBuilder = new SimpleFeatureBuilder(ROUTE_TYPE); - - List routes = Route.find("agency in (:ids)").bind("ids", gisExport.feeds).fetch(); - - // check for duplicates - - // HashMap existingRoutes = new HashMap(); - - for(Route r : routes) - { -// String routeId = r.routeLongName + "_" + r.routeDesc + "_ " + r.phone.id; -// -// if(existingRoutes.containsKey(routeId)) -// continue; -// else -// existingRoutes.put(routeId, true); - - - List patterns = TripPattern.find("route = ?", r).fetch(); - for(TripPattern tp : patterns) - { - if(tp.shape == null) - continue; - - featureBuilder.add(tp.shape.shape); - featureBuilder.add(tp.name); - featureBuilder.add(r.routeShortName); - featureBuilder.add(r.routeLongName); - featureBuilder.add(r.routeDesc); - - if(r.routeType != null) - featureBuilder.add(r.routeType.toString()); - else - featureBuilder.add(""); - - featureBuilder.add(r.routeUrl); - featureBuilder.add(r.routeColor); - featureBuilder.add(r.routeTextColor); - featureBuilder.add(r.agency.name); - SimpleFeature feature = featureBuilder.buildFeature(null); - features.add(feature); - } - } - } - else - throw new Exception("Unknown export type."); - - collection = new ListFeatureCollection(collectionType, features); - - Transaction transaction = new DefaultTransaction("create"); - - String typeName = com.conveyal.datatools.editor.datastore.getTypeNames()[0]; - SimpleFeatureSource featureSource = com.conveyal.datatools.editor.datastore.getFeatureSource(typeName); - - if (featureSource instanceof SimpleFeatureStore) - { - SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource; - - featureStore.setTransaction(transaction); - - featureStore.addFeatures(collection); - transaction.commit(); - - transaction.close(); - } - else - { - throw new Exception(typeName + " does not support read/write access"); - } - - DirectoryZip.zip(outputDirectory, outputZipFile); - FileUtils.deleteDirectory(outputDirectory); - - gisExport.status = GisExportStatus.PROCESSED; - - gisExport.save(); - - } - catch(Exception e) - { - LOG.error("Unable to process GIS export: ", e.toString()); - e.printStackTrace(); - } - }*/ -} - - diff --git a/src/main/java/com/conveyal/datatools/manager/DataManager.java b/src/main/java/com/conveyal/datatools/manager/DataManager.java index 97651d6d0..8b9ccae41 100644 --- a/src/main/java/com/conveyal/datatools/manager/DataManager.java +++ b/src/main/java/com/conveyal/datatools/manager/DataManager.java @@ -56,6 +56,7 @@ import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt; import static com.conveyal.datatools.common.utils.SparkUtils.logRequest; import static com.conveyal.datatools.common.utils.SparkUtils.logResponse; +import static spark.Service.SPARK_DEFAULT_PORT; import static spark.Spark.after; import static spark.Spark.before; import static spark.Spark.exception; @@ -67,6 +68,7 @@ * referenced throughout the application. */ public class DataManager { + public static final String GTFS_PLUS_SUBDIR = "gtfsplus"; private static final Logger LOG = LoggerFactory.getLogger(DataManager.class); // These fields hold YAML files that represent the server configuration. @@ -104,7 +106,9 @@ public class DataManager { public static String commit = ""; public static boolean useS3; - private static final String API_PREFIX = "/api/manager/"; + public static final String API_PREFIX = "/api/manager/"; + // Application port defaults to Spark's default. + public static int PORT = SPARK_DEFAULT_PORT; private static final String GTFS_API_PREFIX = API_PREFIX + "secure/gtfs/"; private static final String EDITOR_API_PREFIX = "/api/editor/"; public static final String publicPath = "(" + API_PREFIX + "|" + EDITOR_API_PREFIX + ")public/.*"; @@ -132,8 +136,9 @@ static void initializeApplication(String[] args) throws IOException { LOG.info(FeedStore.class.getSimpleName()); // Optionally set port for server. Otherwise, Spark defaults to 4567. - if (getConfigProperty("application.port") != null) { - port(Integer.parseInt(getConfigPropertyAsText("application.port"))); + if (hasConfigProperty("application.port")) { + PORT = Integer.parseInt(getConfigPropertyAsText("application.port")); + port(PORT); } useS3 = "true".equals(getConfigPropertyAsText("application.data.use_s3_storage")); @@ -235,7 +240,6 @@ static void registerRoutes() throws IOException { new EditorControllerImpl(EDITOR_API_PREFIX, Table.STOPS, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.TRIPS, DataManager.GTFS_DATA_SOURCE); // TODO: Add transfers.txt controller? -// GisController.register(EDITOR_API_PREFIX); } // log all exceptions to system.out diff --git a/src/main/java/com/conveyal/datatools/manager/auth/Actions.java b/src/main/java/com/conveyal/datatools/manager/auth/Actions.java new file mode 100644 index 000000000..f59ebd1b2 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/auth/Actions.java @@ -0,0 +1,9 @@ +package com.conveyal.datatools.manager.auth; + +/** + * The set of request actions that a user can take on application entities. These are checked + * against the requesting user's permissions to ensure that they have permission to make the request. + */ +public enum Actions { + CREATE, EDIT, MANAGE, VIEW +} diff --git a/src/main/java/com/conveyal/datatools/manager/auth/Auth0Connection.java b/src/main/java/com/conveyal/datatools/manager/auth/Auth0Connection.java index 560ddd458..80c5942ce 100644 --- a/src/main/java/com/conveyal/datatools/manager/auth/Auth0Connection.java +++ b/src/main/java/com/conveyal/datatools/manager/auth/Auth0Connection.java @@ -20,6 +20,7 @@ import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt; import static com.conveyal.datatools.manager.DataManager.getConfigPropertyAsText; import static com.conveyal.datatools.manager.DataManager.hasConfigProperty; +import static com.conveyal.datatools.manager.controllers.api.UserController.inTestingEnvironment; /** * This handles verifying the Auth0 token passed in the Auth header of Spark HTTP requests. @@ -43,9 +44,24 @@ public class Auth0Connection { * @param req Spark request object */ public static void checkUser(Request req) { - if (authDisabled()) { - // If in a development environment, assign a mock profile to request attribute and skip authentication. - req.attribute("user", new Auth0UserProfile("mock@example.com", "user_id:string")); + if (authDisabled() || inTestingEnvironment()) { + // If in a development or testing environment, assign a mock profile of an admin user to the request + // attribute and skip authentication. + Auth0UserProfile.DatatoolsInfo adminDatatoolsInfo = new Auth0UserProfile.DatatoolsInfo(); + adminDatatoolsInfo.setPermissions( + new Auth0UserProfile.Permission[]{ + new Auth0UserProfile.Permission("administer-application", new String[]{}) + } + ); + adminDatatoolsInfo.setClientId(DataManager.getConfigPropertyAsText("AUTH0_CLIENT_ID")); + + Auth0UserProfile.AppMetadata adminAppMetaData = new Auth0UserProfile.AppMetadata(); + adminAppMetaData.setDatatoolsInfo(adminDatatoolsInfo); + + Auth0UserProfile adminUser = new Auth0UserProfile("mock@example.com", "user_id:string"); + adminUser.setApp_metadata(adminAppMetaData); + + req.attribute("user", adminUser); return; } // Check that auth header is present and formatted correctly (Authorization: Bearer [token]). @@ -132,8 +148,11 @@ private static void remapTokenValues(Map jwt) { * tables in the database. */ public static void checkEditPrivileges(Request request) { - if (authDisabled()) { - // If in a development environment, skip privileges check. + if (authDisabled() || inTestingEnvironment()) { + // If in a development or testing environment, skip privileges check. This is done so that basically any API + // endpoint can function. + // TODO: make unit tests of the below items or do some more stuff as mentioned in PR review here: + // https://github.com/conveyal/datatools-server/pull/187#discussion_r262714708 return; } Auth0UserProfile userProfile = request.attribute("user"); diff --git a/src/main/java/com/conveyal/datatools/manager/auth/Auth0UserProfile.java b/src/main/java/com/conveyal/datatools/manager/auth/Auth0UserProfile.java index f6e21918e..dc81f72cd 100644 --- a/src/main/java/com/conveyal/datatools/manager/auth/Auth0UserProfile.java +++ b/src/main/java/com/conveyal/datatools/manager/auth/Auth0UserProfile.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.ArrayList; import java.util.List; /** @@ -72,6 +73,14 @@ public AppMetadata() {} public void setDatatoolsInfo(DatatoolsInfo datatools) { if (Auth0Connection.authDisabled()) return; + // check if the datatools field hasn't yet been created. Although new users that get created automatically + // have this set, when running in a test environment, this won't be set, so it should be created. + if (this.datatools == null) { + this.datatools = new ArrayList<>(); + this.datatools.add(datatools); + return; + } + for(int i = 0; i < this.datatools.size(); i++) { if (this.datatools.get(i).clientId.equals(DataManager.getConfigPropertyAsText("AUTH0_CLIENT_ID"))) { this.datatools.set(i, datatools); diff --git a/src/main/java/com/conveyal/datatools/manager/auth/Auth0Users.java b/src/main/java/com/conveyal/datatools/manager/auth/Auth0Users.java index 43f0ef54d..d6fd46bd4 100644 --- a/src/main/java/com/conveyal/datatools/manager/auth/Auth0Users.java +++ b/src/main/java/com/conveyal/datatools/manager/auth/Auth0Users.java @@ -1,7 +1,6 @@ package com.conveyal.datatools.manager.auth; import com.conveyal.datatools.manager.DataManager; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.http.HttpResponse; @@ -16,7 +15,6 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; -import java.util.Collection; import java.util.HashSet; import java.util.Set; @@ -42,18 +40,18 @@ public class Auth0Users { private static URI getUrl(String searchQuery, int page, int perPage, boolean includeTotals) { // always filter users by datatools client_id String defaultQuery = "app_metadata.datatools.client_id:" + clientId; - URIBuilder builder = new URIBuilder(); - builder.setScheme("https").setHost(AUTH0_DOMAIN).setPath("/api/v2/users"); + URIBuilder builder = getURIBuilder(); + builder.setPath("/api/v2/users"); builder.setParameter("sort", "email:1"); builder.setParameter("per_page", Integer.toString(perPage)); builder.setParameter("page", Integer.toString(page)); builder.setParameter("include_totals", Boolean.toString(includeTotals)); if (searchQuery != null) { - builder.setParameter("search_engine", "v2"); + builder.setParameter("search_engine", "v3"); builder.setParameter("q", searchQuery + " AND " + defaultQuery); } else { - builder.setParameter("search_engine", "v2"); + builder.setParameter("search_engine", "v3"); builder.setParameter("q", defaultQuery); } @@ -82,20 +80,40 @@ private static String doRequest(URI uri) { request.addHeader("Authorization", "Bearer " + AUTH0_API_TOKEN); request.setHeader("Accept-Charset", charset); - HttpResponse response = null; + HttpResponse response; + + LOG.info("Making request: ({})", request.toString()); try { response = client.execute(request); } catch (IOException e) { + LOG.error("An exception occurred while making a request to Auth0"); e.printStackTrace(); + return null; } String result = null; - try { - result = EntityUtils.toString(response.getEntity()); - } catch (IOException e) { - e.printStackTrace(); + if (response.getEntity() != null) { + try { + result = EntityUtils.toString(response.getEntity()); + } catch (IOException e) { + LOG.error("An exception occurred while parsing a response from Auth0"); + e.printStackTrace(); + } + } else { + LOG.warn("No response body available to parse from Auth0 request"); + } + + int statusCode = response.getStatusLine().getStatusCode(); + if(statusCode >= 300) { + LOG.warn( + "HTTP request to Auth0 returned error code >= 300: ({}). Body: {}", + request.toString(), + result != null ? result : "" + ); + } else { + LOG.info("Successfully made request: ({})", request.toString()); } return result; @@ -118,35 +136,17 @@ public static String getAuth0Users(String queryString) { return getAuth0Users(queryString, 0); } - /** - * Get all users for this application (using the default search). - */ - public static Collection getAll () { - Collection users = new HashSet<>(); - - // limited to the first 100 - URI uri = getUrl(null, 0, 100, false); - String response = doRequest(uri); - try { - users = mapper.readValue(response, new TypeReference>(){}); - } catch (IOException e) { - e.printStackTrace(); - } - return users; - } - /** * Get a single Auth0 user for the specified ID. */ public static Auth0UserProfile getUserById(String id) { - - URIBuilder builder = new URIBuilder(); - builder.setScheme("https").setHost(AUTH0_DOMAIN).setPath("/api/v2/users/" + id); + URIBuilder builder = getURIBuilder(); + builder.setPath("/api/v2/users/" + id); URI uri = null; try { uri = builder.build(); - } catch (URISyntaxException e) { + LOG.error("Unable to build URI to getUserById"); e.printStackTrace(); return null; } @@ -155,23 +155,35 @@ public static Auth0UserProfile getUserById(String id) { try { user = mapper.readValue(response, Auth0UserProfile.class); } catch (IOException e) { + LOG.error("Unable to parse user profile response from Auth0! Response: {}", response); e.printStackTrace(); } return user; } /** - * Get users subscribed to a given target ID. + * Creates a new uri builder and sets the scheme, port and host according to whether a test environment is in effect */ - public static String getUsersBySubscription(String subscriptionType, String target) { - return getAuth0Users("app_metadata.datatools.subscriptions.type:" + subscriptionType + " AND app_metadata.datatools.subscriptions.target:" + target); + private static URIBuilder getURIBuilder() { + URIBuilder builder = new URIBuilder(); + if (AUTH0_DOMAIN.equals("your-auth0-domain")) { + // set items for testing purposes assuming use of a Wiremock server + builder.setScheme("http"); + builder.setPort(8089); + builder.setHost("localhost"); + } else { + // use live Auth0 domain + builder.setScheme("https"); + builder.setHost(AUTH0_DOMAIN); + } + return builder; } /** - * Get users belong to a specified organization. + * Get users subscribed to a given target ID. */ - public static String getUsersForOrganization(String organizationId) { - return getAuth0Users("app_metadata.datatools.organizations.organization_id:" + organizationId); + public static String getUsersBySubscription(String subscriptionType, String target) { + return getAuth0Users("app_metadata.datatools.subscriptions.type:" + subscriptionType + " AND app_metadata.datatools.subscriptions.target:" + target); } public static Set getVerifiedEmailsBySubscription(String subscriptionType, String target) { diff --git a/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedSourceController.java b/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedSourceController.java index 016493217..6882deb38 100644 --- a/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedSourceController.java +++ b/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedSourceController.java @@ -2,6 +2,7 @@ import com.conveyal.datatools.manager.DataManager; import com.conveyal.datatools.manager.auth.Auth0UserProfile; +import com.conveyal.datatools.manager.auth.Actions; import com.conveyal.datatools.manager.extensions.ExternalFeedResource; import com.conveyal.datatools.manager.jobs.FetchSingleFeedJob; import com.conveyal.datatools.manager.jobs.NotifyUsersForSubscriptionJob; @@ -45,7 +46,7 @@ public class FeedSourceController { private static ObjectMapper mapper = new ObjectMapper(); public static FeedSource getFeedSource(Request req, Response res) { - return requestFeedSourceById(req, "view"); + return requestFeedSourceById(req, Actions.VIEW); } public static Collection getAllFeedSources(Request req, Response res) { @@ -142,7 +143,7 @@ public static FeedSource updateFeedSource(Request req, Response res) { // call this method just for null and permissions check // TODO: it's wasteful to request the entire feed source here, need to factor out permissions checks. However, // we need the URL to see if it has been updated in order to then set the lastFetched value to null. - FeedSource formerFeedSource = requestFeedSourceById(req, "manage"); + FeedSource formerFeedSource = requestFeedSourceById(req, Actions.MANAGE); Document fieldsToUpdate = Document.parse(req.body()); if (fieldsToUpdate.containsKey("url") && formerFeedSource.url != null) { // Reset last fetched timestamp if the URL has been updated. @@ -174,7 +175,7 @@ public static FeedSource updateFeedSource(Request req, Response res) { * storage? This might should be refactored in the future, but it isn't really hurting anything at the moment. */ public static FeedSource updateExternalFeedResource(Request req, Response res) { - FeedSource source = requestFeedSourceById(req, "manage"); + FeedSource source = requestFeedSourceById(req, Actions.MANAGE); String resourceType = req.queryParams("resourceType"); JsonNode node = null; try { @@ -219,7 +220,7 @@ public static FeedSource updateExternalFeedResource(Request req, Response res) { * FIXME: Should this just set a "deleted" flag instead of removing from the database entirely? */ private static FeedSource deleteFeedSource(Request req, Response res) { - FeedSource source = requestFeedSourceById(req, "manage"); + FeedSource source = requestFeedSourceById(req, Actions.MANAGE); try { source.delete(); @@ -234,7 +235,7 @@ private static FeedSource deleteFeedSource(Request req, Response res) { * Re-fetch this feed from the feed source URL. */ public static String fetch (Request req, Response res) { - FeedSource s = requestFeedSourceById(req, "manage"); + FeedSource s = requestFeedSourceById(req, Actions.MANAGE); LOG.info("Fetching feed for source {}", s.name); @@ -250,10 +251,10 @@ public static String fetch (Request req, Response res) { /** * Helper function returns feed source if user has permission for specified action. * @param req spark Request object from API request - * @param action action type (either "view" or "manage") + * @param action action type (either "view" or Permission.MANAGE) * @return feedsource object for ID */ - public static FeedSource requestFeedSourceById(Request req, String action) { + public static FeedSource requestFeedSourceById(Request req, Actions action) { String id = req.params("id"); if (id == null) { logMessageAndHalt(req, 400, "Please specify id param"); @@ -261,7 +262,7 @@ public static FeedSource requestFeedSourceById(Request req, String action) { return checkFeedSourcePermissions(req, Persistence.feedSources.getById(id), action); } - public static FeedSource checkFeedSourcePermissions(Request req, FeedSource feedSource, String action) { + public static FeedSource checkFeedSourcePermissions(Request req, FeedSource feedSource, Actions action) { Auth0UserProfile userProfile = req.attribute("user"); Boolean publicFilter = Boolean.valueOf(req.queryParams("public")) || req.url().split("/api/*/")[1].startsWith("public"); @@ -272,16 +273,16 @@ public static FeedSource checkFeedSourcePermissions(Request req, FeedSource feed String orgId = feedSource.organizationId(); boolean authorized; switch (action) { - case "create": + case CREATE: authorized = userProfile.canAdministerProject(feedSource.projectId, orgId); break; - case "manage": + case MANAGE: authorized = userProfile.canManageFeed(orgId, feedSource.projectId, feedSource.id); break; - case "edit": + case EDIT: authorized = userProfile.canEditGTFS(orgId, feedSource.projectId, feedSource.id); break; - case "view": + case VIEW: if (!publicFilter) { authorized = userProfile.canViewFeed(orgId, feedSource.projectId, feedSource.id); } else { @@ -299,7 +300,7 @@ public static FeedSource checkFeedSourcePermissions(Request req, FeedSource feed if (!feedSource.isPublic && !authorized) logMessageAndHalt(req, 403, "User not authorized to perform action on feed source"); // if feed is public, but action is managerial, halt (we shouldn't ever retrieveById here, but just in case) - else if (feedSource.isPublic && action.equals("manage")) + else if (feedSource.isPublic && action.equals(Actions.MANAGE)) logMessageAndHalt(req, 403, "User not authorized to perform action on feed source"); } diff --git a/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedVersionController.java b/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedVersionController.java index 268bd08a9..058d814b8 100644 --- a/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedVersionController.java +++ b/src/main/java/com/conveyal/datatools/manager/controllers/api/FeedVersionController.java @@ -1,8 +1,13 @@ package com.conveyal.datatools.manager.controllers.api; +import com.conveyal.datatools.common.utils.SparkUtils; import com.conveyal.datatools.manager.DataManager; import com.conveyal.datatools.manager.auth.Auth0UserProfile; +import com.conveyal.datatools.manager.auth.Actions; import com.conveyal.datatools.manager.jobs.CreateFeedVersionFromSnapshotJob; +import com.conveyal.datatools.manager.jobs.GisExportJob; +import com.conveyal.datatools.manager.jobs.MergeFeedsJob; +import com.conveyal.datatools.manager.jobs.MergeFeedsType; import com.conveyal.datatools.manager.jobs.ProcessSingleFeedJob; import com.conveyal.datatools.manager.models.FeedDownloadToken; import com.conveyal.datatools.manager.models.FeedSource; @@ -13,6 +18,7 @@ import com.conveyal.datatools.manager.persistence.Persistence; import com.conveyal.datatools.manager.utils.HashUtils; import com.conveyal.datatools.manager.utils.json.JsonManager; + import com.fasterxml.jackson.databind.JsonNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,8 +27,13 @@ import javax.servlet.http.HttpServletResponse; import java.io.File; +import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import java.util.Date; +import java.util.List; +import java.util.HashSet; +import java.util.Set; import static com.conveyal.datatools.common.utils.S3Utils.downloadFromS3; import static com.conveyal.datatools.common.utils.SparkUtils.copyRequestStreamIntoFile; @@ -30,6 +41,8 @@ import static com.conveyal.datatools.common.utils.SparkUtils.formatJobMessage; import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt; import static com.conveyal.datatools.manager.controllers.api.FeedSourceController.checkFeedSourcePermissions; +import static com.mongodb.client.model.Filters.eq; +import static com.conveyal.datatools.manager.jobs.MergeFeedsType.REGIONAL; import static spark.Spark.delete; import static spark.Spark.get; import static spark.Spark.post; @@ -37,11 +50,6 @@ public class FeedVersionController { - // TODO use this instead of stringly typed permissions - enum Permission { - VIEW, MANAGE - } - public static final Logger LOG = LoggerFactory.getLogger(FeedVersionController.class); public static JsonManager json = new JsonManager<>(FeedVersion.class, JsonViews.UserInterface.class); @@ -50,7 +58,7 @@ enum Permission { * If you pass in ?summarized=true, don't include the full tree of validation results, only the counts. */ private static FeedVersion getFeedVersion (Request req, Response res) { - return requestFeedVersion(req, "view"); + return requestFeedVersion(req, Actions.VIEW); } /** @@ -58,11 +66,11 @@ private static FeedVersion getFeedVersion (Request req, Response res) { */ private static Collection getAllFeedVersionsForFeedSource(Request req, Response res) { // Check permissions and get the FeedSource whose FeedVersions we want. - FeedSource feedSource = requestFeedSourceById(req, "view"); + FeedSource feedSource = requestFeedSourceById(req, Actions.VIEW); return feedSource.retrieveFeedVersions(); } - public static FeedSource requestFeedSourceById(Request req, String action, String paramName) { + public static FeedSource requestFeedSourceById(Request req, Actions action, String paramName) { String id = req.queryParams(paramName); if (id == null) { logMessageAndHalt(req, 400, "Please specify feedSourceId param"); @@ -70,7 +78,7 @@ public static FeedSource requestFeedSourceById(Request req, String action, Strin return checkFeedSourcePermissions(req, Persistence.feedSources.getById(id), action); } - private static FeedSource requestFeedSourceById(Request req, String action) { + private static FeedSource requestFeedSourceById(Request req, Actions action) { return requestFeedSourceById(req, action, "feedSourceId"); } @@ -88,7 +96,7 @@ private static FeedSource requestFeedSourceById(Request req, String action) { public static String createFeedVersionViaUpload(Request req, Response res) { Auth0UserProfile userProfile = req.attribute("user"); - FeedSource feedSource = requestFeedSourceById(req, "manage"); + FeedSource feedSource = requestFeedSourceById(req, Actions.MANAGE); FeedVersion latestVersion = feedSource.retrieveLatest(); FeedVersion newFeedVersion = new FeedVersion(feedSource); newFeedVersion.retrievalMethod = FeedSource.FeedRetrievalMethod.MANUALLY_UPLOADED; @@ -150,7 +158,7 @@ private static boolean createFeedVersionFromSnapshot (Request req, Response res) Auth0UserProfile userProfile = req.attribute("user"); // TODO: Should the ability to create a feedVersion from snapshot be controlled by the 'edit-gtfs' privilege? - FeedSource feedSource = requestFeedSourceById(req, "manage"); + FeedSource feedSource = requestFeedSourceById(req, Actions.MANAGE); Snapshot snapshot = Persistence.snapshots.getById(req.queryParams("snapshotId")); if (snapshot == null) { logMessageAndHalt(req, 400, "Must provide valid snapshot ID"); @@ -167,16 +175,16 @@ private static boolean createFeedVersionFromSnapshot (Request req, Response res) * Spark HTTP API handler that deletes a single feed version based on the ID in the request. */ private static FeedVersion deleteFeedVersion(Request req, Response res) { - FeedVersion version = requestFeedVersion(req, "manage"); + FeedVersion version = requestFeedVersion(req, Actions.MANAGE); version.delete(); return version; } - private static FeedVersion requestFeedVersion(Request req, String action) { + private static FeedVersion requestFeedVersion(Request req, Actions action) { return requestFeedVersion(req, action, req.params("id")); } - public static FeedVersion requestFeedVersion(Request req, String action, String feedVersionId) { + public static FeedVersion requestFeedVersion(Request req, Actions action, String feedVersionId) { FeedVersion version = Persistence.feedVersions.getById(feedVersionId); if (version == null) { logMessageAndHalt(req, 404, "Feed version ID does not exist"); @@ -187,7 +195,7 @@ public static FeedVersion requestFeedVersion(Request req, String action, String } private static boolean renameFeedVersion (Request req, Response res) { - FeedVersion v = requestFeedVersion(req, "manage"); + FeedVersion v = requestFeedVersion(req, Actions.MANAGE); String name = req.queryParams("name"); if (name == null) { @@ -199,7 +207,7 @@ private static boolean renameFeedVersion (Request req, Response res) { } private static HttpServletResponse downloadFeedVersionDirectly(Request req, Response res) { - FeedVersion version = requestFeedVersion(req, "view"); + FeedVersion version = requestFeedVersion(req, Actions.VIEW); return downloadFile(version.retrieveGtfsFile(), version.id, req, res); } @@ -207,8 +215,8 @@ private static HttpServletResponse downloadFeedVersionDirectly(Request req, Resp * Returns credentials that a client may use to then download a feed version. Functionality * changes depending on whether application.data.use_s3_storage config property is true. */ - private static Object getFeedDownloadCredentials(Request req, Response res) { - FeedVersion version = requestFeedVersion(req, "view"); + private static Object getDownloadCredentials(Request req, Response res) { + FeedVersion version = requestFeedVersion(req, Actions.VIEW); if (DataManager.useS3) { // Return pre-signed download link if using S3. @@ -226,7 +234,7 @@ private static Object getFeedDownloadCredentials(Request req, Response res) { * FIXME! */ private static JsonNode validate (Request req, Response res) { - FeedVersion version = requestFeedVersion(req, "manage"); + FeedVersion version = requestFeedVersion(req, Actions.MANAGE); logMessageAndHalt(req, 400, "Validate endpoint not currently configured!"); // FIXME: Update for sql-loader validation process? return null; @@ -234,7 +242,7 @@ private static JsonNode validate (Request req, Response res) { } private static FeedVersion publishToExternalResource (Request req, Response res) { - FeedVersion version = requestFeedVersion(req, "manage"); + FeedVersion version = requestFeedVersion(req, Actions.MANAGE); // notify any extensions of the change try { @@ -262,6 +270,101 @@ private static FeedVersion publishToExternalResource (Request req, Response res) } } + /** + * HTTP endpoint to initiate an export of a shapefile containing the stops or routes of one or + * more feed versions. NOTE: the job ID returned must be used by the requester to download the + * zipped shapefile once the job has completed. + */ + private static String exportGis (Request req, Response res) throws IOException { + String type = req.queryParams("type"); + Auth0UserProfile userProfile = req.attribute("user"); + List feedIds = Arrays.asList(req.queryParams("feedId").split(",")); + File temp = File.createTempFile("gis_" + type, ".zip"); + // Create and run shapefile export. + GisExportJob gisExportJob = new GisExportJob( + GisExportJob.ExportType.valueOf(type), + temp, + feedIds, + userProfile.getUser_id() + ); + DataManager.heavyExecutor.execute(gisExportJob); + // Do not use S3 to store the file, which should only be stored ephemerally (until requesting + // user has downloaded file). + FeedDownloadToken token = new FeedDownloadToken(gisExportJob); + Persistence.tokens.create(token); + return SparkUtils.formatJobMessage(gisExportJob.jobId, "Generating shapefile."); + } + + /** + * Public HTTP endpoint to download a zipped shapefile of routes or stops for a set of feed + * versions using the job ID that was used for initially creating the exported shapes. + */ + private static HttpServletResponse downloadFeedVersionGis (Request req, Response res) { + FeedDownloadToken token = Persistence.tokens.getOneFiltered(eq("jobId", req.params("jobId"))); + File file = new File(token.filePath); + try { + return downloadFile(file, file.getName(), req, res); + } catch (Exception e) { + logMessageAndHalt(req, 500, + "Unknown error occurred while downloading feed version shapefile", e); + } finally { + if (!file.delete()) { + LOG.error("Could not delete shapefile {}. Storage issues may occur.", token.filePath); + } else { + LOG.info("Deleted shapefile {} following download.", token.filePath); + } + // Delete token. + Persistence.tokens.removeById(token.id); + } + return null; + } + + /** + * HTTP controller that handles merging multiple feed versions for a given feed source, with version IDs specified + * in a comma-separated string in the feedVersionIds query parameter and merge type specified in mergeType query + * parameter. NOTE: REGIONAL merge type should only be handled through {@link ProjectController#mergeProjectFeeds(Request, Response)}. + */ + private static String mergeFeedVersions(Request req, Response res) { + String[] versionIds = req.queryParams("feedVersionIds").split(","); + // Try to parse merge type (null or bad value throws IllegalArgumentException). + MergeFeedsType mergeType; + try { + mergeType = MergeFeedsType.valueOf(req.queryParams("mergeType")); + if (mergeType.equals(REGIONAL)) { + throw new IllegalArgumentException("Regional merge type is not permitted for this endpoint."); + } + } catch (IllegalArgumentException e) { + logMessageAndHalt(req, 400, "Must provide valid merge type.", e); + return null; + } + // Collect versions to merge (must belong to same feed source). + Set versions = new HashSet<>(); + String feedSourceId = null; + for (String id : versionIds) { + FeedVersion v = Persistence.feedVersions.getById(id); + if (v == null) { + logMessageAndHalt(req, + 400, + String.format("Must provide valid version ID. (No version exists for id=%s.)", id) + ); + } + // Store feed source id and check other versions for matching. + if (feedSourceId == null) feedSourceId = v.feedSourceId; + else if (!v.feedSourceId.equals(feedSourceId)) { + logMessageAndHalt(req, 400, "Cannot merge versions with different parent feed sources."); + } + versions.add(v); + } + if (versionIds.length != 2) { + logMessageAndHalt(req, 400, "Merging more than two versions is not currently supported."); + } + // Kick off merge feeds job. + Auth0UserProfile userProfile = req.attribute("user"); + MergeFeedsJob mergeFeedsJob = new MergeFeedsJob(userProfile.getUser_id(), versions, "merged", mergeType); + DataManager.heavyExecutor.execute(mergeFeedsJob); + return SparkUtils.formatJobMessage(mergeFeedsJob.jobId, "Merging feed versions..."); + } + /** * Download locally stored feed version with token supplied by this application. This method is only used when * useS3 is set to false. Otherwise, a direct download from s3 should be used. @@ -294,19 +397,22 @@ public static void register (String apiPrefix) { // previous version of data tools. get(apiPrefix + "secure/feedversion/:id", FeedVersionController::getFeedVersion, json::write); get(apiPrefix + "secure/feedversion/:id/download", FeedVersionController::downloadFeedVersionDirectly); - get(apiPrefix + "secure/feedversion/:id/downloadtoken", FeedVersionController::getFeedDownloadCredentials, json::write); + get(apiPrefix + "secure/feedversion/:id/downloadtoken", FeedVersionController::getDownloadCredentials, json::write); post(apiPrefix + "secure/feedversion/:id/validate", FeedVersionController::validate, json::write); get(apiPrefix + "secure/feedversion", FeedVersionController::getAllFeedVersionsForFeedSource, json::write); post(apiPrefix + "secure/feedversion", FeedVersionController::createFeedVersionViaUpload, json::write); + post(apiPrefix + "secure/feedversion/shapes", FeedVersionController::exportGis, json::write); post(apiPrefix + "secure/feedversion/fromsnapshot", FeedVersionController::createFeedVersionFromSnapshot, json::write); put(apiPrefix + "secure/feedversion/:id/rename", FeedVersionController::renameFeedVersion, json::write); + put(apiPrefix + "secure/feedversion/merge", FeedVersionController::mergeFeedVersions, json::write); post(apiPrefix + "secure/feedversion/:id/publish", FeedVersionController::publishToExternalResource, json::write); delete(apiPrefix + "secure/feedversion/:id", FeedVersionController::deleteFeedVersion, json::write); get(apiPrefix + "public/feedversion", FeedVersionController::getAllFeedVersionsForFeedSource, json::write); - get(apiPrefix + "public/feedversion/:id/downloadtoken", FeedVersionController::getFeedDownloadCredentials, json::write); + get(apiPrefix + "public/feedversion/:id/downloadtoken", FeedVersionController::getDownloadCredentials, json::write); get(apiPrefix + "downloadfeed/:token", FeedVersionController::downloadFeedVersionWithToken); + get(apiPrefix + "downloadshapes/:jobId", FeedVersionController::downloadFeedVersionGis, json::write); } } diff --git a/src/main/java/com/conveyal/datatools/manager/controllers/api/GtfsPlusController.java b/src/main/java/com/conveyal/datatools/manager/controllers/api/GtfsPlusController.java index a3ec1a275..f340ccf32 100644 --- a/src/main/java/com/conveyal/datatools/manager/controllers/api/GtfsPlusController.java +++ b/src/main/java/com/conveyal/datatools/manager/controllers/api/GtfsPlusController.java @@ -1,33 +1,27 @@ package com.conveyal.datatools.manager.controllers.api; -import com.conveyal.datatools.common.utils.Consts; import com.conveyal.datatools.common.utils.SparkUtils; import com.conveyal.datatools.manager.DataManager; import com.conveyal.datatools.manager.auth.Auth0UserProfile; +import com.conveyal.datatools.manager.gtfsplus.ValidationIssue; import com.conveyal.datatools.manager.jobs.ProcessSingleFeedJob; import com.conveyal.datatools.manager.models.FeedVersion; import com.conveyal.datatools.manager.persistence.FeedStore; import com.conveyal.datatools.manager.persistence.Persistence; import com.conveyal.datatools.manager.utils.HashUtils; import com.conveyal.datatools.manager.utils.json.JsonUtil; -import com.conveyal.gtfs.GTFSFeed; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import spark.Request; import spark.Response; import javax.servlet.http.HttpServletResponse; -import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Serializable; -import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; @@ -41,6 +35,7 @@ import static com.conveyal.datatools.common.utils.SparkUtils.formatJobMessage; import static com.conveyal.datatools.common.utils.SparkUtils.copyRequestStreamIntoFile; import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt; +import static com.conveyal.datatools.manager.gtfsplus.GtfsPlusValidation.validateGtfsPlus; import static spark.Spark.get; import static spark.Spark.post; @@ -60,7 +55,7 @@ public class GtfsPlusController { public static final Logger LOG = LoggerFactory.getLogger(GtfsPlusController.class); - private static FeedStore gtfsPlusStore = new FeedStore("gtfsplus"); + private static final FeedStore gtfsPlusStore = new FeedStore(DataManager.GTFS_PLUS_SUBDIR); /** * Upload a GTFS+ file based on a specific feed version and replace (or create) @@ -83,7 +78,7 @@ private static HttpServletResponse getGtfsPlusFile(Request req, Response res) { // check for saved File file = gtfsPlusStore.getFeed(feedVersionId); - if(file == null) { + if (file == null) { return getGtfsPlusFromGtfs(feedVersionId, req, res); } LOG.info("Returning updated GTFS+ data"); @@ -101,7 +96,7 @@ private static HttpServletResponse getGtfsPlusFromGtfs(String feedVersionId, Req // create a set of valid GTFS+ table names Set gtfsPlusTables = new HashSet<>(); - for(int i = 0; i < DataManager.gtfsPlusConfig.size(); i++) { + for (int i = 0; i < DataManager.gtfsPlusConfig.size(); i++) { JsonNode tableNode = DataManager.gtfsPlusConfig.get(i); gtfsPlusTables.add(tableNode.get("name").asText()); } @@ -117,7 +112,7 @@ private static HttpServletResponse getGtfsPlusFromGtfs(String feedVersionId, Req byte[] buffer = new byte[512]; while (entries.hasMoreElements()) { final ZipEntry entry = entries.nextElement(); - if(!gtfsPlusTables.contains(entry.getName())) continue; + if (!gtfsPlusTables.contains(entry.getName())) continue; // create a new empty ZipEntry and copy the contents ZipEntry newEntry = new ZipEntry(entry.getName()); @@ -170,7 +165,7 @@ private static String publishGtfsPlusFile(Request req, Response res) { String feedVersionId = req.params("versionid"); LOG.info("Publishing GTFS+ for " + feedVersionId); File plusFile = gtfsPlusStore.getFeed(feedVersionId); - if(plusFile == null || !plusFile.exists()) { + if (plusFile == null || !plusFile.exists()) { logMessageAndHalt(req, 400, "No saved GTFS+ data for version"); } @@ -178,7 +173,7 @@ private static String publishGtfsPlusFile(Request req, Response res) { // create a set of valid GTFS+ table names Set gtfsPlusTables = new HashSet<>(); - for(int i = 0; i < DataManager.gtfsPlusConfig.size(); i++) { + for (int i = 0; i < DataManager.gtfsPlusConfig.size(); i++) { JsonNode tableNode = DataManager.gtfsPlusConfig.get(i); gtfsPlusTables.add(tableNode.get("name").asText()); } @@ -196,7 +191,8 @@ private static String publishGtfsPlusFile(Request req, Response res) { byte[] buffer = new byte[512]; while (entries.hasMoreElements()) { final ZipEntry entry = entries.nextElement(); - if(gtfsPlusTables.contains(entry.getName()) || entry.getName().startsWith("_")) continue; // skip GTFS+ and non-standard tables + // skip GTFS+ and non-standard tables + if (gtfsPlusTables.contains(entry.getName()) || entry.getName().startsWith("_")) continue; // create a new empty ZipEntry and copy the contents ZipEntry newEntry = new ZipEntry(entry.getName()); @@ -255,164 +251,18 @@ private static String publishGtfsPlusFile(Request req, Response res) { /** * HTTP endpoint that validates GTFS+ tables for a specific feed version (or its saved/edited GTFS+). - * FIXME: For now this uses the MapDB-backed GTFSFeed class. Which actually suggests that this might - * should be contained within a MonitorableJob. */ private static Collection getGtfsPlusValidation(Request req, Response res) { String feedVersionId = req.params("versionid"); - LOG.info("Validating GTFS+ for " + feedVersionId); - FeedVersion feedVersion = Persistence.feedVersions.getById(feedVersionId); - List issues = new LinkedList<>(); - - - // load the main GTFS - // FIXME: Swap MapDB-backed GTFSFeed for use of SQL data? - GTFSFeed gtfsFeed = GTFSFeed.fromFile(feedVersion.retrieveGtfsFile().getAbsolutePath()); - // check for saved GTFS+ data - File file = gtfsPlusStore.getFeed(feedVersionId); - if (file == null) { - LOG.warn("GTFS+ file not found, loading from main version GTFS."); - file = feedVersion.retrieveGtfsFile(); - } - int gtfsPlusTableCount = 0; try { - ZipFile zipFile = new ZipFile(file); - final Enumeration entries = zipFile.entries(); - while (entries.hasMoreElements()) { - final ZipEntry entry = entries.nextElement(); - for(int i = 0; i < DataManager.gtfsPlusConfig.size(); i++) { - JsonNode tableNode = DataManager.gtfsPlusConfig.get(i); - if(tableNode.get("name").asText().equals(entry.getName())) { - LOG.info("Validating GTFS+ table: " + entry.getName()); - gtfsPlusTableCount++; - validateTable(issues, tableNode, zipFile.getInputStream(entry), gtfsFeed); - } - } - } - + issues = validateGtfsPlus(feedVersionId); } catch(IOException e) { logMessageAndHalt(req, 500, "Could not read GTFS+ zip file", e); } - LOG.info("GTFS+ tables found: {}/{}", gtfsPlusTableCount, DataManager.gtfsPlusConfig.size()); return issues; } - /** - * Validate a single GTFS+ table using the table specification found in gtfsplus.yml. - */ - private static void validateTable( - Collection issues, - JsonNode tableNode, - InputStream inputStream, - GTFSFeed gtfsFeed - ) throws IOException { - String tableId = tableNode.get("id").asText(); - BufferedReader in = new BufferedReader(new InputStreamReader(inputStream)); - String line = in.readLine(); - String[] fields = line.split(","); - List fieldList = Arrays.asList(fields); - JsonNode[] fieldNodes = new JsonNode[fields.length]; - JsonNode fieldsNode = tableNode.get("fields"); - for(int i = 0; i < fieldsNode.size(); i++) { - JsonNode fieldNode = fieldsNode.get(i); - int index = fieldList.indexOf(fieldNode.get("name").asText()); - if(index != -1) fieldNodes[index] = fieldNode; - } - - int rowIndex = 0; - while((line = in.readLine()) != null) { - String[] values = line.split(Consts.COLUMN_SPLIT, -1); - for(int v=0; v < values.length; v++) { - validateTableValue(issues, tableId, rowIndex, values[v], fieldNodes[v], gtfsFeed); - } - rowIndex++; - } - } - - private static void validateTableValue(Collection issues, String tableId, int rowIndex, String value, JsonNode fieldNode, GTFSFeed gtfsFeed) { - if(fieldNode == null) return; - String fieldName = fieldNode.get("name").asText(); - - if(fieldNode.get("required") != null && fieldNode.get("required").asBoolean()) { - if(value == null || value.length() == 0) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Required field missing value")); - } - } - - switch(fieldNode.get("inputType").asText()) { - case "DROPDOWN": - boolean invalid = true; - ArrayNode options = (ArrayNode) fieldNode.get("options"); - for (JsonNode option : options) { - String optionValue = option.get("value").asText(); - - // NOTE: per client's request, this check has been made case insensitive - boolean valuesAreEqual = optionValue.equalsIgnoreCase(value); - - // if value is found in list of options, break out of loop - if (valuesAreEqual || (!fieldNode.get("required").asBoolean() && value.equals(""))) { - invalid = false; - break; - } - } - if (invalid) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Value: " + value + " is not a valid option.")); - } - break; - case "TEXT": - // check if value exceeds max length requirement - if(fieldNode.get("maxLength") != null) { - int maxLength = fieldNode.get("maxLength").asInt(); - if(value.length() > maxLength) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Text value exceeds the max. length of "+maxLength)); - } - } - break; - case "GTFS_ROUTE": - if(!gtfsFeed.routes.containsKey(value)) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Route ID "+ value + " not found in GTFS")); - } - break; - case "GTFS_STOP": - if(!gtfsFeed.stops.containsKey(value)) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Stop ID "+ value + " not found in GTFS")); - } - break; - case "GTFS_TRIP": - if(!gtfsFeed.trips.containsKey(value)) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Trip ID "+ value + " not found in GTFS")); - } - break; - case "GTFS_FARE": - if(!gtfsFeed.fares.containsKey(value)) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Fare ID "+ value + " not found in GTFS")); - } - break; - case "GTFS_SERVICE": - if(!gtfsFeed.services.containsKey(value)) { - issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Service ID "+ value + " not found in GTFS")); - } - break; - } - - } - - public static class ValidationIssue implements Serializable { - private static final long serialVersionUID = 1L; - public String tableId; - public String fieldName; - public int rowIndex; - public String description; - - public ValidationIssue(String tableId, String fieldName, int rowIndex, String description) { - this.tableId = tableId; - this.fieldName = fieldName; - this.rowIndex = rowIndex; - this.description = description; - } - } - public static void register(String apiPrefix) { post(apiPrefix + "secure/gtfsplus/:versionid", GtfsPlusController::uploadGtfsPlusFile, JsonUtil.objectMapper::writeValueAsString); get(apiPrefix + "secure/gtfsplus/:versionid", GtfsPlusController::getGtfsPlusFile); diff --git a/src/main/java/com/conveyal/datatools/manager/controllers/api/ProjectController.java b/src/main/java/com/conveyal/datatools/manager/controllers/api/ProjectController.java index c5caba46f..cc4db1c1c 100644 --- a/src/main/java/com/conveyal/datatools/manager/controllers/api/ProjectController.java +++ b/src/main/java/com/conveyal/datatools/manager/controllers/api/ProjectController.java @@ -5,8 +5,9 @@ import com.conveyal.datatools.manager.auth.Auth0UserProfile; import com.conveyal.datatools.manager.jobs.FetchProjectFeedsJob; import com.conveyal.datatools.manager.jobs.MakePublicJob; -import com.conveyal.datatools.manager.jobs.MergeProjectFeedsJob; +import com.conveyal.datatools.manager.jobs.MergeFeedsJob; import com.conveyal.datatools.manager.models.FeedDownloadToken; +import com.conveyal.datatools.manager.models.FeedSource; import com.conveyal.datatools.manager.models.FeedVersion; import com.conveyal.datatools.manager.models.JsonViews; import com.conveyal.datatools.manager.models.Project; @@ -20,6 +21,8 @@ import spark.Response; import java.util.Collection; +import java.util.HashSet; +import java.util.Set; import java.util.stream.Collectors; import static com.conveyal.datatools.common.utils.S3Utils.downloadFromS3; @@ -27,6 +30,7 @@ import static com.conveyal.datatools.common.utils.SparkUtils.formatJobMessage; import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt; import static com.conveyal.datatools.manager.DataManager.publicPath; +import static com.conveyal.datatools.manager.jobs.MergeFeedsType.REGIONAL; import static spark.Spark.delete; import static spark.Spark.get; import static spark.Spark.post; @@ -216,14 +220,28 @@ private static Project checkProjectPermissions(Request req, Project project, Str * to getFeedDownloadCredentials with the project ID to obtain either temporary S3 credentials or a download token * (depending on application configuration "application.data.use_s3_storage") to download the zip file. */ - private static String downloadMergedFeed(Request req, Response res) { + static String mergeProjectFeeds(Request req, Response res) { Project project = requestProjectById(req, "view"); Auth0UserProfile userProfile = req.attribute("user"); // TODO: make this an authenticated call? - MergeProjectFeedsJob mergeProjectFeedsJob = new MergeProjectFeedsJob(project, userProfile.getUser_id()); - DataManager.heavyExecutor.execute(mergeProjectFeedsJob); + Set feedVersions = new HashSet<>(); + // Get latest version for each feed source in project + Collection feedSources = project.retrieveProjectFeedSources(); + for (FeedSource fs : feedSources) { + // check if feed version exists + FeedVersion version = fs.retrieveLatest(); + if (version == null) { + LOG.warn("Skipping {} because it has no feed versions", fs.name); + continue; + } + // modify feed version to use prepended feed id + LOG.info("Adding {} feed to merged zip", fs.name); + feedVersions.add(version); + } + MergeFeedsJob mergeFeedsJob = new MergeFeedsJob(userProfile.getUser_id(), feedVersions, project.id, REGIONAL); + DataManager.heavyExecutor.execute(mergeFeedsJob); // Return job ID to requester for monitoring job status. - return formatJobMessage(mergeProjectFeedsJob.jobId, "Merge operation is processing."); + return formatJobMessage(mergeFeedsJob.jobId, "Merge operation is processing."); } /** @@ -310,7 +328,7 @@ public static void register (String apiPrefix) { post(apiPrefix + "secure/project/:id/fetch", ProjectController::fetch, json::write); post(apiPrefix + "secure/project/:id/deployPublic", ProjectController::publishPublicFeeds, json::write); - get(apiPrefix + "secure/project/:id/download", ProjectController::downloadMergedFeed); + get(apiPrefix + "secure/project/:id/download", ProjectController::mergeProjectFeeds); get(apiPrefix + "secure/project/:id/downloadtoken", ProjectController::getFeedDownloadCredentials, json::write); get(apiPrefix + "public/project/:id", ProjectController::getProject, json::write); diff --git a/src/main/java/com/conveyal/datatools/manager/controllers/api/UserController.java b/src/main/java/com/conveyal/datatools/manager/controllers/api/UserController.java index 0aa1e74f5..fbb49f823 100644 --- a/src/main/java/com/conveyal/datatools/manager/controllers/api/UserController.java +++ b/src/main/java/com/conveyal/datatools/manager/controllers/api/UserController.java @@ -53,15 +53,19 @@ */ public class UserController { - private static String AUTH0_DOMAIN = DataManager.getConfigPropertyAsText("AUTH0_DOMAIN"); - private static String AUTH0_CLIENT_ID = DataManager.getConfigPropertyAsText("AUTH0_CLIENT_ID"); - private static String AUTH0_API_TOKEN = DataManager.getConfigPropertyAsText("AUTH0_TOKEN"); + private static final String AUTH0_DOMAIN = DataManager.getConfigPropertyAsText("AUTH0_DOMAIN"); + private static final String AUTH0_CLIENT_ID = DataManager.getConfigPropertyAsText("AUTH0_CLIENT_ID"); + private static final String AUTH0_API_TOKEN = DataManager.getConfigPropertyAsText("AUTH0_TOKEN"); + static final int TEST_AUTH0_PORT = 8089; + static final String TEST_AUTH0_DOMAIN = String.format("localhost:%d", TEST_AUTH0_PORT); private static Logger LOG = LoggerFactory.getLogger(UserController.class); private static ObjectMapper mapper = new ObjectMapper(); - private static final String charset = "UTF-8"; - private static String baseUsersUrl = "https://" + AUTH0_DOMAIN + "/api/v2/users"; - public static JsonManager json = - new JsonManager<>(Project.class, JsonViews.UserInterface.class); + private static final String UTF_8 = "UTF-8"; + static final String USERS_PATH = "/api/v2/users"; + static final String DEFAULT_BASE_USERS_URL = "https://" + AUTH0_DOMAIN + USERS_PATH; + /** Users URL uses Auth0 domain by default, but can be overridden with {@link #setBaseUsersUrl(String)} for testing. */ + private static String baseUsersUrl = DEFAULT_BASE_USERS_URL; + private static final JsonManager json = new JsonManager<>(Project.class, JsonViews.UserInterface.class); /** * HTTP endpoint to get a single Auth0 user for the application (by specified ID param). Note, this uses a different @@ -73,6 +77,14 @@ private static String getUser(Request req, Response res) { return executeRequestAndGetResult(request, req); } + /** + * Determines whether the user controller is being run in a testing environment by checking if the users URL contains + * the {@link #TEST_AUTH0_DOMAIN}. + */ + public static boolean inTestingEnvironment() { + return baseUsersUrl.contains(TEST_AUTH0_DOMAIN); + } + /** * HTTP endpoint to get all users for the application (using a filtered search on all users for the Auth0 tenant). */ @@ -151,8 +163,6 @@ private static String createPublicUser(Request req, Response res) { /** * HTTP endpoint to create new Auth0 user for the application. - * - * FIXME: This endpoint fails if the user's email already exists in the Auth0 tenant. */ private static String createUser(Request req, Response res) { HttpPost request = new HttpPost(baseUsersUrl); @@ -174,6 +184,14 @@ private static String updateUser(Request req, Response res) { String userId = req.params("id"); Auth0UserProfile user = getUserById(userId); + if (user == null) { + logMessageAndHalt( + req, + 404, + String.format("Could not update user: User with id %s not found", userId) + ); + } + LOG.info("Updating user {}", user.getEmail()); HttpPatch request = new HttpPatch(getUserIdUrl(req)); @@ -287,7 +305,7 @@ private static Object getRecentActivity(Request req, Response res) { */ private static void setHeaders(HttpRequestBase request) { request.addHeader("Authorization", "Bearer " + AUTH0_API_TOKEN); - request.setHeader("Accept-Charset", charset); + request.setHeader("Accept-Charset", UTF_8); request.setHeader("Content-Type", "application/json"); } @@ -338,7 +356,7 @@ private static JsonNode parseJsonFromBody(Request req) { private static void setRequestEntityUsingJson(HttpEntityEnclosingRequestBase request, String json, Request req) { HttpEntity entity = null; try { - entity = new ByteArrayEntity(json.getBytes(charset)); + entity = new ByteArrayEntity(json.getBytes(UTF_8)); } catch (UnsupportedEncodingException e) { logMessageAndHalt( req, @@ -374,16 +392,9 @@ private static String executeRequestAndGetResult(HttpRequestBase httpRequest, Re ); } - int statusCode = response.getStatusLine().getStatusCode(); - if(statusCode >= 300) { - LOG.error("HTTP request returned error code >= 300: ({})", httpRequest.toString()); - logMessageAndHalt(req, statusCode, response.toString()); - } - // parse response body if there is one HttpEntity entity = response.getEntity(); String result = null; - if (entity != null) { try { result = EntityUtils.toString(entity); @@ -391,14 +402,39 @@ private static String executeRequestAndGetResult(HttpRequestBase httpRequest, Re logMessageAndHalt( req, 500, - String.format("Failed to parse result of http request (%s).", - httpRequest.toString() + String.format( + "Failed to parse result of http request (%s).", + httpRequest.toString() ), e ); } } + int statusCode = response.getStatusLine().getStatusCode(); + if(statusCode >= 300) { + LOG.error( + "HTTP request returned error code >= 300: ({}). Body: {}", + httpRequest.toString(), + result != null ? result : "" + ); + // attempt to parse auth0 response to respond with an error message + String auth0Message = "An Auth0 error occurred"; + JsonNode jsonResponse = null; + try { + jsonResponse = mapper.readTree(result); + } catch (IOException e) { + LOG.warn("Could not parse json from auth0 error message. Body: {}", result != null ? result : ""); + e.printStackTrace(); + } + + if (jsonResponse != null && jsonResponse.has("message")) { + auth0Message = String.format("%s: %s", auth0Message, jsonResponse.get("message").asText()); + } + + logMessageAndHalt(req, statusCode, auth0Message); + } + LOG.info("Successfully made request: ({})", httpRequest.toString()); return result; @@ -492,6 +528,14 @@ public FeedVersionCommentActivity(Note note, FeedSource feedSource, FeedVersion } } + /** + * Used to override the base url for making requests to Auth0. This is primarily used for testing purposes to set + * the url to something that is stubbed with WireMock. + */ + public static void setBaseUsersUrl (String url) { + baseUsersUrl = url; + } + public static void register (String apiPrefix) { get(apiPrefix + "secure/user/:id", UserController::getUser, json::write); get(apiPrefix + "secure/user/:id/recentactivity", UserController::getRecentActivity, json::write); diff --git a/src/main/java/com/conveyal/datatools/manager/extensions/ExternalFeedResource.java b/src/main/java/com/conveyal/datatools/manager/extensions/ExternalFeedResource.java index d3c0c61b6..c761449de 100644 --- a/src/main/java/com/conveyal/datatools/manager/extensions/ExternalFeedResource.java +++ b/src/main/java/com/conveyal/datatools/manager/extensions/ExternalFeedResource.java @@ -16,7 +16,7 @@ public interface ExternalFeedResource { public void importFeedsForProject(Project project, String authHeader) throws Exception; - public void feedSourceCreated(FeedSource source, String authHeader); + public void feedSourceCreated(FeedSource source, String authHeader) throws Exception; public void propertyUpdated(ExternalFeedSourceProperty property, String previousValue, String authHeader) throws IOException; diff --git a/src/main/java/com/conveyal/datatools/manager/extensions/mtc/MtcFeedResource.java b/src/main/java/com/conveyal/datatools/manager/extensions/mtc/MtcFeedResource.java index 822de30cc..99e2125b9 100644 --- a/src/main/java/com/conveyal/datatools/manager/extensions/mtc/MtcFeedResource.java +++ b/src/main/java/com/conveyal/datatools/manager/extensions/mtc/MtcFeedResource.java @@ -22,10 +22,20 @@ import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; +import java.util.Collection; import static com.conveyal.datatools.manager.models.ExternalFeedSourceProperty.constructId; /** + * This class implements the {@link ExternalFeedResource} interface for the MTC RTD database list of carriers (transit + * operators) and allows the Data Tools application to read and sync the list of carriers to a set of feed sources for a + * given project. + * + * This is generally intended as an initialization step to importing feed sources into a project; however, it should + * support subsequent sync requests (e.g., if new agencies are expected in the external feed resource, syncing should + * import those OR if feed properties are expected to have changed in the external feed resource, they should be updated + * accordingly in Data Tools). + * * Created by demory on 3/30/16. */ public class MtcFeedResource implements ExternalFeedResource { @@ -48,11 +58,15 @@ public String getResourceType() { return RESOURCE_TYPE; } + /** + * Fetch the list of feeds from the MTC endpoint, create any feed sources that do not match on agencyID, and update + * the external feed source properties. + */ @Override public void importFeedsForProject(Project project, String authHeader) throws IOException, IllegalAccessException { URL url; ObjectMapper mapper = new ObjectMapper(); - // single list from MTC + // A single list of feeds is returned from the MTC Carrier endpoint. try { url = new URL(rtdApi + "/Carrier"); } catch(MalformedURLException ex) { @@ -61,83 +75,52 @@ public void importFeedsForProject(Project project, String authHeader) throws IOE } try { - HttpURLConnection con = (HttpURLConnection) url.openConnection(); - - // optional default is GET - con.setRequestMethod("GET"); - + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); //add request header - con.setRequestProperty("User-Agent", "User-Agent"); - + conn.setRequestProperty("User-Agent", "User-Agent"); // add auth header - LOG.info("authHeader="+authHeader); - con.setRequestProperty("Authorization", authHeader); - - int responseCode = con.getResponseCode(); - LOG.info("Sending 'GET' request to URL : " + url); - LOG.info("Response Code : " + responseCode); + conn.setRequestProperty("Authorization", authHeader); - BufferedReader in = new BufferedReader( - new InputStreamReader(con.getInputStream())); - String inputLine; - StringBuffer response = new StringBuffer(); - - while ((inputLine = in.readLine()) != null) { - response.append(inputLine); - } - in.close(); - - String json = response.toString(); - RtdCarrier[] results = mapper.readValue(json, RtdCarrier[].class); - for (int i = 0; i < results.length; i++) { - // String className = "RtdCarrier"; - // Object car = Class.forName(className).newInstance(); - RtdCarrier car = results[i]; - //LOG.info("car id=" + car.AgencyId + " name=" + car.AgencyName); + LOG.info("Sending 'GET' request to URL : {}", url); + LOG.info("Response Code : {}", conn.getResponseCode()); + RtdCarrier[] carriers = mapper.readValue(conn.getInputStream(), RtdCarrier[].class); + Collection projectFeedSources = project.retrieveProjectFeedSources(); + // Iterate over carriers found in response and update properties. Also, create a feed source for any carriers + // found in the response that do not correspond to an agency ID found in the external feed source properties. + for (int i = 0; i < carriers.length; i++) { + RtdCarrier carrier = carriers[i]; FeedSource source = null; - // check if a FeedSource with this AgencyId already exists - for (FeedSource existingSource : project.retrieveProjectFeedSources()) { + // Check if a FeedSource with this AgencyId already exists. + for (FeedSource existingSource : projectFeedSources) { ExternalFeedSourceProperty agencyIdProp; - agencyIdProp = Persistence.externalFeedSourceProperties.getById(constructId(existingSource, this.getResourceType(), - AGENCY_ID_FIELDNAME)); - if (agencyIdProp != null && agencyIdProp.value != null && agencyIdProp.value.equals(car.AgencyId)) { - //LOG.info("already exists: " + car.AgencyId); + String propertyId = constructId(existingSource, this.getResourceType(), AGENCY_ID_FIELDNAME); + agencyIdProp = Persistence.externalFeedSourceProperties.getById(propertyId); + if (agencyIdProp != null && agencyIdProp.value != null && agencyIdProp.value.equals(carrier.AgencyId)) { source = existingSource; } } - - String feedName; - if (car.AgencyName != null) { - feedName = car.AgencyName; - } else if (car.AgencyShortName != null) { - feedName = car.AgencyShortName; - } else { - feedName = car.AgencyId; - } - + // Feed source does not exist. Create one using carrier properties. if (source == null) { + // Derive the name from carrier properties found in response. + String feedName = carrier.AgencyName != null + ? carrier.AgencyName + : carrier.AgencyShortName != null + ? carrier.AgencyShortName + : carrier.AgencyId; + // Create new feed source to store in application database. source = new FeedSource(feedName); + source.projectId = project.id; + LOG.info("Creating feed source {} from carrier response. (Did not previously exist.)", feedName); + // Store the feed source if it does not already exist. + Persistence.feedSources.create(source); } - else source.name = feedName; - - source.projectId = project.id; - // Store the feed source. - Persistence.feedSources.create(source); - - // create / update the properties - - for(Field carrierField : car.getClass().getDeclaredFields()) { - String fieldName = carrierField.getName(); - String fieldValue = carrierField.get(car) != null ? carrierField.get(car).toString() : null; - ExternalFeedSourceProperty prop = new ExternalFeedSourceProperty(source, this.getResourceType(), fieldName, fieldValue); - if (Persistence.externalFeedSourceProperties.getById(prop.id) == null) { - Persistence.externalFeedSourceProperties.create(prop); - } else { - Persistence.externalFeedSourceProperties.updateField(prop.id, fieldName, fieldValue); - } - } + // TODO: Does any property on the feed source need to be updated from the carrier (e.g., name). + + // Create / update the properties + LOG.info("Updating props for {}", source.name); + carrier.updateFields(source); } } catch(Exception ex) { LOG.error("Could not read feeds from MTC RTD API"); @@ -146,12 +129,15 @@ public void importFeedsForProject(Project project, String authHeader) throws IOE } /** - * Do nothing for now. Creating a new agency for RTD requires adding the AgencyId property (when it was previously - * null. See {@link #propertyUpdated(ExternalFeedSourceProperty, String, String)}. + * Generate blank external feed resource properties when a new feed source is created. Creating a new agency for RTD + * requires adding the AgencyId property (when it was previously null. See {@link #propertyUpdated(ExternalFeedSourceProperty, String, String)}. */ @Override - public void feedSourceCreated(FeedSource source, String authHeader) { - LOG.info("Processing new FeedSource {} for RTD. (No action taken.)", source.name); + public void feedSourceCreated(FeedSource source, String authHeader) throws IllegalAccessException { + LOG.info("Processing new FeedSource {} for RTD. Empty external feed properties being generated.", source.name); + // Create a blank carrier and update fields (will initialize all fields to null). + RtdCarrier carrier = new RtdCarrier(); + carrier.updateFields(source); } /** diff --git a/src/main/java/com/conveyal/datatools/manager/extensions/mtc/RtdCarrier.java b/src/main/java/com/conveyal/datatools/manager/extensions/mtc/RtdCarrier.java index 768acdf93..920d7ee32 100644 --- a/src/main/java/com/conveyal/datatools/manager/extensions/mtc/RtdCarrier.java +++ b/src/main/java/com/conveyal/datatools/manager/extensions/mtc/RtdCarrier.java @@ -1,12 +1,17 @@ package com.conveyal.datatools.manager.extensions.mtc; +import com.conveyal.datatools.manager.models.ExternalFeedSourceProperty; import com.conveyal.datatools.manager.models.FeedSource; import com.conveyal.datatools.manager.persistence.Persistence; import com.fasterxml.jackson.annotation.JsonProperty; +import java.lang.reflect.Field; + import static com.conveyal.datatools.manager.models.ExternalFeedSourceProperty.constructId; /** + * Represents all of the properties persisted on a carrier record by the external MTC database known as RTD. + * * Created by demory on 3/30/16. */ @@ -63,11 +68,12 @@ public class RtdCarrier { @JsonProperty String EditedDate; + /** Empty constructor needed for serialization (also used to create empty carrier). */ public RtdCarrier() { } /** - * Construct an RtdCarrier given the provided feed source. + * Construct an RtdCarrier given the provided feed source and initialize all field values from MongoDB. * @param source */ public RtdCarrier(FeedSource source) { @@ -93,9 +99,38 @@ private String getPropId(FeedSource source, String fieldName) { } /** - * FIXME: Are there cases where this might throw NPEs? + * Get the value stored in the database for a particular field. + * + * TODO: Are there cases where this might throw NPEs? */ private String getValueForField (FeedSource source, String fieldName) { return Persistence.externalFeedSourceProperties.getById(getPropId(source, fieldName)).value; } + + /** + * Use reflection to update (or create if field does not exist) all fields for a carrier instance and provided feed + * source. + * + * TODO: Perhaps we should not be using reflection, but it works pretty well here. + */ + public void updateFields(FeedSource feedSource) throws IllegalAccessException { + // Using reflection, iterate over every field in the class. + for(Field carrierField : this.getClass().getDeclaredFields()) { + String fieldName = carrierField.getName(); + String fieldValue = carrierField.get(this) != null ? carrierField.get(this).toString() : null; + // Construct external feed source property for field with value from carrier. + ExternalFeedSourceProperty prop = new ExternalFeedSourceProperty( + feedSource, + MtcFeedResource.RESOURCE_TYPE, + fieldName, + fieldValue + ); + // If field does not exist, create it. Otherwise, update value. + if (Persistence.externalFeedSourceProperties.getById(prop.id) == null) { + Persistence.externalFeedSourceProperties.create(prop); + } else { + Persistence.externalFeedSourceProperties.updateField(prop.id, fieldName, fieldValue); + } + } + } } \ No newline at end of file diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/GtfsPlusValidation.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/GtfsPlusValidation.java new file mode 100644 index 000000000..0bb0ed27f --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/GtfsPlusValidation.java @@ -0,0 +1,197 @@ +package com.conveyal.datatools.manager.gtfsplus; + +import com.conveyal.datatools.common.utils.Consts; +import com.conveyal.datatools.manager.DataManager; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.conveyal.datatools.manager.persistence.FeedStore; +import com.conveyal.datatools.manager.persistence.Persistence; +import com.conveyal.gtfs.GTFSFeed; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.Collection; +import java.util.Enumeration; +import java.util.LinkedList; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; + +public class GtfsPlusValidation { + public static final Logger LOG = LoggerFactory.getLogger(GtfsPlusValidation.class); + private static final FeedStore gtfsPlusStore = new FeedStore(DataManager.GTFS_PLUS_SUBDIR); + private static final String NOT_FOUND = "not found in GTFS"; + + /** + * Validate a GTFS+ feed and return a list of issues encountered. + * FIXME: For now this uses the MapDB-backed GTFSFeed class. Which actually suggests that this might + * should be contained within a MonitorableJob. + */ + public static List validateGtfsPlus (String feedVersionId) throws IOException { + if (!DataManager.isModuleEnabled("gtfsplus")) { + throw new IllegalStateException("GTFS+ module must be enabled in server.yml to run GTFS+ validation."); + } + List issues = new LinkedList<>(); + LOG.info("Validating GTFS+ for " + feedVersionId); + FeedVersion feedVersion = Persistence.feedVersions.getById(feedVersionId); + // Load the main GTFS file. + // FIXME: Swap MapDB-backed GTFSFeed for use of SQL data? + GTFSFeed gtfsFeed = GTFSFeed.fromFile(feedVersion.retrieveGtfsFile().getAbsolutePath()); + // check for saved GTFS+ data + File file = gtfsPlusStore.getFeed(feedVersionId); + if (file == null) { + LOG.warn("GTFS+ file not found, loading from main version GTFS."); + file = feedVersion.retrieveGtfsFile(); + } + int gtfsPlusTableCount = 0; + ZipFile zipFile = new ZipFile(file); + final Enumeration entries = zipFile.entries(); + while (entries.hasMoreElements()) { + final ZipEntry entry = entries.nextElement(); + for (int i = 0; i < DataManager.gtfsPlusConfig.size(); i++) { + JsonNode tableNode = DataManager.gtfsPlusConfig.get(i); + if (tableNode.get("name").asText().equals(entry.getName())) { + LOG.info("Validating GTFS+ table: " + entry.getName()); + gtfsPlusTableCount++; + validateTable(issues, tableNode, zipFile.getInputStream(entry), gtfsFeed); + } + } + } + LOG.info("GTFS+ tables found: {}/{}", gtfsPlusTableCount, DataManager.gtfsPlusConfig.size()); + return issues; + } + + /** + * Validate a single GTFS+ table using the table specification found in gtfsplus.yml. + */ + private static void validateTable( + Collection issues, + JsonNode specTable, + InputStream inputStreamToValidate, + GTFSFeed gtfsFeed + ) throws IOException { + String tableId = specTable.get("id").asText(); + // Read in table data from input stream. + BufferedReader in = new BufferedReader(new InputStreamReader(inputStreamToValidate)); + String line = in.readLine(); + String[] inputHeaders = line.split(","); + List fieldList = Arrays.asList(inputHeaders); + JsonNode[] fieldsFounds = new JsonNode[inputHeaders.length]; + JsonNode specFields = specTable.get("fields"); + // Iterate over spec fields and check that there are no missing required fields. + for (int i = 0; i < specFields.size(); i++) { + JsonNode specField = specFields.get(i); + String fieldName = specField.get("name").asText(); + int index = fieldList.indexOf(fieldName); + if (index != -1) { + // Add spec field for each field found. + fieldsFounds[index] = specField; + } else if (isRequired(specField)) { + // If spec field not found, check that missing field was not required. + issues.add(new ValidationIssue(tableId, fieldName, -1, "Required column missing.")); + } + } + // Iterate over each row and validate each field value. + int rowIndex = 0; + while ((line = in.readLine()) != null) { + String[] values = line.split(Consts.COLUMN_SPLIT, -1); + for (int v = 0; v < values.length; v++) { + validateTableValue(issues, tableId, rowIndex, values[v], fieldsFounds[v], gtfsFeed); + } + rowIndex++; + } + } + + /** Determine if a GTFS+ spec field is required. */ + private static boolean isRequired(JsonNode specField) { + return specField.get("required") != null && specField.get("required").asBoolean(); + } + + /** Validate a single value for a GTFS+ table. */ + private static void validateTableValue( + Collection issues, + String tableId, + int rowIndex, + String value, + JsonNode specField, + GTFSFeed gtfsFeed + ) { + if (specField == null) return; + String fieldName = specField.get("name").asText(); + + if (isRequired(specField)) { + if (value == null || value.length() == 0) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Required field missing value")); + } + } + + switch(specField.get("inputType").asText()) { + case "DROPDOWN": + boolean invalid = true; + ArrayNode options = (ArrayNode) specField.get("options"); + for (JsonNode option : options) { + String optionValue = option.get("value").asText(); + + // NOTE: per client's request, this check has been made case insensitive + boolean valuesAreEqual = optionValue.equalsIgnoreCase(value); + + // if value is found in list of options, break out of loop + if (valuesAreEqual || (!isRequired(specField) && "".equals(value))) { + invalid = false; + break; + } + } + if (invalid) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Value: " + value + " is not a valid option.")); + } + break; + case "TEXT": + // check if value exceeds max length requirement + if (specField.get("maxLength") != null) { + int maxLength = specField.get("maxLength").asInt(); + if (value != null && value.length() > maxLength) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, "Text value exceeds the max. length of " + maxLength)); + } + } + break; + case "GTFS_ROUTE": + if (!gtfsFeed.routes.containsKey(value)) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, missingIdText(value, "Route"))); + } + break; + case "GTFS_STOP": + if (!gtfsFeed.stops.containsKey(value)) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, missingIdText(value, "Stop"))); + } + break; + case "GTFS_TRIP": + if (!gtfsFeed.trips.containsKey(value)) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, missingIdText(value, "Trip"))); + } + break; + case "GTFS_FARE": + if (!gtfsFeed.fares.containsKey(value)) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, missingIdText(value, "Fare"))); + } + break; + case "GTFS_SERVICE": + if (!gtfsFeed.services.containsKey(value)) { + issues.add(new ValidationIssue(tableId, fieldName, rowIndex, missingIdText(value, "Service"))); + } + break; + } + + } + + /** Construct missing ID text for validation issue description. */ + private static String missingIdText(String value, String entity) { + return String.join(" ", entity, "ID", value, NOT_FOUND); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/ValidationIssue.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/ValidationIssue.java new file mode 100644 index 000000000..b835a3996 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/ValidationIssue.java @@ -0,0 +1,19 @@ +package com.conveyal.datatools.manager.gtfsplus; + +import java.io.Serializable; + +/** A validation issue for a GTFS+ field. Use rowIndex = -1 for a table level issue. */ +public class ValidationIssue implements Serializable { + private static final long serialVersionUID = 1L; + public String tableId; + public String fieldName; + public int rowIndex; + public String description; + + public ValidationIssue(String tableId, String fieldName, int rowIndex, String description) { + this.tableId = tableId; + this.fieldName = fieldName; + this.rowIndex = rowIndex; + this.description = description; + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/CalendarAttribute.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/CalendarAttribute.java new file mode 100644 index 000000000..ed6c4b7f1 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/CalendarAttribute.java @@ -0,0 +1,20 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import javax.naming.OperationNotSupportedException; +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class CalendarAttribute extends Entity { + + private static final long serialVersionUID = 1L; + + public String service_id; + public String service_description; + + @Override public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/Direction.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/Direction.java new file mode 100644 index 000000000..0b83f6e9c --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/Direction.java @@ -0,0 +1,22 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class Direction extends Entity { + + private static final long serialVersionUID = 1L; + + public String route_id; + public int direction_id; + public String direction; + + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/FareRiderCategory.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/FareRiderCategory.java new file mode 100644 index 000000000..133d2b60f --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/FareRiderCategory.java @@ -0,0 +1,24 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.time.LocalDate; + +public class FareRiderCategory extends Entity { + + private static final long serialVersionUID = 1L; + + public String fare_id; + public int rider_category_id; + public double price; + public LocalDate expiration_date; + public LocalDate commencement_date; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/FareZoneAttribute.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/FareZoneAttribute.java new file mode 100644 index 000000000..4d586f296 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/FareZoneAttribute.java @@ -0,0 +1,20 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class FareZoneAttribute extends Entity { + + private static final long serialVersionUID = 1L; + + public String zone_id; + public String zone_name; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/GtfsPlusTable.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/GtfsPlusTable.java new file mode 100644 index 000000000..23031b2fd --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/GtfsPlusTable.java @@ -0,0 +1,98 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.loader.DateField; +import com.conveyal.gtfs.loader.DoubleField; +import com.conveyal.gtfs.loader.IntegerField; +import com.conveyal.gtfs.loader.ShortField; +import com.conveyal.gtfs.loader.StringField; +import com.conveyal.gtfs.loader.Table; + +import static com.conveyal.gtfs.loader.Requirement.OPTIONAL; +import static com.conveyal.gtfs.loader.Requirement.PROPRIETARY; +import static com.conveyal.gtfs.loader.Requirement.REQUIRED; + +/** + * This class contains GTFS+ table definitions that are based on gtfs-lib's {@link Table} constants. + * Currently, these are only used when operating on tables being merged within + * {@link com.conveyal.datatools.manager.jobs.MergeFeedsJob}. The definition of these tables can be + * found at https://www.transitwiki.org/TransitWiki/images/e/e7/GTFS%2B_Additional_Files_Format_Ver_1.7.pdf. + */ +public class GtfsPlusTable { + public static final Table REALTIME_ROUTES = new Table("realtime_routes", RealtimeRoute.class, PROPRIETARY, + new StringField("route_id", REQUIRED).isReferenceTo(Table.ROUTES), + new ShortField("realtime_enabled", REQUIRED, 1), + new StringField("realtime_routename", REQUIRED), + new StringField("realtime_routecode", REQUIRED) + ); + + public static final Table REALTIME_STOPS = new Table("realtime_stops", RealtimeStop.class, PROPRIETARY, + new StringField("trip_id", REQUIRED).isReferenceTo(Table.TRIPS), + new StringField("stop_id", REQUIRED).isReferenceTo(Table.STOPS), + new StringField("realtime_stop_id", REQUIRED) + ).keyFieldIsNotUnique(); + + public static final Table DIRECTIONS = new Table("directions", Direction.class, PROPRIETARY, + new StringField("route_id", REQUIRED).isReferenceTo(Table.ROUTES), + new ShortField("direction_id", REQUIRED, 1), + new StringField("direction", REQUIRED)) + .keyFieldIsNotUnique() + .hasCompoundKey(); + + public static final Table REALTIME_TRIPS = new Table("realtime_trips", RealtimeTrip.class, PROPRIETARY, + new StringField("trip_id", REQUIRED).isReferenceTo(Table.TRIPS), + new StringField("realtime_trip_id", REQUIRED) + ); + + public static final Table STOP_ATTRIBUTES = new Table("stop_attributes", StopAttribute.class, PROPRIETARY, + new StringField("stop_id", REQUIRED).isReferenceTo(Table.STOPS), + new ShortField("accessibility_id", REQUIRED, 8), + new StringField("cardinal_direction", OPTIONAL), + new StringField("relative_position", OPTIONAL), + new StringField("stop_city", REQUIRED) + ); + + public static final Table TIMEPOINTS = new Table("timepoints", TimePoint.class, PROPRIETARY, + new StringField("trip_id", REQUIRED).isReferenceTo(Table.TRIPS), + new StringField("stop_id", REQUIRED).isReferenceTo(Table.STOPS) + ).keyFieldIsNotUnique(); + + public static final Table RIDER_CATEGORIES = new Table("rider_categories", RiderCategory.class, PROPRIETARY, + new IntegerField("rider_category_id", REQUIRED, 1, 25), + new StringField("rider_category_description", REQUIRED) + ); + + public static final Table FARE_RIDER_CATEGORIES = new Table("fare_rider_categories", FareRiderCategory.class, PROPRIETARY, + new StringField("fare_id", REQUIRED), + new IntegerField("rider_category_id", REQUIRED, 2, 25).isReferenceTo(RIDER_CATEGORIES), + new DoubleField("price", REQUIRED, 0, Double.MAX_VALUE, 2), + new DateField("expiration_date", OPTIONAL), + new DateField("commencement_date", OPTIONAL) + ).keyFieldIsNotUnique(); + + public static final Table CALENDAR_ATTRIBUTES = new Table("calendar_attributes", CalendarAttribute.class, PROPRIETARY, + new StringField("service_id", REQUIRED).isReferenceTo(Table.CALENDAR), + new StringField("service_description", REQUIRED) + ); + + public static final Table FAREZONE_ATTRIBUTES = new Table("farezone_attributes", FareZoneAttribute.class, PROPRIETARY, + new StringField("zone_id", REQUIRED), + new StringField("zone_name", REQUIRED) + ); + + /** + * List of tables in the order such that internal references can be appropriately checked as + * tables are loaded/encountered. + */ + public static final Table[] tables = new Table[] { + REALTIME_ROUTES, + REALTIME_STOPS, + REALTIME_TRIPS, + DIRECTIONS, + STOP_ATTRIBUTES, + TIMEPOINTS, + RIDER_CATEGORIES, + FARE_RIDER_CATEGORIES, + CALENDAR_ATTRIBUTES, + FAREZONE_ATTRIBUTES + }; +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeRoute.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeRoute.java new file mode 100644 index 000000000..49050a05f --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeRoute.java @@ -0,0 +1,21 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class RealtimeRoute extends Entity { + private static final long serialVersionUID = 1L; + + public String route_id; + public int realtime_enabled; + public String realtime_routename; + public String realtime_routecode; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeStop.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeStop.java new file mode 100644 index 000000000..fd61db1f3 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeStop.java @@ -0,0 +1,21 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class RealtimeStop extends Entity { + + private static final long serialVersionUID = 1L; + + public String trip_id; + public String stop_id; + public String realtime_stop_id; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeTrip.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeTrip.java new file mode 100644 index 000000000..3c52ae7de --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RealtimeTrip.java @@ -0,0 +1,20 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class RealtimeTrip extends Entity { + + private static final long serialVersionUID = 1L; + + public String trip_id; + public String realtime_trip_id; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RiderCategory.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RiderCategory.java new file mode 100644 index 000000000..d30705ee2 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/RiderCategory.java @@ -0,0 +1,20 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class RiderCategory extends Entity { + + private static final long serialVersionUID = 1L; + + public int rider_category_id; + public String rider_category_description; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/StopAttribute.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/StopAttribute.java new file mode 100644 index 000000000..e11714f2c --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/StopAttribute.java @@ -0,0 +1,23 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class StopAttribute extends Entity { + + private static final long serialVersionUID = 1L; + + public String stop_id; + public int accessibility_id; + public String cardinal_direction; + public String relative_position; + public String stop_city; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/TimePoint.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/TimePoint.java new file mode 100644 index 000000000..b1b71abf8 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/TimePoint.java @@ -0,0 +1,20 @@ +package com.conveyal.datatools.manager.gtfsplus.tables; + +import com.conveyal.gtfs.model.Entity; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +public class TimePoint extends Entity { + + private static final long serialVersionUID = 1L; + + public String trip_id; + public String stop_id; + + @Override + public void setStatementParameters(PreparedStatement statement, boolean setDefaultId) throws SQLException { + throw new UnsupportedOperationException( + "Cannot call setStatementParameters because loading a GTFS+ table into RDBMS is unsupported."); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/package-info.java b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/package-info.java new file mode 100644 index 000000000..aab21a404 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/gtfsplus/tables/package-info.java @@ -0,0 +1,16 @@ +/** + * This package contains classes that correspond to those found for GTFS entity types in + * {@link com.conveyal.gtfs.model}, but for GTFS+ entity types. It also contains + * {@link com.conveyal.datatools.manager.gtfsplus.tables.GtfsPlusTable}, which extends the + * {@link com.conveyal.gtfs.loader.Table} in order to define a table specification for this set of + * extension tables. + * + * Note: these classes are primarily used for the MTC merge type in + * {@link com.conveyal.datatools.manager.jobs.MergeFeedsJob}. There may be an opportunity to also use + * these classes in the GTFS+ validation code path found in + * {@link com.conveyal.datatools.manager.controllers.api.GtfsPlusController}; however, + * TODO a way to define an enum set for string field values would need to first be added to support + * fields such as {@link com.conveyal.datatools.manager.gtfsplus.tables.StopAttribute#cardinal_direction}. + */ +package com.conveyal.datatools.manager.gtfsplus.tables; + diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/GisExportJob.java b/src/main/java/com/conveyal/datatools/manager/jobs/GisExportJob.java new file mode 100644 index 000000000..89f29f559 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/jobs/GisExportJob.java @@ -0,0 +1,301 @@ +package com.conveyal.datatools.manager.jobs; + +import com.conveyal.datatools.common.status.MonitorableJob; +import com.conveyal.datatools.editor.utils.DirectoryZip; +import com.conveyal.datatools.manager.DataManager; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.conveyal.datatools.manager.persistence.Persistence; +import com.conveyal.gtfs.loader.Feed; +import com.conveyal.gtfs.loader.Requirement; +import com.conveyal.gtfs.loader.Table; +import com.conveyal.gtfs.model.Agency; +import com.conveyal.gtfs.model.Route; +import com.conveyal.gtfs.model.Stop; +import com.google.common.io.Files; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.GeometryFactory; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.Point; +import org.apache.commons.dbutils.DbUtils; +import org.geotools.data.DataUtilities; +import org.geotools.data.DefaultTransaction; +import org.geotools.data.Transaction; +import org.geotools.data.shapefile.ShapefileDataStore; +import org.geotools.data.shapefile.ShapefileDataStoreFactory; +import org.geotools.data.simple.SimpleFeatureSource; +import org.geotools.data.simple.SimpleFeatureStore; +import org.geotools.feature.DefaultFeatureCollection; +import org.geotools.feature.simple.SimpleFeatureBuilder; +import org.geotools.feature.simple.SimpleFeatureTypeBuilder; +import org.geotools.referencing.crs.DefaultGeographicCRS; +import org.opengis.feature.simple.SimpleFeature; +import org.opengis.feature.simple.SimpleFeatureType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.Serializable; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** Export routes or stops for a GTFS feed version as a shapefile. */ +public class GisExportJob extends MonitorableJob { + public static final Logger LOG = LoggerFactory.getLogger(GisExportJob.class); + public ExportType exportType; + public Collection feedIds; + + public GisExportJob(ExportType exportType, File file, Collection feedIds, String owner) { + super( + owner, + String.format("Export %s GIS for feed", exportType.toString().toLowerCase()), + JobType.EXPORT_GIS + ); + this.exportType = exportType; + this.file = file; + this.feedIds = feedIds; + status.update("Beginning export", 5); + } + + @Override public void jobLogic() { + LOG.info("Storing shapefile for feeds {} at {}", feedIds, file.getAbsolutePath()); + File outDir = Files.createTempDir(); + LOG.info("Temp directory for shapefile: {}", outDir.getAbsolutePath()); + File outShp = new File(outDir, file.getName().replaceAll("\\.zip", "") + ".shp"); + Connection connection = null; + try { + GeometryFactory geometryFactory = new GeometryFactory(); + ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory(); + + Map params = new HashMap<>(); + params.put("url", outShp.toURI().toURL()); + + ShapefileDataStore datastore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params); + datastore.forceSchemaCRS(DefaultGeographicCRS.WGS84); + + final SimpleFeatureType STOP_TYPE = DataUtilities.createType( + "Stop", + String.join(",", + // Geometry must be the first attribute for a shapefile (and must be named + // "the_geom"). We must include SRID, otherwise the projection will be undefined. + "the_geom:Point:srid=4326", + "name:String", + "code:String", + "desc:String", + "id:String", + "agency:String" + ) + ); + + final SimpleFeatureType ROUTE_TYPE = DataUtilities.createType( + "Route", // <- the name for our feature type + String.join(",", + // Geometry must be the first attribute for a shapefile (and must be named + // "the_geom"). We must include SRID, otherwise the projection will be undefined. + "the_geom:LineString:srid=4326", + "pattName:String", + "shortName:String", + "longName:String", + "desc:String", + "type:String", + "url:String", + "routeColor:String", + "textColor:String", + "shapeId:String", + "agency:String" + ) + ); + SimpleFeatureBuilder featureBuilder; + DefaultFeatureCollection features = new DefaultFeatureCollection(); + // Get connection for use in fetching patterns. This is outside of for loop so we're + // not connecting multiple times. + connection = DataManager.GTFS_DATA_SOURCE.getConnection(); + for (String feedId : feedIds) { + // Get feed version and connection to RDBMS feed. + FeedVersion version = Persistence.feedVersions.getById(feedId); + if (version == null) { + throw new IllegalStateException(String.format("Could not find version %s", feedId)); + } + Feed feed = new Feed(DataManager.GTFS_DATA_SOURCE, version.namespace); + Agency agency = feed.agencies.iterator().next(); + String agencyName = agency != null + ? agency.agency_name + : version.parentFeedSource().name; + status.update( + String.format( + "Exporting %s for %s", + exportType.toString().toLowerCase(), + agencyName), + 40 + ); + if (exportType.equals(ExportType.STOPS)) { + datastore.createSchema(STOP_TYPE); + featureBuilder = new SimpleFeatureBuilder(STOP_TYPE); + for (Stop stop : feed.stops) { + Point point = geometryFactory.createPoint( + new Coordinate(stop.stop_lon, stop.stop_lat) + ); + LOG.info(point.toString()); + featureBuilder.add(point); + featureBuilder.add(stop.stop_name); + featureBuilder.add(stop.stop_code); + featureBuilder.add(stop.stop_desc); + featureBuilder.add(stop.stop_id); + featureBuilder.add(agencyName); + // Build feature (null id arg will generate default ID). + SimpleFeature feature = featureBuilder.buildFeature(null); + features.add(feature); + } + } else if (exportType.equals(ExportType.ROUTES)) { + datastore.createSchema(ROUTE_TYPE); + featureBuilder = new SimpleFeatureBuilder(ROUTE_TYPE); + // There is not a clean way to fetch patterns out of the RDBMS and it may not + // be worth building a structured way with JDBCTableReader simply for + // exporting a shapefile. If there are future similar cases, we may need to + // refactor this into a more structured operation using Java objects or + // com.conveyal.gtfs.loader.Feed + // Note: we use generateSelectSql for PROPRIETARY because we encountered an issue with some feeds + // (perhaps legacy) not containing the column patterns#direction_id. + // See https://github.com/ibi-group/datatools-server/issues/203 + // TODO: replace with Table#generateSelectAllSql + String patternsSql = Table.PATTERNS.generateSelectSql(version.namespace, Requirement.PROPRIETARY); + PreparedStatement statement = connection.prepareStatement(patternsSql); + ResultSet resultSet = statement.executeQuery(); + // we loop over trip patterns. Note that this will yield several lines for routes that have + // multiple patterns. There's no real good way to reconcile the shapes of multiple patterns. + while (resultSet.next()) { + String pattern_id = resultSet.getString("pattern_id"); + String route_id = resultSet.getString("route_id"); + String name = resultSet.getString("name"); + String shape_id = resultSet.getString("shape_id"); + LineString shape; + if (shape_id != null) { + // Select shape points for pattern shape and build line string. + PreparedStatement shapeStatement = connection.prepareStatement( + String.format( + "select shape_pt_lon, shape_pt_lat, shape_pt_sequence, " + + "shape_id from %s.shapes where shape_id = ? " + + "order by shape_pt_sequence", + version.namespace + )); + shapeStatement.setString(1, shape_id); + ResultSet shapePointsResultSet = shapeStatement.executeQuery(); + // Construct line string from shape points. + List coordinates = new ArrayList<>(); + while (shapePointsResultSet.next()) { + double lon = shapePointsResultSet.getDouble(1); + double lat = shapePointsResultSet.getDouble(2); + coordinates.add(new Coordinate(lon, lat)); + } + Coordinate[] coords = new Coordinate[coordinates.size()]; + coords = coordinates.toArray(coords); + shape = geometryFactory.createLineString(coords); + } else { + LOG.info("Building pattern {} from stops", pattern_id); + // Build the shape from the pattern stops if there is no shape for + // pattern. + PreparedStatement stopsStatement = connection.prepareStatement( + String.format( + "select stop_lon, stop_lat, stops.stop_id, stop_sequence, pattern_id" + + " from %s.stops as stops, %s.pattern_stops as ps" + + " where pattern_id = ? and stops.stop_id = ps.stop_id" + + " order by pattern_id, stop_sequence", + version.namespace, version.namespace + )); + stopsStatement.setString(1, pattern_id); + List coordinates = new ArrayList<>(); + ResultSet stopsResultSet = stopsStatement.executeQuery(); + while (stopsResultSet.next()) { + double lon = stopsResultSet.getDouble(1); + double lat = stopsResultSet.getDouble(2); + coordinates.add(new Coordinate(lon, lat)); + } + Coordinate[] coords = new Coordinate[coordinates.size()]; + coords = coordinates.toArray(coords); + shape = geometryFactory.createLineString(coords); + } + + Route route = feed.routes.get(route_id); + if (route == null) { + LOG.warn("Route ({}) for pattern {} does not exist. Skipping pattern" + , route_id, pattern_id); + continue; + } + featureBuilder.add(shape); + featureBuilder.add(name); + featureBuilder.add(route.route_short_name); + featureBuilder.add(route.route_long_name); + featureBuilder.add(route.route_desc); + featureBuilder.add(route.route_type); + featureBuilder.add(route.route_url); + featureBuilder.add(route.route_color); + featureBuilder.add(route.route_text_color); + featureBuilder.add(shape_id); + featureBuilder.add(agencyName); + SimpleFeature feature = featureBuilder.buildFeature(null); + features.add(feature); + } + } else { + throw new IllegalStateException("Invalid type"); + } + } + if (features.size() == 0) { + throw new IllegalStateException("Cannot write shapefile with zero features!"); + } + // Save the file + + Transaction transaction = new DefaultTransaction("create"); + + String typeName = datastore.getTypeNames()[0]; + SimpleFeatureSource featureSource = datastore.getFeatureSource(typeName); + // Check that we have read-write access to disk: + // http://docs.geotools.org/stable/userguide/library/data/featuresource.html + if (featureSource instanceof SimpleFeatureStore) { + SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource; + featureStore.setTransaction(transaction); + try { + LOG.info("Adding {} features to shapefile.", features.size()); + featureStore.addFeatures(features); + transaction.commit(); + } catch (Exception e) { + e.printStackTrace(); + transaction.rollback(); + throw e; + } finally { + transaction.close(); + } + } else { + // If this is thrown, there could be some other issue unrelated to read/write + // access, for example, during development of this feature this error was thrown + // when there were no features contained within the shapefile. + throw new Exception(typeName + " does not support read/write access (or other " + + "unknown issue)."); + } + LOG.info("Zipping shapefile {}", file.getAbsolutePath()); + // zip the file + DirectoryZip.zip(outDir, file); + + // Clean up temporary files. + for (File f : outDir.listFiles()) { + f.delete(); + } + outDir.delete(); + status.update(false, "Export complete!", 100); + status.completed = true; + } catch (Exception e) { + String message = "An exception occurred during the GIS export"; + LOG.error(message); + status.fail(message); + e.printStackTrace(); + } finally { + if (connection != null) DbUtils.closeQuietly(connection); + } + } + + public enum ExportType { ROUTES, STOPS } +} diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsJob.java b/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsJob.java new file mode 100644 index 000000000..256895ef6 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsJob.java @@ -0,0 +1,823 @@ +package com.conveyal.datatools.manager.jobs; + +import com.conveyal.datatools.common.status.MonitorableJob; +import com.conveyal.datatools.manager.DataManager; +import com.conveyal.datatools.manager.gtfsplus.tables.GtfsPlusTable; +import com.conveyal.datatools.manager.models.FeedSource; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.conveyal.datatools.manager.persistence.FeedStore; +import com.conveyal.gtfs.error.NewGTFSError; +import com.conveyal.gtfs.error.NewGTFSErrorType; +import com.conveyal.gtfs.loader.Field; +import com.conveyal.gtfs.loader.ReferenceTracker; +import com.conveyal.gtfs.loader.Table; +import com.csvreader.CsvReader; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.supercsv.io.CsvListWriter; +import org.supercsv.prefs.CsvPreference; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.time.LocalDate; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipOutputStream; + +import static com.conveyal.datatools.manager.jobs.MergeFeedsType.MTC; +import static com.conveyal.datatools.manager.jobs.MergeFeedsType.REGIONAL; +import static com.conveyal.datatools.manager.utils.StringUtils.getCleanName; +import static com.conveyal.gtfs.loader.DateField.GTFS_DATE_FORMATTER; +import static com.conveyal.gtfs.loader.Field.getFieldIndex; + +/** + * This job handles merging two or more feed versions according to logic specific to the specified merge type. + * The current merge types handled here are: + * - {@link MergeFeedsType#REGIONAL}: this is essentially a "dumb" merge. For each feed version, each primary key is + * scoped so that there is no possibility that it will conflict with other IDs + * found in any other feed version. Note: There is absolutely no attempt to merge + * entities based on either expected shared IDs or entity location (e.g., stop + * coordinates). + * - {@link MergeFeedsType#MTC}: this strategy is defined in detail at https://github.com/conveyal/datatools-server/issues/185, + * but in essence, this strategy attempts to merge a current and future feed into + * a combined file. For certain entities (specifically stops and routes) it uses + * alternate fields as primary keys (stop_code and route_short_name) if they are + * available. There is some complexity related to this in {@link #constructMergedTable(Table, List, ZipOutputStream)}. + * Another defining characteristic is to prefer entities defined in the "future" + * file if there are matching entities in the current file. + * Future merge strategies could be added here. For example, some potential customers have mentioned a desire to + * prefer entities from the "current" version, so that entities edited in Data Tools would override the values found + * in the "future" file, which may have limited data attributes due to being exported from scheduling software with + * limited GTFS support. + * + * Reproduced from https://github.com/conveyal/datatools-server/issues/185 on 2019/04/23: + * + * 1. When a new GTFS+ feed is loaded in TDM, check as part of the loading and validation process if + * the dataset is for a future date. (If all services start in the future, consider the dataset + * to be for the future). + * 2. If it is a future dataset, automatically notify the user that the feed needs to be merged with + * most recent active version or a selected one in order to further process the feed. + * 3. Use the chosen version to merge the future feed. The merging process needs to be efficient so + * that the user doesn’t need to wait more than a tolerable time. + * 4. The merge process shall compare the current and future datasets, validate the following rules + * and generate the Merge Validation Report: + * i. Merging will be based on route_short_name in the current and future datasets. All matching + * route_short_names between the datasets shall be considered same route. Any route_short_name + * in active data not present in the future will be appended to the future routes file. + * ii. Future feed_info.txt file should get priority over active feed file when difference is + * identified. + * iii. When difference is found in agency.txt file between active and future feeds, the future + * agency.txt file data should be used. Possible issue with missing agency_id referenced by routes + * iv. When stop_code is included, stop merging will be based on that. If stop_code is not + * included, it will be based on stop_id. All stops in future data will be carried forward and + * any stops found in active data that are not in the future data shall be appended. If one + * of the feed is missing stop_code, merge fails with a notification to the user with + * suggestion that the feed with missing stop_code must be fixed with stop_code. + * v. If any service_id in the active feed matches with the future feed, it should be modified + * and all associated trip records must also be changed with the modified service_id. + * If a service_id from the active calendar has both the start_date and end_date in the + * future, the service shall not be appended to the merged file. Records in trips, + * calendar_dates, and calendar_attributes referencing this service_id shall also be + * removed/ignored. Stop_time records for the ignored trips shall also be removed. + * If a service_id from the active calendar has only the end_date in the future, the end_date + * shall be set to one day prior to the earliest start_date in future dataset before appending + * the calendar record to the merged file. + * trip_ids between active and future datasets must not match. If any trip_id is found to be + * matching, the merge should fail with appropriate notification to user with the cause of the + * failure. Notification should include all matched trip_ids. + * vi. New shape_ids in the future datasets should be appended in the merged feed. + * vii. Merging fare_attributes will be based on fare_id in the current and future datasets. All + * matching fare_ids between the datasets shall be considered same fare. Any fare_id in active + * data not present in the future will be appended to the future fare_attributes file. + * viii. All fare rules from the future dataset will be included. Any identical fare rules from + * the current dataset will be discarded. Any fare rules unique to the current dataset will be + * appended to the future file. + * ix. All transfers.txt entries with unique stop pairs (from - to) from both the future and + * current datasets will be included in the merged file. Entries with duplicate stop pairs from + * the current dataset will be discarded. + * x. All GTFS+ files should be merged based on how the associated base GTFS file is merged. For + * example, directions for routes that are not in the future routes.txt file should be appended + * to the future directions.txt file in the merged feed. + */ +public class MergeFeedsJob extends MonitorableJob { + + private static final Logger LOG = LoggerFactory.getLogger(MergeFeedsJob.class); + public static final ObjectMapper mapper = new ObjectMapper(); + private final Set feedVersions; + private final FeedSource feedSource; + private final ReferenceTracker referenceTracker = new ReferenceTracker(); + public MergeFeedsResult mergeFeedsResult; + private final String filename; + public final String projectId; + public final MergeFeedsType mergeType; + private File mergedTempFile = null; + final FeedVersion mergedVersion; + public boolean failOnDuplicateTripId = true; + + /** + * @param owner user ID that initiated job + * @param feedVersions set of feed versions to merge + * @param file resulting merge filename (without .zip) + * @param mergeType the type of merge to perform (@link MergeFeedsType) + */ + public MergeFeedsJob(String owner, Set feedVersions, String file, + MergeFeedsType mergeType) { + super(owner, mergeType.equals(REGIONAL) ? "Merging project feeds" : "Merging feed versions", + JobType.MERGE_FEED_VERSIONS); + this.feedVersions = feedVersions; + // Grab parent feed source if performing non-regional merge (each version should share the + // same feed source). + this.feedSource = + mergeType.equals(REGIONAL) ? null : feedVersions.iterator().next().parentFeedSource(); + // Construct full filename with extension + this.filename = String.format("%s.zip", file); + // If the merge type is regional, the file string should be equivalent to projectId, which + // is used by the client to download the merged feed upon job completion. + this.projectId = mergeType.equals(REGIONAL) ? file : null; + this.mergeType = mergeType; + // Assuming job is successful, mergedVersion will contain the resulting feed version. + this.mergedVersion = mergeType.equals(REGIONAL) ? null : new FeedVersion(this.feedSource); + this.mergeFeedsResult = new MergeFeedsResult(mergeType); + } + + /** + * The final stage handles clean up (deleting temp file) and adding the next job to process the + * new merged version (assuming the merge did not fail). + */ + public void jobFinished() { + // Delete temp file to ensure it does not cause storage bloat. Note: merged file has already been stored + // permanently. + if (!mergedTempFile.delete()) { + // FIXME: send to bugsnag? + LOG.error( + "Merged feed file {} not deleted. This may contribute to storage space shortages.", + mergedTempFile.getAbsolutePath()); + } + } + + /** + * Primary job logic handles collecting and sorting versions, creating a merged table for all versions, and writing + * the resulting zip file to storage. + */ + @Override public void jobLogic() throws IOException { + // Create temp zip file to add merged feed content to. + mergedTempFile = File.createTempFile(filename, null); + mergedTempFile.deleteOnExit(); + // Create the zipfile. + ZipOutputStream out = new ZipOutputStream(new FileOutputStream(mergedTempFile)); + LOG.info("Created project merge file: " + mergedTempFile.getAbsolutePath()); + List feedsToMerge = collectAndSortFeeds(feedVersions); + + // Determine which tables to merge (only merge GTFS+ tables for MTC extension). + final List tablesToMerge = + Arrays.stream(Table.tablesInOrder) + .filter(Table::isSpecTable) + .collect(Collectors.toList()); + if (DataManager.isExtensionEnabled("mtc")) { + // Merge GTFS+ tables only if MTC extension is enabled. We should do this for both + // regional and MTC merge strategies. + tablesToMerge.addAll(Arrays.asList(GtfsPlusTable.tables)); + } + int numberOfTables = tablesToMerge.size(); + // Loop over GTFS tables and merge each feed one table at a time. + for (int i = 0; i < numberOfTables; i++) { + Table table = tablesToMerge.get(i); + if (mergeType.equals(REGIONAL) && table.name.equals(Table.FEED_INFO.name)) { + // It does not make sense to include the feed_info table when performing a + // regional feed merge because this file is intended to contain data specific to + // a single agency feed. + // TODO: Perhaps future work can generate a special feed_info file for the merged + // file. + LOG.warn("Skipping feed_info table for regional merge."); + continue; + } + if (table.name.equals(Table.PATTERNS.name) || table.name.equals(Table.PATTERN_STOP.name)) { + LOG.warn("Skipping editor-only table {}.", table.name); + continue; + } + double percentComplete = Math.round((double) i / numberOfTables * 10000d) / 100d; + status.update("Merging " + table.name, percentComplete); + // Perform the merge. + LOG.info("Writing {} to merged feed", table.name); + int mergedLineNumber = constructMergedTable(table, feedsToMerge, out); + if (mergedLineNumber == 0) { + LOG.warn("Skipping {} table. No entries found in zip files.", table.name); + } else if (mergedLineNumber == -1) { + LOG.error("Merge {} table failed!", table.name); + } + } + // Close output stream for zip file. + out.close(); + // Handle writing file to storage (local or s3). + if (mergeFeedsResult.failed) { + status.fail("Merging feed versions failed."); + } else { + storeMergedFeed(); + status.update(false, "Merged feed created successfully.", 100, true); + } + LOG.info("Feed merge is complete."); + if (!mergeType.equals(REGIONAL) && !status.error && !mergeFeedsResult.failed) { + // Handle the processing of the new version for non-regional merges (note: s3 upload is handled within this job). + // We must add this job in jobLogic (rather than jobFinished) because jobFinished is called after this job's + // subJobs are run. + ProcessSingleFeedJob processSingleFeedJob = + new ProcessSingleFeedJob(mergedVersion, owner, true); + addNextJob(processSingleFeedJob); + } + } + + /** + * Collect zipFiles for each feed version before merging tables. + * Note: feed versions are sorted by first calendar date so that future dataset is iterated over first. This is + * required for the MTC merge strategy which prefers entities from the future dataset over past entities. + */ + private List collectAndSortFeeds(Set feedVersions) { + return feedVersions.stream().map(version -> { + try { + return new FeedToMerge(version); + } catch (Exception e) { + LOG.error("Could not create zip file for version {}:", version.parentFeedSource(), + version.version); + return null; + } + }).filter(Objects::nonNull).filter(entry -> entry.version.validationResult != null + && entry.version.validationResult.firstCalendarDate != null) + // MTC-specific sort mentioned in above comment. + // TODO: If another merge strategy requires a different sort order, a merge type check should be added. + .sorted(Comparator.comparing(entry -> entry.version.validationResult.firstCalendarDate, + Comparator.reverseOrder())).collect(Collectors.toList()); + } + + /** + * Handles writing the GTFS zip file to disk. For REGIONAL merges, this will end up in a project subdirectory on s3. + * Otherwise, it will write to a new version. + */ + private void storeMergedFeed() throws IOException { + if (mergeType.equals(REGIONAL)) { + status.update(false, "Saving merged feed.", 95); + // Store the project merged zip locally or on s3 + if (DataManager.useS3) { + String s3Key = String.join("/", "project", filename); + FeedStore.s3Client.putObject(DataManager.feedBucket, s3Key, mergedTempFile); + LOG.info("Storing merged project feed at s3://{}/{}", DataManager.feedBucket, + s3Key); + } else { + try { + FeedVersion.feedStore + .newFeed(filename, new FileInputStream(mergedTempFile), null); + } catch (IOException e) { + e.printStackTrace(); + LOG.error("Could not store feed for project {}", filename); + throw e; + } + } + } else { + // Store the zip file for the merged feed version. + try { + FeedVersion.feedStore + .newFeed(mergedVersion.id, new FileInputStream(mergedTempFile), feedSource); + } catch (IOException e) { + LOG.error("Could not store merged feed for new version"); + throw e; + } + } + } + + /** + * Merge the specified table for multiple GTFS feeds. + * + * @param table table to merge + * @param feedsToMerge map of feedSources to zipFiles from which to extract the .txt tables + * @param out output stream to write table into + * @return number of lines in merged table + */ + private int constructMergedTable(Table table, List feedsToMerge, + ZipOutputStream out) throws IOException { + // CSV writer used to write to zip file. + CsvListWriter writer = new CsvListWriter(new OutputStreamWriter(out), CsvPreference.STANDARD_PREFERENCE); + String keyField = table.getKeyFieldName(); + String orderField = table.getOrderFieldName(); + if (mergeType.equals(MTC)) { + // MTC requires that the stop and route records be merged based on different key fields. + switch (table.name) { + case "stops": + keyField = "stop_code"; + break; + case "routes": + keyField = "route_short_name"; + break; + default: + // Otherwise, use the standard key field (see keyField declaration. + break; + } + } + // Set up objects for tracking the rows encountered + Map rowValuesForStopOrRouteId = new HashMap<>(); + Set rowStrings = new HashSet<>(); + int mergedLineNumber = 0; + // Get the spec fields to export + List specFields = table.specFields(); + boolean stopCodeMissingFromFirstTable = false; + try { + // Iterate over each zip file. + for (int feedIndex = 0; feedIndex < feedsToMerge.size(); feedIndex++) { + boolean keyFieldMissing = false; + // Use for a new agency ID for use if the feed does not contain one. Initialize to + // null. If the value becomes non-null, the agency_id is missing and needs to be + // replaced with the generated value stored in this variable. + String newAgencyId = null; + mergeFeedsResult.feedCount++; + FeedToMerge feed = feedsToMerge.get(feedIndex); + FeedVersion version = feed.version; + FeedSource feedSource = version.parentFeedSource(); + // Generate ID prefix to scope GTFS identifiers to avoid conflicts. + String idScope = getCleanName(feedSource.name) + version.version; + CsvReader csvReader = table.getCsvReader(feed.zipFile, null); + // If csv reader is null, the table was not found in the zip file. There is no need + // to handle merging this table for the current zip file. + if (csvReader == null) { + LOG.warn("Table {} not found in the zip file for {}{}", table.name, + feedSource.name, version.version); + continue; + } + LOG.info("Adding {} table for {}{}", table.name, feedSource.name, version.version); + + Field[] fieldsFoundInZip = + table.getFieldsFromFieldHeaders(csvReader.getHeaders(), null); + List fieldsFoundList = Arrays.asList(fieldsFoundInZip); + // Determine the index of the key field for this version's table. + int keyFieldIndex = getFieldIndex(fieldsFoundInZip, keyField); + if (keyFieldIndex == -1) { + LOG.error("No {} field exists for {} table (feed={})", keyField, table.name, + feed.version.id); + keyFieldMissing = true; + // If there is no agency_id for agency table, create one and ensure that + // route#agency_id gets set. + } + int lineNumber = 0; + // Iterate over rows in table, writing them to the out file. + while (csvReader.readRecord()) { + String keyValue = csvReader.get(keyFieldIndex); + if (feedIndex > 0 && mergeType.equals(MTC)) { + // Always prefer the "future" file for the feed_info table, which means + // we can skip any iterations following the first one. If merging the agency + // table, we should only skip the following feeds if performing an MTC merge + // because that logic assumes the two feeds share the same agency (or + // agencies). NOTE: feed_info file is skipped by default (outside of this + // method) for a regional merge), which is why this block is exclusively + // for an MTC merge. Also, this statement may print multiple log + // statements, but it is deliberately nested in the csv while block in + // order to detect agency_id mismatches and fail the merge if found. + if (table.name.equals("feed_info")) { + LOG.warn("Skipping {} file for feed {}/{} (future file preferred)", + table.name, feedIndex, feedsToMerge.size()); + continue; + } else if (table.name.equals("agency")) { + // The second feed's agency table must contain the same agency_id + // value as the first feed. + String agencyId = String.join(":", keyField, keyValue); + if (!"".equals(keyValue) && !referenceTracker.transitIds.contains(agencyId)) { + String otherAgencyId = referenceTracker.transitIds.stream() + .filter(transitId -> transitId.startsWith("agency_id")) + .findAny() + .orElse(null); + String message = String.format( + "MTC merge detected mismatching agency_id values between two " + + "feeds (%s and %s). Failing merge operation.", + agencyId, + otherAgencyId + ); + LOG.error(message); + mergeFeedsResult.failed = true; + mergeFeedsResult.failureReasons.add(message); + return -1; + } + LOG.warn("Skipping {} file for feed {}/{} (future file preferred)", + table.name, feedIndex, feedsToMerge.size()); + continue; + } + } + // Check certain initial conditions on the first line of the file. + if (lineNumber == 0) { + if (table.name.equals(Table.AGENCY.name) && (keyFieldMissing || keyValue.equals(""))) { + // agency_id is optional if only one agency is present, but that will + // cause issues for the feed merge, so we need to insert an agency_id + // for the single entry. + newAgencyId = UUID.randomUUID().toString(); + if (keyFieldMissing) { + // Only add agency_id field if it is missing in table. + List fieldsList = new ArrayList<>(Arrays.asList(fieldsFoundInZip)); + fieldsList.add(Table.AGENCY.fields[0]); + fieldsFoundInZip = fieldsList.toArray(fieldsFoundInZip); + } + fieldsFoundList = Arrays.asList(fieldsFoundInZip); + } + if (mergeType.equals(MTC) && table.name.equals("stops")) { + // For the first line of the stops table, check that the alt. key + // field (stop_code) is present. If it is not, revert to the original + // key field. This is only pertinent for the MTC merge type. + // TODO: Use more sophisticated check for missing stop_codes than + // simply the first line containing the value. + if (feedIndex == 0) { + // Check that the first file contains stop_code values. + if ("".equals(keyValue)) { + LOG.warn( + "stop_code is not present in file {}/{}. Reverting to stop_id", + feedIndex, feedsToMerge.size()); + // If the key value for stop_code is not present, revert to stop_id. + keyField = table.getKeyFieldName(); + keyFieldIndex = table.getKeyFieldIndex(fieldsFoundInZip); + keyValue = csvReader.get(keyFieldIndex); + stopCodeMissingFromFirstTable = true; + } + } else { + // Check whether stop_code exists for the subsequent files. + String firstStopCodeValue = csvReader.get(getFieldIndex(fieldsFoundInZip, "stop_code")); + if (stopCodeMissingFromFirstTable && !"".equals(firstStopCodeValue)) { + // If stop_code was missing from the first file and exists for + // the second, we consider that a failing error. + mergeFeedsResult.failed = true; + mergeFeedsResult.errorCount++; + mergeFeedsResult.failureReasons.add( + "If one stops.txt file contains stop_codes, both feed versions must stop_codes."); + } + } + } + } + boolean skipRecord = false; + String[] rowValues = new String[specFields.size()]; + String[] values = csvReader.getValues(); + if (values.length == 1) { + LOG.warn("Found blank line. Skipping..."); + continue; + } + // Piece together the row to write, which should look practically identical to the original + // row except for the identifiers receiving a prefix to avoid ID conflicts. + for (int specFieldIndex = 0; + specFieldIndex < specFields.size(); specFieldIndex++) { + Field field = specFields.get(specFieldIndex); + // Get index of field from GTFS spec as it appears in feed + int index = fieldsFoundList.indexOf(field); + String val = csvReader.get(index); + // Default value to write is unchanged from value found in csv. + String valueToWrite = val; + // Handle filling in agency_id if missing when merging regional feeds. + if (newAgencyId != null && field.name.equals("agency_id") && mergeType + .equals(REGIONAL)) { + if (val.equals("") && table.name.equals("agency") && lineNumber > 0) { + // If there is no agency_id value for a second (or greater) agency + // record, fail the merge feed job. + String message = String.format( + "Feed %s has multiple agency records but no agency_id values.", + feed.version.id); + mergeFeedsResult.failed = true; + mergeFeedsResult.failureReasons.add(message); + LOG.error(message); + return -1; + } + LOG.info("Updating {}#agency_id to (auto-generated) {} for ID {}", + table.name, newAgencyId, keyValue); + val = newAgencyId; + } + // Determine if field is a GTFS identifier. + boolean isKeyField = + field.isForeignReference() || keyField.equals(field.name); + if (this.mergeType.equals(REGIONAL) && isKeyField && !val.isEmpty()) { + // For regional merge, if field is a GTFS identifier (e.g., route_id, + // stop_id, etc.), add scoped prefix. + valueToWrite = String.join(":", idScope, val); + } + // Only need to check for merge conflicts if using MTC merge type because + // the regional merge type scopes all identifiers by default. Also, the + // reference tracker will get far too large if we attempt to use it to + // track references for a large number of feeds (e.g., every feed in New + // York State). + if (mergeType.equals(MTC)) { + Set idErrors = referenceTracker + .checkReferencesAndUniqueness(keyValue, lineNumber, field, val, + table, keyField, orderField); + // Store values for key fields that have been encountered. + // TODO Consider using Strategy Pattern https://en.wikipedia.org/wiki/Strategy_pattern + // instead of a switch statement. + switch (table.name) { + case "calendar": + // If any service_id in the active feed matches with the future + // feed, it should be modified and all associated trip records + // must also be changed with the modified service_id. + // TODO How can we check that calendar_dates entries are + // duplicates? I think we would need to consider the + // service_id:exception_type:date as the unique key and include any + // all entries as long as they are unique on this key. + if (hasDuplicateError(idErrors)) { + String key = getTableScopedValue(table, idScope, val); + // Modify service_id and ensure that referencing trips + // have service_id updated. + valueToWrite = String.join(":", idScope, val); + mergeFeedsResult.remappedIds.put(key, valueToWrite); + } + // If a service_id from the active calendar has both the + // start_date and end_date in the future, the service will be + // excluded from the merged file. Records in trips, + // calendar_dates, and calendar_attributes referencing this + // service_id shall also be removed/ignored. Stop_time records + // for the ignored trips shall also be removed. + if (feedIndex > 0) { + int startDateIndex = + getFieldIndex(fieldsFoundInZip, "start_date"); + LocalDate startDate = LocalDate + .parse(csvReader.get(startDateIndex), + GTFS_DATE_FORMATTER); + if (startDate.isAfter(LocalDate.now())) { + LOG.warn( + "Skipping calendar entry {} because it operates in the future.", + keyValue); + String key = + getTableScopedValue(table, idScope, keyValue); + mergeFeedsResult.skippedIds.add(key); + skipRecord = true; + continue; + } + // If a service_id from the active calendar has only the + // end_date in the future, the end_date shall be set to one + // day prior to the earliest start_date in future dataset + // before appending the calendar record to the merged file. + int endDateIndex = + getFieldIndex(fieldsFoundInZip, "end_date"); + if (index == endDateIndex) { + LocalDate endDate = LocalDate + .parse(csvReader.get(endDateIndex), + GTFS_DATE_FORMATTER); + if (endDate.isAfter(LocalDate.now())) { + val = feedsToMerge.get( + 0).version.validationResult.firstCalendarDate + .minus(1, ChronoUnit.DAYS) + .format(GTFS_DATE_FORMATTER); + } + } + } + break; + case "trips": + // trip_ids between active and future datasets must not match. If any trip_id is found + // to be matching, the merge should fail with appropriate notification to user with the + // cause of the failure. Merge result should include all conflicting trip_ids. + for (NewGTFSError error : idErrors) { + if (error.errorType.equals(NewGTFSErrorType.DUPLICATE_ID)) { + mergeFeedsResult.failureReasons + .add("Trip ID conflict caused merge failure."); + mergeFeedsResult.idConflicts.add(error.badValue); + mergeFeedsResult.errorCount++; + if (failOnDuplicateTripId) + mergeFeedsResult.failed = true; + skipRecord = true; + } + } + break; + case "stops": + // When stop_code is included, stop merging will be based on that. If stop_code is not + // included, it will be based on stop_id. All stops in future data will be carried + // forward and any stops found in active data that are not in the future data shall be + // appended. If one of the feed is missing stop_code, merge fails with a notification to + // the user with suggestion that the feed with missing stop_code must be fixed with + // stop_code. + // NOTE: route case is also used by the stops case, so the route + // case must follow this block. + case "routes": + boolean useAltKey = + keyField.equals("stop_code") || keyField.equals("route_short_name"); + // First, check uniqueness of primary key value (i.e., stop or route ID) + // in case the stop_code or route_short_name are being used. This + // must occur unconditionally because each record must be tracked + // by the reference tracker. + String primaryKeyValue = + csvReader.get(table.getKeyFieldIndex(fieldsFoundInZip)); + Set primaryKeyErrors = referenceTracker + .checkReferencesAndUniqueness(primaryKeyValue, lineNumber, + field, val, table); + // Merging will be based on route_short_name/stop_code in the current and future datasets. All + // matching route_short_names/stop_codes between the datasets shall be considered same route/stop. Any + // route_short_name/stop_code in active data not present in the future will be appended to the + // future routes/stops file. + if (useAltKey) { + if ("".equals(keyValue) && field.name.equals(table.getKeyFieldName())) { + // If alt key is empty (which is permitted), skip + // checking of alt key dupe errors/re-mapping values and + // simply use the primary key (route_id/stop_id). + if (hasDuplicateError(primaryKeyErrors)) { + skipRecord = true; + } + } else if (hasDuplicateError(idErrors)) { + // If we encounter a route/stop that shares its alt. + // ID with a previous route/stop, we need to + // remap its route_id/stop_id field so that + // references point to the previous + // route_id/stop_id. For example, + // route_short_name in both feeds is "ABC" but + // each route has a different route_id (123 and + // 456). This block will map references to 456 to + // 123 so that ABC/123 is the route of record. + //////////////////////////////////////////////////////// + // Get current route/stop ID. (Note: primary + // ID index is always zero because we're + // iterating over the spec fields). + String currentPrimaryKey = rowValues[0]; + // Get unique key to check for remapped ID when + // writing values to file. + String key = + getTableScopedValue(table, idScope, currentPrimaryKey); + // Extract the route/stop ID value used for the + // route/stop with already encountered matching + // short name/stop code. + String[] strings = + rowValuesForStopOrRouteId.get(String.join( + ":", keyField, val)); + String keyForMatchingAltId = strings[0]; + if (!keyForMatchingAltId.equals(currentPrimaryKey)) { + // Remap this row's route_id/stop_id to ensure + // that referencing entities (trips, stop_times) + // have their references updated. + mergeFeedsResult.remappedIds.put(key, keyForMatchingAltId); + } + skipRecord = true; + } + // Next check for regular ID conflicts (e.g., on route_id or stop_id) because any + // conflicts here will actually break the feed. This essentially handles the case + // where two routes have different short_names, but share the same route_id. We want + // both of these routes to end up in the merged feed in this case because we're + // matching on short name, so we must modify the route_id. + if (!skipRecord && !referenceTracker.transitIds + .contains(String.join(":", keyField, keyValue))) { + if (hasDuplicateError(primaryKeyErrors)) { + String key = getTableScopedValue(table, idScope, val); + // Modify route_id and ensure that referencing trips + // have route_id updated. + valueToWrite = String.join(":", idScope, val); + mergeFeedsResult.remappedIds.put(key, valueToWrite); + } + } + } else { + // Key field has defaulted to the standard primary key field + // (stop_id or route_id), which makes the check much + // simpler (just skip the duplicate record). + if (hasDuplicateError(idErrors)) skipRecord = true; + } + + if (newAgencyId != null && field.name.equals("agency_id")) { + LOG.info( + "Updating route#agency_id to (auto-generated) {} for route={}", + newAgencyId, keyValue); + val = newAgencyId; + } + break; + default: + // For any other table, skip any duplicate record. + if (hasDuplicateError(idErrors)) skipRecord = true; + break; + } + } + + if (field.isForeignReference()) { + String key = getTableScopedValue(field.referenceTable, idScope, val); + // If the current foreign ref points to another record that has been skipped, skip this + // record and add its primary key to the list of skipped IDs (so that other references can + // be properly omitted). + if (mergeFeedsResult.skippedIds.contains(key)) { + String skippedKey = getTableScopedValue(table, idScope, keyValue); + if (orderField != null) { + skippedKey = String.join(":", skippedKey, + csvReader.get(getFieldIndex(fieldsFoundInZip, orderField))); + } + mergeFeedsResult.skippedIds.add(skippedKey); + skipRecord = true; + continue; + } + // If the field is a foreign reference, check to see whether the reference has been + // remapped due to a conflicting ID from another feed (e.g., calendar#service_id). + if (mergeFeedsResult.remappedIds.containsKey(key)) { + mergeFeedsResult.remappedReferences++; + // If the value has been remapped update the value to write. + valueToWrite = mergeFeedsResult.remappedIds.get(key); + } + } + rowValues[specFieldIndex] = valueToWrite; + } // End of iteration over each field for a row. + // Do not write rows that are designated to be skipped. + if (skipRecord && this.mergeType.equals(MTC)) { + mergeFeedsResult.recordsSkipCount++; + continue; + } + String newLine = String.join(",", rowValues); + switch (table.name) { + // Store row values for route or stop ID (or alternative ID field) in order + // to check for ID conflicts. NOTE: This is only intended to be used for + // routes and stops. Otherwise, this might (will) consume too much memory. + case "stops": + case "routes": + // FIXME: This should be revised for tables with order fields, but it should work fine for its + // primary purposes: to detect exact copy rows and to temporarily hold the data in case a reference + // needs to be looked up in order to remap an entity to that key. + // Here we need to get the key field index according to the spec + // table definition. Otherwise, if we use the keyFieldIndex variable + // defined above, we will be using the found fields index, which will + // cause major issues when trying to put and get values into the + // below map. + String key = String.join( + ":", keyField, rowValues[table.getFieldIndex(keyField)]); + rowValuesForStopOrRouteId.put(key, rowValues); + break; + case "transfers": + case "fare_rules": + case "directions": // GTFS+ table + if (!rowStrings.add(newLine)) { + // The line already exists in the output file, do not append it again. This prevents duplicate + // entries for certain files that do not contain primary keys (e.g., fare_rules and transfers) and + // do not otherwise have convenient ways to track uniqueness (like an order field). + // FIXME: add ordinal field/compound keys for transfers (from/to_stop_id) and fare_rules (?). + // Perhaps it makes sense to include all unique fare rules rows, but transfers that share the + // same from/to stop IDs but different transfer times or other values should not both be + // included in the merged feed (yet this strategy would fail to filter those out). + mergeFeedsResult.recordsSkipCount++; + continue; + } + break; + default: + // Do nothing. + break; + + } + // Finally, handle writing lines to zip entry. + if (mergedLineNumber == 0) { + // Create entry for zip file. + ZipEntry tableEntry = new ZipEntry(table.name + ".txt"); + out.putNextEntry(tableEntry); + // Write headers to table. + String[] headers = specFields.stream() + .map(field -> field.name) + .toArray(String[]::new); + writer.write(headers); + } + // Write line to table (plus new line char). + writer.write(rowValues); + lineNumber++; + mergedLineNumber++; + } // End of iteration over each row. + } + writer.flush(); + out.closeEntry(); + } catch (Exception e) { + LOG.error("Error merging feed sources: {}", + feedVersions.stream().map(version -> version.parentFeedSource().name) + .collect(Collectors.toList()).toString()); + e.printStackTrace(); + throw e; + } + // Track the number of lines in the merged table and return final number. + mergeFeedsResult.linesPerTable.put(table.name, mergedLineNumber); + return mergedLineNumber; + } + + /** Checks that any of a set of errors is of the type {@link NewGTFSErrorType#DUPLICATE_ID}. */ + private boolean hasDuplicateError(Set errors) { + for (NewGTFSError error : errors) { + if (error.errorType.equals(NewGTFSErrorType.DUPLICATE_ID)) return true; + } + return false; + } + + /** Get table-scoped value used for key when remapping references for a particular feed. */ + private static String getTableScopedValue(Table table, String prefix, String id) { + return String.join(":", + table.name, + prefix, + id); + } + + /** + * Helper class that collects the feed version and its zip file. Note: this class helps with sorting versions to + * merge in a list collection. + */ + private class FeedToMerge { + public FeedVersion version; + public ZipFile zipFile; + + FeedToMerge(FeedVersion version) throws IOException { + this.version = version; + this.zipFile = new ZipFile(version.retrieveGtfsFile()); + } + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsResult.java b/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsResult.java new file mode 100644 index 000000000..e971e4fd2 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsResult.java @@ -0,0 +1,40 @@ +package com.conveyal.datatools.manager.jobs; + +import java.io.Serializable; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * Contains the result of {@link MergeFeedsJob}. + */ +public class MergeFeedsResult implements Serializable { + private static final long serialVersionUID = 1L; + + /** Number of feeds merged */ + public int feedCount; + public int errorCount; + /** Type of merge operation performed */ + public MergeFeedsType type; + /** Contains a set of strings for which there were error-causing duplicate values */ + public Set idConflicts = new HashSet<>(); + /** Contains the set of IDs for records that were excluded in the merged feed */ + public Set skippedIds = new HashSet<>(); + /** Contains the set of IDs that had their values remapped during the merge */ + public Map remappedIds = new HashMap<>(); + /** Mapping of table name to line count in merged file */ + public Map linesPerTable = new HashMap<>(); + public int remappedReferences; + public int recordsSkipCount; + public Date startTime; + public boolean failed; + /** Set of reasons explaining why merge operation failed */ + public Set failureReasons = new HashSet<>(); + + public MergeFeedsResult (MergeFeedsType type) { + this.type = type; + this.startTime = new Date(); + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsType.java b/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsType.java new file mode 100644 index 000000000..f827c7f96 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/jobs/MergeFeedsType.java @@ -0,0 +1,6 @@ +package com.conveyal.datatools.manager.jobs; + +public enum MergeFeedsType { + REGIONAL, + MTC +} diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/MergeProjectFeedsJob.java b/src/main/java/com/conveyal/datatools/manager/jobs/MergeProjectFeedsJob.java deleted file mode 100644 index 4cf329662..000000000 --- a/src/main/java/com/conveyal/datatools/manager/jobs/MergeProjectFeedsJob.java +++ /dev/null @@ -1,258 +0,0 @@ -package com.conveyal.datatools.manager.jobs; - -import com.conveyal.datatools.common.status.MonitorableJob; -import com.conveyal.datatools.common.utils.Consts; -import com.conveyal.datatools.manager.DataManager; -import com.conveyal.datatools.manager.models.FeedSource; -import com.conveyal.datatools.manager.models.FeedVersion; -import com.conveyal.datatools.manager.models.Project; -import com.conveyal.datatools.manager.persistence.FeedStore; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.zip.ZipEntry; -import java.util.zip.ZipFile; -import java.util.zip.ZipOutputStream; - -/** - * Created by landon on 9/19/17. - */ -public class MergeProjectFeedsJob extends MonitorableJob { - - private static final Logger LOG = LoggerFactory.getLogger(MergeProjectFeedsJob.class); - public final Project project; - - public MergeProjectFeedsJob(Project project, String owner) { - super(owner, "Merging project feeds for " + project.name, JobType.MERGE_PROJECT_FEEDS); - this.project = project; - status.message = "Merging feeds..."; - } - - @Override - public void jobLogic () throws IOException { - // get feed sources in project - Collection feeds = project.retrieveProjectFeedSources(); - - // create temp merged zip file to add feed content to - File mergedFile = null; - try { - mergedFile = File.createTempFile(project.id + "-merged", ".zip"); - mergedFile.deleteOnExit(); - } catch (IOException e) { - LOG.error("Could not create temp file"); - e.printStackTrace(); - throw e; - } - - // create the zipfile - ZipOutputStream out = new ZipOutputStream(new FileOutputStream(mergedFile)); - - LOG.info("Created project merge file: " + mergedFile.getAbsolutePath()); - - // map of feed versions to table entries contained within version's GTFS - Map feedSourceMap = new HashMap<>(); - - // collect zipFiles for each feedSource before merging tables - for (FeedSource fs : feeds) { - // check if feed source has version (use latest) - FeedVersion version = fs.retrieveLatest(); - if (version == null) { - LOG.info("Skipping {} because it has no feed versions", fs.name); - continue; - } - // modify feed version to use prepended feed id - LOG.info("Adding {} feed to merged zip", fs.name); - try { - File file = version.retrieveGtfsFile(); - if (file == null) { - LOG.error("No file exists for {}", version.id); - continue; - } - ZipFile zipFile = new ZipFile(file); - feedSourceMap.put(fs, zipFile); - } catch(Exception e) { - e.printStackTrace(); - LOG.error("Zipfile for version {} not found", version.id); - } - } - - // loop through GTFS tables - int numberOfTables = DataManager.gtfsConfig.size(); - for(int i = 0; i < numberOfTables; i++) { - JsonNode tableNode = DataManager.gtfsConfig.get(i); - byte[] tableOut = mergeTables(tableNode, feedSourceMap); - - // if at least one feed has the table, include it - if (tableOut != null) { - - String tableName = tableNode.get("name").asText(); - synchronized (status) { - status.message = "Merging " + tableName; - status.percentComplete = Math.round((double) i / numberOfTables * 10000d) / 100d; - } - // create entry for zip file - ZipEntry tableEntry = new ZipEntry(tableName); - try { - out.putNextEntry(tableEntry); - LOG.info("Writing {} to merged feed", tableName); - out.write(tableOut); - out.closeEntry(); - } catch (IOException e) { - LOG.error("Error writing to table {}", tableName); - e.printStackTrace(); - } - } - } - try { - out.close(); - } catch (IOException e) { - LOG.error("Error closing zip file"); - e.printStackTrace(); - } - synchronized (status) { - status.message = "Saving merged feed."; - status.percentComplete = 95.0; - } - // Store the project merged zip locally or on s3 - if (DataManager.useS3) { - String s3Key = "project/" + project.id + ".zip"; - FeedStore.s3Client.putObject(DataManager.feedBucket, s3Key, mergedFile); - LOG.info("Storing merged project feed at s3://{}/{}", DataManager.feedBucket, s3Key); - } else { - try { - FeedVersion.feedStore.newFeed(project.id + ".zip", new FileInputStream(mergedFile), null); - } catch (IOException e) { - LOG.error("Could not store feed for project {}", project.id); - e.printStackTrace(); - } - } - // delete temp file - mergedFile.delete(); - - synchronized (status) { - status.message = "Merged feed created successfully."; - status.completed = true; - status.percentComplete = 100.0; - } - } - - /** - * Merge the specified table for multiple GTFS feeds. - * @param tableNode tableNode to merge - * @param feedSourceMap map of feedSources to zipFiles from which to extract the .txt tables - * @return single merged table for feeds - */ - private static byte[] mergeTables(JsonNode tableNode, Map feedSourceMap) throws IOException { - - String tableName = tableNode.get("name").asText(); - ByteArrayOutputStream tableOut = new ByteArrayOutputStream(); - - ArrayNode fieldsNode = (ArrayNode) tableNode.get("fields"); - List headers = new ArrayList<>(); - for (int i = 0; i < fieldsNode.size(); i++) { - JsonNode fieldNode = fieldsNode.get(i); - String fieldName = fieldNode.get("name").asText(); - Boolean notInSpec = fieldNode.has("datatools") && fieldNode.get("datatools").asBoolean(); - if (notInSpec) { - fieldsNode.remove(i); - } - headers.add(fieldName); - } - - try { - // write headers to table - tableOut.write(String.join(",", headers).getBytes()); - tableOut.write("\n".getBytes()); - - // iterate over feed source to zipfile map - for ( Map.Entry mapEntry : feedSourceMap.entrySet()) { - FeedSource fs = mapEntry.getKey(); - ZipFile zipFile = mapEntry.getValue(); - final Enumeration entries = zipFile.entries(); - while (entries.hasMoreElements()) { - final ZipEntry entry = entries.nextElement(); - if(tableName.equals(entry.getName())) { - LOG.info("Adding {} table for {}", entry.getName(), fs.name); - - InputStream inputStream = zipFile.getInputStream(entry); - - BufferedReader in = new BufferedReader(new InputStreamReader(inputStream)); - String line = in.readLine(); - String[] fields = line.split(","); - - List fieldList = Arrays.asList(fields); - - - // iterate over rows in table - while((line = in.readLine()) != null) { - String[] newValues = new String[fieldsNode.size()]; - String[] values = line.split(Consts.COLUMN_SPLIT, -1); - if (values.length == 1) { - LOG.warn("Found blank line. Skipping..."); - continue; - } - for(int v = 0; v < fieldsNode.size(); v++) { - JsonNode fieldNode = fieldsNode.get(v); - String fieldName = fieldNode.get("name").asText(); - - // get index of field from GTFS spec as it appears in feed - int index = fieldList.indexOf(fieldName); - String val = ""; - try { - index = fieldList.indexOf(fieldName); - if(index != -1) { - val = values[index]; - } - } catch (ArrayIndexOutOfBoundsException e) { - LOG.warn("Index {} out of bounds for file {} and feed {}", index, entry.getName(), fs.name); - continue; - } - - String fieldType = fieldNode.get("inputType").asText(); - - // if field is a gtfs identifier, prepend with feed id/name - if (fieldType.contains("GTFS") && !val.isEmpty()) { - newValues[v] = fs.name + ":" + val; - } - else { - newValues[v] = val; - } - } - String newLine = String.join(",", newValues); - - // write line to table (plus new line char) - tableOut.write(newLine.getBytes()); - tableOut.write("\n".getBytes()); - } - } - } - } - } catch (IOException e) { - e.printStackTrace(); - LOG.error( - "Error merging feed sources: {}", - feedSourceMap.keySet().stream().map(fs -> fs.name).collect(Collectors.toList()).toString() - ); - throw e; - } - return tableOut.toByteArray(); - } -} diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/ProcessSingleFeedJob.java b/src/main/java/com/conveyal/datatools/manager/jobs/ProcessSingleFeedJob.java index 4c1a11182..a5ed1a062 100644 --- a/src/main/java/com/conveyal/datatools/manager/jobs/ProcessSingleFeedJob.java +++ b/src/main/java/com/conveyal/datatools/manager/jobs/ProcessSingleFeedJob.java @@ -50,7 +50,7 @@ public String getFeedSourceId () { public void jobLogic () { LOG.info("Processing feed for {}", feedVersion.id); - // First, load the feed into database. + // First, load the feed into database. During this stage, the GTFS file will be uploaded to S3 (and deleted locally). addNextJob(new LoadFeedJob(feedVersion, owner, isNewVersion)); // Next, validate the feed. diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/ValidateFeedJob.java b/src/main/java/com/conveyal/datatools/manager/jobs/ValidateFeedJob.java index 25d5091fd..a48876acd 100644 --- a/src/main/java/com/conveyal/datatools/manager/jobs/ValidateFeedJob.java +++ b/src/main/java/com/conveyal/datatools/manager/jobs/ValidateFeedJob.java @@ -4,6 +4,7 @@ import com.conveyal.datatools.common.utils.Scheduler; import com.conveyal.datatools.manager.models.FeedVersion; import com.conveyal.datatools.manager.persistence.Persistence; +import com.conveyal.gtfs.validator.ValidationResult; import com.fasterxml.jackson.annotation.JsonProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,4 +74,14 @@ public String getFeedSourceId () { return feedVersion.parentFeedSource().id; } + /** + * Getter that returns the validationResult so that once the job finishes, the client can optionally provide + * directions to users based on the success of the validation or other validation data (e.g., "The feed you have + * loaded is only valid for future dates."). + */ + @JsonProperty + public ValidationResult getValidationResult () { + return feedVersion.validationResult; + } + } diff --git a/src/main/java/com/conveyal/datatools/manager/models/Deployment.java b/src/main/java/com/conveyal/datatools/manager/models/Deployment.java index 312046628..94156e21f 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/Deployment.java +++ b/src/main/java/com/conveyal/datatools/manager/models/Deployment.java @@ -448,7 +448,7 @@ public static class SummarizedFeedVersion { public int version; public SummarizedFeedVersion (FeedVersion version) { - this.validationResult = new FeedValidationResultSummary(version.validationResult, version.feedLoadResult); + this.validationResult = new FeedValidationResultSummary(version); this.feedSource = version.parentFeedSource(); this.updated = version.updated; this.id = version.id; diff --git a/src/main/java/com/conveyal/datatools/manager/models/FeedDownloadToken.java b/src/main/java/com/conveyal/datatools/manager/models/FeedDownloadToken.java index 23c9c12f1..3572f180f 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/FeedDownloadToken.java +++ b/src/main/java/com/conveyal/datatools/manager/models/FeedDownloadToken.java @@ -1,10 +1,10 @@ package com.conveyal.datatools.manager.models; -import com.conveyal.datatools.manager.models.Snapshot; +import com.conveyal.datatools.common.status.MonitorableJob; import com.conveyal.datatools.manager.persistence.Persistence; import com.fasterxml.jackson.annotation.JsonProperty; -import org.mapdb.Fun; +import java.io.File; import java.util.Date; /** @@ -16,6 +16,8 @@ public class FeedDownloadToken extends Model { private static final long serialVersionUID = 1L; + public String jobId; + public String filePath; public String feedVersionId; public String snapshotId; @@ -23,6 +25,15 @@ public class FeedDownloadToken extends Model { public FeedDownloadToken () { } + /** Generic download token for file generated by a server job. */ + public FeedDownloadToken (MonitorableJob job) { + userId = job.owner; + jobId = job.jobId; + File file = job.retrieveFile(); + filePath = file != null ? file.getAbsolutePath() : null; + timestamp = new Date(); + } + public FeedDownloadToken (FeedVersion feedVersion) { feedVersionId = feedVersion.id; timestamp = new Date(); diff --git a/src/main/java/com/conveyal/datatools/manager/models/FeedSource.java b/src/main/java/com/conveyal/datatools/manager/models/FeedSource.java index 8eb2e8d68..fa18e3aae 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/FeedSource.java +++ b/src/main/java/com/conveyal/datatools/manager/models/FeedSource.java @@ -9,7 +9,6 @@ import com.conveyal.datatools.manager.persistence.FeedStore; import com.conveyal.datatools.manager.persistence.Persistence; import com.conveyal.datatools.manager.utils.HashUtils; -import com.conveyal.gtfs.validator.ValidationResult; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; @@ -31,6 +30,7 @@ import java.util.Map; import static com.conveyal.datatools.manager.utils.StringUtils.getCleanName; +import static com.mongodb.client.model.Filters.and; import static com.mongodb.client.model.Filters.eq; /** @@ -111,6 +111,15 @@ public String organizationId () { */ public String snapshotVersion; + /** + * The SQL namespace for the most recently verified published {@link FeedVersion}. + * + * FIXME During migration to RDBMS for GTFS data, this field changed to map to the SQL unique ID, + * however the name of the field suggests it maps to the feed version ID stored in MongoDB. Now + * that both published namespace/version ID are available in {@link #publishedValidationSummary()} + * it might make sense to migrate this field back to the versionID for MTC (or rename it to + * publishedNamespace). Both efforts would require some level of db migration + code changes. + */ public String publishedVersionId; public String editorNamespace; @@ -304,6 +313,30 @@ public FeedVersion retrieveLatest() { return newestVersion; } + /** + * Fetches the published {@link FeedVersion} for this feed source according to the + * {@link #publishedVersionId} field (which currently maps to {@link FeedVersion#namespace}. + */ + public FeedVersion retrievePublishedVersion() { + if (this.publishedVersionId == null) return null; + FeedVersion publishedVersion = Persistence.feedVersions + // Sort is unnecessary here. + .getOneFiltered(eq("namespace", this.publishedVersionId), Sorts.descending("version")); + if (publishedVersion == null) { + // Is this what happens if there are none? + return null; + } + return publishedVersion; + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonView(JsonViews.UserInterface.class) + @JsonProperty("publishedValidationSummary") + private FeedValidationResultSummary publishedValidationSummary() { + FeedVersion publishedVersion = retrievePublishedVersion(); + return publishedVersion != null ? new FeedValidationResultSummary(publishedVersion) : null; + } + @JsonInclude(JsonInclude.Include.NON_NULL) @JsonView(JsonViews.UserInterface.class) @JsonProperty("latestVersionId") @@ -332,8 +365,7 @@ public Date lastUpdated() { @JsonProperty("latestValidation") public FeedValidationResultSummary latestValidation() { FeedVersion latest = retrieveLatest(); - ValidationResult result = latest != null ? latest.validationResult : null; - return result != null ?new FeedValidationResultSummary(result, latest.feedLoadResult) : null; + return latest != null ? new FeedValidationResultSummary(latest) : null; } // TODO: figure out some way to indicate whether feed has been edited since last snapshot (i.e, there exist changes) @@ -360,10 +392,10 @@ public Map> externalProperties() { for(String resourceType : DataManager.feedResources.keySet()) { Map propTable = new HashMap<>(); - // FIXME: use mongo filters instead - Persistence.externalFeedSourceProperties.getAll().stream() - .filter(prop -> prop.feedSourceId.equals(this.id)) - .forEach(prop -> propTable.put(prop.name, prop.value)); + // Get all external properties for the feed source/resource type and fill prop table. + Persistence.externalFeedSourceProperties + .getFiltered(and(eq("feedSourceId", this.id), eq("resourceType", resourceType))) + .forEach(prop -> propTable.put(prop.name, prop.value)); resourceTable.put(resourceType, propTable); } diff --git a/src/main/java/com/conveyal/datatools/manager/models/FeedValidationResultSummary.java b/src/main/java/com/conveyal/datatools/manager/models/FeedValidationResultSummary.java index a85fd2590..1862efa9f 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/FeedValidationResultSummary.java +++ b/src/main/java/com/conveyal/datatools/manager/models/FeedValidationResultSummary.java @@ -15,7 +15,10 @@ */ public class FeedValidationResultSummary implements Serializable { private static final long serialVersionUID = 1L; - + // Include feed ID and namespace here so the client can trace back to the full feed version if this is nested under + // a feed source. + public String feedVersionId; + public String namespace; public LoadStatus loadStatus; @JsonInclude(Include.ALWAYS) @@ -45,27 +48,29 @@ public class FeedValidationResultSummary implements Serializable { /** * Construct a summarized version of the given FeedValidationResult. - * @param validationResult */ - public FeedValidationResultSummary (ValidationResult validationResult, FeedLoadResult feedLoadResult) { - if (validationResult != null) { - this.loadStatus = validationResult.fatalException == null + public FeedValidationResultSummary (FeedVersion version) { + this.feedVersionId = version.id; + this.namespace = version.namespace; + // If feed load failed (and is null), construct an empty result to avoid NPEs. + if (version.feedLoadResult == null) { + version.feedLoadResult = new FeedLoadResult(true); + } + if (version.validationResult != null) { + this.loadStatus = version.validationResult.fatalException == null ? LoadStatus.SUCCESS : LoadStatus.OTHER_FAILURE; - this.loadFailureReason = validationResult.fatalException; + this.loadFailureReason = version.validationResult.fatalException; if (loadStatus == LoadStatus.SUCCESS) { - if (feedLoadResult == null) { - feedLoadResult = new FeedLoadResult(true); - } - this.errorCount = validationResult.errorCount; - this.agencyCount = feedLoadResult.agency.rowCount; - this.routeCount = feedLoadResult.routes.rowCount; - this.stopCount = feedLoadResult.stops.rowCount; - this.tripCount = feedLoadResult.trips.rowCount; - this.stopTimesCount = feedLoadResult.stopTimes.rowCount; - this.startDate = validationResult.firstCalendarDate; - this.endDate = validationResult.lastCalendarDate; - this.bounds = boundsFromValidationResult(validationResult); + this.errorCount = version.validationResult.errorCount; + this.agencyCount = version.feedLoadResult.agency.rowCount; + this.routeCount = version.feedLoadResult.routes.rowCount; + this.stopCount = version.feedLoadResult.stops.rowCount; + this.tripCount = version.feedLoadResult.trips.rowCount; + this.stopTimesCount = version.feedLoadResult.stopTimes.rowCount; + this.startDate = version.validationResult.firstCalendarDate; + this.endDate = version.validationResult.lastCalendarDate; + this.bounds = boundsFromValidationResult(version.validationResult); // FIXME: compute avg revenue time // this.avgDailyRevenueTime = validationResult.avgDailyRevenueTime; } diff --git a/src/main/java/com/conveyal/datatools/manager/models/FeedVersion.java b/src/main/java/com/conveyal/datatools/manager/models/FeedVersion.java index f40711036..d4879056d 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/FeedVersion.java +++ b/src/main/java/com/conveyal/datatools/manager/models/FeedVersion.java @@ -168,7 +168,7 @@ public Feed retrieveFeed() { @JsonView(JsonViews.UserInterface.class) @BsonProperty("validationSummary") public FeedValidationResultSummary validationSummary() { - return new FeedValidationResultSummary(validationResult, feedLoadResult); + return new FeedValidationResultSummary(this); } diff --git a/src/main/java/com/conveyal/datatools/manager/models/Model.java b/src/main/java/com/conveyal/datatools/manager/models/Model.java index d9e073c60..14c5a1b7f 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/Model.java +++ b/src/main/java/com/conveyal/datatools/manager/models/Model.java @@ -14,6 +14,8 @@ import java.util.UUID; import com.conveyal.datatools.manager.auth.Auth0UserProfile; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.persistence.MappedSuperclass; @@ -25,6 +27,7 @@ @MappedSuperclass // applies mapping information to the subclassed entities FIXME remove? public abstract class Model implements Serializable { private static final long serialVersionUID = 1L; + private static final Logger LOG = LoggerFactory.getLogger(Model.class); public Model () { // This autogenerates an ID @@ -96,7 +99,17 @@ public void storeUser(Auth0UserProfile profile) { public void storeUser(String id) { userId = id; if (!Auth0Connection.authDisabled()) { - Auth0UserProfile profile = Auth0Users.getUserById(userId); + Auth0UserProfile profile = null; + // Try to fetch Auth0 user to store email address. This is surrounded by a try/catch because in the event of + // a failure we do not want to cause issues from this low-level operation. + try { + profile = Auth0Users.getUserById(userId); + } catch (Exception e) { + LOG.warn( + "Could not find user profile {} from Auth0. This may be due to testing conditions or simply a bad user ID.", + id); + e.printStackTrace(); + } userEmail = profile != null ? profile.getEmail() : null; } else { userEmail = "no_auth@conveyal.com"; diff --git a/src/main/java/com/conveyal/datatools/manager/models/Project.java b/src/main/java/com/conveyal/datatools/manager/models/Project.java index dd65d9850..e3b849db1 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/Project.java +++ b/src/main/java/com/conveyal/datatools/manager/models/Project.java @@ -63,6 +63,12 @@ public OtpServer retrieveServer(String name) { // and/or for applying a geographic filter when syncing with external feed registries. public Bounds bounds; + // Identifies a specific "pinned" deployment for the project. This is used in datatools-ui in 2 places: + // 1. In the list of project deployments, a "pinned" deployment is shown first and highlighted. + // 2. In the project feed source table, if a "pinned" deployment exists, the status of the versions that were in + // the "pinned" deployment are shown and compared to the most recent version in the feed sources. + public String pinnedDeploymentId; + public Project() { this.buildConfig = new OtpBuildConfig(); this.routerConfig = new OtpRouterConfig(); diff --git a/src/main/java/com/conveyal/datatools/manager/persistence/TypedPersistence.java b/src/main/java/com/conveyal/datatools/manager/persistence/TypedPersistence.java index 71a426afc..2b1231322 100644 --- a/src/main/java/com/conveyal/datatools/manager/persistence/TypedPersistence.java +++ b/src/main/java/com/conveyal/datatools/manager/persistence/TypedPersistence.java @@ -166,6 +166,11 @@ public T getOneFiltered (Bson filter, Bson sortBy) { return mongoCollection.find(filter).first(); } + /** Convenience wrapper for #getOneFiltered that supplies null for sortBy arg. */ + public T getOneFiltered (Bson filter) { + return getOneFiltered(filter, null); + } + public boolean removeById (String id) { DeleteResult result = mongoCollection.deleteOne(eq(id)); if (result.getDeletedCount() == 1) { diff --git a/src/main/resources/gtfs/gtfs.yml b/src/main/resources/gtfs/gtfs.yml index 5d86bca60..acc45edf0 100644 --- a/src/main/resources/gtfs/gtfs.yml +++ b/src/main/resources/gtfs/gtfs.yml @@ -318,6 +318,27 @@ columnWidth: 12 helpContent: +- id: shape + name: shapes.txt + helpContent: Shapes describe the physical path that a vehicle takes, and are defined in the file shapes.txt. Shapes belong to Trips, and consist of a sequence of points. Tracing the points in order provides the path of the vehicle. The points do not need to match stop locations. + fields: + - name: shape_id + required: true + inputType: GTFS_ID + helpContent: The shape_id field contains an ID that uniquely identifies a shape. + - name: shape_pt_lat + required: true + inputType: LATITUDE + - name: shape_pt_lon + required: true + inputType: LONGITUDE + - name: shape_pt_sequence + required: true + inputType: POSITIVE_INT + - name: shape_dist_traveled + inputType: POSITIVE_NUM + required: false + - id: trip name: trips.txt helpContent: Trips for each route. A trip is a sequence of two or more stops that occurs at specific time. diff --git a/src/test/java/com/conveyal/datatools/DatatoolsTest.java b/src/test/java/com/conveyal/datatools/DatatoolsTest.java index 51caf6afc..a62475e2c 100644 --- a/src/test/java/com/conveyal/datatools/DatatoolsTest.java +++ b/src/test/java/com/conveyal/datatools/DatatoolsTest.java @@ -1,11 +1,13 @@ package com.conveyal.datatools; import com.conveyal.datatools.manager.DataManager; -import org.junit.jupiter.api.BeforeAll; +import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; /** * Created by landon on 2/24/17. @@ -14,7 +16,7 @@ public abstract class DatatoolsTest { private static final Logger LOG = LoggerFactory.getLogger(DatatoolsTest.class); private static boolean setUpIsDone = false; - @BeforeAll + @BeforeClass public static void setUp() { if (setUpIsDone) { return; diff --git a/src/test/java/com/conveyal/datatools/LoadFeedTest.java b/src/test/java/com/conveyal/datatools/LoadFeedTest.java index 60e28aaad..e3878088b 100644 --- a/src/test/java/com/conveyal/datatools/LoadFeedTest.java +++ b/src/test/java/com/conveyal/datatools/LoadFeedTest.java @@ -2,7 +2,7 @@ import com.conveyal.datatools.manager.models.FeedSource; import com.conveyal.datatools.manager.models.FeedVersion; -import org.junit.jupiter.api.BeforeAll; +import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -14,7 +14,7 @@ public abstract class LoadFeedTest { public static FeedSource source; public static FeedVersion version; - @BeforeAll + @BeforeClass public void setUp() { DatatoolsTest.setUp(); LOG.info("ProcessGtfsSnapshotMergeTest setup"); diff --git a/src/test/java/com/conveyal/datatools/TestUtils.java b/src/test/java/com/conveyal/datatools/TestUtils.java new file mode 100644 index 000000000..cb73e9aa1 --- /dev/null +++ b/src/test/java/com/conveyal/datatools/TestUtils.java @@ -0,0 +1,85 @@ +package com.conveyal.datatools; + +import com.conveyal.datatools.manager.jobs.ProcessSingleFeedJob; +import com.conveyal.datatools.manager.models.FeedSource; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.stream.Collectors; + +import static com.conveyal.datatools.manager.DataManager.GTFS_DATA_SOURCE; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class TestUtils { + + private static final Logger LOG = LoggerFactory.getLogger(TestUtils.class); + + /** + * Parse a json string into an unmapped JsonNode object + */ + public static JsonNode parseJson(String jsonString) throws IOException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.readTree(jsonString); + } + + /** + * Utility function to create a feed version during tests. Note: this is intended to run the job in the same thread, + * so that tasks can run synchronously. + */ + public static FeedVersion createFeedVersion(FeedSource source, String gtfsFileName) { + File gtfsFile = new File(TestUtils.class.getResource(gtfsFileName).getFile()); + return createFeedVersion(source, gtfsFile); + } + + /** + * Utility function to create a feed version during tests. Note: this is intended to run the job in the same thread, + * so that tasks can run synchronously. + */ + public static FeedVersion createFeedVersion(FeedSource source, File gtfsFile) { + FeedVersion version = new FeedVersion(source); + InputStream is; + try { + is = new FileInputStream(gtfsFile); + version.newGtfsFile(is); + } catch (IOException e) { + e.printStackTrace(); + } + ProcessSingleFeedJob processSingleFeedJob = new ProcessSingleFeedJob(version, "test", true); + // Run in same thread to keep things synchronous. + processSingleFeedJob.run(); + return version; + } + + public static void assertThatSqlQueryYieldsRowCount(String sql, int expectedRowCount) throws + SQLException { + LOG.info(sql); + int recordCount = 0; + ResultSet rs = GTFS_DATA_SOURCE.getConnection().prepareStatement(sql).executeQuery(); + while (rs.next()) recordCount++; + assertThat("Records matching query should equal expected count.", recordCount, equalTo(expectedRowCount)); + } + + public static void assertThatFeedHasNoErrorsOfType (String namespace, String... errorTypes) throws SQLException { + assertThatSqlQueryYieldsRowCount( + String.format( + "select * from %s.errors where error_type in (%s)", + namespace, + Arrays.stream(errorTypes) + .map(error -> String.format("'%s'", error)) + .collect(Collectors.joining(",")) + ), + 0 + ); + } +} diff --git a/src/test/java/com/conveyal/datatools/manager/controllers/api/AppInfoControllerTest.java b/src/test/java/com/conveyal/datatools/manager/controllers/api/AppInfoControllerTest.java index 6e9231290..cfbfa3a70 100644 --- a/src/test/java/com/conveyal/datatools/manager/controllers/api/AppInfoControllerTest.java +++ b/src/test/java/com/conveyal/datatools/manager/controllers/api/AppInfoControllerTest.java @@ -1,10 +1,11 @@ package com.conveyal.datatools.manager.controllers.api; import com.conveyal.datatools.DatatoolsTest; +import com.conveyal.datatools.manager.DataManager; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; +import org.junit.BeforeClass; +import org.junit.Test; import java.io.IOException; @@ -16,7 +17,7 @@ public class AppInfoControllerTest { /** * Prepare and start a testing-specific web server */ - @BeforeAll + @BeforeClass public static void setUp() { // start server if it isn't already running DatatoolsTest.setUp(); @@ -26,9 +27,9 @@ public static void setUp() { * Make sure the app info endpoint can load and return expected data. */ @Test - public void canReturnApprInfo() throws IOException { + public void canReturnAppInfo() throws IOException { String jsonString = given() - .port(4000) + .port(DataManager.PORT) .get("/api/manager/public/appinfo") .then() // make sure the repoUrl matches what is found in the pom.xml diff --git a/src/test/java/com/conveyal/datatools/manager/controllers/api/UserControllerTest.java b/src/test/java/com/conveyal/datatools/manager/controllers/api/UserControllerTest.java new file mode 100644 index 000000000..f57b5a4b0 --- /dev/null +++ b/src/test/java/com/conveyal/datatools/manager/controllers/api/UserControllerTest.java @@ -0,0 +1,267 @@ +package com.conveyal.datatools.manager.controllers.api; + +import com.conveyal.datatools.DatatoolsTest; +import com.conveyal.datatools.manager.DataManager; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; + +import java.io.IOException; + +import static com.conveyal.datatools.TestUtils.parseJson; +import static com.conveyal.datatools.manager.controllers.api.UserController.TEST_AUTH0_DOMAIN; +import static com.conveyal.datatools.manager.controllers.api.UserController.TEST_AUTH0_PORT; +import static com.conveyal.datatools.manager.controllers.api.UserController.USERS_PATH; +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.delete; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.matchingJsonPath; +import static com.github.tomakehurst.wiremock.client.WireMock.patch; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; +import static com.zenika.snapshotmatcher.SnapshotMatcher.matchesSnapshot; +import static io.restassured.RestAssured.given; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * These tests verify that various Auth0 API calls behave as expected. The Auth0 server is mocked in order to return + * certain responses needed to verify functionality. + */ +public class UserControllerTest { + private String emailForExistingAccount = "test-existing-user@test.com"; + private ObjectMapper mapper = new ObjectMapper(); + + /** + * This sets up a mock server that accepts requests and sends predefined responses to mock an Auth0 server. + */ + @Rule + public WireMockRule wireMockRule = new WireMockRule( + options() + .port(TEST_AUTH0_PORT) + .usingFilesUnderDirectory("src/test/resources/com/conveyal/datatools/auth0-mock-responses/") + ); + + /** + * Prepare and start a testing-specific web server + */ + @BeforeClass + public static void setUp() { + // start server if it isn't already running + DatatoolsTest.setUp(); + // Set users URL to test domain used by wiremock. + UserController.setBaseUsersUrl("http://" + TEST_AUTH0_DOMAIN + USERS_PATH); + } + + /** + * Reset some Auth0 stuff to non-testing values. + */ + @AfterClass + public static void tearDown() { + UserController.setBaseUsersUrl(UserController.DEFAULT_BASE_USERS_URL); + } + + /** + * Make sure the user endpoint can return a list of users + */ + @Test + public void canListFirstTenUsers() throws IOException { + // create wiremock stub for get users endpoint + stubFor( + get(urlPathEqualTo("/api/v2/users")) + .withQueryParam("page", equalTo("1")) + .willReturn( + aResponse() + .withBodyFile("getFirstTenUsersResponse.json") + ) + ); + + + // make request and parse the json response + JsonNode userResponse = parseJson( + given() + .port(4000) + .get("/api/manager/secure/user?page=1") + .then() + .extract() + .response() + .asString() + ); + + // make sure the response matches the saved snapshot + assertThat(userResponse, matchesSnapshot()); + } + + /** + * Make sure a user can be created + */ + @Test + public void canCreateUser() throws IOException { + String newUserEmail = "test-new-user@test.com"; + + // create wiremock stub for create users endpoint + stubFor( + post(urlPathEqualTo("/api/v2/users")) + .withRequestBody(matchingJsonPath("$.email", equalTo(newUserEmail))) + .willReturn( + aResponse() + .withBodyFile("createNewUserResponse.json") + ) + ); + + ObjectNode requestJson = getBaseUserObject(); + requestJson.put("email", newUserEmail); + + // make request and parse the json response + JsonNode createUserResponse = parseJson( + given() + .port(4000) + .body(requestJson) + .post("/api/manager/secure/user") + .then() + .extract() + .response() + .asString() + ); + + // make sure the response matches the saved snapshot + assertThat(createUserResponse, matchesSnapshot()); + } + + /** + * Make sure a meaningful Auth0 error can be returned when a duplicate user is being created + */ + @Test + public void canReturnMeaningfulAuth0Error() throws IOException { + // create wiremock stub for create users endpoint that responds with a message saying a user with the email + // already exists + stubFor( + post(urlPathEqualTo(USERS_PATH)) + .withRequestBody(matchingJsonPath("$.email", equalTo(emailForExistingAccount))) + .willReturn( + aResponse() + .withStatus(409) + .withBodyFile("createExistingUserResponse.json") + ) + ); + + // make request and parse the json response + JsonNode createUserResponse = parseJson( + given() + .port(DataManager.PORT) + .body(getBaseUserObject()) + .post(DataManager.API_PREFIX + "secure/user") + .then() + .extract() + .response() + .asString() + ); + + // make sure the response matches the saved snapshot + assertThat(createUserResponse, matchesSnapshot()); + } + + /** + * Make sure a user can be updated + */ + @Test + public void canUpdateUser() throws IOException { + // create wiremock stub for update users endpoint + stubFor( + patch(urlPathEqualTo("/api/v2/users/auth0%7Ctest-existing-user")) + .withRequestBody( + matchingJsonPath( + "$.app_metadata.datatools[0].permissions[0].type", + equalTo("administer-application") + ) + ) + .willReturn( + aResponse() + .withBodyFile("updateExistingUserResponse.json") + ) + ); + + // create wiremock stub for get user by id endpoint + stubFor( + get(urlPathEqualTo("/api/v2/users/auth0%7Ctest-existing-user")) + .willReturn( + aResponse() + .withBodyFile("getExistingUserResponse.json") + ) + ); + + ObjectNode requestJson = mapper.createObjectNode(); + requestJson.put("email", emailForExistingAccount); + + ObjectNode testClientPermissions = mapper.createObjectNode(); + testClientPermissions.put("type", "administer-application"); + + ObjectNode testClientData = mapper.createObjectNode(); + testClientData.putArray("permissions").add(testClientPermissions); + testClientData.putArray("projects"); + testClientData.putArray("organizations"); + testClientData.put("client_id", "testing-client-id"); + + requestJson.putArray("data").add(testClientData); + + // make request and parse the json response + JsonNode createUserResponse = parseJson( + given() + .port(4000) + .body(requestJson) + .put("/api/manager/secure/user/auth0|test-existing-user") + .then() + .extract() + .response() + .asString() + ); + + // make sure the response matches the saved snapshot + assertThat(createUserResponse, matchesSnapshot()); + } + + /** + * Make sure a user can be deleted + */ + @Test + public void canDeleteUser() throws IOException { + // create wiremock stub for the delate users endpoint + stubFor( + delete(urlPathEqualTo("/api/v2/users/auth0%7Ctest-existing-user")) + .willReturn(aResponse()) + ); + + + // make request and parse the json response + JsonNode deleteUserResponse = parseJson( + given() + .port(4000) + .delete("/api/manager/secure/user/auth0|test-existing-user") + .then() + .extract() + .response() + .asString() + ); + + // make sure the response matches the saved snapshot + assertThat(deleteUserResponse, matchesSnapshot()); + } + + /** + * create a request body of a user that the above stub will recognize as an existing user + */ + private ObjectNode getBaseUserObject() { + ObjectNode requestJson = mapper.createObjectNode(); + requestJson.put("email", emailForExistingAccount); + requestJson.put("password", "password"); + requestJson.putObject("permissions"); + return requestJson; + } +} diff --git a/src/test/java/com/conveyal/datatools/manager/gtfsplus/GtfsPlusValidationTest.java b/src/test/java/com/conveyal/datatools/manager/gtfsplus/GtfsPlusValidationTest.java new file mode 100644 index 000000000..b97c61a71 --- /dev/null +++ b/src/test/java/com/conveyal/datatools/manager/gtfsplus/GtfsPlusValidationTest.java @@ -0,0 +1,52 @@ +package com.conveyal.datatools.manager.gtfsplus; + +import com.conveyal.datatools.DatatoolsTest; +import com.conveyal.datatools.manager.jobs.MergeFeedsJobTest; +import com.conveyal.datatools.manager.models.FeedSource; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.conveyal.datatools.manager.models.Project; +import com.conveyal.datatools.manager.persistence.Persistence; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Date; +import java.util.List; + +import static com.conveyal.datatools.TestUtils.createFeedVersion; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +/** Runs test to verify that GTFS+ validation runs as expected. */ +public class GtfsPlusValidationTest { + private static final Logger LOG = LoggerFactory.getLogger(MergeFeedsJobTest.class); + private static FeedVersion bartVersion1; + private static Project project; + + /** + * Create feed version for GTFS+ validation test. + */ + @BeforeClass + public static void setUp() { + // start server if it isn't already running + DatatoolsTest.setUp(); + // Create a project, feed sources, and feed versions to merge. + project = new Project(); + project.name = String.format("Test %s", new Date().toString()); + Persistence.projects.create(project); + FeedSource bart = new FeedSource("BART"); + bart.projectId = project.id; + Persistence.feedSources.create(bart); + bartVersion1 = createFeedVersion(bart, "bart_new.zip"); + } + + @Test + public void canValidateCleanGtfsPlus() throws IOException { + LOG.info("Validation BART GTFS+"); + List issues = GtfsPlusValidation.validateGtfsPlus(bartVersion1.id); + // Expect issues to be zero. + assertThat("Issues count for clean BART feed is zero", issues.size(), equalTo(0)); + } +} diff --git a/src/test/java/com/conveyal/datatools/manager/jobs/GisExportJobTest.java b/src/test/java/com/conveyal/datatools/manager/jobs/GisExportJobTest.java new file mode 100644 index 000000000..aefbf51d2 --- /dev/null +++ b/src/test/java/com/conveyal/datatools/manager/jobs/GisExportJobTest.java @@ -0,0 +1,316 @@ +package com.conveyal.datatools.manager.jobs; + +import com.conveyal.datatools.DatatoolsTest; +import com.conveyal.datatools.manager.DataManager; +import com.conveyal.datatools.manager.models.FeedSource; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.conveyal.datatools.manager.models.Project; +import com.conveyal.datatools.manager.persistence.Persistence; +import com.google.common.io.Files; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.MultiLineString; +import com.vividsolutions.jts.geom.Point; +import org.apache.commons.io.FileUtils; +import org.geotools.data.DataStore; +import org.geotools.data.DataStoreFinder; +import org.geotools.data.FeatureSource; +import org.geotools.feature.FeatureCollection; +import org.geotools.feature.FeatureIterator; +import org.geotools.referencing.CRS; +import org.junit.BeforeClass; +import org.junit.Test; +import org.opengis.feature.Feature; +import org.opengis.feature.Property; +import org.opengis.referencing.FactoryException; +import org.opengis.referencing.crs.CoordinateReferenceSystem; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +import static com.conveyal.datatools.TestUtils.createFeedVersion; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.notNullValue; + +public class GisExportJobTest { + private static final Logger LOG = LoggerFactory.getLogger(GisExportJobTest.class); + private static Project project; + private static FeedVersion calTrainVersion; + private static FeedVersion hawaiiVersion; + + // Bounding box for Caltrain is approximately: + private static final double CALTRAIN_WEST = -122.5918; + private static final double CALTRAIN_EAST = -121.5523; + private static final double CALTRAIN_NORTH = 37.8499; + private static final double CALTRAIN_SOUTH = 37.002; + + @BeforeClass + public static void setUp() { + DatatoolsTest.setUp(); + LOG.info("{} setup", GisExportJobTest.class.getSimpleName()); + + // Create a project, feed sources, and feed versions to merge. + project = new Project(); + project.name = String.format("Test %s", new Date().toString()); + Persistence.projects.create(project); + FeedSource caltrain = new FeedSource("Caltrain"); + Persistence.feedSources.create(caltrain); + calTrainVersion = createFeedVersion(caltrain, "caltrain_gtfs.zip"); + FeedSource hawaii = new FeedSource("Hawaii"); + Persistence.feedSources.create(hawaii); + hawaiiVersion = createFeedVersion(hawaii, "hawaii_fake_no_shapes.zip"); + } + + /** + * Ensures that a shapefile containing stop features for a feed version can be exported and + * contains geometry for each stop. + */ + @Test + public void canExportStops () throws IOException { + // Run the GIS export job for stops. + File zipFile = File.createTempFile("stops", ".zip"); + Set ids = new HashSet<>(); + ids.add(calTrainVersion.id); + GisExportJob gisExportJob = new GisExportJob(GisExportJob.ExportType.STOPS, zipFile, ids, "test"); + gisExportJob.run(); + assertThat(gisExportJob.status.error, equalTo(false)); + File[] files = getFilesFromZippedShapefile(zipFile); + FeatureCollection collection = getFeatureCollectionFromZippedShapefile(files); + assertCrsIsNotNull(files); + // Iterate over features. + int featureCount = 0; + try (FeatureIterator iterator = collection.features()) { + while (iterator.hasNext()) { + featureCount++; + Feature feature = iterator.next(); + // GeometryAttribute sourceGeometry = feature.getDefaultGeometryProperty(); + Collection properties = feature.getProperties(); + // Iterate over feature properties and verify everything looks OK. + for (Property property : properties) { + String name = property.getName().toString(); + Object value = property.getValue(); + LOG.info("{}: {}", name, value); + if ("the_geom".equals(name)) { + // Check that the geometry was exported properly. + Point point = (Point) value; + Coordinate coordinate = point.getCoordinate(); + // Check that the geometry was exported properly. + assertThat(point, notNullValue()); + // Check that coordinates are in the right spot. + assertThat(coordinate.x, greaterThan(CALTRAIN_WEST)); + assertThat(coordinate.x, lessThan(CALTRAIN_EAST)); + assertThat(coordinate.y, greaterThan(CALTRAIN_SOUTH)); + assertThat(coordinate.y, lessThan(CALTRAIN_NORTH)); + } + } + } + } + // Ensure that all stops from feed version are present in shapefile. + assertThat(featureCount, equalTo(calTrainVersion.feedLoadResult.stops.rowCount)); + } + + /** Get CRS from unzipped shapefile set of files and ensure it's not null. */ + private void assertCrsIsNotNull(File[] files) throws IOException { + CoordinateReferenceSystem crs = getCRSFromShapefiles(files); + assertThat("Coordinate reference system is not null.", crs, notNullValue()); + } + + /** + * Ensures that a shapefile containing route (pattern) features for a feed version can be + * exported and contains geometry for each pattern. + */ + @Test + public void canExportRoutes () throws IOException, SQLException { + // Run the GIS export job for stops. + File zipFile = File.createTempFile("routes", ".zip"); + Set ids = new HashSet<>(); + ids.add(calTrainVersion.id); + GisExportJob gisExportJob = new GisExportJob(GisExportJob.ExportType.ROUTES, zipFile, ids, "test"); + gisExportJob.run(); + assertThat(gisExportJob.status.error, equalTo(false)); + File[] files = getFilesFromZippedShapefile(zipFile); + FeatureCollection collection = getFeatureCollectionFromZippedShapefile(files); + assertCrsIsNotNull(files); + // Iterate over features. + int featureCount = 0; + try (FeatureIterator iterator = collection.features()) { + while (iterator.hasNext()) { + featureCount++; + Feature feature = iterator.next(); + // GeometryAttribute sourceGeometry = feature.getDefaultGeometryProperty(); + Collection properties = feature.getProperties(); + // Iterate over feature properties and verify everything looks OK. + for (Property property : properties) { + String name = property.getName().toString(); + Object value = property.getValue(); + LOG.info("{}: {}", name, value); + if ("the_geom".equals(name)) { + MultiLineString shape = (MultiLineString) value; + // Check that the geometry was exported properly. + assertThat(shape, notNullValue()); + Coordinate[] coordinates = shape.getCoordinates(); + // Check that shape has coordinates and the values are (generally) in the + // right place. + assertThat(coordinates.length, greaterThan(0)); + for (Coordinate coordinate : coordinates) { + assertThat(coordinate.x, greaterThan(CALTRAIN_WEST)); + assertThat(coordinate.x, lessThan(CALTRAIN_EAST)); + assertThat(coordinate.y, greaterThan(CALTRAIN_SOUTH)); + assertThat(coordinate.y, lessThan(CALTRAIN_NORTH)); + } + } + } + } + } + PreparedStatement preparedStatement = DataManager.GTFS_DATA_SOURCE.getConnection() + .prepareStatement( + String.format("select count(*) from %s" + ".patterns", calTrainVersion.namespace)); + ResultSet resultSet = preparedStatement.executeQuery(); + int patternCount = 0; + while (resultSet.next()) { + patternCount = resultSet.getInt(1); + } + // Check that feature count = pattern count from SQL query. + assertThat(featureCount, equalTo(patternCount)); + } + + /** + * Verifies that a route shapefile can be generated from its constituent pattern stops. + */ + @Test + public void canExportRoutesFromPatternStops() throws IOException, SQLException { + // Run the GIS export job for stops. + File zipFile = File.createTempFile("routes", ".zip"); + Set ids = new HashSet<>(); + ids.add(hawaiiVersion.id); + GisExportJob gisExportJob = new GisExportJob(GisExportJob.ExportType.ROUTES, zipFile, ids, "test"); + gisExportJob.run(); + assertThat(gisExportJob.status.error, equalTo(false)); + File[] files = getFilesFromZippedShapefile(zipFile); + FeatureCollection collection = getFeatureCollectionFromZippedShapefile(files); + assertCrsIsNotNull(files); + // Iterate over features. + int featureCount = 0; + try (FeatureIterator iterator = collection.features()) { + while (iterator.hasNext()) { + featureCount++; + Feature feature = iterator.next(); + // GeometryAttribute sourceGeometry = feature.getDefaultGeometryProperty(); + Collection properties = feature.getProperties(); + // Iterate over feature properties and verify everything looks OK. + for (Property property : properties) { + String name = property.getName().toString(); + Object value = property.getValue(); + LOG.info("{}: {}", name, value); + if ("the_geom".equals(name)) { + MultiLineString shape = (MultiLineString) value; + // Check that the geometry was exported properly. + assertThat(shape, notNullValue()); + // Fake Hawaii feed has only 5 stops and each is used in the single pattern + // shape, so we expect the coordinates length to be equal to stops row count. + assertThat( + shape.getCoordinates().length, + equalTo(hawaiiVersion.feedLoadResult.stops.rowCount) + ); + } + } + } + } + PreparedStatement preparedStatement = DataManager.GTFS_DATA_SOURCE.getConnection() + .prepareStatement( + String.format("select count(*) from %s" + ".patterns", hawaiiVersion.namespace)); + ResultSet resultSet = preparedStatement.executeQuery(); + int patternCount = 0; + while (resultSet.next()) { + patternCount = resultSet.getInt(1); + } + // Check that feature count = pattern count from SQL query. + assertThat(featureCount, equalTo(patternCount)); + } + + /** Unzip the shapefile into a temp directory and return a list of its files. */ + private File[] getFilesFromZippedShapefile(File zipFile) throws IOException { + File destDir = Files.createTempDir(); + byte[] buffer = new byte[1024]; + ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile)); + ZipEntry zipEntry = zis.getNextEntry(); + while (zipEntry != null) { + File newFile = new File(destDir, zipEntry.getName()); + FileOutputStream fos = new FileOutputStream(newFile); + int len; + while ((len = zis.read(buffer)) > 0) { + fos.write(buffer, 0, len); + } + fos.close(); + zipEntry = zis.getNextEntry(); + } + zis.closeEntry(); + zis.close(); + return destDir.listFiles(); + } + + /** + * Get the coordinate reference system (contained in the .prj file) from a set of files representing an unzipped + * shapefile. + */ + private CoordinateReferenceSystem getCRSFromShapefiles(File[] files) throws IOException { + for (File file : files) { + if (file.getName().endsWith(".prj")) { + LOG.info("Found projection entry: {}", file.getAbsolutePath()); + String wkt = FileUtils.readFileToString(file, "UTF-8"); + try { + CoordinateReferenceSystem crs = CRS.parseWKT(wkt); + LOG.info("CRS is {}", crs.getCoordinateSystem().toString()); + return crs; + } catch (FactoryException e) { + e.printStackTrace(); + } + } + } + return null; + } + + /** + * Utility method to extract a {@link FeatureCollection} from a zipped shapefile during tests. This also asserts + * that the projection file included in the shapefile (.prj) contains a valid/parseable coordinate reference system. + */ + private FeatureCollection getFeatureCollectionFromZippedShapefile(File[] files) throws IOException { + for (File file : files) { + // Find the shapefile and return its features + if (file.getName().endsWith(".shp")) { + LOG.info("Found shapefile entry: {}", file.getAbsolutePath()); + try { + Map connect = new HashMap<>(); + connect.put("url", file.toURI().toString()); + DataStore dataStore = DataStoreFinder.getDataStore(connect); + String[] typeNames = dataStore.getTypeNames(); + String typeName = typeNames[0]; + LOG.info("Reading content " + typeName); + // Create feature collection from data. + FeatureSource featureSource = dataStore.getFeatureSource(typeName); + return featureSource.getFeatures(); + } catch (Throwable e) { + e.printStackTrace(); + } + } + } + return null; + } +} diff --git a/src/test/java/com/conveyal/datatools/manager/jobs/MergeFeedsJobTest.java b/src/test/java/com/conveyal/datatools/manager/jobs/MergeFeedsJobTest.java new file mode 100644 index 000000000..64b5a4ae6 --- /dev/null +++ b/src/test/java/com/conveyal/datatools/manager/jobs/MergeFeedsJobTest.java @@ -0,0 +1,183 @@ +package com.conveyal.datatools.manager.jobs; + +import com.conveyal.datatools.DatatoolsTest; +import com.conveyal.datatools.TestUtils; +import com.conveyal.datatools.manager.models.FeedSource; +import com.conveyal.datatools.manager.models.FeedVersion; +import com.conveyal.datatools.manager.models.Project; +import com.conveyal.datatools.manager.persistence.Persistence; +import com.conveyal.gtfs.error.NewGTFSErrorType; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.sql.SQLException; +import java.util.Date; +import java.util.HashSet; +import java.util.Set; + +import static com.conveyal.datatools.TestUtils.createFeedVersion; +import static org.junit.Assert.assertEquals; + +/** + * Tests for the various {@link MergeFeedsJob} merge types. + */ +public class MergeFeedsJobTest { + private static final Logger LOG = LoggerFactory.getLogger(MergeFeedsJobTest.class); + private static FeedVersion bartVersion1; + private static FeedVersion bartVersion2; + private static FeedVersion calTrainVersion; + private static Project project; + private static FeedVersion napaVersion; + + /** + * Prepare and start a testing-specific web server + */ + @BeforeClass + public static void setUp() { + // start server if it isn't already running + DatatoolsTest.setUp(); + // Create a project, feed sources, and feed versions to merge. + project = new Project(); + project.name = String.format("Test %s", new Date().toString()); + Persistence.projects.create(project); + FeedSource bart = new FeedSource("BART"); + bart.projectId = project.id; + Persistence.feedSources.create(bart); + bartVersion1 = createFeedVersion(bart, "bart_old.zip"); + bartVersion2 = createFeedVersion(bart, "bart_new.zip"); + FeedSource caltrain = new FeedSource("Caltrain"); + caltrain.projectId = project.id; + Persistence.feedSources.create(caltrain); + calTrainVersion = createFeedVersion(caltrain, "caltrain_gtfs.zip"); + FeedSource napa = new FeedSource("Napa"); + napa.projectId = project.id; + Persistence.feedSources.create(napa); + napaVersion = createFeedVersion(napa, "napa-no-agency-id.zip"); + } + + /** + * Ensures that a regional feed merge will produce a feed that includes all entities from each feed. + */ + @Test + public void canMergeRegional() throws SQLException { + // Set up list of feed versions to merge. + Set versions = new HashSet<>(); + versions.add(bartVersion1); + versions.add(calTrainVersion); + versions.add(napaVersion); + MergeFeedsJob mergeFeedsJob = new MergeFeedsJob("test", versions, project.id, MergeFeedsType.REGIONAL); + // Run the job in this thread (we're not concerned about concurrency here). + mergeFeedsJob.run(); + // Create a new feed source/version for the merged feed, so we can easily analyze its contents. + FeedSource source = new FeedSource("Merged feed"); + source.projectId = project.id; + Persistence.feedSources.create(source); + File feed = FeedVersion.feedStore.getFeed(project.id + ".zip"); + LOG.info("Regional merged file: {}", feed.getAbsolutePath()); + FeedVersion mergedVersion = createFeedVersion(source, feed); + // Ensure the feed has the row counts we expect. + assertEquals( + "trips count for merged feed should equal sum of trips for versions merged.", + bartVersion1.feedLoadResult.trips.rowCount + calTrainVersion.feedLoadResult.trips.rowCount + napaVersion.feedLoadResult.trips.rowCount, + mergedVersion.feedLoadResult.trips.rowCount + ); + assertEquals( + "routes count for merged feed should equal sum of routes for versions merged.", + bartVersion1.feedLoadResult.routes.rowCount + calTrainVersion.feedLoadResult.routes.rowCount + napaVersion.feedLoadResult.routes.rowCount, + mergedVersion.feedLoadResult.routes.rowCount + ); + assertEquals( + "stops count for merged feed should equal sum of stops for versions merged.", + mergedVersion.feedLoadResult.stops.rowCount, + bartVersion1.feedLoadResult.stops.rowCount + calTrainVersion.feedLoadResult.stops.rowCount + napaVersion.feedLoadResult.stops.rowCount + ); + assertEquals( + "agency count for merged feed should equal sum of agency for versions merged.", + mergedVersion.feedLoadResult.agency.rowCount, + bartVersion1.feedLoadResult.agency.rowCount + calTrainVersion.feedLoadResult.agency.rowCount + napaVersion.feedLoadResult.agency.rowCount + ); + assertEquals( + "stopTimes count for merged feed should equal sum of stopTimes for versions merged.", + mergedVersion.feedLoadResult.stopTimes.rowCount, + bartVersion1.feedLoadResult.stopTimes.rowCount + calTrainVersion.feedLoadResult.stopTimes.rowCount + napaVersion.feedLoadResult.stopTimes.rowCount + ); + assertEquals( + "calendar count for merged feed should equal sum of calendar for versions merged.", + mergedVersion.feedLoadResult.calendar.rowCount, + bartVersion1.feedLoadResult.calendar.rowCount + calTrainVersion.feedLoadResult.calendar.rowCount + napaVersion.feedLoadResult.calendar.rowCount + ); + assertEquals( + "calendarDates count for merged feed should equal sum of calendarDates for versions merged.", + mergedVersion.feedLoadResult.calendarDates.rowCount, + bartVersion1.feedLoadResult.calendarDates.rowCount + calTrainVersion.feedLoadResult.calendarDates.rowCount + napaVersion.feedLoadResult.calendarDates.rowCount + ); + // Ensure there are no referential integrity errors, duplicate ID, or wrong number of + // fields errors. + TestUtils.assertThatFeedHasNoErrorsOfType( + mergedVersion.namespace, + NewGTFSErrorType.REFERENTIAL_INTEGRITY.toString(), + NewGTFSErrorType.DUPLICATE_ID.toString(), + NewGTFSErrorType.WRONG_NUMBER_OF_FIELDS.toString() + ); + } + + /** + * Ensures that an MTC merge of feeds with duplicate trip IDs will fail. + */ + @Test + public void mergeMTCShouldFailOnDuplicateTrip() { + Set versions = new HashSet<>(); + versions.add(bartVersion1); + versions.add(bartVersion2); + MergeFeedsJob mergeFeedsJob = new MergeFeedsJob("test", versions, "merged_output", MergeFeedsType.MTC); + // Run the job in this thread (we're not concerned about concurrency here). + mergeFeedsJob.run(); + // Result should fail. + assertEquals( + "Merge feeds job should fail due to duplicate trip IDs.", + true, + mergeFeedsJob.mergeFeedsResult.failed + ); + } + + /** + * Tests that the MTC merge strategy will successfully merge BART feeds. Note: this test turns off + * {@link MergeFeedsJob#failOnDuplicateTripId} in order to force the merge to succeed even though there are duplicate + * trips contained within. + */ + @Test + public void canMergeBARTFeeds() throws SQLException { + Set versions = new HashSet<>(); + versions.add(bartVersion1); + versions.add(bartVersion2); + MergeFeedsJob mergeFeedsJob = new MergeFeedsJob("test", versions, "merged_output", MergeFeedsType.MTC); + // This time, turn off the failOnDuplicateTripId flag. + mergeFeedsJob.failOnDuplicateTripId = false; + mergeFeedsJob.run(); + // Result should succeed this time. + assertEquals( + "Merged directions count should equal expected value.", + 2, // Magic number represents expected number of lines after merge. + mergeFeedsJob.mergeFeedsResult.linesPerTable.get("directions").intValue() + ); + assertEquals( + "Merged feed trip count should equal expected value.", + 4552, // Magic number represents the number of trips in the merged BART feed. + mergeFeedsJob.mergedVersion.feedLoadResult.trips.rowCount + ); + assertEquals( + "Merged feed route count should equal expected value.", + 9, // Magic number represents the number of routes in the merged BART feed. + mergeFeedsJob.mergedVersion.feedLoadResult.routes.rowCount + ); + // Ensure there are no referential integrity errors or duplicate ID errors. + TestUtils.assertThatFeedHasNoErrorsOfType( + mergeFeedsJob.mergedVersion.namespace, + NewGTFSErrorType.REFERENTIAL_INTEGRITY.toString(), + NewGTFSErrorType.DUPLICATE_ID.toString() + ); + } +} diff --git a/src/test/java/com/conveyal/datatools/manager/persistence/PersistenceTest.java b/src/test/java/com/conveyal/datatools/manager/persistence/PersistenceTest.java index 7eb62c77d..143aa261d 100644 --- a/src/test/java/com/conveyal/datatools/manager/persistence/PersistenceTest.java +++ b/src/test/java/com/conveyal/datatools/manager/persistence/PersistenceTest.java @@ -3,12 +3,12 @@ import com.conveyal.datatools.DatatoolsTest; import com.conveyal.datatools.manager.models.FeedSource; import com.conveyal.datatools.manager.models.Project; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; +import org.junit.BeforeClass; +import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.Assert.assertEquals; /** * Created by landon on 9/6/17. @@ -16,7 +16,7 @@ public class PersistenceTest { private static final Logger LOG = LoggerFactory.getLogger(PersistenceTest.class); - @BeforeAll + @BeforeClass public static void setUp() { DatatoolsTest.setUp(); LOG.info("{} setup", PersistenceTest.class.getSimpleName()); @@ -30,7 +30,7 @@ public void createFeedSource() { String id = feedSource.id; Persistence.feedSources.create(feedSource); String retrievedId = Persistence.feedSources.getById(id).id; - assertEquals(retrievedId, id, "Found FeedSource ID should equal inserted ID."); + assertEquals("Found FeedSource ID should equal inserted ID.", id, retrievedId); } // @Test @@ -59,7 +59,7 @@ public void createProject() { String id = project.id; Persistence.projects.create(project); String retrievedId = Persistence.projects.getById(id).id; - assertEquals(retrievedId, id, "Found Project ID should equal inserted ID."); + assertEquals("Found Project ID should equal inserted ID.", id, retrievedId); } // // @Test diff --git a/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/createExistingUserResponse.json b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/createExistingUserResponse.json new file mode 100644 index 000000000..f1beb7703 --- /dev/null +++ b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/createExistingUserResponse.json @@ -0,0 +1,6 @@ +{ + "statusCode": 409, + "error": "Conflict", + "message": "The user already exists.", + "errorCode": "auth0_idp_error" +} \ No newline at end of file diff --git a/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/createNewUserResponse.json b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/createNewUserResponse.json new file mode 100644 index 000000000..6cf399343 --- /dev/null +++ b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/createNewUserResponse.json @@ -0,0 +1,24 @@ +{ + "email": "test-new-user@test.com", + "email_verified": false, + "updated_at": "2019-02-16T01:27:19.810Z", + "user_id": "auth0|test-new-user", + "name": "test-new-user@test.com", + "picture": "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "nickname": "test-new-user", + "identities": [{ + "connection": "Username-Password-Authentication", + "user_id": "test-new-user", + "provider": "auth0", + "isSocial": false + }], + "created_at": "2019-02-16T01:27:19.810Z", + "app_metadata": { + "datatools": [{ + "permissions": [], + "projects": [], + "organizations": [], + "client_id": "testing-client-id" + }] + } +} diff --git a/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/getExistingUserResponse.json b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/getExistingUserResponse.json new file mode 100644 index 000000000..fdf863e3b --- /dev/null +++ b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/getExistingUserResponse.json @@ -0,0 +1,24 @@ +{ + "email": "test-existing-user@test.com", + "email_verified": false, + "updated_at": "2019-02-16T01:27:19.810Z", + "user_id": "auth0|test-existing-user", + "name": "test-existing-user@test.com", + "picture": "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "nickname": "test-existing-user", + "identities": [{ + "connection": "Username-Password-Authentication", + "user_id": "test-existing-user", + "provider": "auth0", + "isSocial": false + }], + "created_at": "2019-02-16T01:27:19.810Z", + "app_metadata": { + "datatools": [{ + "permissions": [], + "projects": [], + "organizations": [], + "client_id": "testing-client-id" + }] + } +} diff --git a/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/getFirstTenUsersResponse.json b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/getFirstTenUsersResponse.json new file mode 100644 index 000000000..f16ec1b83 --- /dev/null +++ b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/getFirstTenUsersResponse.json @@ -0,0 +1,66 @@ +[{ + "email": "test1@test.com", + "email_verified": true, + "user_id": "auth0|test1", + "picture": "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "nickname": "test1", + "identities": [{ + "user_id": "test1", + "provider": "auth0", + "connection": "Username-Password-Authentication", + "isSocial": false + }], + "updated_at": "2017-07-12T19:16:11.699Z", + "created_at": "2017-06-22T19:19:41.404Z", + "last_password_reset": "2017-07-12T19:08:52.507Z", + "name": "test1", + "last_login": "2017-07-12T19:09:28.400Z", + "last_ip": "1.1.1.1", + "logins_count": 1, + "app_metadata": { + "datatools": [{ + "permissions": [], + "projects": [{ + "project_id": "test-project-id", + "permissions": [{ + "type": "view-feed", + "feeds": ["*"] + }], + "defaultFeeds": [] + }], + "organizations": [], + "client_id": "your-auth0-client-id", + "subscriptions": [{ + "type": "feed-updated", + "target": ["test-feed-id"] + }] + }] + } +}, { + "email": "test2@test.com", + "email_verified": true, + "updated_at": "2019-02-10T18:24:03.719Z", + "picture": "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "user_id": "auth0|test2", + "nickname": "test2", + "identities": [{ + "user_id": "test2", + "provider": "auth0", + "connection": "Username-Password-Authentication", + "isSocial": false + }], + "created_at": "2018-12-27T16:46:32.864Z", + "name": "test2", + "last_login": "2019-02-10T18:24:03.719Z", + "last_ip": "1.1.1.1", + "logins_count": 49, + "app_metadata": { + "datatools": [{ + "permissions": [{ + "type": "administer-application" + }], + "projects": [], + "client_id": "your-auth0-client-id" + }] + } +}] diff --git a/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/package-info.java b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/package-info.java new file mode 100644 index 000000000..5404ae3cd --- /dev/null +++ b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/package-info.java @@ -0,0 +1,7 @@ +/** + * All of the files in this directory (aside from this one) are mock responses used by a Wiremock server. The Wiremock + * server is used to emulate a 3rd party API (in this case Auth0) and it makes things less cluttered to store the json + * files in here instead of in the test class code. + */ + +package com.conveyal.datatools.auth0-mock-respones.__files; diff --git a/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/updateExistingUserResponse.json b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/updateExistingUserResponse.json new file mode 100644 index 000000000..c41d9087b --- /dev/null +++ b/src/test/resources/com/conveyal/datatools/auth0-mock-responses/__files/updateExistingUserResponse.json @@ -0,0 +1,26 @@ +{ + "email": "test-existing-user@test.com", + "email_verified": false, + "updated_at": "2019-02-16T01:27:19.810Z", + "user_id": "auth0|test-existing-user", + "name": "test-existing-user@test.com", + "picture": "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "nickname": "test-existing-user", + "identities": [{ + "connection": "Username-Password-Authentication", + "user_id": "test-existing-user", + "provider": "auth0", + "isSocial": false + }], + "created_at": "2019-02-16T01:27:19.810Z", + "app_metadata": { + "datatools": [{ + "permissions": [{ + "type": "administer-application" + }], + "projects": [], + "organizations": [], + "client_id": "testing-client-id" + }] + } +} \ No newline at end of file diff --git a/src/test/resources/com/conveyal/datatools/bart_new.zip b/src/test/resources/com/conveyal/datatools/bart_new.zip new file mode 100644 index 000000000..c5ac1373e Binary files /dev/null and b/src/test/resources/com/conveyal/datatools/bart_new.zip differ diff --git a/src/test/resources/com/conveyal/datatools/bart_old.zip b/src/test/resources/com/conveyal/datatools/bart_old.zip new file mode 100644 index 000000000..191c3d2fc Binary files /dev/null and b/src/test/resources/com/conveyal/datatools/bart_old.zip differ diff --git a/src/test/resources/com/conveyal/datatools/hawaii_fake_no_shapes.zip b/src/test/resources/com/conveyal/datatools/hawaii_fake_no_shapes.zip new file mode 100644 index 000000000..219980844 Binary files /dev/null and b/src/test/resources/com/conveyal/datatools/hawaii_fake_no_shapes.zip differ diff --git a/src/test/resources/com/conveyal/datatools/napa-no-agency-id.zip b/src/test/resources/com/conveyal/datatools/napa-no-agency-id.zip new file mode 100644 index 000000000..bfb38c737 Binary files /dev/null and b/src/test/resources/com/conveyal/datatools/napa-no-agency-id.zip differ diff --git a/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canCreateUser-0.json b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canCreateUser-0.json new file mode 100644 index 000000000..b3f89cdf6 --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canCreateUser-0.json @@ -0,0 +1,24 @@ +{ + "app_metadata" : { + "datatools" : [ { + "client_id" : "testing-client-id", + "organizations" : [ ], + "permissions" : [ ], + "projects" : [ ] + } ] + }, + "created_at" : "2019-02-16T01:27:19.810Z", + "email" : "test-new-user@test.com", + "email_verified" : false, + "identities" : [ { + "connection" : "Username-Password-Authentication", + "isSocial" : false, + "provider" : "auth0", + "user_id" : "test-new-user" + } ], + "name" : "test-new-user@test.com", + "nickname" : "test-new-user", + "picture" : "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "updated_at" : "2019-02-16T01:27:19.810Z", + "user_id" : "auth0|test-new-user" +} \ No newline at end of file diff --git a/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canDeleteUser-0.json b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canDeleteUser-0.json new file mode 100644 index 000000000..f32a5804e --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canDeleteUser-0.json @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canListFirstTenUsers-0.json b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canListFirstTenUsers-0.json new file mode 100644 index 000000000..3bf6cc9ef --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canListFirstTenUsers-0.json @@ -0,0 +1,66 @@ +[ { + "app_metadata" : { + "datatools" : [ { + "client_id" : "your-auth0-client-id", + "organizations" : [ ], + "permissions" : [ ], + "projects" : [ { + "defaultFeeds" : [ ], + "permissions" : [ { + "feeds" : [ "*" ], + "type" : "view-feed" + } ], + "project_id" : "test-project-id" + } ], + "subscriptions" : [ { + "target" : [ "test-feed-id" ], + "type" : "feed-updated" + } ] + } ] + }, + "created_at" : "2017-06-22T19:19:41.404Z", + "email" : "test1@test.com", + "email_verified" : true, + "identities" : [ { + "connection" : "Username-Password-Authentication", + "isSocial" : false, + "provider" : "auth0", + "user_id" : "test1" + } ], + "last_ip" : "1.1.1.1", + "last_login" : "2017-07-12T19:09:28.400Z", + "last_password_reset" : "2017-07-12T19:08:52.507Z", + "logins_count" : 1, + "name" : "test1", + "nickname" : "test1", + "picture" : "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "updated_at" : "2017-07-12T19:16:11.699Z", + "user_id" : "auth0|test1" +}, { + "app_metadata" : { + "datatools" : [ { + "client_id" : "your-auth0-client-id", + "permissions" : [ { + "type" : "administer-application" + } ], + "projects" : [ ] + } ] + }, + "created_at" : "2018-12-27T16:46:32.864Z", + "email" : "test2@test.com", + "email_verified" : true, + "identities" : [ { + "connection" : "Username-Password-Authentication", + "isSocial" : false, + "provider" : "auth0", + "user_id" : "test2" + } ], + "last_ip" : "1.1.1.1", + "last_login" : "2019-02-10T18:24:03.719Z", + "logins_count" : 49, + "name" : "test2", + "nickname" : "test2", + "picture" : "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "updated_at" : "2019-02-10T18:24:03.719Z", + "user_id" : "auth0|test2" +} ] \ No newline at end of file diff --git a/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canReturnMeaningfulAuth0Error-0.json b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canReturnMeaningfulAuth0Error-0.json new file mode 100644 index 000000000..8554b2a1c --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canReturnMeaningfulAuth0Error-0.json @@ -0,0 +1,5 @@ +{ + "code" : 409, + "message" : "An Auth0 error occurred: The user already exists.", + "result" : "ERR" +} \ No newline at end of file diff --git a/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canUpdateUser-0.json b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canUpdateUser-0.json new file mode 100644 index 000000000..4265f8044 --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/datatools/manager/controllers/api/UserControllerTest/canUpdateUser-0.json @@ -0,0 +1,26 @@ +{ + "app_metadata" : { + "datatools" : [ { + "client_id" : "testing-client-id", + "organizations" : [ ], + "permissions" : [ { + "type" : "administer-application" + } ], + "projects" : [ ] + } ] + }, + "created_at" : "2019-02-16T01:27:19.810Z", + "email" : "test-existing-user@test.com", + "email_verified" : false, + "identities" : [ { + "connection" : "Username-Password-Authentication", + "isSocial" : false, + "provider" : "auth0", + "user_id" : "test-existing-user" + } ], + "name" : "test-existing-user@test.com", + "nickname" : "test-existing-user", + "picture" : "https://i0.wp.com/cdn.auth0.com/avatars/tu.png?ssl=1", + "updated_at" : "2019-02-16T01:27:19.810Z", + "user_id" : "auth0|test-existing-user" +} \ No newline at end of file diff --git a/src/test/resources/snapshots/package-info.java b/src/test/resources/snapshots/package-info.java new file mode 100644 index 000000000..c3dc9dc3a --- /dev/null +++ b/src/test/resources/snapshots/package-info.java @@ -0,0 +1,7 @@ +/** + * All of the files in the folders within here are snapshot files used to perform assertions on data responses. Each + * file is initially auto-generated, but manually verified for correctness. The snapshot library used is a Conveyal + * fork of an open-source java snapshotting library: https://github.com/conveyal/java-snapshot-matcher. + */ + +package com.conveyal.datatools.snapshots; \ No newline at end of file