Skip to content

Commit

Permalink
Merge pull request #212 from ibi-group/dev
Browse files Browse the repository at this point in the history
Release
  • Loading branch information
landonreed authored Jul 17, 2019
2 parents 8a6ad83 + a68aa5f commit bcfc11a
Showing 79 changed files with 3,614 additions and 1,146 deletions.
3 changes: 2 additions & 1 deletion .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -5,9 +5,10 @@
- [ ] The description lists all applicable issues this PR seeks to resolve
- [ ] The description lists any configuration setting(s) that differ from the default settings
- [ ] All tests and CI builds passing
- [ ] The description lists all relevant PRs included in this release _(remove this if not merging to master)_
- [ ] e2e tests are all passing _(remove this if not merging to master)_
- [ ] Code coverage improves or is at 100% _(remove this if not merging to master)_

### Description

Please explain the changes you made here and, if not immediately obvious from the code, how they resolve any referenced issues. Be sure to include all issues being resolved and any special configuration settings that are need for the software to run properly with these changes.
Please explain the changes you made here and, if not immediately obvious from the code, how they resolve any referenced issues. Be sure to include all issues being resolved and any special configuration settings that are need for the software to run properly with these changes. If merging to master, please also list the PRs that are to be included.
22 changes: 16 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,21 +1,31 @@
dist: trusty # jdk 8 not available on xenial
language: java
jdk:
- oraclejdk8
java:
- oraclejdk8
install: true
sudo: false
# Install mongoDB to perform persistence tests
services: mongodb
services:
- mongodb
- postgresql
addons:
postgresql: 9.6
cache:
directories:
- "$HOME/.m2"
- $HOME/.m2
- $HOME/.cache/yarn
# Install semantic-release
before_script:
- yarn global add @conveyal/maven-semantic-release semantic-release
- yarn global add @conveyal/maven-semantic-release semantic-release@15
# Create dir for GTFS+ files (used during testing)
- mkdir /tmp/gtfsplus
before_install:
#- sed -i.bak -e 's|https://nexus.codehaus.org/snapshots/|https://oss.sonatype.org/content/repositories/codehaus-snapshots/|g' ~/.m2/settings.xml
# set region in AWS config for S3 setup
- mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=foo' 'aws_secret_access_key=bar' 'region=us-east-1' > ~/.aws/config
- cp configurations/default/server.yml.tmp configurations/default/server.yml
# create database for tests
- psql -U postgres -c 'CREATE DATABASE catalogue;'
script:
# package jar
- mvn package
@@ -65,5 +75,5 @@ deploy:
local-dir: deploy
acl: public_read
on:
repo: conveyal/datatools-server
repo: ibi-group/datatools-server
all_branches: true
6 changes: 6 additions & 0 deletions configurations/default/server.yml.tmp
Original file line number Diff line number Diff line change
@@ -15,6 +15,9 @@ modules:
enabled: false
user_admin:
enabled: true
# Enable GTFS+ module for testing purposes
gtfsplus:
enabled: true
gtfsapi:
enabled: true
load_on_fetch: false
@@ -29,3 +32,6 @@ extensions:
enabled: true
api: http://api.transitfeeds.com/v1/getFeeds
key: your-api-key
# Enable MTC for testing purposes
mtc:
enabled: true
85 changes: 57 additions & 28 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -51,7 +51,10 @@
<url>https://github.com/conveyal/datatools-server.git</url>
</scm>
<properties>
<jackson.version>2.9.8</jackson.version>
<jackson.version>2.9.9</jackson.version>
<!-- Using the latest version of geotools (e.g, 20) seems to cause issues with the shapefile
plugin where the_geom for each feature is null. -->
<geotools.version>17.5</geotools.version>
</properties>
<build>
<resources>
@@ -158,23 +161,6 @@
</execution>
</executions>
</plugin>
<!-- This plugin makes sure junit 5 tests run -->
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.0</version>
<dependencies>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-surefire-provider</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.3.1</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>
<repositories>
@@ -218,6 +204,11 @@
<updatePolicy>always</updatePolicy>
</snapshots>
</repository>
<!-- used for importing java projects from github -->
<repository>
<id>jitpack.io</id>
<url>https://jitpack.io</url>
</repository>
</repositories>

<dependencies>
@@ -255,11 +246,22 @@
<version>2.1.0</version>
</dependency>

<!-- Used for loading/fetching/writing GTFS entities (also provides access to commons-io and AWS S3 SDK). -->
<!-- Used for testing (note: this should match the version in gtfs-lib). -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>

<!-- Used for loading/fetching/writing GTFS entities. gtfs-lib also provides access to:
- commons-io - generic utilities
- AWS S3 SDK - putting/getting objects into/out of S3.
-->
<dependency>
<groupId>com.conveyal</groupId>
<artifactId>gtfs-lib</artifactId>
<version>4.2.5</version>
<version>4.3.6</version>
</dependency>

<!-- Used for data-tools application database -->
@@ -322,15 +324,18 @@
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-shapefile</artifactId>
<version>19.2</version>
<version>${geotools.version}</version>
</dependency>

<!-- Unit testing -->
<!-- gt-metadata and gt-api contains some dependencies required by gt-shapefile -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.3.1</version>
<scope>test</scope>
<groupId>org.geotools</groupId>
<artifactId>gt-metadata</artifactId>
<version>${geotools.version}</version>
</dependency>
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-api</artifactId>
<version>${geotools.version}</version>
</dependency>

<!-- Error reporting -->
@@ -359,8 +364,32 @@
<groupId>org.hamcrest</groupId>
<artifactId>java-hamcrest</artifactId>
<version>2.0.0.0</version>
<scope>test</scope>
</dependency>
<!-- Snapshotting library for testing -->
<dependency>
<groupId>com.github.conveyal</groupId>
<artifactId>java-snapshot-matcher</artifactId>
<version>3495b32f7b4d3f82590e0a2284029214070b6984</version>
<scope>test</scope>
</dependency>
<!-- wiremock is used to mock http requests -->
<dependency>
<groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock-standalone</artifactId>
<version>2.14.0</version>
<scope>test</scope>
</dependency>
<!-- Used for writing csv for merged feeds. Note: this appears to be one of the only
CSV libraries that will only quote values when necessary (e.g., there is a comma character
contained within the value) and that will work with an output stream writer when writing
directly to a zip output stream.
-->
<dependency>
<groupId>net.sf.supercsv</groupId>
<artifactId>super-csv</artifactId>
<version>2.4.0</version>
</dependency>

</dependencies>

</project>
Original file line number Diff line number Diff line change
@@ -6,6 +6,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
@@ -19,10 +20,11 @@
*/
public abstract class MonitorableJob implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(MonitorableJob.class);
protected final String owner;
public final String owner;

// Public fields will be serialized over HTTP API and visible to the web client
public final JobType type;
public File file;
public String parentJobId;
public JobType parentJobType;
// Status is not final to allow some jobs to have extra status fields.
@@ -48,6 +50,7 @@ public enum JobType {
LOAD_FEED,
VALIDATE_FEED,
DEPLOY_TO_OTP,
EXPORT_GIS,
FETCH_PROJECT_FEEDS,
FETCH_SINGLE_FEED,
MAKE_PROJECT_PUBLIC,
@@ -57,7 +60,7 @@ public enum JobType {
EXPORT_SNAPSHOT_TO_GTFS,
CONVERT_EDITOR_MAPDB_TO_SQL,
VALIDATE_ALL_FEEDS,
MERGE_PROJECT_FEEDS
MERGE_FEED_VERSIONS
}

public MonitorableJob(String owner, String name, JobType type) {
@@ -90,6 +93,10 @@ private void registerJob() {
DataManager.userJobsMap.put(this.owner, userJobs);
}

public File retrieveFile () {
return file;
}

/**
* This method should never be called directly or overridden. It is a standard clean up stage for all
* monitorable jobs.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.conveyal.datatools.editor.controllers.api;

import com.conveyal.datatools.common.utils.S3Utils;
import com.conveyal.datatools.common.utils.SparkUtils;
import com.conveyal.datatools.editor.controllers.EditorLockController;
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.conveyal.datatools.manager.models.FeedSource;
@@ -86,6 +87,7 @@ private void registerRoutes() {
// Handle update useFrequency field. Hitting this endpoint will delete all trips for a pattern and update the
// useFrequency field.
if ("pattern".equals(classToLowercase)) {
put(ROOT_ROUTE + ID_PARAM + "/stop_times", this::updateStopTimesFromPatternStops, json::write);
delete(ROOT_ROUTE + ID_PARAM + "/trips", this::deleteTripsForPattern, json::write);
}
}
@@ -112,7 +114,7 @@ private String deleteTripsForPattern(Request req, Response res) {
logMessageAndHalt(req, 500, "Error deleting entity", e);
return null;
} finally {
LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime);
LOG.info("Delete trips for pattern operation took {} msec", System.currentTimeMillis() - startTime);
}
}

@@ -124,8 +126,9 @@ private String deleteMultipleTrips(Request req, Response res) {
long startTime = System.currentTimeMillis();
String namespace = getNamespaceAndValidateSession(req);
String[] tripIds = req.queryParams("tripIds").split(",");
JdbcTableWriter tableWriter = null;
try {
JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace);
tableWriter = new JdbcTableWriter(table, datasource, namespace);
for (String tripId: tripIds) {
// Delete each trip ID found in query param WITHOUT auto-committing.
int result = tableWriter.delete(Integer.parseInt(tripId), false);
@@ -135,14 +138,15 @@ private String deleteMultipleTrips(Request req, Response res) {
throw new SQLException(message);
}
}
// Commit the transaction after iterating over trip IDs (because the deletes where made without autocommit).
// Commit the transaction after iterating over trip IDs (because the deletes were made without autocommit).
tableWriter.commit();
LOG.info("Deleted {} trips", tripIds.length);
} catch (InvalidNamespaceException e) {
logMessageAndHalt(req, 400, "Invalid namespace");
} catch (Exception e) {
logMessageAndHalt(req, 500, "Error deleting entity", e);
} finally {
if (tableWriter != null) tableWriter.close();
LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime);
}
return formatJSON(String.format("Deleted %d.", tripIds.length), 200);
@@ -159,7 +163,7 @@ private String deleteOne(Request req, Response res) {
JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace);
if (tableWriter.delete(id, true) == 1) {
// FIXME: change return message based on result value
return formatJSON(String.valueOf("Deleted one."), 200);
return formatJSON("Deleted one.", 200);
}
} catch (Exception e) {
logMessageAndHalt(req, 400, "Error deleting entity", e);
@@ -169,17 +173,37 @@ private String deleteOne(Request req, Response res) {
return null;
}

/**
* For a given pattern ID, update all its trips' stop times to conform to the default travel and dwell times. This
* is used, for instance, when a new pattern stop is added or inserted into an existing pattern that has trips which
* need the updated travel times applied in bulk.
*/
private String updateStopTimesFromPatternStops (Request req, Response res) {
long startTime = System.currentTimeMillis();
String namespace = getNamespaceAndValidateSession(req);
int patternId = getIdFromRequest(req);
try {
int beginStopSequence = Integer.parseInt(req.queryParams("stopSequence"));
JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace);
int stopTimesUpdated = tableWriter.normalizeStopTimesForPattern(patternId, beginStopSequence);
return SparkUtils.formatJSON("updateResult", stopTimesUpdated + " stop times updated.");
} catch (Exception e) {
logMessageAndHalt(req, 400, "Error normalizing stop times", e);
return null;
} finally {
LOG.info("Normalize stop times operation took {} msec", System.currentTimeMillis() - startTime);
}
}

/**
* HTTP endpoint to upload branding image to S3 for either agency or route entities. The endpoint also handles
* updating the branding URL field to match the S3 URL.
*/
private String uploadEntityBranding (Request req, Response res) {
int id = getIdFromRequest(req);
String url = null;
String url;
try {
// FIXME: remove cast to string.
String idAsString = String.valueOf(id);
url = S3Utils.uploadBranding(req, String.join("_", classToLowercase, idAsString));
url = S3Utils.uploadBranding(req, String.format("%s_%d", classToLowercase, id));
} catch (HaltException e) {
// Do not re-catch halts thrown for exceptions that have already been caught.
throw e;
@@ -300,10 +324,4 @@ private Integer getIdFromRequest(Request req) {
}
return id;
}

// TODO add hooks
abstract void getEntityHook(T entity);
abstract void createEntityHook(T entity);
abstract void updateEntityHook(T entity);
abstract void deleteEntityHook(T entity);
}
Original file line number Diff line number Diff line change
@@ -9,24 +9,4 @@ public class EditorControllerImpl extends EditorController {
public EditorControllerImpl(String apiPrefix, Table table, DataSource dataSource){
super(apiPrefix, table, dataSource);
}

@Override
void getEntityHook(Entity entity) {

}

@Override
void createEntityHook(Entity entity) {

}

@Override
void updateEntityHook(Entity entity) {

}

@Override
void deleteEntityHook(Entity entity) {

}
}
Loading

0 comments on commit bcfc11a

Please sign in to comment.