Skip to content

Commit

Permalink
Merge pull request #225 from ibi-group/deploy-to-ec2
Browse files Browse the repository at this point in the history
Deploy OTP to AWS load balancer and manage OTP servers in separate collection (and misc other fixes)
  • Loading branch information
landonreed authored Oct 9, 2019
2 parents 0ea4243 + 240a6e0 commit 00e5c88
Show file tree
Hide file tree
Showing 38 changed files with 1,883 additions and 1,087 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ deploy/
# Configurations
configurations/*
!configurations/default
!configurations/test

# Secret config files
.env
Expand Down
45 changes: 14 additions & 31 deletions configurations/default/server.yml.tmp
Original file line number Diff line number Diff line change
@@ -1,61 +1,44 @@
application:
title: Data Tools
logo: https://d2tyb7byn1fef9.cloudfront.net/ibi_group-128x128.png
logo_large: https://d2tyb7byn1fef9.cloudfront.net/ibi_group_black-512x512.png
client_assets_url: https://example.com
shortcut_icon_url: https://d2tyb7byn1fef9.cloudfront.net/ibi-logo-original%402x.png
public_url: http://localhost:9966
notifications_enabled: true
cors:
enabled: true
origins: https://google.com
methods:
headers:
notifications_enabled: false
docs_url: http://conveyal-data-tools.readthedocs.org
support_email: [email protected]
port: 4000
data:
editor_mapdb: /tmp/editor/mapdb
mapdb: /tmp/mapdb
gtfs: /tmp
use_s3_storage: false
s3_region: us-east-1
gtfs_s3_bucket: bucket-name
modules:
dump:
enabled: true
enterprise:
enabled: false
deployment:
enabled: true
editor:
enabled: true
url: http://localhost:9001
alerts:
enabled: true
use_extension: mtc
url: /alerts
sign_config:
deployment:
enabled: true
use_extension: mtc
url: /signs # eventually remove this
ec2:
enabled: false
default_ami: ami-your-ami-id
# Note: using a cloudfront URL for these download URLs will greatly
# increase download/deploy speed.
otp_download_url: https://optional-otp-repo.com
r5_download_url: https://optional-r5-repo.com
user_admin:
enabled: true
# Enable GTFS+ module for testing purposes
gtfsplus:
enabled: true
gtfsapi:
enabled: true
load_on_fetch: false
# use_extension: mtc
# update_frequency: 30 # in seconds

extensions:
mtc:
enabled: true
rtd_api: http://localhost:9876/
s3_bucket: bucket-name
s3_prefix: waiting/
s3_download_prefix: waiting/
transitland:
enabled: true
api: https://transit.land/api/v1/feeds
transitfeeds:
enabled: true
api: http://api.transitfeeds.com/v1/getFeeds
key: your-api-key
19 changes: 19 additions & 0 deletions configurations/test/env.yml.tmp
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# This client ID refers to the UI client in Auth0.
AUTH0_CLIENT_ID: your-auth0-client-id
AUTH0_DOMAIN: your-auth0-domain
# Note: One of AUTH0_SECRET or AUTH0_PUBLIC_KEY should be used depending on the signing algorithm set on the client.
# It seems that newer Auth0 accounts (2017 and later) might default to RS256 (public key).
AUTH0_SECRET: your-auth0-secret # uses HS256 signing algorithm
# AUTH0_PUBLIC_KEY: /path/to/auth0.pem # uses RS256 signing algorithm
# This client/secret pair refer to a machine-to-machine Auth0 application used to access the Management API.
AUTH0_API_CLIENT: your-api-client-id
AUTH0_API_SECRET: your-api-secret-id
DISABLE_AUTH: false
OSM_VEX: http://localhost:1000
SPARKPOST_KEY: your-sparkpost-key
SPARKPOST_EMAIL: [email protected]
GTFS_DATABASE_URL: jdbc:postgresql://localhost/catalogue
# GTFS_DATABASE_USER:
# GTFS_DATABASE_PASSWORD:
#MONGO_URI: mongodb://mongo-host:27017
MONGO_DB_NAME: catalogue
50 changes: 50 additions & 0 deletions configurations/test/server.yml.tmp
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
application:
title: Data Tools
logo: https://d2tyb7byn1fef9.cloudfront.net/ibi_group-128x128.png
logo_large: https://d2tyb7byn1fef9.cloudfront.net/ibi_group_black-512x512.png
client_assets_url: https://example.com
shortcut_icon_url: https://d2tyb7byn1fef9.cloudfront.net/ibi-logo-original%402x.png
public_url: http://localhost:9966
notifications_enabled: false
docs_url: http://conveyal-data-tools.readthedocs.org
support_email: [email protected]
port: 4000
data:
gtfs: /tmp
use_s3_storage: false
s3_region: us-east-1
gtfs_s3_bucket: bucket-name
modules:
enterprise:
enabled: false
editor:
enabled: true
deployment:
enabled: true
ec2:
enabled: false
default_ami: ami-your-ami-id
user_admin:
enabled: true
# Enable GTFS+ module for testing purposes
gtfsplus:
enabled: true
gtfsapi:
enabled: true
load_on_fetch: false
# use_extension: mtc
# update_frequency: 30 # in seconds
extensions:
# Enable MTC extension so MTC-specific feed merge tests
mtc:
enabled: true
rtd_api: http://localhost:9876/
s3_bucket: bucket-name
s3_prefix: waiting/
s3_download_prefix: waiting/
transitland:
enabled: true
api: https://transit.land/api/v1/feeds
transitfeeds:
enabled: true
api: http://api.transitfeeds.com/v1/getFeeds
17 changes: 17 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
<!-- Using the latest version of geotools (e.g, 20) seems to cause issues with the shapefile
plugin where the_geom for each feature is null. -->
<geotools.version>17.5</geotools.version>
<awsjavasdk.version>1.11.625</awsjavasdk.version>
</properties>
<build>
<resources>
Expand Down Expand Up @@ -393,6 +394,22 @@
<artifactId>super-csv</artifactId>
<version>2.4.0</version>
</dependency>
<!-- AWS individual module imports (S3 imported by gtfs-lib) -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-ec2</artifactId>
<version>${awsjavasdk.version}</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-iam</artifactId>
<version>${awsjavasdk.version}</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-elasticloadbalancingv2</artifactId>
<version>${awsjavasdk.version}</version>
</dependency>
</dependencies>

</project>
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
Expand All @@ -18,7 +19,8 @@
/**
* Created by landon on 6/13/16.
*/
public abstract class MonitorableJob implements Runnable {
public abstract class MonitorableJob implements Runnable, Serializable {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(MonitorableJob.class);
public final String owner;

Expand Down Expand Up @@ -60,6 +62,7 @@ public enum JobType {
EXPORT_SNAPSHOT_TO_GTFS,
CONVERT_EDITOR_MAPDB_TO_SQL,
VALIDATE_ALL_FEEDS,
MONITOR_SERVER_STATUS,
MERGE_FEED_VERSIONS
}

Expand Down Expand Up @@ -128,7 +131,6 @@ public void run () {
boolean parentJobErrored = false;
boolean subTaskErrored = false;
String cancelMessage = "";
long startTimeNanos = System.nanoTime();
try {
// First execute the core logic of the specific MonitorableJob subclass
jobLogic();
Expand Down Expand Up @@ -187,8 +189,7 @@ public void run () {
LOG.error("Job failed", ex);
status.update(true, ex.getMessage(), 100, true);
}
status.startTime = TimeUnit.NANOSECONDS.toMillis(startTimeNanos);
status.duration = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNanos);
status.duration = System.currentTimeMillis() - status.startTime;
LOG.info("{} {} {} in {} ms", type, jobId, status.error ? "errored" : "completed", status.duration);
}

Expand Down Expand Up @@ -242,7 +243,7 @@ public static class Status {
/** How much of task is complete? */
public double percentComplete;

public long startTime;
public long startTime = System.currentTimeMillis();
public long duration;

// When was the job initialized?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public static void logMessageAndHalt(
) throws HaltException {
// Note that halting occurred, also print error stacktrace if applicable
if (e != null) e.printStackTrace();
LOG.info("Halting with status code {}. Error message: {}.", statusCode, message);
LOG.info("Halting with status code {}. Error message: {}", statusCode, message);

if (statusCode >= 500) {
LOG.error(message);
Expand All @@ -122,7 +122,7 @@ public static void logMessageAndHalt(
if (bugsnag != null && e != null) {
// create report to send to bugsnag
Report report = bugsnag.buildReport(e);
Auth0UserProfile userProfile = request.attribute("user");
Auth0UserProfile userProfile = request != null ? request.attribute("user") : null;
String userEmail = userProfile != null ? userProfile.getEmail() : "no-auth";
report.setUserEmail(userEmail);
bugsnag.notify(report);
Expand Down Expand Up @@ -218,11 +218,16 @@ public static void logRequestOrResponse(
String bodyString,
int statusCode
) {
// If request is null, log warning and exit. We do not want to hit an NPE in this method.
if (request == null) {
LOG.warn("Request object is null. Cannot log.");
return;
}
Auth0UserProfile userProfile = request.attribute("user");
String userEmail = userProfile != null ? userProfile.getEmail() : "no-auth";
String queryString = request.queryParams().size() > 0 ? "?" + request.queryString() : "";
LOG.info(
"{} {} {}: {}{}{}{}",
"{} {} {}: {}{}{} {}",
logRequest ? "req" : String.format("res (%s)", statusCode),
userEmail,
request.requestMethod(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ private static Snapshot deleteSnapshot(Request req, Response res) {
if (snapshot == null) logMessageAndHalt(req, 400, "Must provide valid snapshot ID.");
try {
// Remove the snapshot and then renumber the snapshots
Persistence.snapshots.removeById(snapshot.id);
snapshot.delete();
feedSource.renumberSnapshots();
// FIXME Are there references that need to be removed? E.g., what if the active buffer snapshot is deleted?
// FIXME delete tables from database?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;

import static com.conveyal.datatools.editor.jobs.ProcessGtfsSnapshotExport.toGtfsDate;

/** a transaction in an agency database */
public class FeedTx extends DatabaseTx {
private static final Logger LOG = LoggerFactory.getLogger(FeedTx.class);
Expand Down Expand Up @@ -124,6 +122,10 @@ public FeedTx(DB tx, boolean buildSecondaryIndices) {
// editedSinceSnapshot = tx.getAtomicBoolean("editedSinceSnapshot") == null ? tx.createAtomicBoolean("editedSinceSnapshot", false) : editedSinceSnapshot;
}

private static int toGtfsDate (LocalDate date) {
return date.getYear() * 10000 + date.getMonthValue() * 100 + date.getDayOfMonth();
}

public void commit () {
try {
// editedSinceSnapshot.set(true);
Expand Down

This file was deleted.

Loading

0 comments on commit 00e5c88

Please sign in to comment.