Skip to content

Commit

Permalink
Merge pull request #15 from usdot-jpo-ode/release/1.3.0
Browse files Browse the repository at this point in the history
Merge release 1.3.0 into master
  • Loading branch information
codygarver authored Mar 31, 2023
2 parents 6596cb9 + f8e1df4 commit 3a454be
Show file tree
Hide file tree
Showing 20 changed files with 423 additions and 103 deletions.
28 changes: 28 additions & 0 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.217.4/containers/java/.devcontainer/base.Dockerfile

# [Choice] Java version (use -bullseye variants on local arm64/Apple Silicon): 11, 17, 11-bullseye, 17-bullseye, 11-buster, 17-buster
ARG VARIANT="17"
FROM mcr.microsoft.com/vscode/devcontainers/java:0-${VARIANT}

# [Option] Install Maven
ARG INSTALL_MAVEN="true"
ARG MAVEN_VERSION="3.6.3"
# [Option] Install Gradle
ARG INSTALL_GRADLE="false"
ARG GRADLE_VERSION=""
RUN if [ "${INSTALL_MAVEN}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/sdkman/bin/sdkman-init.sh && sdk install maven \"${MAVEN_VERSION}\""; fi \
&& if [ "${INSTALL_GRADLE}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/sdkman/bin/sdkman-init.sh && sdk install gradle \"${GRADLE_VERSION}\""; fi

# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
ARG NODE_VERSION="none"
RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi

# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>

# [Optional] Uncomment this line to install global node packages.
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1

# install kafkacat for testing purposes
RUN apt-get update && apt-get install -y kafkacat
37 changes: 37 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.217.4/containers/java
{
"name": "Java",
"build": {
"dockerfile": "Dockerfile",
"args": {
// Update the VARIANT arg to pick a Java version: 11, 17
// Append -bullseye or -buster to pin to an OS version.
// Use the -bullseye variants on local arm64/Apple Silicon.
"VARIANT": "11",
// Options
"INSTALL_MAVEN": "true",
"INSTALL_GRADLE": "false",
"NODE_VERSION": "none"
}
},

// Set *default* container specific settings.json values on container create.
"settings": {
"java.home": "/docker-java-home"
},

// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"vscjava.vscode-java-pack"
],

// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],

// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "java -version",

// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode"
}
15 changes: 15 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "java",
"name": "Launch Application",
"request": "launch",
"mainClass": "jpo.sdw.depositor.Application",
"projectName": "jpo-sdw-depositor"
}
]
}
8 changes: 8 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"java.configuration.updateBuildConfiguration": "automatic",
"java.test.config": {
"name": "testConfig",
"vmArgs": ["-javaagent:/home/vscode/.m2/repository/org/jmockit/jmockit/1.49/jmockit-1.49.jar"]
},
"java.test.defaultConfig": "testConfig"
}
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ COPY ./src ./src
RUN mvn clean package -DskipTests

# Run container
FROM openjdk:8u171-jre-alpine
FROM eclipse-temurin:11-jre-alpine

WORKDIR /home
COPY --from=builder /home/target/jpo-sdw-depositor-0.0.1-SNAPSHOT.jar /home
COPY --from=builder /home/target/jpo-sdw-depositor-1.3.0.jar /home

ENTRYPOINT ["java", \
"-jar", \
"/home/jpo-sdw-depositor-0.0.1-SNAPSHOT.jar"]
"/home/jpo-sdw-depositor-1.3.0.jar"]
33 changes: 33 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ Subscribes to a Kafka topic and deposits messages to the Situation Data Warehous

This is a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) repository. It subscribes to a Kafka topic and listens for incoming messages. Upon message arrival, this application deposits it over REST to the SDX.


## Release Notes
The current version and release history of the Jpo-sdw-depositor: [Jpo-sdw-depositor Release Notes](<docs/Release_notes.md>)

# Installation and Operation

### Requirements
Expand Down Expand Up @@ -53,3 +57,32 @@ You may configure these values in `jpo-sdw-depositor/src/main/resources/applicat
| sdw.apikey | SDW_API_KEY | SDX API Key (generated by [SDX](https://sdx.trihydro.com)) | (n/a)
| sdw.emailList | SDW_EMAIL_LIST | Comma-delimited email list to send error emails to | [email protected],[email protected]
| sdw.emailFrom | SDW_EMAIL_FROM | Support email to send from | [email protected]

# Confluent Cloud Integration
Rather than using a local kafka instance, this project can utilize an instance of kafka hosted by Confluent Cloud via SASL.

## Environment variables
### Purpose & Usage
- The DOCKER_HOST_IP environment variable is used to communicate with the bootstrap server that the instance of Kafka is running on.
- The KAFKA_TYPE environment variable specifies what type of kafka connection will be attempted and is used to check if Confluent should be utilized.
- The CONFLUENT_KEY and CONFLUENT_SECRET environment variables are used to authenticate with the bootstrap server.

### Values
- DOCKER_HOST_IP must be set to the bootstrap server address (excluding the port)
- KAFKA_TYPE must be set to "CONFLUENT"
- CONFLUENT_KEY must be set to the API key being utilized for CC
- CONFLUENT_SECRET must be set to the API secret being utilized for CC

## CC Docker Compose File
There is a provided docker-compose file (docker-compose-confluent-cloud.yml) that passes the above environment variables into the container that gets created. Further, this file doesn't spin up a local kafka instance since it is not required.

## Note
This has only been tested with Confluent Cloud but technically all SASL authenticated Kafka brokers can be reached using this method.

# Unit Testing
The unit tests can be run by executing the following command from the root directory of the project:
```
mvn test
```

It should be noted that Maven & Java are required to run the unit tests. If you do not have Maven or Java installed, you can reopen the project in the provided dev container and run the tests from there.
21 changes: 21 additions & 0 deletions docker-compose-confluent-cloud.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
version: '2'
services:
sdw_depositor:
build:
context: .
dockerfile: Dockerfile
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
SDW_EMAIL_LIST: ${SDW_EMAIL_LIST}
SDW_EMAIL_FROM: ${SDW_EMAIL_FROM}
SDW_API_KEY: ${SDW_API_KEY}
SDW_DESTINATION_URL: ${SDW_DESTINATION_URL}
SPRING_MAIL_HOST: ${SPRING_MAIL_HOST}
SPRING_MAIL_PORT: ${SPRING_MAIL_PORT}
KAFKA_TYPE: ${KAFKA_TYPE}
CONFLUENT_KEY: ${CONFLUENT_KEY}
CONFLUENT_SECRET: ${CONFLUENT_SECRET}
logging:
options:
max-size: "10m"
max-file: "5"
35 changes: 35 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
version: '2'
services:
zookeeper:
image: wurstmeister/zookeeper
ports:
- "2181:2181"

kafka:
image: wurstmeister/kafka
ports:
- "9092:9092"
environment:
KAFKA_ADVERTISED_HOST_NAME: ${DOCKER_HOST_IP}
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_CREATE_TOPICS: "test:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock

sdw_depositor:
build:
context: .
dockerfile: Dockerfile
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
SDW_EMAIL_LIST: ${SDW_EMAIL_LIST}
SDW_EMAIL_FROM: ${SDW_EMAIL_FROM}
SDW_API_KEY: ${SDW_API_KEY}
SDW_DESTINATION_URL: ${SDW_DESTINATION_URL}
SPRING_MAIL_HOST: ${SPRING_MAIL_HOST}
SPRING_MAIL_PORT: ${SPRING_MAIL_PORT}
SDW_SUBSCRIPTION_TOPIC: ${SDW_SUBSCRIPTION_TOPIC}
logging:
options:
max-size: "10m"
max-file: "5"
23 changes: 23 additions & 0 deletions docs/Release_notes.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
Jpo-sdw-depositor Release Notes
----------------------------

Version 1.0.0, released Mar 30th 2023
----------------------------------------

### **Summary**
The updates for jpo-sdw-depositor 1.0.0 include Confluent Cloud Integration, some fixes, multiple record deposit functionality and some documentation improvements.

Enhancements in this release:
- Allowed the project to work with an instance of kafka hosted by Confluent Cloud.
- Added CC integration info to README.
- Introduced some new environment variables.
- Added docker and dev container files.
- Added updates to allow unit tests to run.
- Transitioned to depositing multiple records into the SDX rather than a single record at a time.
- Updated base image to eclipse-temurin:11-jre-alpine instead of the deprecated openjdk:8u171-jre-alpine image.

Fixes in this release:
- Fixed some unit tests.
- Swapped over to using the kafka_2.11 library instead of the kafka-clients library.
- Added a fix for tests run via Maven.

40 changes: 34 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.8.RELEASE</version>
<version>2.5.0</version>
<relativePath />
<!-- lookup parent from repository -->
</parent>
<groupId>usdot.jpo.ode</groupId>
<artifactId>jpo-sdw-depositor</artifactId>
<version>0.0.1-SNAPSHOT</version>
<version>1.3.0</version>
<packaging>jar</packaging>
<name>JPO SDW Depositor</name>
<name>jpo-sdw-depositor</name>

<properties>
<jmockit.version>1.49</jmockit.version>
</properties>

<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
Expand All @@ -35,18 +41,19 @@
<dependency>
<groupId>org.jmockit</groupId>
<artifactId>jmockit</artifactId>
<version>1.40</version>
<version>${jmockit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.10.2.2</version>
<artifactId>kafka_2.11</artifactId>
<version>2.4.1</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
Expand All @@ -67,12 +74,33 @@
<version>1.0.1.RELEASE</version>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M5</version>
<configuration>
<argLine>-javaagent:${user.home}/.m2/repository/org/jmockit/jmockit/${jmockit.version}/jmockit-${jmockit.version}.jar</argLine>
<!-- <testFailureIgnore>true</testFailureIgnore> -->
<systemPropertyVariables>
<loader.path>${loader.path}</loader.path>
<buildDirectory>${project.build.directory}</buildDirectory>
</systemPropertyVariables>
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.surefire</groupId>
<artifactId>surefire-junit47</artifactId>
<version>3.0.0-M5</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>
</project>
11 changes: 9 additions & 2 deletions sample.env
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
DOCKER_HOST_IP=10.1.2.3
DOCKER_HOST_IP=
#SDW_GROUP_ID=usdot.jpo.sdw
#SDW_KAFKA_PORT=9092
#SDW_DESTINATION_URL=https://webapp-integration.cvmvp.com/whtools/rest/v2/
SDW_SUBSCRIPTION_TOPIC=<your topic to subscribe to for deposit>
SDW_API_KEY <=your api key>
SDW_API_KEY=<your api key>
SDW_EMAIL_LIST=
SDW_EMAIL_FROM=
SPRING_MAIL_HOST=
SPRING_MAIL_PORT=
KAFKA_TYPE=
CONFLUENT_KEY=
CONFLUENT_SECRET=
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@

public interface ConsumerDepositor<T> {

public void run(T... t);
void run(T... t);

}
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
package jpo.sdw.depositor.consumerdepositors;

import java.time.Duration;
import java.util.Arrays;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -27,12 +29,14 @@ public static boolean loop() {

private RestDepositor<String> restDepositor;
private KafkaConsumer<String, String> kafkaConsumer;
private JSONObject jsonMsgList;
private JSONObject jsonMsg;

public KafkaConsumerRestDepositor(KafkaConsumer<String, String> kafkaConsumer, RestDepositor<String> restDepositor,
String encodeType) {
this.setKafkaConsumer(kafkaConsumer);
this.setRestDepositor(restDepositor);
this.jsonMsgList = new JSONObject();
this.jsonMsg = new JSONObject();
this.jsonMsg.put("EncodeType", encodeType);
}
Expand All @@ -41,11 +45,16 @@ public KafkaConsumerRestDepositor(KafkaConsumer<String, String> kafkaConsumer, R
public void run(String... topics) {
this.getKafkaConsumer().subscribe(Arrays.asList(topics));
while (LoopController.loop()) { // NOSONAR (used for unit testing)
ConsumerRecords<String, String> records = this.getKafkaConsumer().poll(100);
ConsumerRecords<String, String> records = this.getKafkaConsumer().poll(Duration.ofMillis(100));
JSONArray jsonRequests = new JSONArray();
for (ConsumerRecord<String, String> record : records) {
logger.info("Depositing message {}", record);
this.jsonMsg.put("EncodedMsg", record.value());
this.getRestDepositor().deposit(jsonMsg.toString());
jsonRequests.put(jsonMsg);
}
if (records.count() != 0) {
this.jsonMsgList.put("depositRequests", jsonRequests);
this.getRestDepositor().deposit(jsonMsgList.toString());
}
}
}
Expand Down
Loading

0 comments on commit 3a454be

Please sign in to comment.