Skip to content

Commit

Permalink
Fix jitpack (#16)
Browse files Browse the repository at this point in the history
* Bump gradle wrapper

* Clean up cache key

* Fix non-nullable field

* Upgrade spock and groovy

* Use java 17

* Remove tool chain config (cannot be set together with source target compatibility

* Set java version for all projects

* Try toolchain in base

* Try toolchain in projects

* Revert build.gradle changes

* Avoid using java 17 method
  • Loading branch information
tuliren authored May 3, 2022
1 parent cd6cfb2 commit f1b0036
Show file tree
Hide file tree
Showing 6 changed files with 69 additions and 38 deletions.
4 changes: 1 addition & 3 deletions .github/workflows/java_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v1
with:
java-version: '14'
java-version: '17'
- name: Validate Gradle wrapper
uses: gradle/wrapper-validation-action@v1
- name: Set up cache
Expand All @@ -25,8 +25,6 @@ jobs:
~/.gradle/caches
~/.gradle/wrapper
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Build with Gradle
run: ./gradlew clean build
- name: Cleanup Gradle Cache
Expand Down
14 changes: 5 additions & 9 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,18 @@ scmVersion {
}
}

java {
toolchain {
languageVersion.set(JavaLanguageVersion.of(17))
}
}

allprojects {
project.group = 'tech.allegro.schema.json2avro'
project.version = scmVersion.version
sourceCompatibility = JavaVersion.VERSION_17
targetCompatibility = JavaVersion.VERSION_17
}

subprojects {

project.ext.versions = [
spock: '1.3-groovy-2.5',
groovy: '2.5.10'
spock: '2.1-groovy-3.0',
groovy: '3.0.9'
]

repositories {
Expand All @@ -49,5 +45,5 @@ subprojects {
}

wrapper {
gradleVersion = '7.2'
gradleVersion = '7.4.2'
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
Expand All @@ -12,7 +13,7 @@
public final class AdditionalPropertyField {

public static final String DEFAULT_AVRO_FIELD_NAME = "_airbyte_additional_properties";
public static final Set<String> DEFAULT_JSON_FIELD_NAMES = Set.of("_ab_additional_properties", DEFAULT_AVRO_FIELD_NAME);
public static final Set<String> DEFAULT_JSON_FIELD_NAMES = ImmutableSet.of("_ab_additional_properties", DEFAULT_AVRO_FIELD_NAME);
public static final Schema FIELD_SCHEMA = Schema.createUnion(
Schema.create(Schema.Type.NULL),
Schema.createMap(Schema.create(Type.STRING)));
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,25 @@
package tech.allegro.schema.json2avro.converter;

import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static tech.allegro.schema.json2avro.converter.AdditionalPropertyField.DEFAULT_AVRO_FIELD_NAME;
import static tech.allegro.schema.json2avro.converter.AdditionalPropertyField.DEFAULT_JSON_FIELD_NAMES;
import static tech.allegro.schema.json2avro.converter.AvroTypeExceptions.enumException;
import static tech.allegro.schema.json2avro.converter.AvroTypeExceptions.typeException;
import static tech.allegro.schema.json2avro.converter.AvroTypeExceptions.unionException;

import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.AvroTypeException;
Expand All @@ -13,25 +32,6 @@
import org.apache.avro.generic.GenericRecordBuilder;
import tech.allegro.schema.json2avro.converter.util.DateTimeUtils;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;

import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static tech.allegro.schema.json2avro.converter.AdditionalPropertyField.DEFAULT_AVRO_FIELD_NAME;
import static tech.allegro.schema.json2avro.converter.AdditionalPropertyField.DEFAULT_JSON_FIELD_NAMES;
import static tech.allegro.schema.json2avro.converter.AvroTypeExceptions.enumException;
import static tech.allegro.schema.json2avro.converter.AvroTypeExceptions.typeException;
import static tech.allegro.schema.json2avro.converter.AvroTypeExceptions.unionException;

public class JsonGenericRecordReader {

private static final Object INCOMPATIBLE = new Object();
Expand Down Expand Up @@ -254,11 +254,16 @@ private Object read(Schema.Field field, Schema schema, Object value, Deque<Strin
private List<Object> readArray(Schema.Field field, Schema schema, List<Object> items, Deque<String> path) {
// When all array elements are supposed to be null or string, we enforce array values to be string.
// This is to properly handle Json arrays that do not follow the schema.
Set<Type> nonNullElementTypes = schema.getElementType()
.getTypes().stream()
.map(Schema::getType)
.filter(t -> t != Type.NULL)
.collect(Collectors.toSet());
Set<Type> nonNullElementTypes;
if (schema.getElementType().isUnion()) {
nonNullElementTypes = schema.getElementType()
.getTypes().stream()
.map(Schema::getType)
.filter(t -> t != Type.NULL)
.collect(Collectors.toSet());
} else {
nonNullElementTypes = Collections.singleton(schema.getElementType().getType());
}
boolean enforceString = nonNullElementTypes.size() == 1 && nonNullElementTypes.contains(Type.STRING);
return items.stream()
.map(item -> read(field, schema.getElementType(), item, path, false, enforceString))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ class JsonAvroConverterSpec extends Specification {

then:
def e = thrown AvroConversionException
e.message == "Failed to convert JSON to Avro: Field field_integer is expected to be type: java.lang.Number"
e.message == "Failed to convert JSON to Avro: Field field_integer is expected to be type: java.lang.Number, but it is: foobar"
}

def "should ignore unknown fields"() {
Expand Down
31 changes: 31 additions & 0 deletions converter/src/test/resources/json_avro_converter.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,37 @@
[
{
"testCase": "simple_schema",
"avroSchema": {
"type": "record",
"name": "test_schema",
"fields": [
{
"name": "username",
"type": "string"
},
{
"name": "active",
"type": "boolean"
},
{
"name": "age",
"type": "int"
}
]
},
"jsonObject": {
"username": "airbyte_developer",
"active": true,
"age": 150
},
"avroObject": {
"username": "airbyte_developer",
"active": true,
"age": 150
}
},
{
"testCase": "nullable_schema",
"avroSchema": {
"type": "record",
"name": "test_schema",
Expand Down

0 comments on commit f1b0036

Please sign in to comment.