diff --git a/RELEASE.md b/RELEASE.md index 7dcf8aaf..ae0b9429 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,6 +1,7 @@ # Release Notes ## [4.3.7] - 2024-09-03 - Added property `spark.cdm.transform.custom.ttl` to allow a custom constant value to be set for TTL instead of using the values from `origin` rows. +- Repo wide code formating & imports organization ## [4.3.6] - 2024-08-29 - Added `overwrite` option to conditionally check or skip `Validation` when it has a non-null value in `target` for the `spark.cdm.feature.extractJson` feature. diff --git a/pom.xml b/pom.xml index d5d260ac..6fba6e14 100644 --- a/pom.xml +++ b/pom.xml @@ -1,4 +1,6 @@ - + 4.0.0 datastax.cdm @@ -288,7 +290,7 @@ LINE MISSEDCOUNT - 1400 + 1500 @@ -313,6 +315,39 @@ + + net.revelc.code + impsort-maven-plugin + 1.9.0 + + java.,javax.,org.,com. + java,* + + + + + sort + + + + + + net.revelc.code.formatter + formatter-maven-plugin + 2.23.0 + + 11 + 11 + 11 + + + + + format + + + + diff --git a/src/main/java/com/datastax/cdm/cql/EnhancedSession.java b/src/main/java/com/datastax/cdm/cql/EnhancedSession.java index 40634fef..7aeb7175 100644 --- a/src/main/java/com/datastax/cdm/cql/EnhancedSession.java +++ b/src/main/java/com/datastax/cdm/cql/EnhancedSession.java @@ -15,7 +15,17 @@ */ package com.datastax.cdm.cql; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.datastax.cdm.cql.codec.CodecFactory; +import com.datastax.cdm.cql.codec.Codecset; import com.datastax.cdm.cql.statement.*; +import com.datastax.cdm.data.PKFactory; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.type.DataType; @@ -23,15 +33,6 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.cql.codec.CodecFactory; -import com.datastax.cdm.cql.codec.Codecset; -import com.datastax.cdm.data.PKFactory; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.properties.PropertyHelper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; public class EnhancedSession { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -54,32 +55,40 @@ public EnhancedSession(PropertyHelper propertyHelper, CqlSession cqlSession, boo public void setPKFactory(PKFactory pkFactory) { this.pkFactory = pkFactory; } - public PKFactory getPKFactory() { return this.pkFactory; } + + public PKFactory getPKFactory() { + return this.pkFactory; + } public CqlSession getCqlSession() { return cqlSession; } + public CqlTable getCqlTable() { return cqlTable; } public OriginSelectByPartitionRangeStatement getOriginSelectByPartitionRangeStatement() { - if (!isOrigin) throw new RuntimeException("This is not an origin session"); + if (!isOrigin) + throw new RuntimeException("This is not an origin session"); return new OriginSelectByPartitionRangeStatement(propertyHelper, this); } public OriginSelectByPKStatement getOriginSelectByPKStatement() { - if (!isOrigin) throw new RuntimeException("This is not an origin session"); + if (!isOrigin) + throw new RuntimeException("This is not an origin session"); return new OriginSelectByPKStatement(propertyHelper, this); } public TargetSelectByPKStatement getTargetSelectByPKStatement() { - if (isOrigin) throw new RuntimeException("This is not a target session"); + if (isOrigin) + throw new RuntimeException("This is not a target session"); return new TargetSelectByPKStatement(propertyHelper, this); } public TargetUpsertStatement getTargetUpsertStatement() { - if (isOrigin) throw new RuntimeException("This is not a target session"); + if (isOrigin) + throw new RuntimeException("This is not a target session"); if (cqlTable.isCounterTable()) return new TargetUpdateStatement(propertyHelper, this); else @@ -88,7 +97,7 @@ public TargetUpsertStatement getTargetUpsertStatement() { private CqlSession initSession(PropertyHelper propertyHelper, CqlSession session) { List codecList = propertyHelper.getStringList(KnownProperties.TRANSFORM_CODECS); - if (null!=codecList && !codecList.isEmpty()) { + if (null != codecList && !codecList.isEmpty()) { MutableCodecRegistry registry = (MutableCodecRegistry) session.getContext().getCodecRegistry(); for (String codecString : codecList) { @@ -96,7 +105,9 @@ private CqlSession initSession(PropertyHelper propertyHelper, CqlSession session for (TypeCodec codec : CodecFactory.getCodecPair(propertyHelper, codecEnum)) { DataType dataType = codec.getCqlType(); GenericType javaType = codec.getJavaType(); - if (logDebug) logger.debug("Registering Codec {} for CQL type {} and Java type {}", codec.getClass().getSimpleName(), dataType, javaType); + if (logDebug) + logger.debug("Registering Codec {} for CQL type {} and Java type {}", + codec.getClass().getSimpleName(), dataType, javaType); try { registry.codecFor(dataType, javaType); } catch (CodecNotFoundException e) { @@ -108,6 +119,4 @@ private CqlSession initSession(PropertyHelper propertyHelper, CqlSession session return session; } - - } diff --git a/src/main/java/com/datastax/cdm/cql/codec/BIGINT_StringCodec.java b/src/main/java/com/datastax/cdm/cql/codec/BIGINT_StringCodec.java index 450eb2ab..777e3525 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/BIGINT_StringCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/BIGINT_StringCodec.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; +import java.nio.ByteBuffer; + +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; - -import java.nio.ByteBuffer; public class BIGINT_StringCodec extends AbstractBaseCodec { @@ -70,4 +71,3 @@ public String parse(String value) { } } - diff --git a/src/main/java/com/datastax/cdm/cql/codec/CodecFactory.java b/src/main/java/com/datastax/cdm/cql/codec/CodecFactory.java index 3c7cf506..dd2407fc 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/CodecFactory.java +++ b/src/main/java/com/datastax/cdm/cql/codec/CodecFactory.java @@ -15,6 +15,9 @@ */ package com.datastax.cdm.cql.codec; +import java.util.Arrays; +import java.util.List; + import com.datastax.cdm.properties.PropertyHelper; import com.datastax.dse.driver.internal.core.type.codec.geometry.LineStringCodec; import com.datastax.dse.driver.internal.core.type.codec.geometry.PointCodec; @@ -22,25 +25,34 @@ import com.datastax.dse.driver.internal.core.type.codec.time.DateRangeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import java.util.Arrays; -import java.util.List; - public class CodecFactory { public static List> getCodecPair(PropertyHelper propertyHelper, Codecset codec) { switch (codec) { - case INT_STRING: return Arrays.asList(new INT_StringCodec(propertyHelper), new TEXT_IntegerCodec(propertyHelper)); - case DOUBLE_STRING: return Arrays.asList(new DOUBLE_StringCodec(propertyHelper), new TEXT_DoubleCodec(propertyHelper)); - case BIGINT_STRING: return Arrays.asList(new BIGINT_StringCodec(propertyHelper), new TEXT_LongCodec(propertyHelper)); - case DECIMAL_STRING: return Arrays.asList(new DECIMAL_StringCodec(propertyHelper), new TEXT_BigDecimalCodec(propertyHelper)); - case TIMESTAMP_STRING_MILLIS: return Arrays.asList(new TIMESTAMP_StringMillisCodec(propertyHelper), new TEXTMillis_InstantCodec(propertyHelper)); - case TIMESTAMP_STRING_FORMAT: return Arrays.asList(new TIMESTAMP_StringFormatCodec(propertyHelper), new TEXTFormat_InstantCodec(propertyHelper)); - case POLYGON_TYPE: return Arrays.asList(new PolygonCodec()); - case POINT_TYPE: return Arrays.asList(new PointCodec()); - case DATE_RANGE: return Arrays.asList(new DateRangeCodec()); - case LINE_STRING: return Arrays.asList(new LineStringCodec()); + case INT_STRING: + return Arrays.asList(new INT_StringCodec(propertyHelper), new TEXT_IntegerCodec(propertyHelper)); + case DOUBLE_STRING: + return Arrays.asList(new DOUBLE_StringCodec(propertyHelper), new TEXT_DoubleCodec(propertyHelper)); + case BIGINT_STRING: + return Arrays.asList(new BIGINT_StringCodec(propertyHelper), new TEXT_LongCodec(propertyHelper)); + case DECIMAL_STRING: + return Arrays.asList(new DECIMAL_StringCodec(propertyHelper), new TEXT_BigDecimalCodec(propertyHelper)); + case TIMESTAMP_STRING_MILLIS: + return Arrays.asList(new TIMESTAMP_StringMillisCodec(propertyHelper), + new TEXTMillis_InstantCodec(propertyHelper)); + case TIMESTAMP_STRING_FORMAT: + return Arrays.asList(new TIMESTAMP_StringFormatCodec(propertyHelper), + new TEXTFormat_InstantCodec(propertyHelper)); + case POLYGON_TYPE: + return Arrays.asList(new PolygonCodec()); + case POINT_TYPE: + return Arrays.asList(new PointCodec()); + case DATE_RANGE: + return Arrays.asList(new DateRangeCodec()); + case LINE_STRING: + return Arrays.asList(new LineStringCodec()); - default: - throw new IllegalArgumentException("Unknown codec: " + codec); + default: + throw new IllegalArgumentException("Unknown codec: " + codec); } } } diff --git a/src/main/java/com/datastax/cdm/cql/codec/Codecset.java b/src/main/java/com/datastax/cdm/cql/codec/Codecset.java index ea9851e8..5f8bafaa 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/Codecset.java +++ b/src/main/java/com/datastax/cdm/cql/codec/Codecset.java @@ -16,14 +16,6 @@ package com.datastax.cdm.cql.codec; public enum Codecset { - INT_STRING, - DOUBLE_STRING, - BIGINT_STRING, - DECIMAL_STRING, - TIMESTAMP_STRING_MILLIS, - TIMESTAMP_STRING_FORMAT, - POINT_TYPE, - POLYGON_TYPE, - DATE_RANGE, - LINE_STRING + INT_STRING, DOUBLE_STRING, BIGINT_STRING, DECIMAL_STRING, TIMESTAMP_STRING_MILLIS, TIMESTAMP_STRING_FORMAT, + POINT_TYPE, POLYGON_TYPE, DATE_RANGE, LINE_STRING } diff --git a/src/main/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodec.java b/src/main/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodec.java index f0cae498..c806cffd 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodec.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.codec; +import java.math.BigDecimal; +import java.nio.ByteBuffer; + +import org.jetbrains.annotations.NotNull; + import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import org.jetbrains.annotations.NotNull; - -import java.math.BigDecimal; -import java.nio.ByteBuffer; public class DECIMAL_StringCodec extends AbstractBaseCodec { @@ -70,4 +71,3 @@ public String parse(String value) { return decimalValue == null ? null : decimalValue.toString(); } } - diff --git a/src/main/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodec.java b/src/main/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodec.java index f9b38139..3f755c6d 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodec.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.cql.codec; +import java.math.RoundingMode; +import java.nio.ByteBuffer; +import java.text.DecimalFormat; + +import org.jetbrains.annotations.NotNull; + import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import org.jetbrains.annotations.NotNull; - -import java.math.RoundingMode; -import java.nio.ByteBuffer; -import java.text.DecimalFormat; // This works with decimal-formatted doubles in strings, but not // with the default scientific notation. A separate codec is needed diff --git a/src/main/java/com/datastax/cdm/cql/codec/INT_StringCodec.java b/src/main/java/com/datastax/cdm/cql/codec/INT_StringCodec.java index 093d7448..fce5ad2e 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/INT_StringCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/INT_StringCodec.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; +import java.nio.ByteBuffer; + +import org.jetbrains.annotations.NotNull; + import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import org.jetbrains.annotations.NotNull; - -import java.nio.ByteBuffer; public class INT_StringCodec extends AbstractBaseCodec { @@ -69,4 +70,3 @@ public String parse(String value) { return intValue == null ? null : intValue.toString(); } } - diff --git a/src/main/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodec.java index 942a8488..10bd0cfd 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodec.java @@ -15,17 +15,6 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.nio.ByteBuffer; import java.time.Instant; import java.time.LocalDateTime; @@ -34,6 +23,18 @@ import java.time.format.DateTimeFormatter; import java.time.zone.ZoneRulesProvider; +import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; + public class TEXTFormat_InstantCodec extends AbstractBaseCodec { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -45,13 +46,16 @@ public TEXTFormat_InstantCodec(PropertyHelper propertyHelper) { String formatString = propertyHelper.getString(KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT); if (formatString == null || formatString.isEmpty()) { - throw new IllegalArgumentException("Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT + " is required and cannot be empty."); + throw new IllegalArgumentException("Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT + + " is required and cannot be empty."); } this.formatter = DateTimeFormatter.ofPattern(formatString); String zone = propertyHelper.getString(KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE); if (zone == null || !ZoneRulesProvider.getAvailableZoneIds().contains(zone)) { - throw new IllegalArgumentException("Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE + " is required and must be a valid ZoneOffset."); + throw new IllegalArgumentException( + "Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE + + " is required and must be a valid ZoneOffset."); } this.zoneOffset = ZoneId.of(zone).getRules().getOffset(Instant.now()); } diff --git a/src/main/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodec.java index d985b1a4..22494f65 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodec.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.codec; +import java.nio.ByteBuffer; +import java.time.Instant; + +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; - -import java.nio.ByteBuffer; -import java.time.Instant; public class TEXTMillis_InstantCodec extends AbstractBaseCodec { @@ -69,4 +70,3 @@ public Instant parse(String value) { } } - diff --git a/src/main/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodec.java index b5f94769..43f81b1f 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodec.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.codec; +import java.math.BigDecimal; +import java.nio.ByteBuffer; + +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; - -import java.math.BigDecimal; -import java.nio.ByteBuffer; public class TEXT_BigDecimalCodec extends AbstractBaseCodec { diff --git a/src/main/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodec.java index 393cfd72..18f4ad88 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodec.java @@ -15,19 +15,20 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; +import static com.datastax.cdm.cql.codec.DOUBLE_StringCodec.DOUBLE_FORMAT; import java.math.RoundingMode; import java.nio.ByteBuffer; import java.text.DecimalFormat; -import static com.datastax.cdm.cql.codec.DOUBLE_StringCodec.DOUBLE_FORMAT; +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; // This works with decimal-formatted doubles in strings, but not // with the default scientific notation. A separate codec is needed @@ -36,10 +37,10 @@ public class TEXT_DoubleCodec extends AbstractBaseCodec { private final DecimalFormat decimalFormat; public TEXT_DoubleCodec(PropertyHelper propertyHelper) { - super(propertyHelper); - decimalFormat = new DecimalFormat(DOUBLE_FORMAT); - decimalFormat.setGroupingUsed(false); - decimalFormat.setRoundingMode(RoundingMode.FLOOR); + super(propertyHelper); + decimalFormat = new DecimalFormat(DOUBLE_FORMAT); + decimalFormat.setGroupingUsed(false); + decimalFormat.setRoundingMode(RoundingMode.FLOOR); } @Override diff --git a/src/main/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodec.java index e1d15798..f2e97bee 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodec.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; +import java.nio.ByteBuffer; + +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; - -import java.nio.ByteBuffer; public class TEXT_IntegerCodec extends AbstractBaseCodec { diff --git a/src/main/java/com/datastax/cdm/cql/codec/TEXT_LongCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TEXT_LongCodec.java index 293704bc..3d638e33 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TEXT_LongCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TEXT_LongCodec.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; +import java.nio.ByteBuffer; + +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; - -import java.nio.ByteBuffer; public class TEXT_LongCodec extends AbstractBaseCodec { diff --git a/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodec.java index 2e133770..3fe99220 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodec.java @@ -15,17 +15,6 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.properties.PropertyHelper; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import org.jetbrains.annotations.NotNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.nio.ByteBuffer; import java.time.Instant; import java.time.LocalDateTime; @@ -34,6 +23,18 @@ import java.time.format.DateTimeFormatter; import java.time.zone.ZoneRulesProvider; +import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; + /** * This codec converts a TIMESTAMP to a Java String with format specified at * KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT using the zone specified at @@ -50,13 +51,16 @@ public TIMESTAMP_StringFormatCodec(PropertyHelper propertyHelper) { String formatString = propertyHelper.getString(KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT); if (formatString == null || formatString.isEmpty()) { - throw new IllegalArgumentException("Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT + " is required and cannot be empty."); + throw new IllegalArgumentException("Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT + + " is required and cannot be empty."); } this.formatter = DateTimeFormatter.ofPattern(formatString); String zone = propertyHelper.getString(KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE); if (zone == null || !ZoneRulesProvider.getAvailableZoneIds().contains(zone)) { - throw new IllegalArgumentException("Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE + " is required and must be a valid ZoneOffset."); + throw new IllegalArgumentException( + "Property " + KnownProperties.TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE + + " is required and must be a valid ZoneOffset."); } this.zoneOffset = ZoneId.of(zone).getRules().getOffset(Instant.now()); } @@ -101,4 +105,3 @@ public String parse(String value) { return formatter.format(instantValue.atOffset(zoneOffset)); } } - diff --git a/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodec.java b/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodec.java index 2690ec4e..527700e5 100644 --- a/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodec.java +++ b/src/main/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodec.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.codec; +import java.nio.ByteBuffer; +import java.time.Instant; + +import org.jetbrains.annotations.NotNull; + +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.cdm.properties.PropertyHelper; -import org.jetbrains.annotations.NotNull; - -import java.nio.ByteBuffer; -import java.time.Instant; public class TIMESTAMP_StringMillisCodec extends AbstractBaseCodec { diff --git a/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java b/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java index f0c74c5d..151ce7d2 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java @@ -15,14 +15,14 @@ */ package com.datastax.cdm.cql.statement; +import java.util.ArrayList; +import java.util.List; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.cql.*; -import java.util.ArrayList; -import java.util.List; - public class BaseCdmStatement { protected IPropertyHelper propertyHelper; @@ -33,7 +33,7 @@ public class BaseCdmStatement { protected List resultColumns = new ArrayList<>(); public BaseCdmStatement(IPropertyHelper propertyHelper, EnhancedSession session) { - if (null==propertyHelper || null==session || null==session.getCqlTable()) + if (null == propertyHelper || null == session || null == session.getCqlTable()) throw new RuntimeException("PropertyHelper or EnhancedSession or EnhancedSession.getCqlTable() is not set"); this.propertyHelper = propertyHelper; this.cqlTable = session.getCqlTable(); @@ -41,7 +41,7 @@ public BaseCdmStatement(IPropertyHelper propertyHelper, EnhancedSession session) } public PreparedStatement prepareStatement() { - if (null==session || null==session.getCqlSession()) + if (null == session || null == session.getCqlSession()) throw new RuntimeException("Session is not set"); if (null == statement || statement.isEmpty()) throw new RuntimeException("Statement is not set"); @@ -52,4 +52,4 @@ public String getCQL() { return statement; } -} \ No newline at end of file +} diff --git a/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatement.java b/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatement.java index 14dbb8e0..347b61c3 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatement.java @@ -31,15 +31,15 @@ public OriginSelectByPKStatement(IPropertyHelper propertyHelper, EnhancedSession public Record getRecord(EnhancedPK pk) { BoundStatement boundStatement = bind(pk); - if (null==boundStatement) + if (null == boundStatement) return null; ResultSet resultSet = session.getCqlSession().execute(boundStatement); - if (null==resultSet) + if (null == resultSet) return null; Row row = resultSet.one(); - if (null==row) + if (null == row) return null; return new Record(pk, row, null); @@ -47,10 +47,7 @@ public Record getRecord(EnhancedPK pk) { @Override public BoundStatement bind(Object... binds) { - if (null==binds - || binds.length != 1 - || null==binds[0] - || !(binds[0] instanceof EnhancedPK)) + if (null == binds || binds.length != 1 || null == binds[0] || !(binds[0] instanceof EnhancedPK)) throw new RuntimeException("Expected 1 nullable bind of type EnhancedPK, got " + binds.length); EnhancedPK pk = (EnhancedPK) binds[0]; @@ -58,9 +55,8 @@ public BoundStatement bind(Object... binds) { BoundStatement boundStatement = prepareStatement().bind(); boundStatement = session.getPKFactory().bindWhereClause(PKFactory.Side.ORIGIN, pk, boundStatement, 0); - return boundStatement - .setConsistencyLevel(cqlTable.getReadConsistencyLevel()) - .setPageSize(cqlTable.getFetchSizeInRows()); + return boundStatement.setConsistencyLevel(cqlTable.getReadConsistencyLevel()) + .setPageSize(cqlTable.getFetchSizeInRows()); } @Override diff --git a/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatement.java b/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatement.java index 1c8ea04e..85b8d02c 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatement.java @@ -15,6 +15,8 @@ */ package com.datastax.cdm.cql.statement; +import java.math.BigInteger; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.feature.Featureset; import com.datastax.cdm.feature.OriginFilterCondition; @@ -24,8 +26,6 @@ import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.PreparedStatement; -import java.math.BigInteger; - public class OriginSelectByPartitionRangeStatement extends OriginSelectStatement { public OriginSelectByPartitionRangeStatement(IPropertyHelper propertyHelper, EnhancedSession session) { super(propertyHelper, session); @@ -33,9 +33,7 @@ public OriginSelectByPartitionRangeStatement(IPropertyHelper propertyHelper, Enh @Override public BoundStatement bind(Object... binds) { - if (null==binds - || binds.length != 2 - || !(binds[0] instanceof BigInteger) + if (null == binds || binds.length != 2 || !(binds[0] instanceof BigInteger) || !(binds[1] instanceof BigInteger)) throw new RuntimeException("Expected 2 not-null binds of type BigInteger, got " + binds.length); @@ -44,16 +42,16 @@ public BoundStatement bind(Object... binds) { PreparedStatement preparedStatement = prepareStatement(); // random partitioner uses BigInteger, the normal partitioner uses long - return preparedStatement.bind( - cqlTable.hasRandomPartitioner() ? min : min.longValueExact(), - cqlTable.hasRandomPartitioner() ? max : max.longValueExact()) - .setConsistencyLevel(cqlTable.getReadConsistencyLevel()) - .setPageSize(cqlTable.getFetchSizeInRows()); + return preparedStatement + .bind(cqlTable.hasRandomPartitioner() ? min : min.longValueExact(), + cqlTable.hasRandomPartitioner() ? max : max.longValueExact()) + .setConsistencyLevel(cqlTable.getReadConsistencyLevel()).setPageSize(cqlTable.getFetchSizeInRows()); } @Override protected String whereBinds() { - String partitionKey = PropertyHelper.asString(cqlTable.getPartitionKeyNames(true), KnownProperties.PropertyType.STRING_LIST).trim(); + String partitionKey = PropertyHelper + .asString(cqlTable.getPartitionKeyNames(true), KnownProperties.PropertyType.STRING_LIST).trim(); return "TOKEN(" + partitionKey + ") >= ? AND TOKEN(" + partitionKey + ") <= ?"; } diff --git a/src/main/java/com/datastax/cdm/cql/statement/OriginSelectStatement.java b/src/main/java/com/datastax/cdm/cql/statement/OriginSelectStatement.java index 1e073045..637ca111 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/OriginSelectStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/OriginSelectStatement.java @@ -15,6 +15,11 @@ */ package com.datastax.cdm.cql.statement; +import java.time.Instant; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.data.Record; import com.datastax.cdm.feature.Featureset; @@ -25,10 +30,6 @@ import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Instant; public abstract class OriginSelectStatement extends BaseCdmStatement { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -45,12 +46,13 @@ public OriginSelectStatement(IPropertyHelper propertyHelper, EnhancedSession ses super(propertyHelper, session); resultColumns.addAll(cqlTable.getColumnNames(false)); - if (null==resultColumns || resultColumns.isEmpty()) { + if (null == resultColumns || resultColumns.isEmpty()) { throw new RuntimeException("No columns found in table " + cqlTable.getTableName()); } WritetimeTTL writetimeTTLFeature = (WritetimeTTL) cqlTable.getFeature(Featureset.WRITETIME_TTL); - if (null!= writetimeTTLFeature && writetimeTTLFeature.isEnabled() && writetimeTTLFeature.hasWriteTimestampFilter()) { + if (null != writetimeTTLFeature && writetimeTTLFeature.isEnabled() + && writetimeTTLFeature.hasWriteTimestampFilter()) { writeTimestampFilterEnabled = true; minWriteTimeStampFilter = writetimeTTLFeature.getMinWriteTimeStampFilter(); maxWriteTimeStampFilter = writetimeTTLFeature.getMaxWriteTimeStampFilter(); @@ -66,7 +68,8 @@ public OriginSelectStatement(IPropertyHelper propertyHelper, EnhancedSession ses filterColumnString = getFilterColumnString(); filterColumnIndex = getFilterColumnIndex(); - filterColumnEnabled = (null != filterColumnIndex && filterColumnIndex >= 0 && null != filterColumnString && !filterColumnString.isEmpty()); + filterColumnEnabled = (null != filterColumnIndex && filterColumnIndex >= 0 && null != filterColumnString + && !filterColumnString.isEmpty()); if (filterColumnEnabled) { logger.info("PARAM -- {}: {} ", KnownProperties.FILTER_COLUMN_NAME, filterColumnString); logger.info("PARAM -- {}: {} ", KnownProperties.FILTER_COLUMN_VALUE, filterColumnString); @@ -94,16 +97,18 @@ protected boolean isRecordValid(Record record) { } public abstract BoundStatement bind(Object... binds); + protected abstract String whereBinds(); public boolean shouldFilterRecord(Record record) { - if (null==record || !isRecordValid(record)) + if (null == record || !isRecordValid(record)) return true; if (this.filterColumnEnabled) { String col = (String) cqlTable.getData(this.filterColumnIndex, record.getOriginRow()); - if (null!=col && this.filterColumnString.equalsIgnoreCase(col.trim())) { - if (logger.isInfoEnabled()) logger.info("Filter Column removing: {}", record.getPk()); + if (null != col && this.filterColumnString.equalsIgnoreCase(col.trim())) { + if (logger.isInfoEnabled()) + logger.info("Filter Column removing: {}", record.getPk()); return true; } } @@ -111,12 +116,12 @@ public boolean shouldFilterRecord(Record record) { if (this.writeTimestampFilterEnabled) { // only process rows greater than writeTimeStampFilter Long originWriteTimeStamp = record.getPk().getWriteTimestamp(); - if (null==originWriteTimeStamp) { + if (null == originWriteTimeStamp) { return false; } - if (originWriteTimeStamp < minWriteTimeStampFilter - || originWriteTimeStamp > maxWriteTimeStampFilter) { - if (logger.isInfoEnabled()) logger.info("Timestamp filter removing: {}", record.getPk()); + if (originWriteTimeStamp < minWriteTimeStampFilter || originWriteTimeStamp > maxWriteTimeStampFilter) { + if (logger.isInfoEnabled()) + logger.info("Timestamp filter removing: {}", record.getPk()); return true; } } @@ -133,7 +138,7 @@ protected String buildStatement() { private String getFilterColumnString() { String rtn = propertyHelper.getString(KnownProperties.FILTER_COLUMN_VALUE); - if (null!=rtn) + if (null != rtn) return rtn.trim(); return null; } diff --git a/src/main/java/com/datastax/cdm/cql/statement/TargetInsertStatement.java b/src/main/java/com/datastax/cdm/cql/statement/TargetInsertStatement.java index 122b991e..24f3b489 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/TargetInsertStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/TargetInsertStatement.java @@ -41,7 +41,8 @@ public TargetInsertStatement(IPropertyHelper propertyHelper, EnhancedSession ses } @Override - protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, Object explodeMapValue) { + protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, + Object explodeMapValue) { if (null == originRow) throw new RuntimeException("Origin row is null"); if (usingCounter) @@ -53,16 +54,17 @@ protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long wr int currentBindIndex = 0; Object bindValue = null; - if (logDebug) logger.debug("bind using conversions: {}",cqlTable.getOtherCqlTable().getConversions()); + if (logDebug) + logger.debug("bind using conversions: {}", cqlTable.getOtherCqlTable().getConversions()); for (int targetIndex = 0; targetIndex < targetColumnTypes.size(); targetIndex++) { if (!bindColumnIndexes.contains(targetIndex)) { // this happens with constant columns, for example continue; } try { - if (targetIndex== explodeMapKeyIndex) { + if (targetIndex == explodeMapKeyIndex) { bindValue = explodeMapKey; - } else if (targetIndex== explodeMapValueIndex) { + } else if (targetIndex == explodeMapValueIndex) { bindValue = explodeMapValue; } else if (targetIndex == extractJsonFeature.getTargetColumnIndex()) { int originIndex = extractJsonFeature.getOriginColumnIndex(); @@ -76,12 +78,12 @@ protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long wr boundStatement = boundStatement.set(currentBindIndex++, bindValue, cqlTable.getBindClass(targetIndex)); } catch (Exception e) { - logger.error( - "Error trying to bind value: {} of class: {} to column: {} of targetDataType: {}/{} at column index: {} and bind index: {} of statement: {}", - bindValue, (null == bindValue ? "unknown" : bindValue.getClass().getName()), - targetColumnNames.get(targetIndex), targetColumnTypes.get(targetIndex), - cqlTable.getBindClass(targetIndex).getName(), targetIndex, (currentBindIndex - 1), - this.getCQL()); + logger.error( + "Error trying to bind value: {} of class: {} to column: {} of targetDataType: {}/{} at column index: {} and bind index: {} of statement: {}", + bindValue, (null == bindValue ? "unknown" : bindValue.getClass().getName()), + targetColumnNames.get(targetIndex), targetColumnTypes.get(targetIndex), + cqlTable.getBindClass(targetIndex).getName(), targetIndex, (currentBindIndex - 1), + this.getCQL()); throw new RuntimeException("Error trying to bind value: ", e); } } @@ -93,8 +95,7 @@ protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long wr boundStatement = boundStatement.set(currentBindIndex++, writeTime, Long.class); } - return boundStatement - .setConsistencyLevel(cqlTable.getWriteConsistencyLevel()) + return boundStatement.setConsistencyLevel(cqlTable.getWriteConsistencyLevel()) .setTimeout(Duration.ofSeconds(10)); } @@ -114,15 +115,17 @@ protected String buildStatement() { bindIndex++; } - if (null!=constantColumnValues && !constantColumnValues.isEmpty()) { + if (null != constantColumnValues && !constantColumnValues.isEmpty()) { // constants are not bound, so no need to increment the bind index valuesList += "," + PropertyHelper.asString(constantColumnValues, KnownProperties.PropertyType.STRING_LIST); } - targetUpdateCQL = "INSERT INTO " + cqlTable.getKeyspaceTable() + - " (" + PropertyHelper.asString(bindColumnNames, KnownProperties.PropertyType.STRING_LIST) + - (null!=constantColumnNames && !constantColumnNames.isEmpty() ? "," + PropertyHelper.asString(constantColumnNames, KnownProperties.PropertyType.STRING_LIST) : "") + - ") VALUES (" + valuesList + ")"; + targetUpdateCQL = "INSERT INTO " + cqlTable.getKeyspaceTable() + " (" + + PropertyHelper.asString(bindColumnNames, KnownProperties.PropertyType.STRING_LIST) + + (null != constantColumnNames && !constantColumnNames.isEmpty() + ? "," + PropertyHelper.asString(constantColumnNames, KnownProperties.PropertyType.STRING_LIST) + : "") + + ") VALUES (" + valuesList + ")"; targetUpdateCQL += usingTTLTimestamp(); @@ -134,7 +137,7 @@ private void setBindColumnNamesAndIndexes() { this.bindColumnIndexes = new ArrayList<>(); for (String targetColumnName : this.targetColumnNames) { - if (null==constantColumnNames || !constantColumnNames.contains(targetColumnName)) { + if (null == constantColumnNames || !constantColumnNames.contains(targetColumnName)) { this.bindColumnNames.add(targetColumnName); this.bindColumnIndexes.add(this.targetColumnNames.indexOf(targetColumnName)); } diff --git a/src/main/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatement.java b/src/main/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatement.java index 96673d6e..e81bb05f 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatement.java @@ -15,6 +15,8 @@ */ package com.datastax.cdm.cql.statement; +import java.util.concurrent.CompletionStage; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.data.EnhancedPK; import com.datastax.cdm.data.PKFactory; @@ -27,8 +29,6 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; -import java.util.concurrent.CompletionStage; - public class TargetSelectByPKStatement extends BaseCdmStatement { public TargetSelectByPKStatement(IPropertyHelper propertyHelper, EnhancedSession session) { super(propertyHelper, session); @@ -37,15 +37,15 @@ public TargetSelectByPKStatement(IPropertyHelper propertyHelper, EnhancedSession public Record getRecord(EnhancedPK pk) { BoundStatement boundStatement = bind(pk); - if (null==boundStatement) + if (null == boundStatement) return null; ResultSet resultSet = session.getCqlSession().execute(boundStatement); - if (null==resultSet) + if (null == resultSet) return null; Row row = resultSet.one(); - if (null==row) + if (null == row) return null; return new Record(pk, null, row); @@ -53,7 +53,7 @@ public Record getRecord(EnhancedPK pk) { public CompletionStage getAsyncResult(EnhancedPK pk) { BoundStatement boundStatement = bind(pk); - if (null==boundStatement) + if (null == boundStatement) return null; return session.getCqlSession().executeAsync(boundStatement); } @@ -67,8 +67,9 @@ private BoundStatement bind(EnhancedPK pk) { } private String buildStatement() { - return "SELECT " + PropertyHelper.asString(cqlTable.getColumnNames(true), KnownProperties.PropertyType.STRING_LIST) - + " FROM " + cqlTable.getKeyspaceTable() - + " WHERE " + session.getPKFactory().getWhereClause(PKFactory.Side.TARGET); + return "SELECT " + + PropertyHelper.asString(cqlTable.getColumnNames(true), KnownProperties.PropertyType.STRING_LIST) + + " FROM " + cqlTable.getKeyspaceTable() + " WHERE " + + session.getPKFactory().getWhereClause(PKFactory.Side.TARGET); } -} \ No newline at end of file +} diff --git a/src/main/java/com/datastax/cdm/cql/statement/TargetUpdateStatement.java b/src/main/java/com/datastax/cdm/cql/statement/TargetUpdateStatement.java index ea33d8bd..a5037fd3 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/TargetUpdateStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/TargetUpdateStatement.java @@ -15,18 +15,19 @@ */ package com.datastax.cdm.cql.statement; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.data.EnhancedPK; import com.datastax.cdm.data.PKFactory; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.Row; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; public class TargetUpdateStatement extends TargetUpsertStatement { public final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -39,7 +40,8 @@ public TargetUpdateStatement(IPropertyHelper propertyHelper, EnhancedSession ses } @Override - protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, Object explodeMapValue) { + protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, + Object explodeMapValue) { // We reference the originRow and convert it to the target type. // We need the targetRow if (null == originRow) @@ -71,10 +73,9 @@ protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long wr } targetValue = (null == targetRow ? 0L : cqlTable.getData(targetIndex, targetRow)); bindValueTarget = ((Long) originValue - (null == targetValue ? 0L : (Long) targetValue)); - } - else if (targetIndex== explodeMapKeyIndex) { + } else if (targetIndex == explodeMapKeyIndex) { bindValueTarget = explodeMapKey; - } else if (targetIndex== explodeMapValueIndex) { + } else if (targetIndex == explodeMapValueIndex) { bindValueTarget = explodeMapValue; } else if (targetIndex == extractJsonFeature.getTargetColumnIndex()) { originIndex = extractJsonFeature.getOriginColumnIndex(); @@ -86,11 +87,13 @@ else if (targetIndex== explodeMapKeyIndex) { bindValueTarget = cqlTable.getOtherCqlTable().getAndConvertData(originIndex, originRow); } - boundStatement = boundStatement.set(currentBindIndex++, bindValueTarget, cqlTable.getBindClass(targetIndex)); + boundStatement = boundStatement.set(currentBindIndex++, bindValueTarget, + cqlTable.getBindClass(targetIndex)); } catch (Exception e) { - logger.error("Error trying to bind value:" + bindValueTarget + " to column:" + - targetColumnNames.get(targetIndex) + " of targetDataType:" + targetColumnTypes.get(targetIndex) + "/" - + cqlTable.getBindClass(targetIndex).getName() + " at column index:" + targetIndex); + logger.error("Error trying to bind value:" + bindValueTarget + " to column:" + + targetColumnNames.get(targetIndex) + " of targetDataType:" + + targetColumnTypes.get(targetIndex) + "/" + cqlTable.getBindClass(targetIndex).getName() + + " at column index:" + targetIndex); throw new RuntimeException("Error trying to bind value: ", e); } } @@ -99,8 +102,7 @@ else if (targetIndex== explodeMapKeyIndex) { EnhancedPK pk = pkFactory.getTargetPK(originRow); boundStatement = pkFactory.bindWhereClause(PKFactory.Side.TARGET, pk, boundStatement, currentBindIndex); - return boundStatement - .setConsistencyLevel(cqlTable.getWriteConsistencyLevel()) + return boundStatement.setConsistencyLevel(cqlTable.getWriteConsistencyLevel()) .setTimeout(Duration.ofSeconds(10)); } diff --git a/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatement.java b/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatement.java index 452b6f1a..13d5f337 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatement.java @@ -29,97 +29,97 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; public class TargetUpsertRunDetailsStatement { - private CqlSession session; - private String keyspaceName; - private String tableName; - private long runId; - private long prevRunId; - private BoundStatement boundInitInfoStatement; - private BoundStatement boundInitStatement; - private BoundStatement boundUpdateInfoStatement; - private BoundStatement boundUpdateStatement; - private BoundStatement boundUpdateStartStatement; - private BoundStatement boundSelectStatement; + private CqlSession session; + private String keyspaceName; + private String tableName; + private long runId; + private long prevRunId; + private BoundStatement boundInitInfoStatement; + private BoundStatement boundInitStatement; + private BoundStatement boundUpdateInfoStatement; + private BoundStatement boundUpdateStatement; + private BoundStatement boundUpdateStartStatement; + private BoundStatement boundSelectStatement; - public TargetUpsertRunDetailsStatement(CqlSession session, String keyspaceTable) { - this.session = session; - String[] ksTab = keyspaceTable.split("\\."); - this.keyspaceName = ksTab[0]; - this.tableName = ksTab[1]; - String cdmKsTabInfo = this.keyspaceName + ".cdm_run_info"; - String cdmKsTabDetails = this.keyspaceName + ".cdm_run_details"; + public TargetUpsertRunDetailsStatement(CqlSession session, String keyspaceTable) { + this.session = session; + String[] ksTab = keyspaceTable.split("\\."); + this.keyspaceName = ksTab[0]; + this.tableName = ksTab[1]; + String cdmKsTabInfo = this.keyspaceName + ".cdm_run_info"; + String cdmKsTabDetails = this.keyspaceName + ".cdm_run_details"; - this.session.execute("create table if not exists " + cdmKsTabInfo - + " (table_name text, run_id bigint, run_type text, prev_run_id bigint, start_time timestamp, end_time timestamp, run_info text, primary key (table_name, run_id))"); - this.session.execute("create table if not exists " + cdmKsTabDetails - + " (table_name text, run_id bigint, start_time timestamp, token_min bigint, token_max bigint, status text, primary key ((table_name, run_id), token_min))"); + this.session.execute("create table if not exists " + cdmKsTabInfo + + " (table_name text, run_id bigint, run_type text, prev_run_id bigint, start_time timestamp, end_time timestamp, run_info text, primary key (table_name, run_id))"); + this.session.execute("create table if not exists " + cdmKsTabDetails + + " (table_name text, run_id bigint, start_time timestamp, token_min bigint, token_max bigint, status text, primary key ((table_name, run_id), token_min))"); - boundInitInfoStatement = bindStatement("INSERT INTO " + cdmKsTabInfo - + " (table_name, run_id, run_type, prev_run_id, start_time) VALUES (?, ?, ?, ?, dateof(now()))"); - boundInitStatement = bindStatement("INSERT INTO " + cdmKsTabDetails - + " (table_name, run_id, token_min, token_max, status) VALUES (?, ?, ?, ?, ?)"); - boundUpdateInfoStatement = bindStatement("UPDATE " + cdmKsTabInfo - + " SET end_time = dateof(now()), run_info = ? WHERE table_name = ? AND run_id = ?"); - boundUpdateStatement = bindStatement( - "UPDATE " + cdmKsTabDetails + " SET status = ? WHERE table_name = ? AND run_id = ? AND token_min = ?"); - boundUpdateStartStatement = bindStatement("UPDATE " + cdmKsTabDetails - + " SET start_time = dateof(now()), status = ? WHERE table_name = ? AND run_id = ? AND token_min = ?"); - boundSelectStatement = bindStatement("SELECT token_min, token_max FROM " + cdmKsTabDetails - + " WHERE table_name = ? AND run_id = ? and status in ('NOT_STARTED', 'STARTED', 'FAIL', 'DIFF') ALLOW FILTERING"); - } + boundInitInfoStatement = bindStatement("INSERT INTO " + cdmKsTabInfo + + " (table_name, run_id, run_type, prev_run_id, start_time) VALUES (?, ?, ?, ?, dateof(now()))"); + boundInitStatement = bindStatement("INSERT INTO " + cdmKsTabDetails + + " (table_name, run_id, token_min, token_max, status) VALUES (?, ?, ?, ?, ?)"); + boundUpdateInfoStatement = bindStatement("UPDATE " + cdmKsTabInfo + + " SET end_time = dateof(now()), run_info = ? WHERE table_name = ? AND run_id = ?"); + boundUpdateStatement = bindStatement( + "UPDATE " + cdmKsTabDetails + " SET status = ? WHERE table_name = ? AND run_id = ? AND token_min = ?"); + boundUpdateStartStatement = bindStatement("UPDATE " + cdmKsTabDetails + + " SET start_time = dateof(now()), status = ? WHERE table_name = ? AND run_id = ? AND token_min = ?"); + boundSelectStatement = bindStatement("SELECT token_min, token_max FROM " + cdmKsTabDetails + + " WHERE table_name = ? AND run_id = ? and status in ('NOT_STARTED', 'STARTED', 'FAIL', 'DIFF') ALLOW FILTERING"); + } - public Collection getPendingPartitions(long prevRunId) { - this.prevRunId = prevRunId; - if (prevRunId == 0) { - return new ArrayList(); - } + public Collection getPendingPartitions(long prevRunId) { + this.prevRunId = prevRunId; + if (prevRunId == 0) { + return new ArrayList(); + } - final Collection pendingParts = new ArrayList(); - ResultSet rs = session - .execute(boundSelectStatement.setString("table_name", tableName).setLong("run_id", prevRunId)); - rs.forEach(row -> { - Partition part = new Partition(BigInteger.valueOf(row.getLong("token_min")), - BigInteger.valueOf(row.getLong("token_max"))); - pendingParts.add(part); - }); + final Collection pendingParts = new ArrayList(); + ResultSet rs = session + .execute(boundSelectStatement.setString("table_name", tableName).setLong("run_id", prevRunId)); + rs.forEach(row -> { + Partition part = new Partition(BigInteger.valueOf(row.getLong("token_min")), + BigInteger.valueOf(row.getLong("token_max"))); + pendingParts.add(part); + }); - return pendingParts; - } + return pendingParts; + } - public long initCdmRun(Collection parts, RUN_TYPE runType) { - runId = System.currentTimeMillis(); - session.execute(boundInitInfoStatement.setString("table_name", tableName).setLong("run_id", runId) - .setString("run_type", runType.toString()).setLong("prev_run_id", prevRunId)); - parts.forEach(part -> initCdmRun(part)); - return runId; - } + public long initCdmRun(Collection parts, RUN_TYPE runType) { + runId = System.currentTimeMillis(); + session.execute(boundInitInfoStatement.setString("table_name", tableName).setLong("run_id", runId) + .setString("run_type", runType.toString()).setLong("prev_run_id", prevRunId)); + parts.forEach(part -> initCdmRun(part)); + return runId; + } - private void initCdmRun(Partition partition) { - session.execute(boundInitStatement.setString("table_name", tableName).setLong("run_id", runId) - .setLong("token_min", partition.getMin().longValue()) - .setLong("token_max", partition.getMax().longValue()) - .setString("status", TrackRun.RUN_STATUS.NOT_STARTED.toString())); - } + private void initCdmRun(Partition partition) { + session.execute(boundInitStatement.setString("table_name", tableName).setLong("run_id", runId) + .setLong("token_min", partition.getMin().longValue()) + .setLong("token_max", partition.getMax().longValue()) + .setString("status", TrackRun.RUN_STATUS.NOT_STARTED.toString())); + } - public void updateCdmRunInfo(String runInfo) { - session.execute(boundUpdateInfoStatement.setString("table_name", tableName).setLong("run_id", runId) - .setString("run_info", runInfo)); - } + public void updateCdmRunInfo(String runInfo) { + session.execute(boundUpdateInfoStatement.setString("table_name", tableName).setLong("run_id", runId) + .setString("run_info", runInfo)); + } - public void updateCdmRun(BigInteger min, TrackRun.RUN_STATUS status) { - if (TrackRun.RUN_STATUS.STARTED.equals(status)) { - session.execute(boundUpdateStartStatement.setString("table_name", tableName).setLong("run_id", runId) - .setLong("token_min", min.longValue()).setString("status", status.toString())); - } else { - session.execute(boundUpdateStatement.setString("table_name", tableName).setLong("run_id", runId) - .setLong("token_min", min.longValue()).setString("status", status.toString())); - } - } + public void updateCdmRun(BigInteger min, TrackRun.RUN_STATUS status) { + if (TrackRun.RUN_STATUS.STARTED.equals(status)) { + session.execute(boundUpdateStartStatement.setString("table_name", tableName).setLong("run_id", runId) + .setLong("token_min", min.longValue()).setString("status", status.toString())); + } else { + session.execute(boundUpdateStatement.setString("table_name", tableName).setLong("run_id", runId) + .setLong("token_min", min.longValue()).setString("status", status.toString())); + } + } - private BoundStatement bindStatement(String stmt) { - if (null == session) - throw new RuntimeException("Session is not set"); - return session.prepare(stmt).bind().setTimeout(Duration.ofSeconds(10)); - } + private BoundStatement bindStatement(String stmt) { + if (null == session) + throw new RuntimeException("Session is not set"); + return session.prepare(stmt).bind().setTimeout(Duration.ofSeconds(10)); + } } diff --git a/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertStatement.java b/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertStatement.java index 14d58a01..dcbeca8f 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/TargetUpsertStatement.java @@ -15,6 +15,10 @@ */ package com.datastax.cdm.cql.statement; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletionStage; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.data.EnhancedPK; import com.datastax.cdm.data.Record; @@ -28,10 +32,6 @@ import com.datastax.oss.driver.api.core.cql.*; import com.datastax.oss.driver.api.core.type.DataType; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CompletionStage; - public abstract class TargetUpsertStatement extends BaseCdmStatement { protected final List targetColumnNames = new ArrayList<>(); protected final List originColumnNames = new ArrayList<>(); @@ -57,7 +57,9 @@ public abstract class TargetUpsertStatement extends BaseCdmStatement { protected ExtractJson extractJsonFeature; protected abstract String buildStatement(); - protected abstract BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, Object explodeMapValue); + + protected abstract BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, + Object explodeMapKey, Object explodeMapValue); public TargetUpsertStatement(IPropertyHelper propertyHelper, EnhancedSession session) { super(propertyHelper, session); @@ -72,7 +74,7 @@ public TargetUpsertStatement(IPropertyHelper propertyHelper, EnhancedSession ses originColumnNames.addAll(cqlTable.getOtherCqlTable().getColumnNames(true)); originColumnTypes.addAll(cqlTable.getOtherCqlTable().getColumnCqlTypes()); setConstantColumns(); - if (null!=explodeMapFeature && explodeMapFeature.isEnabled()) { + if (null != explodeMapFeature && explodeMapFeature.isEnabled()) { this.explodeMapKeyIndex = explodeMapFeature.getKeyColumnIndex(); this.explodeMapValueIndex = explodeMapFeature.getValueColumnIndex(); } @@ -83,14 +85,15 @@ public TargetUpsertStatement(IPropertyHelper propertyHelper, EnhancedSession ses } public BoundStatement bindRecord(Record record) { - if (null==record) + if (null == record) throw new RuntimeException("record is null"); EnhancedPK pk = record.getPk(); Row originRow = record.getOriginRow(); Row targetRow = record.getTargetRow(); - return bind(originRow, targetRow, pk.getTTL(), pk.getWriteTimestamp(), pk.getExplodeMapKey(), pk.getExplodeMapValue()); + return bind(originRow, targetRow, pk.getTTL(), pk.getWriteTimestamp(), pk.getExplodeMapKey(), + pk.getExplodeMapValue()); } public CompletionStage executeAsync(Statement statement) { @@ -122,7 +125,7 @@ protected String usingTTLTimestamp() { } private void setConstantColumns() { - if (null!=constantColumnFeature && constantColumnFeature.isEnabled()) { + if (null != constantColumnFeature && constantColumnFeature.isEnabled()) { constantColumnNames.addAll(constantColumnFeature.getNames()); constantColumnValues.addAll(constantColumnFeature.getValues()); } @@ -133,7 +136,7 @@ private void setTTLAndWriteTimeBooleans() { usingWriteTime = false; WritetimeTTL wtFeature = (WritetimeTTL) cqlTable.getFeature(Featureset.WRITETIME_TTL); - if (null!=wtFeature && wtFeature.isEnabled()) { + if (null != wtFeature && wtFeature.isEnabled()) { usingTTL = wtFeature.hasTTLColumns(); usingWriteTime = wtFeature.hasWritetimeColumns(); } @@ -143,29 +146,34 @@ protected void checkBindInputs(Integer ttl, Long writeTime, Object explodeMapKey if (haveCheckedBindInputsOnce) return; - if (usingTTL && null==ttl) - throw new RuntimeException(KnownProperties.ORIGIN_TTL_NAMES +" specified, but no TTL value was provided"); + if (usingTTL && null == ttl) + throw new RuntimeException(KnownProperties.ORIGIN_TTL_NAMES + " specified, but no TTL value was provided"); - if (usingWriteTime && null==writeTime) - throw new RuntimeException(KnownProperties.ORIGIN_WRITETIME_NAMES + " specified, but no WriteTime value was provided"); + if (usingWriteTime && null == writeTime) + throw new RuntimeException( + KnownProperties.ORIGIN_WRITETIME_NAMES + " specified, but no WriteTime value was provided"); - if (null!=explodeMapFeature && explodeMapFeature.isEnabled()) { - if (null==explodeMapKey) + if (null != explodeMapFeature && explodeMapFeature.isEnabled()) { + if (null == explodeMapKey) throw new RuntimeException("ExplodeMap is enabled, but no map key was provided"); else if (!cqlTable.getBindClass(explodeMapKeyIndex).isAssignableFrom(explodeMapKey.getClass())) - throw new RuntimeException("ExplodeMap is enabled, but the map key type provided "+explodeMapKey.getClass().getName()+" is not compatible with "+cqlTable.getBindClass(explodeMapKeyIndex).getName()); + throw new RuntimeException( + "ExplodeMap is enabled, but the map key type provided " + explodeMapKey.getClass().getName() + + " is not compatible with " + cqlTable.getBindClass(explodeMapKeyIndex).getName()); - if (null==explodeMapValue) + if (null == explodeMapValue) throw new RuntimeException("ExplodeMap is enabled, but no map value was provided"); else if (!cqlTable.getBindClass(explodeMapValueIndex).isAssignableFrom(explodeMapValue.getClass())) - throw new RuntimeException("ExplodeMap is enabled, but the map value type provided "+explodeMapValue.getClass().getName()+" is not compatible with "+cqlTable.getBindClass(explodeMapValueIndex).getName()); + throw new RuntimeException( + "ExplodeMap is enabled, but the map value type provided " + explodeMapValue.getClass().getName() + + " is not compatible with " + cqlTable.getBindClass(explodeMapValueIndex).getName()); } // this is the only place this variable is modified, so suppress the warning - //noinspection SynchronizeOnNonFinalField + // noinspection SynchronizeOnNonFinalField synchronized (this.haveCheckedBindInputsOnce) { this.haveCheckedBindInputsOnce = true; } } -} \ No newline at end of file +} diff --git a/src/main/java/com/datastax/cdm/data/CqlConversion.java b/src/main/java/com/datastax/cdm/data/CqlConversion.java index 1e278200..fc52c458 100644 --- a/src/main/java/com/datastax/cdm/data/CqlConversion.java +++ b/src/main/java/com/datastax/cdm/data/CqlConversion.java @@ -15,31 +15,27 @@ */ package com.datastax.cdm.data; +import java.nio.ByteBuffer; +import java.util.*; +import java.util.stream.Collectors; + +import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.*; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; -import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.ByteBuffer; -import java.util.*; -import java.util.stream.Collectors; public class CqlConversion { public static final Logger logger = LoggerFactory.getLogger(CqlConversion.class); public static final ProtocolVersion PROTOCOL_VERSION = ProtocolVersion.DEFAULT; enum Type { - NONE, - CODEC, - UDT, - LIST, - SET, - MAP, + NONE, CODEC, UDT, LIST, SET, MAP, // TODO: add TUPLE to this list if we want to convert element types within a tuple UNSUPPORTED } @@ -50,8 +46,9 @@ enum Type { private final CodecRegistry codecRegistry; public CqlConversion(DataType fromDataType, DataType toDataType, CodecRegistry codecRegistry) { - if (null==fromDataType || null==toDataType || null==codecRegistry) - throw new IllegalArgumentException("CqlConversion() - fromDataType, toDataType, and codecRegistry must be non-null"); + if (null == fromDataType || null == toDataType || null == codecRegistry) + throw new IllegalArgumentException( + "CqlConversion() - fromDataType, toDataType, and codecRegistry must be non-null"); CqlData.Type fromCqlDataType = CqlData.toType(fromDataType); CqlData.Type toCqlDataType = CqlData.toType(toDataType); @@ -61,21 +58,22 @@ public CqlConversion(DataType fromDataType, DataType toDataType, CodecRegistry c this.conversionTypeList = new ArrayList<>(); this.codecRegistry = codecRegistry; - if (logger.isDebugEnabled()) logger.debug("CqlConversion() - fromDataType: {}/{} toDataType: {}/{}", fromDataType, fromCqlDataType, toDataType, toCqlDataType); + if (logger.isDebugEnabled()) + logger.debug("CqlConversion() - fromDataType: {}/{} toDataType: {}/{}", fromDataType, fromCqlDataType, + toDataType, toCqlDataType); if (fromCqlDataType == toCqlDataType && fromCqlDataType == CqlData.Type.PRIMITIVE) { fromDataTypeList.add(fromDataType); toDataTypeList.add(toDataType); this.conversionTypeList.add(calcConversionTypeForPrimitives(fromDataType, toDataType, codecRegistry)); - } - else if (CqlData.isCollection(fromDataType) && CqlData.isCollection(toDataType) && - CqlData.toType(fromDataType) == CqlData.toType(toDataType)) { + } else if (CqlData.isCollection(fromDataType) && CqlData.isCollection(toDataType) + && CqlData.toType(fromDataType) == CqlData.toType(toDataType)) { fromDataTypeList.addAll(CqlData.extractDataTypesFromCollection(fromDataType)); toDataTypeList.addAll(CqlData.extractDataTypesFromCollection(toDataType)); conversionTypeList.addAll(calcConversionTypeForCollections(fromDataType, toDataType, codecRegistry)); - } - else { - logger.warn("Conversion does not currently know how to convert between {} and {}",fromDataType.asCql(true,true),toDataType.asCql(true,true)); + } else { + logger.warn("Conversion does not currently know how to convert between {} and {}", + fromDataType.asCql(true, true), toDataType.asCql(true, true)); fromDataTypeList.add(fromDataType); toDataTypeList.add(toDataType); conversionTypeList.add(Type.UNSUPPORTED); @@ -83,68 +81,82 @@ else if (CqlData.isCollection(fromDataType) && CqlData.isCollection(toDataType) } public Object convert(Object inputData) { - if (null==conversionTypeList || conversionTypeList.isEmpty()) + if (null == conversionTypeList || conversionTypeList.isEmpty()) return inputData; - if (logger.isTraceEnabled()) logger.trace("convert() - inputData: {}, converter: {}",inputData,this); + if (logger.isTraceEnabled()) + logger.trace("convert() - inputData: {}, converter: {}", inputData, this); // The first element on the conversionTypeList tells us what conversion the top-level object requires Type conversionType = conversionTypeList.get(0); switch (conversionType) { - case NONE: - case UNSUPPORTED: - return inputData; - case CODEC: - case UDT: - return convert_ONE(conversionType, inputData, fromDataTypeList.get(0), toDataTypeList.get(0), codecRegistry); - case LIST: - case SET: - case MAP: - return convert_COLLECTION(conversionType, inputData, conversionTypeList.subList(1,conversionTypeList.size()), fromDataTypeList, toDataTypeList, codecRegistry); + case NONE: + case UNSUPPORTED: + return inputData; + case CODEC: + case UDT: + return convert_ONE(conversionType, inputData, fromDataTypeList.get(0), toDataTypeList.get(0), + codecRegistry); + case LIST: + case SET: + case MAP: + return convert_COLLECTION(conversionType, inputData, + conversionTypeList.subList(1, conversionTypeList.size()), fromDataTypeList, toDataTypeList, + codecRegistry); } logger.warn("Conversion.convert() - Unknown conversion type: {}", conversionType); return inputData; } public static List getConversions(CqlTable fromTable, CqlTable toTable) { - if (null==fromTable || null==toTable) + if (null == fromTable || null == toTable) throw new IllegalArgumentException("fromTable and/or toTable is null"); List cqlConversions = new ArrayList<>(); List fromDataTypes = fromTable.getColumnCqlTypes(); List toDataTypes = toTable.getColumnCqlTypes(); - if (logger.isDebugEnabled()) logger.debug("getConversions() - From {} columns {} of types {}",fromTable.isOrigin()?"origin":"target",fromTable.getColumnNames(false),fromDataTypes); - if (logger.isDebugEnabled())logger.debug("getConversions() - To {} columns {} of types {}", toTable.isOrigin()?"origin":"target",toTable.getColumnNames(false),toDataTypes); + if (logger.isDebugEnabled()) + logger.debug("getConversions() - From {} columns {} of types {}", + fromTable.isOrigin() ? "origin" : "target", fromTable.getColumnNames(false), fromDataTypes); + if (logger.isDebugEnabled()) + logger.debug("getConversions() - To {} columns {} of types {}", toTable.isOrigin() ? "origin" : "target", + toTable.getColumnNames(false), toDataTypes); - for (int i=0; i= toDataTypes.size()) { - if (logger.isTraceEnabled()) logger.trace("At fromIndex {}, correspondingIndex is {}, setting null conversion",i,correspondingIndex); + if (logger.isTraceEnabled()) + logger.trace("At fromIndex {}, correspondingIndex is {}, setting null conversion", i, + correspondingIndex); cqlConversions.add(null); continue; } DataType toDataType = toDataTypes.get(correspondingIndex); - if (null==toDataType) { - if (logger.isTraceEnabled()) logger.trace("At fromIndex {}, toDataType is null, setting null conversion",i); + if (null == toDataType) { + if (logger.isTraceEnabled()) + logger.trace("At fromIndex {}, toDataType is null, setting null conversion", i); cqlConversions.add(null); - } - else { + } else { cqlConversions.add(new CqlConversion(fromDataType, toDataType, fromTable.getCodecRegistry())); - if (logger.isTraceEnabled()) logger.trace("At fromIndex {} (correspondingIndex {}), have added {}",i, correspondingIndex, cqlConversions.get(cqlConversions.size()-1)); + if (logger.isTraceEnabled()) + logger.trace("At fromIndex {} (correspondingIndex {}), have added {}", i, correspondingIndex, + cqlConversions.get(cqlConversions.size() - 1)); } } return cqlConversions; } - private static Type calcConversionTypeForPrimitives(DataType fromDataType, DataType toDataType, CodecRegistry codecRegistry) { + private static Type calcConversionTypeForPrimitives(DataType fromDataType, DataType toDataType, + CodecRegistry codecRegistry) { if (CqlData.isPrimitive(fromDataType) && CqlData.isPrimitive(toDataType)) { if (fromDataType.equals(toDataType)) return Type.NONE; @@ -157,15 +169,18 @@ private static Type calcConversionTypeForPrimitives(DataType fromDataType, DataT return Type.CODEC; } } - logger.warn("calcConversionTypeForPrimitives requires both types be primitive types: {} and {}",fromDataType.asCql(true,true),toDataType.asCql(true,true)); + logger.warn("calcConversionTypeForPrimitives requires both types be primitive types: {} and {}", + fromDataType.asCql(true, true), toDataType.asCql(true, true)); return Type.UNSUPPORTED; } - private static List calcConversionTypeForCollections(DataType fromDataType, DataType toDataType, CodecRegistry codecRegistry) { + private static List calcConversionTypeForCollections(DataType fromDataType, DataType toDataType, + CodecRegistry codecRegistry) { CqlData.Type fromType = CqlData.toType(fromDataType); CqlData.Type toType = CqlData.toType(toDataType); - if (logger.isTraceEnabled()) logger.trace("calcConversionTypeForCollections() - fromType: {}, toType: {}",fromType,toType); + if (logger.isTraceEnabled()) + logger.trace("calcConversionTypeForCollections() - fromType: {}, toType: {}", fromType, toType); if (CqlData.isCollection(fromDataType) && fromType.equals(toType)) { // If the collection is a UDT, then we are done - no need to review elements as convert_UDT will handle it @@ -174,8 +189,9 @@ private static List calcConversionTypeForCollections(DataType fromDataType List fromElementTypes = CqlData.extractDataTypesFromCollection(fromDataType); List toElementTypes = CqlData.extractDataTypesFromCollection(toDataType); - if (fromElementTypes.size()!=toElementTypes.size()) { - logger.warn("Collections must have same number of elements: {} and {}",fromDataType.asCql(true,true),toDataType.asCql(true,true)); + if (fromElementTypes.size() != toElementTypes.size()) { + logger.warn("Collections must have same number of elements: {} and {}", fromDataType.asCql(true, true), + toDataType.asCql(true, true)); return Collections.singletonList(Type.UNSUPPORTED); } @@ -183,21 +199,22 @@ private static List calcConversionTypeForCollections(DataType fromDataType // The rest will be the conversion types for the elements of the collection List rtn = new ArrayList<>(); switch (fromType) { - case LIST: - rtn.add(Type.LIST); - break; - case SET: - rtn.add(Type.SET); - break; - case MAP: - rtn.add(Type.MAP); - break; - default: - logger.warn("calcConversionTypeForCollections requires collection type to be LIST, SET, or MAP: {}",fromDataType.asCql(true,true)); - return Collections.singletonList(Type.UNSUPPORTED); + case LIST: + rtn.add(Type.LIST); + break; + case SET: + rtn.add(Type.SET); + break; + case MAP: + rtn.add(Type.MAP); + break; + default: + logger.warn("calcConversionTypeForCollections requires collection type to be LIST, SET, or MAP: {}", + fromDataType.asCql(true, true)); + return Collections.singletonList(Type.UNSUPPORTED); } - for (int i=0; i calcConversionTypeForCollections(DataType fromDataType } else if (fromElementType instanceof UserDefinedType && toElementType instanceof UserDefinedType) { rtn.add(Type.UDT); } else { - logger.warn("Within {}, do not know how to convert between element types {} and {}",fromDataType.asCql(true,true),fromElementType.asCql(true,true),toElementType.asCql(true,true)); + logger.warn("Within {}, do not know how to convert between element types {} and {}", + fromDataType.asCql(true, true), fromElementType.asCql(true, true), + toElementType.asCql(true, true)); rtn.add(Type.UNSUPPORTED); } } return rtn; } - logger.warn("calcConversionTypeForCollections requires both types be collections of the same type: {} and {}",fromDataType.asCql(true,true),toDataType.asCql(true,true)); + logger.warn("calcConversionTypeForCollections requires both types be collections of the same type: {} and {}", + fromDataType.asCql(true, true), toDataType.asCql(true, true)); return Collections.singletonList(Type.UNSUPPORTED); } - protected static Object convert_ONE(Type conversionType, Object inputData, DataType fromDataType, DataType toDataType, CodecRegistry codecRegistry) { - if (logger.isDebugEnabled()) logger.debug("convert_ONE conversionType {} inputData {} fromDataType {} toDataType {}",conversionType,inputData,fromDataType,toDataType); + protected static Object convert_ONE(Type conversionType, Object inputData, DataType fromDataType, + DataType toDataType, CodecRegistry codecRegistry) { + if (logger.isDebugEnabled()) + logger.debug("convert_ONE conversionType {} inputData {} fromDataType {} toDataType {}", conversionType, + inputData, fromDataType, toDataType); switch (conversionType) { - case NONE: - case UNSUPPORTED: - return inputData; - case CODEC: - return convert_CODEC(inputData, fromDataType, toDataType, codecRegistry); - case UDT: - return convert_UDT((UdtValue) inputData, (UserDefinedType) fromDataType, (UserDefinedType) toDataType); + case NONE: + case UNSUPPORTED: + return inputData; + case CODEC: + return convert_CODEC(inputData, fromDataType, toDataType, codecRegistry); + case UDT: + return convert_UDT((UdtValue) inputData, (UserDefinedType) fromDataType, (UserDefinedType) toDataType); } return inputData; } @SuppressWarnings("unchecked") - protected static Object convert_CODEC(Object value, DataType fromDataType, DataType toDataType, CodecRegistry codecRegistry) { + protected static Object convert_CODEC(Object value, DataType fromDataType, DataType toDataType, + CodecRegistry codecRegistry) { Class fromClass = CqlData.getBindClass(fromDataType); Class toClass = CqlData.getBindClass(toDataType); - if (logger.isDebugEnabled()) logger.debug("convert_CODEC value {} from {} to {}",value,fromClass,toClass); + if (logger.isDebugEnabled()) + logger.debug("convert_CODEC value {} from {} to {}", value, fromClass, toClass); if (!fromClass.isAssignableFrom(value.getClass())) { - throw new IllegalArgumentException("Value is not of type " + fromClass.getName() + " but of type " + value.getClass().getName()); + throw new IllegalArgumentException( + "Value is not of type " + fromClass.getName() + " but of type " + value.getClass().getName()); } TypeCodec fromCodec = (TypeCodec) codecRegistry.codecFor(toDataType, fromClass); if (fromCodec == null) { - throw new IllegalArgumentException("No codec found in codecRegistry for Java type " + fromClass.getName() + " to CQL type " + toDataType); + throw new IllegalArgumentException("No codec found in codecRegistry for Java type " + fromClass.getName() + + " to CQL type " + toDataType); } TypeCodec toCodec = (TypeCodec) codecRegistry.codecFor(toDataType, toClass); if (toCodec == null) { - throw new IllegalArgumentException("No codec found in codecRegistry for Java type " + toClass.getName() + " to CQL type " + toDataType); + throw new IllegalArgumentException("No codec found in codecRegistry for Java type " + toClass.getName() + + " to CQL type " + toDataType); } ByteBuffer encoded = fromCodec.encode(value, PROTOCOL_VERSION); return toCodec.decode(encoded, PROTOCOL_VERSION); } protected static UdtValue convert_UDT(UdtValue fromUDTValue, UserDefinedType fromUDT, UserDefinedType toUDT) { - if (logger.isDebugEnabled()) logger.debug("convert_UDT fromUDTValue {} of class {} and type {}, converting fromUDT {} toUDT {}", CqlData.getFormattedContent(CqlData.toType(fromUDT),fromUDTValue),fromUDTValue.getClass().getName(),fromUDTValue.getType(),fromUDT,toUDT); - if (null==fromUDTValue) + if (logger.isDebugEnabled()) + logger.debug("convert_UDT fromUDTValue {} of class {} and type {}, converting fromUDT {} toUDT {}", + CqlData.getFormattedContent(CqlData.toType(fromUDT), fromUDTValue), + fromUDTValue.getClass().getName(), fromUDTValue.getType(), fromUDT, toUDT); + if (null == fromUDTValue) return null; List fromFieldTypes = fromUDT.getFieldTypes(); List toFieldTypes = toUDT.getFieldTypes(); - if (null==fromFieldTypes || null==toFieldTypes || fromFieldTypes.size() != toFieldTypes.size()) { + if (null == fromFieldTypes || null == toFieldTypes || fromFieldTypes.size() != toFieldTypes.size()) { throw new IllegalArgumentException("fromUDT and toUDT not be null and must have the same number of fields"); } if (!fromUDTValue.getType().getClass().equals(fromUDT.getClass())) { @@ -284,24 +315,35 @@ protected static UdtValue convert_UDT(UdtValue fromUDTValue, UserDefinedType fro toUDTValue.set(i, toFieldValue, toCodec); } - if (logger.isDebugEnabled()) logger.debug("convert_UDT returning {} of type {}", CqlData.getFormattedContent(CqlData.toType(toUDT),toUDTValue), toUDTValue.getType()); + if (logger.isDebugEnabled()) + logger.debug("convert_UDT returning {} of type {}", + CqlData.getFormattedContent(CqlData.toType(toUDT), toUDTValue), toUDTValue.getType()); return toUDTValue; } - protected static Object convert_COLLECTION(Type collectionType, Object value, List conversionTypeList, List fromDataTypeList, List toDataTypeList, CodecRegistry codecRegistry) { - CqlData.Type firstDataType = (null==fromDataTypeList || fromDataTypeList.isEmpty()) ? CqlData.Type.UNKNOWN : CqlData.toType(fromDataTypeList.get(0)); - if (logger.isDebugEnabled()) logger.debug("convert_COLLECTION collectionType {} value {} conversionTypeList {} fromDataTypeList {} toDataTypeList {}",collectionType, CqlData.getFormattedContent(firstDataType,value),conversionTypeList,fromDataTypeList,toDataTypeList); - if (null==value) { + protected static Object convert_COLLECTION(Type collectionType, Object value, List conversionTypeList, + List fromDataTypeList, List toDataTypeList, CodecRegistry codecRegistry) { + CqlData.Type firstDataType = (null == fromDataTypeList || fromDataTypeList.isEmpty()) ? CqlData.Type.UNKNOWN + : CqlData.toType(fromDataTypeList.get(0)); + if (logger.isDebugEnabled()) + logger.debug( + "convert_COLLECTION collectionType {} value {} conversionTypeList {} fromDataTypeList {} toDataTypeList {}", + collectionType, CqlData.getFormattedContent(firstDataType, value), conversionTypeList, + fromDataTypeList, toDataTypeList); + if (null == value) { return null; } - if (null==collectionType || null==conversionTypeList || null==fromDataTypeList || null==toDataTypeList + if (null == collectionType || null == conversionTypeList || null == fromDataTypeList || null == toDataTypeList || conversionTypeList.isEmpty() || fromDataTypeList.isEmpty() || toDataTypeList.isEmpty()) { - throw new IllegalArgumentException("conversionType, conversionTypeList, fromDataTypeList, and toDataTypeList must not be null and must not be empty"); + throw new IllegalArgumentException( + "conversionType, conversionTypeList, fromDataTypeList, and toDataTypeList must not be null and must not be empty"); } - if (conversionTypeList.size() != fromDataTypeList.size() || conversionTypeList.size() != toDataTypeList.size()) { - throw new IllegalArgumentException("conversionTypeList, fromDataTypeList, and toDataTypeList must be the same size"); + if (conversionTypeList.size() != fromDataTypeList.size() + || conversionTypeList.size() != toDataTypeList.size()) { + throw new IllegalArgumentException( + "conversionTypeList, fromDataTypeList, and toDataTypeList must be the same size"); } - if (null==codecRegistry) { + if (null == codecRegistry) { throw new IllegalArgumentException("codecRegistry must not be null"); } @@ -310,17 +352,20 @@ protected static Object convert_COLLECTION(Type collectionType, Object value, Li return value; switch (collectionType) { - case LIST: - return ((List) value).stream().map(v -> convert_ONE(conversionTypeList.get(0), v, fromDataTypeList.get(0), toDataTypeList.get(0), codecRegistry)).collect(Collectors.toList()); - case SET: - return ((Set) value).stream().map(v -> convert_ONE(conversionTypeList.get(0), v, fromDataTypeList.get(0), toDataTypeList.get(0), codecRegistry)).collect(Collectors.toSet()); - case MAP: - // There are two conversion types in the element list: one for keys and one for values - return ((Map) value).entrySet().stream() - .collect(Collectors.toMap( - entry -> convert_ONE(conversionTypeList.get(0), entry.getKey(), fromDataTypeList.get(0), toDataTypeList.get(0), codecRegistry), - entry -> convert_ONE(conversionTypeList.get(1), entry.getValue(), fromDataTypeList.get(1), toDataTypeList.get(1), codecRegistry) - )); + case LIST: + return ((List) value).stream().map(v -> convert_ONE(conversionTypeList.get(0), v, + fromDataTypeList.get(0), toDataTypeList.get(0), codecRegistry)).collect(Collectors.toList()); + case SET: + return ((Set) value).stream().map(v -> convert_ONE(conversionTypeList.get(0), v, fromDataTypeList.get(0), + toDataTypeList.get(0), codecRegistry)).collect(Collectors.toSet()); + case MAP: + // There are two conversion types in the element list: one for keys and one for values + return ((Map) value).entrySet().stream() + .collect(Collectors.toMap( + entry -> convert_ONE(conversionTypeList.get(0), entry.getKey(), fromDataTypeList.get(0), + toDataTypeList.get(0), codecRegistry), + entry -> convert_ONE(conversionTypeList.get(1), entry.getValue(), fromDataTypeList.get(1), + toDataTypeList.get(1), codecRegistry))); } return value; } @@ -339,13 +384,8 @@ List getToDataTypeList() { @Override public String toString() { - return "CqlData{" + - "fromDataTypeList=" + fromDataTypeList + - ", toDataTypeList=" + toDataTypeList + - ", conversionTypeList=" + conversionTypeList + - '}'; + return "CqlData{" + "fromDataTypeList=" + fromDataTypeList + ", toDataTypeList=" + toDataTypeList + + ", conversionTypeList=" + conversionTypeList + '}'; } } - - diff --git a/src/main/java/com/datastax/cdm/data/CqlData.java b/src/main/java/com/datastax/cdm/data/CqlData.java index 1e650e6b..ee651561 100644 --- a/src/main/java/com/datastax/cdm/data/CqlData.java +++ b/src/main/java/com/datastax/cdm/data/CqlData.java @@ -15,25 +15,18 @@ */ package com.datastax.cdm.data; +import java.util.*; + import com.datastax.dse.driver.api.core.type.DseDataTypes; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.*; -import java.util.*; - public class CqlData { public enum Type { - PRIMITIVE, - UDT, - LIST, - SET, - MAP, - TUPLE, - VECTOR, - UNKNOWN + PRIMITIVE, UDT, LIST, SET, MAP, TUPLE, VECTOR, UNKNOWN } - private static final Map> primitiveDataTypeToJavaClassMap = new HashMap<>(); + private static final Map> primitiveDataTypeToJavaClassMap = new HashMap<>(); static { primitiveDataTypeToJavaClassMap.put(DataTypes.TEXT, String.class); primitiveDataTypeToJavaClassMap.put(DataTypes.ASCII, String.class); @@ -54,21 +47,33 @@ public enum Type { primitiveDataTypeToJavaClassMap.put(DataTypes.DECIMAL, java.math.BigDecimal.class); primitiveDataTypeToJavaClassMap.put(DataTypes.TIMEUUID, java.util.UUID.class); primitiveDataTypeToJavaClassMap.put(DataTypes.COUNTER, Long.class); - primitiveDataTypeToJavaClassMap.put(DataTypes.DURATION, com.datastax.oss.driver.api.core.data.CqlDuration.class); - primitiveDataTypeToJavaClassMap.put(DseDataTypes.POLYGON, com.datastax.dse.driver.api.core.data.geometry.Polygon.class); - primitiveDataTypeToJavaClassMap.put(DseDataTypes.POINT, com.datastax.dse.driver.api.core.data.geometry.Point.class); - primitiveDataTypeToJavaClassMap.put(DseDataTypes.LINE_STRING, com.datastax.dse.driver.api.core.data.geometry.LineString.class); - primitiveDataTypeToJavaClassMap.put(DseDataTypes.DATE_RANGE, com.datastax.dse.driver.api.core.data.time.DateRange.class); + primitiveDataTypeToJavaClassMap.put(DataTypes.DURATION, + com.datastax.oss.driver.api.core.data.CqlDuration.class); + primitiveDataTypeToJavaClassMap.put(DseDataTypes.POLYGON, + com.datastax.dse.driver.api.core.data.geometry.Polygon.class); + primitiveDataTypeToJavaClassMap.put(DseDataTypes.POINT, + com.datastax.dse.driver.api.core.data.geometry.Point.class); + primitiveDataTypeToJavaClassMap.put(DseDataTypes.LINE_STRING, + com.datastax.dse.driver.api.core.data.geometry.LineString.class); + primitiveDataTypeToJavaClassMap.put(DseDataTypes.DATE_RANGE, + com.datastax.dse.driver.api.core.data.time.DateRange.class); } public static Type toType(DataType dataType) { - if (isPrimitive(dataType)) return Type.PRIMITIVE; - if (dataType instanceof ListType) return Type.LIST; - if (dataType instanceof SetType) return Type.SET; - if (dataType instanceof MapType) return Type.MAP; - if (dataType instanceof TupleType) return Type.TUPLE; - if (dataType instanceof UserDefinedType) return Type.UDT; - if (dataType instanceof VectorType) return Type.VECTOR; + if (isPrimitive(dataType)) + return Type.PRIMITIVE; + if (dataType instanceof ListType) + return Type.LIST; + if (dataType instanceof SetType) + return Type.SET; + if (dataType instanceof MapType) + return Type.MAP; + if (dataType instanceof TupleType) + return Type.TUPLE; + if (dataType instanceof UserDefinedType) + return Type.UDT; + if (dataType instanceof VectorType) + return Type.VECTOR; throw new RuntimeException("Unsupported data type: " + dataType); } @@ -77,35 +82,54 @@ public static boolean isPrimitive(DataType dataType) { } public static boolean isCollection(DataType dataType) { - if (dataType instanceof UserDefinedType) return true; - if (dataType instanceof ListType) return true; - if (dataType instanceof SetType) return true; - if (dataType instanceof MapType) return true; - if (dataType instanceof TupleType) return true; - if (dataType instanceof VectorType) return true; + if (dataType instanceof UserDefinedType) + return true; + if (dataType instanceof ListType) + return true; + if (dataType instanceof SetType) + return true; + if (dataType instanceof MapType) + return true; + if (dataType instanceof TupleType) + return true; + if (dataType instanceof VectorType) + return true; return false; } public static boolean isFrozen(DataType dataType) { - if (isPrimitive(dataType)) return false; - if (dataType instanceof UserDefinedType) return ((UserDefinedType) dataType).isFrozen(); - if (dataType instanceof ListType) return ((ListType) dataType).isFrozen(); - if (dataType instanceof SetType) return ((SetType) dataType).isFrozen(); - if (dataType instanceof MapType) return ((MapType) dataType).isFrozen(); - if (dataType instanceof TupleType) return dataType.asCql(true, false).toLowerCase().contains("frozen<"); + if (isPrimitive(dataType)) + return false; + if (dataType instanceof UserDefinedType) + return ((UserDefinedType) dataType).isFrozen(); + if (dataType instanceof ListType) + return ((ListType) dataType).isFrozen(); + if (dataType instanceof SetType) + return ((SetType) dataType).isFrozen(); + if (dataType instanceof MapType) + return ((MapType) dataType).isFrozen(); + if (dataType instanceof TupleType) + return dataType.asCql(true, false).toLowerCase().contains("frozen<"); // vector CQL data type doesn't support frozen return false; } public static Class getBindClass(DataType dataType) { Class primitiveClass = primitiveDataTypeToJavaClassMap.get(dataType); - if (primitiveClass != null) return primitiveClass; - if (dataType instanceof ListType) return java.util.List.class; - if (dataType instanceof SetType) return java.util.Set.class; - if (dataType instanceof MapType) return java.util.Map.class; - if (dataType instanceof UserDefinedType) return com.datastax.oss.driver.api.core.data.UdtValue.class; - if (dataType instanceof TupleType) return com.datastax.oss.driver.api.core.data.TupleValue.class; - if (dataType instanceof VectorType) return com.datastax.oss.driver.api.core.data.CqlVector.class; + if (primitiveClass != null) + return primitiveClass; + if (dataType instanceof ListType) + return java.util.List.class; + if (dataType instanceof SetType) + return java.util.Set.class; + if (dataType instanceof MapType) + return java.util.Map.class; + if (dataType instanceof UserDefinedType) + return com.datastax.oss.driver.api.core.data.UdtValue.class; + if (dataType instanceof TupleType) + return com.datastax.oss.driver.api.core.data.TupleValue.class; + if (dataType instanceof VectorType) + return com.datastax.oss.driver.api.core.data.CqlVector.class; throw new IllegalArgumentException("Unsupported data type: " + dataType); } @@ -113,20 +137,20 @@ public static Class getBindClass(DataType dataType) { public static List extractDataTypesFromCollection(DataType dataType) { CqlData.Type type = CqlData.toType(dataType); switch (type) { - case UDT: - return Collections.singletonList(dataType); - case LIST: - return Collections.singletonList(((ListType) dataType).getElementType()); - case SET: - return Collections.singletonList(((SetType) dataType).getElementType()); - case MAP: - return Arrays.asList(((MapType) dataType).getKeyType(), ((MapType) dataType).getValueType()); - case TUPLE: - return ((TupleType) dataType).getComponentTypes(); - case VECTOR: - return Collections.singletonList(((VectorType) dataType).getElementType()); - default: - return null; + case UDT: + return Collections.singletonList(dataType); + case LIST: + return Collections.singletonList(((ListType) dataType).getElementType()); + case SET: + return Collections.singletonList(((SetType) dataType).getElementType()); + case MAP: + return Arrays.asList(((MapType) dataType).getKeyType(), ((MapType) dataType).getValueType()); + case TUPLE: + return ((TupleType) dataType).getComponentTypes(); + case VECTOR: + return Collections.singletonList(((VectorType) dataType).getElementType()); + default: + return null; } } @@ -138,34 +162,37 @@ public static String getFormattedContent(Type type, Object value) { String closeBracket; try { switch (type) { - case UDT: - return ((UdtValue) value).getFormattedContents(); - case LIST: - case VECTOR: - openBracket = "["; - closeBracket = "]"; - break; - case SET: - case MAP: - openBracket = "{"; - closeBracket = "}"; - break; - case PRIMITIVE: - case UNKNOWN: - case TUPLE: - default: - return value.toString(); + case UDT: + return ((UdtValue) value).getFormattedContents(); + case LIST: + case VECTOR: + openBracket = "["; + closeBracket = "]"; + break; + case SET: + case MAP: + openBracket = "{"; + closeBracket = "}"; + break; + case PRIMITIVE: + case UNKNOWN: + case TUPLE: + default: + return value.toString(); } List objects = DataUtility.extractObjectsFromCollection(value); StringBuilder sb = new StringBuilder(openBracket); for (Object obj : objects) { - if (obj instanceof UdtValue) sb.append(((UdtValue) obj).getFormattedContents()); + if (obj instanceof UdtValue) + sb.append(((UdtValue) obj).getFormattedContents()); else if (obj instanceof Map.Entry) { Object mapKey = ((Map.Entry) obj).getKey(); Object mapValue = ((Map.Entry) obj).getValue(); - String mapKeyStr = mapKey instanceof UdtValue ? ((UdtValue) mapKey).getFormattedContents() : mapKey.toString(); - String mapValueStr = mapValue instanceof UdtValue ? ((UdtValue) mapValue).getFormattedContents() : mapValue.toString(); + String mapKeyStr = mapKey instanceof UdtValue ? ((UdtValue) mapKey).getFormattedContents() + : mapKey.toString(); + String mapValueStr = mapValue instanceof UdtValue ? ((UdtValue) mapValue).getFormattedContents() + : mapValue.toString(); sb.append(mapKeyStr).append("=").append(mapValueStr); } else { sb.append(obj.toString()); diff --git a/src/main/java/com/datastax/cdm/data/DataUtility.java b/src/main/java/com/datastax/cdm/data/DataUtility.java index e8a27ba6..87a475c6 100644 --- a/src/main/java/com/datastax/cdm/data/DataUtility.java +++ b/src/main/java/com/datastax/cdm/data/DataUtility.java @@ -15,13 +15,14 @@ */ package com.datastax.cdm.data; -import com.datastax.cdm.schema.CqlTable; -import com.datastax.cdm.properties.IPropertyHelper; -import com.datastax.cdm.properties.KnownProperties; +import java.util.*; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.*; +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.schema.CqlTable; public class DataUtility { public static final Logger logger = LoggerFactory.getLogger(CqlConversion.class); @@ -50,39 +51,46 @@ public static List extractObjectsFromCollection(Object collection) { return objects; } - public static Map getThisToThatColumnNameMap(IPropertyHelper propertyHelper, CqlTable thisCqlTable, CqlTable thatCqlTable) { + public static Map getThisToThatColumnNameMap(IPropertyHelper propertyHelper, CqlTable thisCqlTable, + CqlTable thatCqlTable) { // Property ORIGIN_COLUMN_NAMES_TO_TARGET is a list of origin column name to target column name mappings // Use that as the starting point for the return map - List originColumnNames = thisCqlTable.isOrigin() ? thisCqlTable.getColumnNames(false) : thatCqlTable.getColumnNames(false); - List targetColumnNames = thisCqlTable.isOrigin() ? thatCqlTable.getColumnNames(false) : thisCqlTable.getColumnNames(false); + List originColumnNames = thisCqlTable.isOrigin() ? thisCqlTable.getColumnNames(false) + : thatCqlTable.getColumnNames(false); + List targetColumnNames = thisCqlTable.isOrigin() ? thatCqlTable.getColumnNames(false) + : thisCqlTable.getColumnNames(false); if (logger.isDebugEnabled()) { logger.debug("originColumnNames: " + originColumnNames); logger.debug("targetColumnNames: " + targetColumnNames); } - List originColumnNamesToTarget = propertyHelper.getStringList(KnownProperties.ORIGIN_COLUMN_NAMES_TO_TARGET); - Map originToTargetNameMap = new HashMap<>(); - if (null!=originColumnNamesToTarget && !originColumnNamesToTarget.isEmpty()) { + List originColumnNamesToTarget = propertyHelper + .getStringList(KnownProperties.ORIGIN_COLUMN_NAMES_TO_TARGET); + Map originToTargetNameMap = new HashMap<>(); + if (null != originColumnNamesToTarget && !originColumnNamesToTarget.isEmpty()) { for (String pair : originColumnNamesToTarget) { String[] parts = pair.split(":"); - if (parts.length != 2 || null == parts[0] || null == parts[1] || - parts[0].isEmpty() || parts[1].isEmpty()) { - throw new RuntimeException(KnownProperties.ORIGIN_COLUMN_NAMES_TO_TARGET + " pair is mis-configured, either a missing ':' separator or one/both sides are empty: " + pair); + if (parts.length != 2 || null == parts[0] || null == parts[1] || parts[0].isEmpty() + || parts[1].isEmpty()) { + throw new RuntimeException(KnownProperties.ORIGIN_COLUMN_NAMES_TO_TARGET + + " pair is mis-configured, either a missing ':' separator or one/both sides are empty: " + + pair); } String originColumnName = CqlTable.unFormatName(parts[0]); String targetColumnName = CqlTable.unFormatName(parts[1]); if (originColumnNames.contains(originColumnName) && targetColumnNames.contains(targetColumnName)) { originToTargetNameMap.put(originColumnName, targetColumnName); - } - else { - throw new RuntimeException(KnownProperties.ORIGIN_COLUMN_NAMES_TO_TARGET + " one or both columns are not found on the table: " + pair); + } else { + throw new RuntimeException(KnownProperties.ORIGIN_COLUMN_NAMES_TO_TARGET + + " one or both columns are not found on the table: " + pair); } } } - // Next, add any origin column names that are not on the map, and add them if there is a matching target column name + // Next, add any origin column names that are not on the map, and add them if there is a matching target column + // name for (String originColumnName : originColumnNames) { if (!originToTargetNameMap.containsKey(originColumnName)) { if (targetColumnNames.contains(originColumnName)) { @@ -98,7 +106,7 @@ public static Map getThisToThatColumnNameMap(IPropertyHelper prop if (thisCqlTable.isOrigin()) { return originToTargetNameMap; } else { - Map targetToOriginNameMap = new HashMap<>(); + Map targetToOriginNameMap = new HashMap<>(); for (String originColumnName : originToTargetNameMap.keySet()) { String targetColumnName = originToTargetNameMap.get(originColumnName); targetToOriginNameMap.put(targetColumnName, originColumnName); diff --git a/src/main/java/com/datastax/cdm/data/EnhancedPK.java b/src/main/java/com/datastax/cdm/data/EnhancedPK.java index 3afcda2e..6cdef29c 100644 --- a/src/main/java/com/datastax/cdm/data/EnhancedPK.java +++ b/src/main/java/com/datastax/cdm/data/EnhancedPK.java @@ -14,13 +14,15 @@ * limitations under the License. */ package com.datastax.cdm.data; -import com.datastax.cdm.feature.ExplodeMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.*; import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.datastax.cdm.feature.ExplodeMap; + public class EnhancedPK { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); boolean logDebug = logger.isDebugEnabled(); @@ -34,14 +36,19 @@ public class EnhancedPK { private boolean warningState = false; private List messages; - private Map explodeMap; + private Map explodeMap; private final Object explodeMapKey; private final Object explodeMapValue; - public EnhancedPK(PKFactory factory, List values, List classes, Integer ttl, Long writeTimestamp, Object explodeMapKey, Object explodeMapValue) { - if (logDebug) {logger.debug("EnhancedPK: values={}, ttl={}, writeTimestamp={}, explodeMapKey={}, explodeMapValue={}", values, ttl, writeTimestamp, explodeMapKey, explodeMapValue);} + public EnhancedPK(PKFactory factory, List values, List classes, Integer ttl, Long writeTimestamp, + Object explodeMapKey, Object explodeMapValue) { + if (logDebug) { + logger.debug("EnhancedPK: values={}, ttl={}, writeTimestamp={}, explodeMapKey={}, explodeMapValue={}", + values, ttl, writeTimestamp, explodeMapKey, explodeMapValue); + } this.factory = factory; - this.values = (null==explodeMapValue? values : new ArrayList<>(values)); // copy the list when we will modify it + this.values = (null == explodeMapValue ? values : new ArrayList<>(values)); // copy the list when we will modify + // it this.classes = classes; this.messages = null; this.writeTimestamp = writeTimestamp; @@ -49,7 +56,9 @@ public EnhancedPK(PKFactory factory, List values, List classes, I this.explodeMapKey = explodeMapKey; this.explodeMapValue = explodeMapValue; - if (null!=explodeMapKey) {this.values.set(factory.getExplodeMapTargetPKIndex(), explodeMapKey);} + if (null != explodeMapKey) { + this.values.set(factory.getExplodeMapTargetPKIndex(), explodeMapKey); + } validate(); } @@ -58,7 +67,8 @@ public EnhancedPK(PKFactory factory, List values, List classes, I this(factory, values, classes, ttl, writeTimestamp, null, null); } - public EnhancedPK(PKFactory factory, List values, List classes, Integer ttl, Long writeTimestamp, Map explodeMap) { + public EnhancedPK(PKFactory factory, List values, List classes, Integer ttl, Long writeTimestamp, + Map explodeMap) { this(factory, values, classes, ttl, writeTimestamp, null, null); this.explodeMap = explodeMap; } @@ -67,41 +77,61 @@ public List explode(ExplodeMap explodeMapFeature) { if (null == explodeMap || explodeMap.isEmpty()) { return Collections.singletonList(this); } - return explodeMapFeature.explode(explodeMap).stream() - .map(entry -> new EnhancedPK(factory, values, classes, ttl, writeTimestamp, entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + return explodeMapFeature.explode(explodeMap).stream().map(entry -> new EnhancedPK(factory, values, classes, ttl, + writeTimestamp, entry.getKey(), entry.getValue())).collect(Collectors.toList()); + } + + public boolean isError() { + return errorState; + } + + public boolean isWarning() { + return warningState; + } + + public List getPKValues() { + return values; + } + + public String getMessages() { + return (null == messages) ? "" : String.join("; ", messages); + } + + public boolean canExplode() { + return null != explodeMap && !explodeMap.isEmpty(); } - public boolean isError() {return errorState;} - public boolean isWarning() {return warningState;} - public List getPKValues() {return values;} - public String getMessages() {return (null==messages)? "" : String.join("; ", messages);} - public boolean canExplode() {return null != explodeMap && !explodeMap.isEmpty();} public Object getExplodeMapKey() { return this.explodeMapKey; } + public Object getExplodeMapValue() { return this.explodeMapValue; } + public Long getWriteTimestamp() { return this.writeTimestamp; } + public Integer getTTL() { return this.ttl; } private void validate() { - if (null==values || null== classes || values.isEmpty() || values.size() != classes.size()) { - if (null==this.messages) this.messages = new ArrayList<>(); + if (null == values || null == classes || values.isEmpty() || values.size() != classes.size()) { + if (null == this.messages) + this.messages = new ArrayList<>(); this.messages.add("ERROR: types and/or values are null and/or empty, or are not the same size"); this.errorState = true; return; } - for (int i=0; i0) sb.append(" %% "); + for (int i = 0; i < values.size(); i++) { + if (i > 0) + sb.append(" %% "); sb.append((null == values.get(i)) ? "(null)" : values.get(i)); } String rawPK = sb.toString(); diff --git a/src/main/java/com/datastax/cdm/data/PKFactory.java b/src/main/java/com/datastax/cdm/data/PKFactory.java index b31ccf3a..cc9fc693 100644 --- a/src/main/java/com/datastax/cdm/data/PKFactory.java +++ b/src/main/java/com/datastax/cdm/data/PKFactory.java @@ -15,30 +15,28 @@ */ package com.datastax.cdm.data; +import java.util.*; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.feature.*; +import com.datastax.cdm.properties.PropertyHelper; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.cdm.properties.PropertyHelper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; -import java.util.stream.Collectors; public class PKFactory { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); boolean logDebug = logger.isDebugEnabled(); private enum LookupMethod { - ORIGIN_COLUMN, - CONSTANT_COLUMN, - EXPLODE_MAP + ORIGIN_COLUMN, CONSTANT_COLUMN, EXPLODE_MAP } public enum Side { - ORIGIN, - TARGET + ORIGIN, TARGET } private final CqlTable originTable; @@ -59,7 +57,6 @@ public enum Side { private final Integer explodeMapTargetPKIndex; private final ExplodeMap explodeMapFeature; - public PKFactory(PropertyHelper propertyHelper, CqlTable originTable, CqlTable targetTable) { this.originTable = originTable; @@ -68,13 +65,13 @@ public PKFactory(PropertyHelper propertyHelper, CqlTable originTable, CqlTable t this.targetPKLookupMethods = new ArrayList<>(); this.targetDefaultValues = new ArrayList<>(); - for (int i = 0; i< targetTable.getPKNames(false).size(); i++) { + for (int i = 0; i < targetTable.getPKNames(false).size(); i++) { targetPKLookupMethods.add(null); targetDefaultValues.add(null); } this.originPKLookupMethods = new ArrayList<>(); - for (int i = 0; i< originTable.getPKNames(false).size(); i++) { + for (int i = 0; i < originTable.getPKNames(false).size(); i++) { originPKLookupMethods.add(null); } @@ -95,7 +92,10 @@ public PKFactory(PropertyHelper propertyHelper, CqlTable originTable, CqlTable t public EnhancedPK getTargetPK(Row originRow) { List newValues = getTargetPKValuesFromOriginColumnLookupMethod(originRow, targetDefaultValues); - if (logDebug) logger.debug("getTargetPK: newValues: {}; {}; explodeMapTargetKeyColumnIndex={}", newValues, null==writetimeTTLFeature?WritetimeTTL.class.getSimpleName()+":null":writetimeTTLFeature, explodeMapTargetKeyColumnIndex); + if (logDebug) + logger.debug("getTargetPK: newValues: {}; {}; explodeMapTargetKeyColumnIndex={}", newValues, + null == writetimeTTLFeature ? WritetimeTTL.class.getSimpleName() + ":null" : writetimeTTLFeature, + explodeMapTargetKeyColumnIndex); Long originWriteTimeStamp = null; Integer originTTL = null; if (FeatureFactory.isEnabled(writetimeTTLFeature)) { @@ -104,10 +104,10 @@ public EnhancedPK getTargetPK(Row originRow) { } if (explodeMapTargetKeyColumnIndex < 0) { return new EnhancedPK(this, newValues, getPKClasses(Side.TARGET), originTTL, originWriteTimeStamp); - } - else { + } else { Map explodeMap = getExplodeMap(originRow); - return new EnhancedPK(this, newValues, getPKClasses(Side.TARGET), originTTL, originWriteTimeStamp, explodeMap); + return new EnhancedPK(this, newValues, getPKClasses(Side.TARGET), originTTL, originWriteTimeStamp, + explodeMap); } } @@ -119,72 +119,81 @@ public String getWhereClause(Side side) { StringBuilder sb; List pkNames; switch (side) { - case ORIGIN: - if (null!=originWhereClause && !originWhereClause.isEmpty()) return originWhereClause; - sb = new StringBuilder(); - pkNames = originTable.getPKNames(true); - for (int i=0; i 0) sb.append(" AND "); - sb.append(name).append("=?"); - } + case ORIGIN: + if (null != originWhereClause && !originWhereClause.isEmpty()) + return originWhereClause; + sb = new StringBuilder(); + pkNames = originTable.getPKNames(true); + for (int i = 0; i < pkNames.size(); i++) { + LookupMethod method = originPKLookupMethods.get(i); + String name = pkNames.get(i); + + // On origin PK, we don't bind anything other than ORIGIN_COLUMN + if (method == LookupMethod.ORIGIN_COLUMN) { + if (sb.length() > 0) + sb.append(" AND "); + sb.append(name).append("=?"); } - return sb.toString(); - case TARGET: - if (null!=targetWhereClause && !targetWhereClause.isEmpty()) return targetWhereClause; - sb = new StringBuilder(); - pkNames = targetTable.getPKNames(true); - for (int i=0; i0) sb.append(" AND "); - sb.append(name).append("=?"); - break; - case CONSTANT_COLUMN: - if (null!=defaultValue) { - if (sb.length() > 0) sb.append(" AND "); - sb.append(name).append("=").append(defaultValue); - } - break; + } + return sb.toString(); + case TARGET: + if (null != targetWhereClause && !targetWhereClause.isEmpty()) + return targetWhereClause; + sb = new StringBuilder(); + pkNames = targetTable.getPKNames(true); + for (int i = 0; i < pkNames.size(); i++) { + LookupMethod method = targetPKLookupMethods.get(i); + String name = pkNames.get(i); + Object defaultValue = targetDefaultValues.get(i); + + if (null == method) + continue; + switch (method) { + case ORIGIN_COLUMN: + case EXPLODE_MAP: + if (sb.length() > 0) + sb.append(" AND "); + sb.append(name).append("=?"); + break; + case CONSTANT_COLUMN: + if (null != defaultValue) { + if (sb.length() > 0) + sb.append(" AND "); + sb.append(name).append("=").append(defaultValue); } + break; } - return sb.toString(); + } + return sb.toString(); } return null; } - public BoundStatement bindWhereClause(Side side, EnhancedPK pk, BoundStatement boundStatement, int startingBindIndex) { + public BoundStatement bindWhereClause(Side side, EnhancedPK pk, BoundStatement boundStatement, + int startingBindIndex) { List indexesToBind; CqlTable table; switch (side) { - case ORIGIN: - indexesToBind = originPKIndexesToBind; - table = originTable; - break; - case TARGET: - indexesToBind = targetPKIndexesToBind; - table=targetTable; - break; - default: - throw new RuntimeException("Unknown side: "+side); + case ORIGIN: + indexesToBind = originPKIndexesToBind; + table = originTable; + break; + case TARGET: + indexesToBind = targetPKIndexesToBind; + table = targetTable; + break; + default: + throw new RuntimeException("Unknown side: " + side); } if (pk.isError() || pk.getPKValues().size() != table.getPKClasses().size()) - throw new RuntimeException("PK is in Error state, or the number of values does not match the number of bind types"); + throw new RuntimeException( + "PK is in Error state, or the number of values does not match the number of bind types"); - for (int i=0; i getPKNames(Side side, boolean pretty) { switch (side) { - case ORIGIN: - return originTable.getPKNames(pretty); - case TARGET: - return targetTable.getPKNames(pretty); - default: - throw new RuntimeException("Unknown side: "+side); + case ORIGIN: + return originTable.getPKNames(pretty); + case TARGET: + return targetTable.getPKNames(pretty); + default: + throw new RuntimeException("Unknown side: " + side); } } public List getPKClasses(Side side) { switch (side) { - case ORIGIN: - return originTable.getPKClasses(); - case TARGET: - return targetTable.getPKClasses(); - default: - throw new RuntimeException("Unknown side: "+side); + case ORIGIN: + return originTable.getPKClasses(); + case TARGET: + return targetTable.getPKClasses(); + default: + throw new RuntimeException("Unknown side: " + side); } } public List toValidRecordList(Record record) { - if (null==record || !record.isValid()) + if (null == record || !record.isValid()) return new ArrayList<>(0); List recordSet; if (record.getPk().canExplode()) { - recordSet = record.getPk().explode(explodeMapFeature).stream() - .filter(pk -> !pk.isError()) + recordSet = record.getPk().explode(explodeMapFeature).stream().filter(pk -> !pk.isError()) .map(pk -> new Record(pk, record.getOriginRow(), record.getTargetRow())) .collect(Collectors.toList()); } else { @@ -228,26 +236,29 @@ public List toValidRecordList(Record record) { return recordSet; } - public Integer getExplodeMapTargetPKIndex() {return explodeMapTargetPKIndex;} + public Integer getExplodeMapTargetPKIndex() { + return explodeMapTargetPKIndex; + } private List getTargetPKValuesFromOriginColumnLookupMethod(Row originRow, List defaultValues) { List newValues = new ArrayList<>(defaultValues); - for (int i = 0; i< targetPKLookupMethods.size(); i++) { + for (int i = 0; i < targetPKLookupMethods.size(); i++) { if (targetPKLookupMethods.get(i) != LookupMethod.ORIGIN_COLUMN) continue; - int originIndex = targetTable.getCorrespondingIndex(targetTable.indexOf(targetTable.getPKNames(false).get(i))); - Object value = originTable.getAndConvertData(originIndex,originRow); + int originIndex = targetTable + .getCorrespondingIndex(targetTable.indexOf(targetTable.getPKNames(false).get(i))); + Object value = originTable.getAndConvertData(originIndex, originRow); newValues.set(i, value); } return newValues; } - private Map getExplodeMap(Row originRow) { + private Map getExplodeMap(Row originRow) { if (explodeMapTargetKeyColumnIndex < 0) { return null; } - return (Map) originTable.getData(explodeMapOriginColumnIndex,originRow); + return (Map) originTable.getData(explodeMapOriginColumnIndex, originRow); } // This fills the PKLookupMethods lists with either ORIGIN_COLUMN or null. @@ -255,25 +266,25 @@ private void setOriginColumnLookupMethod(PropertyHelper propertyHelper) { // Origin PK columns are expected to be found on originColumnNames; if not, it could be because // the origin PK defaulted from the target PK, and the column is added as part of a feature // (e.g. explode map). In that case, we will set the lookup to null. - for (int i=0; i=0) - this.originPKLookupMethods.set(i,LookupMethod.ORIGIN_COLUMN); + for (int i = 0; i < originTable.getPKNames(false).size(); i++) { + if (originTable.indexOf(originTable.getPKNames(false).get(i)) >= 0) + this.originPKLookupMethods.set(i, LookupMethod.ORIGIN_COLUMN); } // Target PK columns may or may not be found on the originColumnNames. - for (int i=0; i= 0) - this.targetPKLookupMethods.set(i,LookupMethod.ORIGIN_COLUMN); + this.targetPKLookupMethods.set(i, LookupMethod.ORIGIN_COLUMN); } } private void setConstantColumns() { ConstantColumns feature = (ConstantColumns) targetTable.getFeature(Featureset.CONSTANT_COLUMNS); - if (null!=feature && feature.isEnabled()) { + if (null != feature && feature.isEnabled()) { List constantColumnNames = feature.getNames(); List constantColumnValues = feature.getValues(); List constantColumnBindClasses = feature.getBindClasses(); - for (int i=0; i= 0) { @@ -287,7 +298,7 @@ private void setConstantColumns() { private Integer setExplodeMapMethods_getTargetKeyColumnIndex() { ExplodeMap feature = (ExplodeMap) targetTable.getFeature(Featureset.EXPLODE_MAP); - if (null!=feature && feature.isEnabled()) { + if (null != feature && feature.isEnabled()) { String explodeMapKeyColumn = feature.getKeyColumnName(); int targetPKIndex = targetTable.getPKNames(false).indexOf(explodeMapKeyColumn); if (targetPKIndex >= 0) { @@ -300,7 +311,7 @@ private Integer setExplodeMapMethods_getTargetKeyColumnIndex() { private Integer getExplodeMapOriginColumnIndex() { ExplodeMap feature = (ExplodeMap) originTable.getFeature(Featureset.EXPLODE_MAP); - if (null!=feature && feature.isEnabled()) { + if (null != feature && feature.isEnabled()) { return feature.getOriginColumnIndex(); } return -1; @@ -309,7 +320,7 @@ private Integer getExplodeMapOriginColumnIndex() { private List getIndexesToBind(Side side) { List indexesToBind = new ArrayList<>(); List lookupMethods = (side == Side.ORIGIN) ? originPKLookupMethods : targetPKLookupMethods; - for (int i = 0; i< lookupMethods.size(); i++) { + for (int i = 0; i < lookupMethods.size(); i++) { LookupMethod method = lookupMethods.get(i); if (null != method && method != LookupMethod.CONSTANT_COLUMN) indexesToBind.add(i); diff --git a/src/main/java/com/datastax/cdm/data/Record.java b/src/main/java/com/datastax/cdm/data/Record.java index b27e6f6f..7fe932e1 100644 --- a/src/main/java/com/datastax/cdm/data/Record.java +++ b/src/main/java/com/datastax/cdm/data/Record.java @@ -15,17 +15,14 @@ */ package com.datastax.cdm.data; +import java.util.concurrent.CompletionStage; + import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.Row; -import java.util.concurrent.CompletionStage; - public class Record { public enum Diff { - UNKNOWN, - NONE, - MISSING_TARGET, - FIELD_DIFF + UNKNOWN, NONE, MISSING_TARGET, FIELD_DIFF } private EnhancedPK pk; @@ -51,8 +48,14 @@ public Record(EnhancedPK pk, CompletionStage targetFutureRow) { this(pk, null, null, targetFutureRow); } - public EnhancedPK getPk() {return pk;} - public Row getOriginRow() {return originRow;} + public EnhancedPK getPk() { + return pk; + } + + public Row getOriginRow() { + return originRow; + } + public Row getTargetRow() { if (null == targetRow && null != targetFutureRow) { AsyncResultSet asyncResultSet = targetFutureRow.toCompletableFuture().join(); @@ -61,7 +64,10 @@ public Row getTargetRow() { return targetRow; } - public void setTargetRow(Row targetRow) {this.targetRow = targetRow;} + public void setTargetRow(Row targetRow) { + this.targetRow = targetRow; + } + public void setAsyncTargetRow(CompletionStage targetFutureRow) { this.targetRow = null; this.targetFutureRow = targetFutureRow; @@ -73,11 +79,8 @@ public boolean isValid() { @Override public String toString() { - return "Record{" + - "pk=" + pk + - ", originRow is " + ((null==originRow) ? "not set" : "set") + - ", targetRow is " + ((null==targetRow) ? "not set" : "set") + - '}'; + return "Record{" + "pk=" + pk + ", originRow is " + ((null == originRow) ? "not set" : "set") + + ", targetRow is " + ((null == targetRow) ? "not set" : "set") + '}'; } } diff --git a/src/main/java/com/datastax/cdm/feature/AbstractFeature.java b/src/main/java/com/datastax/cdm/feature/AbstractFeature.java index 8cf85b57..23ef5fab 100644 --- a/src/main/java/com/datastax/cdm/feature/AbstractFeature.java +++ b/src/main/java/com/datastax/cdm/feature/AbstractFeature.java @@ -23,17 +23,20 @@ public abstract class AbstractFeature implements Feature { protected boolean isValid = true; protected boolean isLoaded = false; - public AbstractFeature() { } + public AbstractFeature() { + } @Override public boolean isEnabled() { - if (!isLoaded) throw new RuntimeException("Feature not initialized"); + if (!isLoaded) + throw new RuntimeException("Feature not initialized"); return isEnabled; } @Override public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { - if (!isLoaded) throw new RuntimeException("Feature not initialized"); + if (!isLoaded) + throw new RuntimeException("Feature not initialized"); if (!validateProperties()) { isEnabled = false; return false; @@ -42,9 +45,10 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) } /** - * Validate the properties of the feature typically called by loadProperties as well as initializeAndValidate. - * It should set isValid to false if any properties are invalid, and ideally uses logger to inform the - * user of the problems found in property configuration. + * Validate the properties of the feature typically called by loadProperties as well as initializeAndValidate. It + * should set isValid to false if any properties are invalid, and ideally uses logger to inform the user of the + * problems found in property configuration. + * * @return true if the properties are valid, false otherwise */ protected boolean validateProperties() { @@ -53,7 +57,8 @@ protected boolean validateProperties() { @Override public String toString() { - return String.format("%s{loaded:%s/valid:%s/enabled:%s}", this.getClass().getSimpleName(), isLoaded, isValid, isEnabled); + return String.format("%s{loaded:%s/valid:%s/enabled:%s}", this.getClass().getSimpleName(), isLoaded, isValid, + isEnabled); } } diff --git a/src/main/java/com/datastax/cdm/feature/ConstantColumns.java b/src/main/java/com/datastax/cdm/feature/ConstantColumns.java index b96b6082..f7cd5dfe 100644 --- a/src/main/java/com/datastax/cdm/feature/ConstantColumns.java +++ b/src/main/java/com/datastax/cdm/feature/ConstantColumns.java @@ -43,19 +43,17 @@ public boolean loadProperties(IPropertyHelper propertyHelper) { isLoaded = true; isValid = validateProperties(); - isEnabled = (null!=names && names.size() > 0); + isEnabled = (null != names && names.size() > 0); return isLoaded && isValid; } @Override protected boolean validateProperties() { - if ((null == names || names.isEmpty()) && - (null == values || values.isEmpty())) { + if ((null == names || names.isEmpty()) && (null == values || values.isEmpty())) { return true; // feature is disabled, which is valid } // both names and values must be set, not empty, and of the same size - if (null == names || null == values || names.size() == 0 || - names.size() != values.size()) { + if (null == names || null == values || names.size() == 0 || names.size() != values.size()) { logger.error("Constant column names ({}) and values ({}) are of different sizes", names, values); return false; } @@ -65,7 +63,7 @@ protected boolean validateProperties() { @Override public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { - if (null==targetTable) { + if (null == targetTable) { throw new IllegalArgumentException("targetTable is null"); } if (targetTable.isOrigin()) { @@ -77,12 +75,14 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) isEnabled = false; return false; } - if (!isEnabled) return true; + if (!isEnabled) + return true; this.bindClasses = targetTable.extendColumns(this.names); - for (int i=0; i getNames() { return isEnabled ? names : Collections.emptyList(); } - public List getBindClasses() { return isEnabled ? bindClasses : Collections.emptyList(); } - public List getValues() { return isEnabled ? values : Collections.emptyList(); } + public List getNames() { + return isEnabled ? names : Collections.emptyList(); + } + + public List getBindClasses() { + return isEnabled ? bindClasses : Collections.emptyList(); + } + + public List getValues() { + return isEnabled ? values : Collections.emptyList(); + } private static List getConstantColumnNames(IPropertyHelper propertyHelper) { return CqlTable.unFormatNames(propertyHelper.getStringList(KnownProperties.CONSTANT_COLUMN_NAMES)); @@ -125,9 +135,11 @@ private static List getConstantColumnValues(IPropertyHelper propertyHelp String columnValueString = propertyHelper.getString(KnownProperties.CONSTANT_COLUMN_VALUES); String regexString = propertyHelper.getString(KnownProperties.CONSTANT_COLUMN_SPLIT_REGEX); - if (null!=columnValueString && !columnValueString.isEmpty()) { - if (null==regexString || regexString.isEmpty()) { - throw new RuntimeException("Constant column values are specified [" + columnValueString + "], but no split regex is provided in property " + KnownProperties.CONSTANT_COLUMN_SPLIT_REGEX); + if (null != columnValueString && !columnValueString.isEmpty()) { + if (null == regexString || regexString.isEmpty()) { + throw new RuntimeException("Constant column values are specified [" + columnValueString + + "], but no split regex is provided in property " + + KnownProperties.CONSTANT_COLUMN_SPLIT_REGEX); } else { return Arrays.asList(columnValueString.split(regexString)); } diff --git a/src/main/java/com/datastax/cdm/feature/ExplodeMap.java b/src/main/java/com/datastax/cdm/feature/ExplodeMap.java index 338b8003..6886c479 100644 --- a/src/main/java/com/datastax/cdm/feature/ExplodeMap.java +++ b/src/main/java/com/datastax/cdm/feature/ExplodeMap.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.feature; +import java.util.*; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.data.CqlConversion; import com.datastax.cdm.data.CqlData; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.type.DataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; -import java.util.stream.Collectors; public class ExplodeMap extends AbstractFeature { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -44,7 +45,9 @@ public class ExplodeMap extends AbstractFeature { @Override public boolean loadProperties(IPropertyHelper helper) { - if (null == helper) { throw new IllegalArgumentException("helper is null");} + if (null == helper) { + throw new IllegalArgumentException("helper is null"); + } this.originColumnName = getOriginColumnName(helper); this.keyColumnName = getKeyColumnName(helper); @@ -52,10 +55,7 @@ public boolean loadProperties(IPropertyHelper helper) { isValid = validateProperties(); - isEnabled = isValid - && !originColumnName.isEmpty() - && !keyColumnName.isEmpty() - && !valueColumnName.isEmpty(); + isEnabled = isValid && !originColumnName.isEmpty() && !keyColumnName.isEmpty() && !valueColumnName.isEmpty(); isLoaded = true; return isLoaded && isValid; @@ -64,23 +64,26 @@ public boolean loadProperties(IPropertyHelper helper) { @Override protected boolean validateProperties() { isValid = true; - if ((null == originColumnName || originColumnName.isEmpty()) && - (null == keyColumnName || keyColumnName.isEmpty()) && - (null == valueColumnName || valueColumnName.isEmpty())) + if ((null == originColumnName || originColumnName.isEmpty()) + && (null == keyColumnName || keyColumnName.isEmpty()) + && (null == valueColumnName || valueColumnName.isEmpty())) return true; - if (null==originColumnName || originColumnName.isEmpty()) { - logger.error("Origin column name is not set when Key ({}) and/or Value ({}) are set", keyColumnName, valueColumnName); + if (null == originColumnName || originColumnName.isEmpty()) { + logger.error("Origin column name is not set when Key ({}) and/or Value ({}) are set", keyColumnName, + valueColumnName); isValid = false; } - if (null==keyColumnName || keyColumnName.isEmpty()) { - logger.error("Key column name is not set when Origin ({}) and/or Value ({}) are set", originColumnName, valueColumnName); + if (null == keyColumnName || keyColumnName.isEmpty()) { + logger.error("Key column name is not set when Origin ({}) and/or Value ({}) are set", originColumnName, + valueColumnName); isValid = false; } - if (null==valueColumnName || valueColumnName.isEmpty()) { - logger.error("Value column name is not set when Origin ({}) and/or Key ({}) are set", originColumnName, keyColumnName); + if (null == valueColumnName || valueColumnName.isEmpty()) { + logger.error("Value column name is not set when Origin ({}) and/or Key ({}) are set", originColumnName, + keyColumnName); isValid = false; } @@ -89,7 +92,7 @@ protected boolean validateProperties() { @Override public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { - if (null==originTable || null==targetTable) { + if (null == originTable || null == targetTable) { throw new IllegalArgumentException("originTable and/or targetTable is null"); } if (!originTable.isOrigin()) { @@ -104,16 +107,19 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) isEnabled = false; return false; } - if (!isEnabled) return true; + if (!isEnabled) + return true; // Initialize Origin variables List originBindClasses = originTable.extendColumns(Collections.singletonList(originColumnName)); if (null == originBindClasses || originBindClasses.size() != 1 || null == originBindClasses.get(0)) { - logger.error("Origin column {} is not found on the origin table {}", originColumnName, originTable.getKeyspaceTable()); + logger.error("Origin column {} is not found on the origin table {}", originColumnName, + originTable.getKeyspaceTable()); isValid = false; } else { if (!CqlData.Type.MAP.equals(CqlData.toType(originTable.getDataType(originColumnName)))) { - logger.error("Origin column {} is not a map, it is {}", originColumnName, originBindClasses.get(0).getName()); + logger.error("Origin column {} is not a map, it is {}", originColumnName, + originBindClasses.get(0).getName()); isValid = false; } else { this.originColumnIndex = originTable.indexOf(originColumnName); @@ -122,11 +128,14 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) // Initialize Target variables List targetBindClasses = targetTable.extendColumns(Arrays.asList(keyColumnName, valueColumnName)); - if (null == targetBindClasses || targetBindClasses.size() != 2 || null == targetBindClasses.get(0) || null == targetBindClasses.get(1)) { + if (null == targetBindClasses || targetBindClasses.size() != 2 || null == targetBindClasses.get(0) + || null == targetBindClasses.get(1)) { if (null == targetBindClasses.get(0)) - logger.error("Target key column {} is not found on the target table {}", keyColumnName, targetTable.getKeyspaceTable()); + logger.error("Target key column {} is not found on the target table {}", keyColumnName, + targetTable.getKeyspaceTable()); if (null == targetBindClasses.get(1)) - logger.error("Target value column {} is not found on the target table {}", valueColumnName, targetTable.getKeyspaceTable()); + logger.error("Target value column {} is not found on the target table {}", valueColumnName, + targetTable.getKeyspaceTable()); isValid = false; } else { this.keyColumnIndex = targetTable.indexOf(keyColumnName); @@ -152,20 +161,23 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) if (isEnabled && logger.isTraceEnabled()) { logger.trace("Origin column {} is at index {}", originColumnName, originColumnIndex); - logger.trace("Target key column {} is at index {} with conversion {}", keyColumnName, keyColumnIndex, keyConversion); - logger.trace("Target value column {} is at index {} with conversion {}", valueColumnName, valueColumnIndex, valueConversion); + logger.trace("Target key column {} is at index {} with conversion {}", keyColumnName, keyColumnIndex, + keyConversion); + logger.trace("Target value column {} is at index {} with conversion {}", valueColumnName, valueColumnIndex, + valueConversion); } - if (!isValid) isEnabled = false; - logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled?"enabled":"disabled"); + if (!isValid) + isEnabled = false; + logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled ? "enabled" : "disabled"); return isValid; } public Set> explode(Map map) { - if (map == null) { return null; } - return map.entrySet().stream() - .map(this::applyConversions) - .collect(Collectors.toSet()); + if (map == null) { + return null; + } + return map.entrySet().stream().map(this::applyConversions).collect(Collectors.toSet()); } private Map.Entry applyConversions(Map.Entry entry) { @@ -183,30 +195,52 @@ private Map.Entry applyConversions(Map.Entry ent return new AbstractMap.SimpleEntry<>(key, value); } - public String getOriginColumnName() { return isEnabled ? originColumnName : ""; } - public Integer getOriginColumnIndex() { return isEnabled ? originColumnIndex : -1; } + public String getOriginColumnName() { + return isEnabled ? originColumnName : ""; + } - public String getKeyColumnName() { return isEnabled ? keyColumnName : ""; } - public Integer getKeyColumnIndex() { return isEnabled ? keyColumnIndex : -1; } + public Integer getOriginColumnIndex() { + return isEnabled ? originColumnIndex : -1; + } - public String getValueColumnName() { return isEnabled ? valueColumnName : ""; } - public Integer getValueColumnIndex() { return isEnabled ? valueColumnIndex : -1; } + public String getKeyColumnName() { + return isEnabled ? keyColumnName : ""; + } + + public Integer getKeyColumnIndex() { + return isEnabled ? keyColumnIndex : -1; + } + + public String getValueColumnName() { + return isEnabled ? valueColumnName : ""; + } + + public Integer getValueColumnIndex() { + return isEnabled ? valueColumnIndex : -1; + } public static String getOriginColumnName(IPropertyHelper helper) { - if (null == helper) { throw new IllegalArgumentException("helper is null");} + if (null == helper) { + throw new IllegalArgumentException("helper is null"); + } String columnName = CqlTable.unFormatName(helper.getString(KnownProperties.EXPLODE_MAP_ORIGIN_COLUMN_NAME)); return (null == columnName) ? "" : columnName; } public static String getKeyColumnName(IPropertyHelper helper) { - if (null == helper) { throw new IllegalArgumentException("helper is null");} + if (null == helper) { + throw new IllegalArgumentException("helper is null"); + } String columnName = CqlTable.unFormatName(helper.getString(KnownProperties.EXPLODE_MAP_TARGET_KEY_COLUMN_NAME)); return (null == columnName) ? "" : columnName; } public static String getValueColumnName(IPropertyHelper helper) { - if (null == helper) { throw new IllegalArgumentException("helper is null");} - String columnName = CqlTable.unFormatName(helper.getString(KnownProperties.EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME)); + if (null == helper) { + throw new IllegalArgumentException("helper is null"); + } + String columnName = CqlTable + .unFormatName(helper.getString(KnownProperties.EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME)); return (null == columnName) ? "" : columnName; } } diff --git a/src/main/java/com/datastax/cdm/feature/ExtractJson.java b/src/main/java/com/datastax/cdm/feature/ExtractJson.java index 6dcc5d12..ebb3e956 100644 --- a/src/main/java/com/datastax/cdm/feature/ExtractJson.java +++ b/src/main/java/com/datastax/cdm/feature/ExtractJson.java @@ -31,130 +31,130 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class ExtractJson extends AbstractFeature { - public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - private ObjectMapper mapper = new ObjectMapper(); - - private String originColumnName = ""; - private String originJsonFieldName = ""; - private Integer originColumnIndex = -1; - - private String targetColumnName = ""; - private Integer targetColumnIndex = -1; - private boolean overwriteTarget = false; - - @Override - public boolean loadProperties(IPropertyHelper helper) { - if (null == helper) { - throw new IllegalArgumentException("helper is null"); - } - - originColumnName = getColumnName(helper, KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME); - targetColumnName = getColumnName(helper, KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING); - overwriteTarget = helper.getBoolean(KnownProperties.EXTRACT_JSON_TARGET_OVERWRITE); - - // Convert columnToFieldMapping to targetColumnName and originJsonFieldName - if (!targetColumnName.isBlank()) { - String[] parts = targetColumnName.split("\\:"); - if (parts.length == 2) { - originJsonFieldName = parts[0]; - targetColumnName = parts[1]; - } else { - originJsonFieldName = targetColumnName; - } - } - - isValid = validateProperties(); - isEnabled = isValid && !originColumnName.isEmpty() && !targetColumnName.isEmpty(); - isLoaded = true; - - return isLoaded && isValid; - } - - @Override - protected boolean validateProperties() { - if (StringUtils.isBlank(originColumnName) && StringUtils.isBlank(targetColumnName)) - return true; - - if (StringUtils.isBlank(originColumnName)) { - logger.error("Origin column name is not set when Target ({}) is set", targetColumnName); - return false; - } - - if (StringUtils.isBlank(targetColumnName)) { - logger.error("Target column name is not set when Origin ({}) is set", originColumnName); - return false; - } - - return true; - } - - @Override - public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { - if (null == originTable || null == targetTable) { - throw new IllegalArgumentException("Origin table and/or Target table is null"); - } - if (!originTable.isOrigin()) { - throw new IllegalArgumentException(originTable.getKeyspaceTable() + " is not an origin table"); - } - if (targetTable.isOrigin()) { - throw new IllegalArgumentException(targetTable.getKeyspaceTable() + " is not a target table"); - } - - if (!validateProperties()) { - isEnabled = false; - return false; - } - if (!isEnabled) - return true; - - // Initialize Origin variables - List originBindClasses = originTable.extendColumns(Collections.singletonList(originColumnName)); - if (null == originBindClasses || originBindClasses.size() != 1 || null == originBindClasses.get(0)) { - throw new IllegalArgumentException("Origin column " + originColumnName - + " is not found on the origin table " + originTable.getKeyspaceTable()); - } else { - this.originColumnIndex = originTable.indexOf(originColumnName); - } - - // Initialize Target variables - List targetBindClasses = targetTable.extendColumns(Collections.singletonList(targetColumnName)); - if (null == targetBindClasses || targetBindClasses.size() != 1 || null == targetBindClasses.get(0)) { - throw new IllegalArgumentException("Target column " + targetColumnName - + " is not found on the target table " + targetTable.getKeyspaceTable()); - } else { - this.targetColumnIndex = targetTable.indexOf(targetColumnName); - } - - logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled ? "enabled" : "disabled"); - return true; - } - - public Object extract(String jsonString) throws JsonMappingException, JsonProcessingException { - if (StringUtils.isNotBlank(jsonString)) { - return mapper.readValue(jsonString, Map.class).get(originJsonFieldName); - } - - return null; - } - - public Integer getOriginColumnIndex() { - return isEnabled ? originColumnIndex : -1; - } - - public Integer getTargetColumnIndex() { - return isEnabled ? targetColumnIndex : -1; - } - - public String getTargetColumnName() { - return isEnabled ? targetColumnName : ""; - } - - public boolean overwriteTarget() { - return overwriteTarget; - } - - private String getColumnName(IPropertyHelper helper, String colName) { - String columnName = CqlTable.unFormatName(helper.getString(colName)); - return (null == columnName) ? "" : columnName; - } + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + private ObjectMapper mapper = new ObjectMapper(); + + private String originColumnName = ""; + private String originJsonFieldName = ""; + private Integer originColumnIndex = -1; + + private String targetColumnName = ""; + private Integer targetColumnIndex = -1; + private boolean overwriteTarget = false; + + @Override + public boolean loadProperties(IPropertyHelper helper) { + if (null == helper) { + throw new IllegalArgumentException("helper is null"); + } + + originColumnName = getColumnName(helper, KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME); + targetColumnName = getColumnName(helper, KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING); + overwriteTarget = helper.getBoolean(KnownProperties.EXTRACT_JSON_TARGET_OVERWRITE); + + // Convert columnToFieldMapping to targetColumnName and originJsonFieldName + if (!targetColumnName.isBlank()) { + String[] parts = targetColumnName.split("\\:"); + if (parts.length == 2) { + originJsonFieldName = parts[0]; + targetColumnName = parts[1]; + } else { + originJsonFieldName = targetColumnName; + } + } + + isValid = validateProperties(); + isEnabled = isValid && !originColumnName.isEmpty() && !targetColumnName.isEmpty(); + isLoaded = true; + + return isLoaded && isValid; + } + + @Override + protected boolean validateProperties() { + if (StringUtils.isBlank(originColumnName) && StringUtils.isBlank(targetColumnName)) + return true; + + if (StringUtils.isBlank(originColumnName)) { + logger.error("Origin column name is not set when Target ({}) is set", targetColumnName); + return false; + } + + if (StringUtils.isBlank(targetColumnName)) { + logger.error("Target column name is not set when Origin ({}) is set", originColumnName); + return false; + } + + return true; + } + + @Override + public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { + if (null == originTable || null == targetTable) { + throw new IllegalArgumentException("Origin table and/or Target table is null"); + } + if (!originTable.isOrigin()) { + throw new IllegalArgumentException(originTable.getKeyspaceTable() + " is not an origin table"); + } + if (targetTable.isOrigin()) { + throw new IllegalArgumentException(targetTable.getKeyspaceTable() + " is not a target table"); + } + + if (!validateProperties()) { + isEnabled = false; + return false; + } + if (!isEnabled) + return true; + + // Initialize Origin variables + List originBindClasses = originTable.extendColumns(Collections.singletonList(originColumnName)); + if (null == originBindClasses || originBindClasses.size() != 1 || null == originBindClasses.get(0)) { + throw new IllegalArgumentException("Origin column " + originColumnName + + " is not found on the origin table " + originTable.getKeyspaceTable()); + } else { + this.originColumnIndex = originTable.indexOf(originColumnName); + } + + // Initialize Target variables + List targetBindClasses = targetTable.extendColumns(Collections.singletonList(targetColumnName)); + if (null == targetBindClasses || targetBindClasses.size() != 1 || null == targetBindClasses.get(0)) { + throw new IllegalArgumentException("Target column " + targetColumnName + + " is not found on the target table " + targetTable.getKeyspaceTable()); + } else { + this.targetColumnIndex = targetTable.indexOf(targetColumnName); + } + + logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled ? "enabled" : "disabled"); + return true; + } + + public Object extract(String jsonString) throws JsonMappingException, JsonProcessingException { + if (StringUtils.isNotBlank(jsonString)) { + return mapper.readValue(jsonString, Map.class).get(originJsonFieldName); + } + + return null; + } + + public Integer getOriginColumnIndex() { + return isEnabled ? originColumnIndex : -1; + } + + public Integer getTargetColumnIndex() { + return isEnabled ? targetColumnIndex : -1; + } + + public String getTargetColumnName() { + return isEnabled ? targetColumnName : ""; + } + + public boolean overwriteTarget() { + return overwriteTarget; + } + + private String getColumnName(IPropertyHelper helper, String colName) { + String columnName = CqlTable.unFormatName(helper.getString(colName)); + return (null == columnName) ? "" : columnName; + } } diff --git a/src/main/java/com/datastax/cdm/feature/Feature.java b/src/main/java/com/datastax/cdm/feature/Feature.java index 3f0153e0..27f8f83e 100644 --- a/src/main/java/com/datastax/cdm/feature/Feature.java +++ b/src/main/java/com/datastax/cdm/feature/Feature.java @@ -23,25 +23,36 @@ public interface Feature { /** * Initializes the feature based on properties * - * @param propertyHelper propertyHelper containing initialized properties + * @param propertyHelper + * propertyHelper containing initialized properties + * * @return true if the properties appear to be valid, false otherwise */ public boolean loadProperties(IPropertyHelper propertyHelper); /** * Indicates if feature is enabled. + * * @return true if the feature is enabled, false otherwise - * @throws RuntimeException if the feature is not loaded + * + * @throws RuntimeException + * if the feature is not loaded */ public boolean isEnabled(); /** - * Using the loaded properties, initializes the feature and validates it against the origin and target tables. - * This method should be called after loadProperties() and before any other method. - * @param originCqlTable origin CqlTable - * @param targetCqlTable target CqlTable + * Using the loaded properties, initializes the feature and validates it against the origin and target tables. This + * method should be called after loadProperties() and before any other method. + * + * @param originCqlTable + * origin CqlTable + * @param targetCqlTable + * target CqlTable + * * @return true if the feature is valid, false otherwise - * @throws RuntimeException if the feature is not loaded, or there is a problem with the properties relative to the tables. + * + * @throws RuntimeException + * if the feature is not loaded, or there is a problem with the properties relative to the tables. */ public boolean initializeAndValidate(CqlTable originCqlTable, CqlTable targetCqlTable); diff --git a/src/main/java/com/datastax/cdm/feature/FeatureFactory.java b/src/main/java/com/datastax/cdm/feature/FeatureFactory.java index 83a19ca6..284ec0ac 100644 --- a/src/main/java/com/datastax/cdm/feature/FeatureFactory.java +++ b/src/main/java/com/datastax/cdm/feature/FeatureFactory.java @@ -18,14 +18,20 @@ public class FeatureFactory { public static Feature getFeature(Featureset feature) { switch (feature) { - case ORIGIN_FILTER: return new OriginFilterCondition(); - case CONSTANT_COLUMNS: return new ConstantColumns(); - case EXPLODE_MAP: return new ExplodeMap(); - case EXTRACT_JSON: return new ExtractJson(); - case WRITETIME_TTL: return new WritetimeTTL(); - case GUARDRAIL_CHECK: return new Guardrail(); - default: - throw new IllegalArgumentException("Unknown feature: " + feature); + case ORIGIN_FILTER: + return new OriginFilterCondition(); + case CONSTANT_COLUMNS: + return new ConstantColumns(); + case EXPLODE_MAP: + return new ExplodeMap(); + case EXTRACT_JSON: + return new ExtractJson(); + case WRITETIME_TTL: + return new WritetimeTTL(); + case GUARDRAIL_CHECK: + return new Guardrail(); + default: + throw new IllegalArgumentException("Unknown feature: " + feature); } } diff --git a/src/main/java/com/datastax/cdm/feature/Featureset.java b/src/main/java/com/datastax/cdm/feature/Featureset.java index 14995d90..4609f868 100644 --- a/src/main/java/com/datastax/cdm/feature/Featureset.java +++ b/src/main/java/com/datastax/cdm/feature/Featureset.java @@ -16,11 +16,6 @@ package com.datastax.cdm.feature; public enum Featureset { - ORIGIN_FILTER, - CONSTANT_COLUMNS, - EXPLODE_MAP, - EXTRACT_JSON, - WRITETIME_TTL, - GUARDRAIL_CHECK, + ORIGIN_FILTER, CONSTANT_COLUMNS, EXPLODE_MAP, EXTRACT_JSON, WRITETIME_TTL, GUARDRAIL_CHECK, TEST_UNIMPLEMENTED_FEATURE } diff --git a/src/main/java/com/datastax/cdm/feature/Guardrail.java b/src/main/java/com/datastax/cdm/feature/Guardrail.java index ed72729d..75923f42 100644 --- a/src/main/java/com/datastax/cdm/feature/Guardrail.java +++ b/src/main/java/com/datastax/cdm/feature/Guardrail.java @@ -15,24 +15,25 @@ */ package com.datastax.cdm.feature; +import java.text.DecimalFormat; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.data.Record; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.cql.Row; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.text.DecimalFormat; -import java.util.HashMap; -import java.util.Map; - -public class Guardrail extends AbstractFeature { +public class Guardrail extends AbstractFeature { public final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private final boolean logDebug = logger.isDebugEnabled(); private final boolean logTrace = logger.isTraceEnabled(); - public static final String CLEAN_CHECK=""; + public static final String CLEAN_CHECK = ""; public static final int BASE_FACTOR = 1000; private DecimalFormat decimalFormat = new DecimalFormat("0.###"); @@ -49,14 +50,14 @@ public class Guardrail extends AbstractFeature { @Override public boolean loadProperties(IPropertyHelper propertyHelper) { Number property = propertyHelper.getNumber(KnownProperties.GUARDRAIL_COLSIZE_KB); - if (null==property) + if (null == property) this.colSizeInKB = 0.0; else this.colSizeInKB = property.doubleValue(); isValid = validateProperties(); isLoaded = true; - isEnabled=(isValid && colSizeInKB >0); + isEnabled = (isValid && colSizeInKB > 0); return isValid; } @@ -64,7 +65,8 @@ public boolean loadProperties(IPropertyHelper propertyHelper) { protected boolean validateProperties() { isValid = true; if (this.colSizeInKB < 0) { - logger.error("{} must be greater than equal to zero, but is {}", KnownProperties.GUARDRAIL_COLSIZE_KB, this.colSizeInKB); + logger.error("{} must be greater than equal to zero, but is {}", KnownProperties.GUARDRAIL_COLSIZE_KB, + this.colSizeInKB); isValid = false; } @@ -73,11 +75,11 @@ protected boolean validateProperties() { @Override public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { - if (null==originTable || !originTable.isOrigin()) { + if (null == originTable || !originTable.isOrigin()) { logger.error("originTable is null, or is not an origin table"); return false; } - if (null==targetTable || targetTable.isOrigin()) { + if (null == targetTable || targetTable.isOrigin()) { logger.error("targetTable is null, or is an origin table"); return false; } @@ -91,16 +93,20 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) return false; } - if (logDebug) logger.debug("Guardrail is {}. colSizeInKB={}", isEnabled ? "enabled" : "disabled", colSizeInKB); + if (logDebug) + logger.debug("Guardrail is {}. colSizeInKB={}", isEnabled ? "enabled" : "disabled", colSizeInKB); return isValid; } - private Map check(Map currentChecks, int targetIndex, Object targetValue) { + private Map check(Map currentChecks, int targetIndex, Object targetValue) { int colSize = targetTable.byteCount(targetIndex, targetValue); - if (logTrace) logger.trace("Column {} at targetIndex {} has size {} bytes", targetTable.getColumnNames(false).get(targetIndex), targetIndex, colSize); + if (logTrace) + logger.trace("Column {} at targetIndex {} has size {} bytes", + targetTable.getColumnNames(false).get(targetIndex), targetIndex, colSize); if (colSize > colSizeInKB * BASE_FACTOR) { - if (null==currentChecks) currentChecks = new HashMap(); + if (null == currentChecks) + currentChecks = new HashMap(); currentChecks.put(targetTable.getColumnNames(false).get(targetIndex), colSize); } return currentChecks; @@ -109,37 +115,43 @@ private Map check(Map currentChecks, int targetI public String guardrailChecks(Record record) { if (!isEnabled) return null; - if (null==record) + if (null == record) return CLEAN_CHECK; - if (null==record.getOriginRow()) + if (null == record.getOriginRow()) return CLEAN_CHECK; - Map largeColumns = null; + Map largeColumns = null; - // As the order of feature loading is not guaranteed, we wait until the first record to figure out the explodeMap - if (null==explodeMap) calcExplodeMap(); + // As the order of feature loading is not guaranteed, we wait until the first record to figure out the + // explodeMap + if (null == explodeMap) + calcExplodeMap(); Row row = record.getOriginRow(); - for (int i=0; i entry : largeColumns.entrySet()) { - if (colCount++>0) sb.append(","); - sb.append(entry.getKey()).append("(").append(decimalFormat.format(entry.getValue()/BASE_FACTOR)).append(")"); + int colCount = 0; + for (Map.Entry entry : largeColumns.entrySet()) { + if (colCount++ > 0) + sb.append(","); + sb.append(entry.getKey()).append("(").append(decimalFormat.format(entry.getValue() / BASE_FACTOR)) + .append(")"); } return sb.toString(); @@ -147,11 +159,14 @@ public String guardrailChecks(Record record) { private void calcExplodeMap() { this.explodeMap = (ExplodeMap) originTable.getFeature(Featureset.EXPLODE_MAP); - if (null!=explodeMap && explodeMap.isEnabled()) { + if (null != explodeMap && explodeMap.isEnabled()) { explodeMapIndex = explodeMap.getOriginColumnIndex(); explodeMapKeyIndex = explodeMap.getKeyColumnIndex(); explodeMapValueIndex = explodeMap.getValueColumnIndex(); - if (logDebug) logger.debug("ExplodeMap is enabled. explodeMapIndex={}, explodeMapKeyIndex={}, explodeMapValueIndex={}", explodeMapIndex, explodeMapKeyIndex, explodeMapValueIndex); + if (logDebug) + logger.debug( + "ExplodeMap is enabled. explodeMapIndex={}, explodeMapKeyIndex={}, explodeMapValueIndex={}", + explodeMapIndex, explodeMapKeyIndex, explodeMapValueIndex); } } } diff --git a/src/main/java/com/datastax/cdm/feature/OriginFilterCondition.java b/src/main/java/com/datastax/cdm/feature/OriginFilterCondition.java index 27112c78..afdd8590 100644 --- a/src/main/java/com/datastax/cdm/feature/OriginFilterCondition.java +++ b/src/main/java/com/datastax/cdm/feature/OriginFilterCondition.java @@ -15,13 +15,14 @@ */ package com.datastax.cdm.feature; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.cdm.schema.CqlTable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -public class OriginFilterCondition extends AbstractFeature { +public class OriginFilterCondition extends AbstractFeature { public final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private String filterCondition; @@ -30,15 +31,16 @@ public class OriginFilterCondition extends AbstractFeature { public boolean loadProperties(IPropertyHelper propertyHelper) { this.filterCondition = propertyHelper.getString(KnownProperties.FILTER_CQL_WHERE_CONDITION); isValid = validateProperties(); - isLoaded =true; - isEnabled=(isValid && null != filterCondition && !filterCondition.isEmpty()); + isLoaded = true; + isEnabled = (isValid && null != filterCondition && !filterCondition.isEmpty()); return isValid; } @Override protected boolean validateProperties() { isValid = true; - if (null == filterCondition || filterCondition.isEmpty()) return isValid; + if (null == filterCondition || filterCondition.isEmpty()) + return isValid; String trimmedFilter = filterCondition.trim(); if (trimmedFilter.isEmpty()) { @@ -65,9 +67,11 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) } // TODO: in future, we may want to validate the condition against the origin table via initializeAndValidate - logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled?"enabled":"disabled"); + logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled ? "enabled" : "disabled"); return isValid; } - public String getFilterCondition() { return null == filterCondition ? "" : filterCondition; } + public String getFilterCondition() { + return null == filterCondition ? "" : filterCondition; + } } diff --git a/src/main/java/com/datastax/cdm/feature/TrackRun.java b/src/main/java/com/datastax/cdm/feature/TrackRun.java index 0debe375..b0a4aa84 100644 --- a/src/main/java/com/datastax/cdm/feature/TrackRun.java +++ b/src/main/java/com/datastax/cdm/feature/TrackRun.java @@ -26,39 +26,40 @@ import com.datastax.oss.driver.api.core.CqlSession; public class TrackRun { - public enum RUN_TYPE { - MIGRATE, DIFF_DATA - } - public enum RUN_STATUS { - NOT_STARTED, STARTED, PASS, FAIL, DIFF - } + public enum RUN_TYPE { + MIGRATE, DIFF_DATA + } - public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - private TargetUpsertRunDetailsStatement runStatement; + public enum RUN_STATUS { + NOT_STARTED, STARTED, PASS, FAIL, DIFF + } - public TrackRun(CqlSession session, String keyspaceTable) { - this.runStatement = new TargetUpsertRunDetailsStatement(session, keyspaceTable); - } + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + private TargetUpsertRunDetailsStatement runStatement; - public Collection getPendingPartitions(long prevRunId) { - Collection pendingParts = runStatement.getPendingPartitions(prevRunId); - logger.info("###################### {} partitions pending from previous run id {} ######################", - pendingParts.size(), prevRunId); - return pendingParts; - } + public TrackRun(CqlSession session, String keyspaceTable) { + this.runStatement = new TargetUpsertRunDetailsStatement(session, keyspaceTable); + } - public long initCdmRun(Collection parts, RUN_TYPE runType) { - long runId = runStatement.initCdmRun(parts, runType); - logger.info("###################### Run Id for this job is: {} ######################", runId); + public Collection getPendingPartitions(long prevRunId) { + Collection pendingParts = runStatement.getPendingPartitions(prevRunId); + logger.info("###################### {} partitions pending from previous run id {} ######################", + pendingParts.size(), prevRunId); + return pendingParts; + } - return runId; - } + public long initCdmRun(Collection parts, RUN_TYPE runType) { + long runId = runStatement.initCdmRun(parts, runType); + logger.info("###################### Run Id for this job is: {} ######################", runId); - public void updateCdmRun(BigInteger min, RUN_STATUS status) { - runStatement.updateCdmRun(min, status); - } + return runId; + } - public void endCdmRun(String runInfo) { - runStatement.updateCdmRunInfo(runInfo); - } + public void updateCdmRun(BigInteger min, RUN_STATUS status) { + runStatement.updateCdmRun(min, status); + } + + public void endCdmRun(String runInfo) { + runStatement.updateCdmRunInfo(runInfo); + } } diff --git a/src/main/java/com/datastax/cdm/feature/WritetimeTTL.java b/src/main/java/com/datastax/cdm/feature/WritetimeTTL.java index 78a94a8f..3ef787cf 100644 --- a/src/main/java/com/datastax/cdm/feature/WritetimeTTL.java +++ b/src/main/java/com/datastax/cdm/feature/WritetimeTTL.java @@ -15,20 +15,21 @@ */ package com.datastax.cdm.feature; -import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.cdm.properties.IPropertyHelper; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.schema.CqlTable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.time.Instant; import java.util.*; import java.util.stream.Collectors; -public class WritetimeTTL extends AbstractFeature { +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.schema.CqlTable; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; + +public class WritetimeTTL extends AbstractFeature { public final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); private final boolean logDebug = logger.isDebugEnabled();; @@ -49,21 +50,22 @@ public class WritetimeTTL extends AbstractFeature { public boolean loadProperties(IPropertyHelper propertyHelper) { this.autoTTLNames = propertyHelper.getBoolean(KnownProperties.ORIGIN_TTL_AUTO); this.ttlNames = getTTLNames(propertyHelper); - if (null!=this.ttlNames && !this.ttlNames.isEmpty()) { + if (null != this.ttlNames && !this.ttlNames.isEmpty()) { logger.info("PARAM -- TTLCols: {}", ttlNames); this.autoTTLNames = false; } this.autoWritetimeNames = propertyHelper.getBoolean(KnownProperties.ORIGIN_WRITETIME_AUTO); this.writetimeNames = getWritetimeNames(propertyHelper); - if (null!=this.writetimeNames && !this.writetimeNames.isEmpty()) { + if (null != this.writetimeNames && !this.writetimeNames.isEmpty()) { logger.info("PARAM -- WriteTimestampCols: {}", writetimeNames); this.autoWritetimeNames = false; } this.customWritetime = getCustomWritetime(propertyHelper); if (this.customWritetime > 0) { - logger.info("PARAM -- {}: {} datetime is {} ", KnownProperties.TRANSFORM_CUSTOM_WRITETIME, customWritetime, Instant.ofEpochMilli(customWritetime / 1000)); + logger.info("PARAM -- {}: {} datetime is {} ", KnownProperties.TRANSFORM_CUSTOM_WRITETIME, customWritetime, + Instant.ofEpochMilli(customWritetime / 1000)); } this.writetimeIncrement = propertyHelper.getLong(KnownProperties.TRANSFORM_CUSTOM_WRITETIME_INCREMENT); @@ -75,19 +77,19 @@ public boolean loadProperties(IPropertyHelper propertyHelper) { this.filterMin = getMinFilter(propertyHelper); this.filterMax = getMaxFilter(propertyHelper); - this.hasWriteTimestampFilter = (null != filterMin && null != filterMax && filterMin > 0 && filterMax > 0 && filterMax > filterMin); + this.hasWriteTimestampFilter = (null != filterMin && null != filterMax && filterMin > 0 && filterMax > 0 + && filterMax > filterMin); if (this.hasWriteTimestampFilter) { - logger.info("PARAM -- {}: {} datetime is {} ", KnownProperties.FILTER_WRITETS_MIN, filterMin, Instant.ofEpochMilli(filterMin / 1000)); - logger.info("PARAM -- {}: {} datetime is {} ", KnownProperties.FILTER_WRITETS_MAX, filterMax, Instant.ofEpochMilli(filterMax / 1000)); + logger.info("PARAM -- {}: {} datetime is {} ", KnownProperties.FILTER_WRITETS_MIN, filterMin, + Instant.ofEpochMilli(filterMin / 1000)); + logger.info("PARAM -- {}: {} datetime is {} ", KnownProperties.FILTER_WRITETS_MAX, filterMax, + Instant.ofEpochMilli(filterMax / 1000)); } - isValid = validateProperties(); - isEnabled = isValid && - ((null != ttlNames && !ttlNames.isEmpty()) - || (null != writetimeNames && !writetimeNames.isEmpty()) - || autoTTLNames || autoWritetimeNames - || customWritetime > 0 || customTTL > 0); + isEnabled = isValid + && ((null != ttlNames && !ttlNames.isEmpty()) || (null != writetimeNames && !writetimeNames.isEmpty()) + || autoTTLNames || autoWritetimeNames || customWritetime > 0 || customTTL > 0); isLoaded = true; return isValid; @@ -101,8 +103,9 @@ protected boolean validateProperties() { validateTTLNames(); validateWritetimeNames(); - if (null==this.writetimeIncrement || this.writetimeIncrement < 0L) { - logger.error(KnownProperties.TRANSFORM_CUSTOM_WRITETIME_INCREMENT + " must be set to a value greater than or equal to zero"); + if (null == this.writetimeIncrement || this.writetimeIncrement < 0L) { + logger.error(KnownProperties.TRANSFORM_CUSTOM_WRITETIME_INCREMENT + + " must be set to a value greater than or equal to zero"); isValid = false; } @@ -111,7 +114,7 @@ protected boolean validateProperties() { @Override public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) { - if (null==originTable) { + if (null == originTable) { throw new IllegalArgumentException("originTable is null"); } if (!originTable.isOrigin()) { @@ -125,7 +128,8 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) isEnabled = false; } - logger.info("Counter table does not support TTL or WriteTimestamp columns as they cannot set on write, so feature is disabled"); + logger.info( + "Counter table does not support TTL or WriteTimestamp columns as they cannot set on write, so feature is disabled"); return true; } @@ -148,20 +152,21 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable) validateTTLColumns(originTable); validateWritetimeColumns(originTable); - if (hasWriteTimestampFilter && (null==writetimeNames || writetimeNames.isEmpty())) { + if (hasWriteTimestampFilter && (null == writetimeNames || writetimeNames.isEmpty())) { logger.error("WriteTimestamp filter is configured but no WriteTimestamp columns are defined"); isValid = false; } - if (this.writetimeIncrement == 0L && (null != writetimeNames && !writetimeNames.isEmpty()) - && originTable.hasUnfrozenList()) { - logger.warn("Origin table has at least one unfrozen List, and " - + KnownProperties.TRANSFORM_CUSTOM_WRITETIME_INCREMENT - + " is set to zero; this may result in duplicate list entries on reruns or validation with autocorrect."); - } + if (this.writetimeIncrement == 0L && (null != writetimeNames && !writetimeNames.isEmpty()) + && originTable.hasUnfrozenList()) { + logger.warn("Origin table has at least one unfrozen List, and " + + KnownProperties.TRANSFORM_CUSTOM_WRITETIME_INCREMENT + + " is set to zero; this may result in duplicate list entries on reruns or validation with autocorrect."); + } - if (!isValid) isEnabled = false; - logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled?"enabled":"disabled"); + if (!isValid) + isEnabled = false; + logger.info("Feature {} is {}", this.getClass().getSimpleName(), isEnabled ? "enabled" : "disabled"); return isValid; } @@ -175,13 +180,13 @@ public static List getWritetimeNames(IPropertyHelper propertyHelper) { protected static Long getCustomWritetime(IPropertyHelper propertyHelper) { Long cwt = propertyHelper.getLong(KnownProperties.TRANSFORM_CUSTOM_WRITETIME); - return null==cwt ? 0L : cwt; + return null == cwt ? 0L : cwt; } protected static Long getCustomTTL(IPropertyHelper propertyHelper) { - Long cttl = propertyHelper.getLong(KnownProperties.TRANSFORM_CUSTOM_TTL); - return null == cttl ? 0L : cttl; - } + Long cttl = propertyHelper.getLong(KnownProperties.TRANSFORM_CUSTOM_TTL); + return null == cttl ? 0L : cttl; + } public static Long getMinFilter(IPropertyHelper propertyHelper) { return propertyHelper.getLong(KnownProperties.FILTER_WRITETS_MIN); @@ -191,37 +196,56 @@ public static Long getMaxFilter(IPropertyHelper propertyHelper) { return propertyHelper.getLong(KnownProperties.FILTER_WRITETS_MAX); } - public Long getCustomWritetime() { return customWritetime; } - public Long getCustomTTL() { return customTTL; } - public boolean hasWriteTimestampFilter() { return isEnabled && hasWriteTimestampFilter; } - public Long getMinWriteTimeStampFilter() { return (this.hasWriteTimestampFilter && null!=this.filterMin) ? this.filterMin : Long.MIN_VALUE; } - public Long getMaxWriteTimeStampFilter() { return (this.hasWriteTimestampFilter && null!=this.filterMax) ? this.filterMax : Long.MAX_VALUE; } + public Long getCustomWritetime() { + return customWritetime; + } - public boolean hasTTLColumns() { - return customTTL > 0 || null != this.ttlSelectColumnIndexes && !this.ttlSelectColumnIndexes.isEmpty(); - } + public Long getCustomTTL() { + return customTTL; + } - public boolean hasWritetimeColumns() { return customWritetime>0 || null!=this.writetimeSelectColumnIndexes && !this.writetimeSelectColumnIndexes.isEmpty(); } + public boolean hasWriteTimestampFilter() { + return isEnabled && hasWriteTimestampFilter; + } + + public Long getMinWriteTimeStampFilter() { + return (this.hasWriteTimestampFilter && null != this.filterMin) ? this.filterMin : Long.MIN_VALUE; + } + + public Long getMaxWriteTimeStampFilter() { + return (this.hasWriteTimestampFilter && null != this.filterMax) ? this.filterMax : Long.MAX_VALUE; + } + + public boolean hasTTLColumns() { + return customTTL > 0 || null != this.ttlSelectColumnIndexes && !this.ttlSelectColumnIndexes.isEmpty(); + } + + public boolean hasWritetimeColumns() { + return customWritetime > 0 + || null != this.writetimeSelectColumnIndexes && !this.writetimeSelectColumnIndexes.isEmpty(); + } public Long getLargestWriteTimeStamp(Row row) { - if (logDebug) logger.debug("getLargestWriteTimeStamp: customWritetime={}, writetimeSelectColumnIndexes={}", customWritetime,writetimeSelectColumnIndexes); - if (this.customWritetime > 0) return this.customWritetime; - if (null==this.writetimeSelectColumnIndexes || this.writetimeSelectColumnIndexes.isEmpty()) return null; - OptionalLong max = this.writetimeSelectColumnIndexes.stream() - .mapToLong(row::getLong) - .filter(Objects::nonNull) + if (logDebug) + logger.debug("getLargestWriteTimeStamp: customWritetime={}, writetimeSelectColumnIndexes={}", + customWritetime, writetimeSelectColumnIndexes); + if (this.customWritetime > 0) + return this.customWritetime; + if (null == this.writetimeSelectColumnIndexes || this.writetimeSelectColumnIndexes.isEmpty()) + return null; + OptionalLong max = this.writetimeSelectColumnIndexes.stream().mapToLong(row::getLong).filter(Objects::nonNull) .max(); return max.isPresent() ? max.getAsLong() + this.writetimeIncrement : null; } public Integer getLargestTTL(Row row) { - if (logDebug) logger.debug("getLargestTTL: customTTL={}, ttlSelectColumnIndexes={}", customTTL, ttlSelectColumnIndexes); - if (this.customTTL > 0) return this.customTTL.intValue(); - if (null==this.ttlSelectColumnIndexes || this.ttlSelectColumnIndexes.isEmpty()) return null; - OptionalInt max = this.ttlSelectColumnIndexes.stream() - .mapToInt(row::getInt) - .filter(Objects::nonNull) - .max(); + if (logDebug) + logger.debug("getLargestTTL: customTTL={}, ttlSelectColumnIndexes={}", customTTL, ttlSelectColumnIndexes); + if (this.customTTL > 0) + return this.customTTL.intValue(); + if (null == this.ttlSelectColumnIndexes || this.ttlSelectColumnIndexes.isEmpty()) + return null; + OptionalInt max = this.ttlSelectColumnIndexes.stream().mapToInt(row::getInt).filter(Objects::nonNull).max(); return max.isPresent() ? max.getAsInt() : null; } @@ -240,7 +264,8 @@ private void validateTTLColumns(CqlTable originTable) { return; } else { if (!originTable.isWritetimeTTLColumn(ttlName)) { - logger.error("TTL column {} is not a column which can provide a TTL on origin table {}", ttlName, originTable.getKeyspaceName()); + logger.error("TTL column {} is not a column which can provide a TTL on origin table {}", ttlName, + originTable.getKeyspaceName()); isValid = false; return; } @@ -251,9 +276,7 @@ private void validateTTLColumns(CqlTable originTable) { } originTable.extendColumns(newColumnNames, newColumnDataTypes); - ttlSelectColumnIndexes = newColumnNames.stream() - .mapToInt(originTable::indexOf) - .boxed() + ttlSelectColumnIndexes = newColumnNames.stream().mapToInt(originTable::indexOf).boxed() .collect(Collectors.toList()); } @@ -267,12 +290,14 @@ private void validateWritetimeColumns(CqlTable originTable) { for (String writetimeName : writetimeNames) { int index = originTable.indexOf(writetimeName); if (index < 0) { - logger.error("Writetime column {} is not configured for origin table {}", writetimeName, originTable.getKeyspaceName()); + logger.error("Writetime column {} is not configured for origin table {}", writetimeName, + originTable.getKeyspaceName()); isValid = false; return; } else { if (!originTable.isWritetimeTTLColumn(writetimeName)) { - logger.error("Writetime column {} is not a column which can provide a WRITETIME on origin table {}", writetimeName, originTable.getKeyspaceName()); + logger.error("Writetime column {} is not a column which can provide a WRITETIME on origin table {}", + writetimeName, originTable.getKeyspaceName()); isValid = false; return; } @@ -284,21 +309,19 @@ private void validateWritetimeColumns(CqlTable originTable) { originTable.extendColumns(newColumnNames, newColumnDataTypes); writetimeSelectColumnIndexes = new ArrayList<>(); - writetimeSelectColumnIndexes = newColumnNames.stream() - .mapToInt(originTable::indexOf) - .boxed() + writetimeSelectColumnIndexes = newColumnNames.stream().mapToInt(originTable::indexOf).boxed() .collect(Collectors.toList()); } private void validateTTLNames() { - if (null!=ttlNames && ttlNames.size()==0) { + if (null != ttlNames && ttlNames.size() == 0) { logger.error("must be null or not empty"); isValid = false; } } private void validateWritetimeNames() { - if (null!=writetimeNames && writetimeNames.size()==0) { + if (null != writetimeNames && writetimeNames.size() == 0) { logger.error("must be null or not empty"); isValid = false; } diff --git a/src/main/java/com/datastax/cdm/job/AbstractJobSession.java b/src/main/java/com/datastax/cdm/job/AbstractJobSession.java index ebafa6f0..86c630f4 100644 --- a/src/main/java/com/datastax/cdm/job/AbstractJobSession.java +++ b/src/main/java/com/datastax/cdm/job/AbstractJobSession.java @@ -15,6 +15,12 @@ */ package com.datastax.cdm.job; +import java.util.Collection; + +import org.apache.spark.SparkConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.data.PKFactory; import com.datastax.cdm.feature.Feature; @@ -25,91 +31,85 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.RateLimiter; -import java.util.Collection; - -import org.apache.spark.SparkConf; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public abstract class AbstractJobSession extends BaseJobSession { - public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - protected EnhancedSession originSession; - protected EnhancedSession targetSession; - protected Guardrail guardrailFeature; - protected boolean guardrailEnabled; - protected JobCounter jobCounter; - protected Long printStatsAfter; - protected TrackRun trackRunFeature; - - protected AbstractJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc) { - this(originSession, targetSession, sc, false); - } - - protected AbstractJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc, - boolean isJobMigrateRowsFromFile) { - super(sc); - - if (originSession == null) { - return; - } - - this.printStatsAfter = propertyHelper.getLong(KnownProperties.PRINT_STATS_AFTER); - if (!propertyHelper.meetsMinimum(KnownProperties.PRINT_STATS_AFTER, printStatsAfter, 1L)) { - logger.warn(KnownProperties.PRINT_STATS_AFTER + " must be greater than 0. Setting to default value of " - + KnownProperties.getDefaultAsString(KnownProperties.PRINT_STATS_AFTER)); - propertyHelper.setProperty(KnownProperties.PRINT_STATS_AFTER, - KnownProperties.getDefault(KnownProperties.PRINT_STATS_AFTER)); - printStatsAfter = propertyHelper.getLong(KnownProperties.PRINT_STATS_AFTER); - } - this.jobCounter = new JobCounter(printStatsAfter, - propertyHelper.getBoolean(KnownProperties.PRINT_STATS_PER_PART)); - - rateLimiterOrigin = RateLimiter.create(propertyHelper.getInteger(KnownProperties.PERF_RATELIMIT_ORIGIN)); - rateLimiterTarget = RateLimiter.create(propertyHelper.getInteger(KnownProperties.PERF_RATELIMIT_TARGET)); - - logger.info("PARAM -- Origin Rate Limit: {}", rateLimiterOrigin.getRate()); - logger.info("PARAM -- Target Rate Limit: {}", rateLimiterTarget.getRate()); - - this.originSession = new EnhancedSession(propertyHelper, originSession, true); - this.targetSession = new EnhancedSession(propertyHelper, targetSession, false); - this.originSession.getCqlTable().setOtherCqlTable(this.targetSession.getCqlTable()); - this.targetSession.getCqlTable().setOtherCqlTable(this.originSession.getCqlTable()); - this.originSession.getCqlTable().setFeatureMap(featureMap); - this.targetSession.getCqlTable().setFeatureMap(featureMap); - - boolean allFeaturesValid = true; - for (Feature f : featureMap.values()) { - if (!f.initializeAndValidate(this.originSession.getCqlTable(), this.targetSession.getCqlTable())) { - allFeaturesValid = false; - logger.error("Feature {} is not valid. Please check the configuration.", f.getClass().getName()); - } - } - if (!allFeaturesValid) { - throw new RuntimeException("One or more features are not valid. Please check the configuration."); - } - - PKFactory pkFactory = new PKFactory(propertyHelper, this.originSession.getCqlTable(), - this.targetSession.getCqlTable()); - this.originSession.setPKFactory(pkFactory); - this.targetSession.setPKFactory(pkFactory); - - // Guardrail is referenced by many jobs, and is evaluated against the target - // table - this.guardrailFeature = (Guardrail) this.targetSession.getCqlTable().getFeature(Featureset.GUARDRAIL_CHECK); - this.guardrailEnabled = this.guardrailFeature.isEnabled(); - } - - public abstract void processSlice(T slice); - - public synchronized void initCdmRun(Collection parts, TrackRun trackRunFeature) { - } - - public synchronized void printCounts(boolean isFinal) { - if (isFinal) { - jobCounter.printFinal(trackRunFeature); - } else { - jobCounter.printProgress(); - } - } + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + protected EnhancedSession originSession; + protected EnhancedSession targetSession; + protected Guardrail guardrailFeature; + protected boolean guardrailEnabled; + protected JobCounter jobCounter; + protected Long printStatsAfter; + protected TrackRun trackRunFeature; + + protected AbstractJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc) { + this(originSession, targetSession, sc, false); + } + + protected AbstractJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc, + boolean isJobMigrateRowsFromFile) { + super(sc); + + if (originSession == null) { + return; + } + + this.printStatsAfter = propertyHelper.getLong(KnownProperties.PRINT_STATS_AFTER); + if (!propertyHelper.meetsMinimum(KnownProperties.PRINT_STATS_AFTER, printStatsAfter, 1L)) { + logger.warn(KnownProperties.PRINT_STATS_AFTER + " must be greater than 0. Setting to default value of " + + KnownProperties.getDefaultAsString(KnownProperties.PRINT_STATS_AFTER)); + propertyHelper.setProperty(KnownProperties.PRINT_STATS_AFTER, + KnownProperties.getDefault(KnownProperties.PRINT_STATS_AFTER)); + printStatsAfter = propertyHelper.getLong(KnownProperties.PRINT_STATS_AFTER); + } + this.jobCounter = new JobCounter(printStatsAfter, + propertyHelper.getBoolean(KnownProperties.PRINT_STATS_PER_PART)); + + rateLimiterOrigin = RateLimiter.create(propertyHelper.getInteger(KnownProperties.PERF_RATELIMIT_ORIGIN)); + rateLimiterTarget = RateLimiter.create(propertyHelper.getInteger(KnownProperties.PERF_RATELIMIT_TARGET)); + + logger.info("PARAM -- Origin Rate Limit: {}", rateLimiterOrigin.getRate()); + logger.info("PARAM -- Target Rate Limit: {}", rateLimiterTarget.getRate()); + + this.originSession = new EnhancedSession(propertyHelper, originSession, true); + this.targetSession = new EnhancedSession(propertyHelper, targetSession, false); + this.originSession.getCqlTable().setOtherCqlTable(this.targetSession.getCqlTable()); + this.targetSession.getCqlTable().setOtherCqlTable(this.originSession.getCqlTable()); + this.originSession.getCqlTable().setFeatureMap(featureMap); + this.targetSession.getCqlTable().setFeatureMap(featureMap); + + boolean allFeaturesValid = true; + for (Feature f : featureMap.values()) { + if (!f.initializeAndValidate(this.originSession.getCqlTable(), this.targetSession.getCqlTable())) { + allFeaturesValid = false; + logger.error("Feature {} is not valid. Please check the configuration.", f.getClass().getName()); + } + } + if (!allFeaturesValid) { + throw new RuntimeException("One or more features are not valid. Please check the configuration."); + } + + PKFactory pkFactory = new PKFactory(propertyHelper, this.originSession.getCqlTable(), + this.targetSession.getCqlTable()); + this.originSession.setPKFactory(pkFactory); + this.targetSession.setPKFactory(pkFactory); + + // Guardrail is referenced by many jobs, and is evaluated against the target + // table + this.guardrailFeature = (Guardrail) this.targetSession.getCqlTable().getFeature(Featureset.GUARDRAIL_CHECK); + this.guardrailEnabled = this.guardrailFeature.isEnabled(); + } + + public abstract void processSlice(T slice); + + public synchronized void initCdmRun(Collection parts, TrackRun trackRunFeature) { + } + + public synchronized void printCounts(boolean isFinal) { + if (isFinal) { + jobCounter.printFinal(trackRunFeature); + } else { + jobCounter.printProgress(); + } + } } diff --git a/src/main/java/com/datastax/cdm/job/BaseJobSession.java b/src/main/java/com/datastax/cdm/job/BaseJobSession.java index ca3feb07..6a400d3a 100644 --- a/src/main/java/com/datastax/cdm/job/BaseJobSession.java +++ b/src/main/java/com/datastax/cdm/job/BaseJobSession.java @@ -32,48 +32,48 @@ public abstract class BaseJobSession { - public static final String THREAD_CONTEXT_LABEL = "ThreadLabel"; - protected static final String NEW_LINE = System.lineSeparator(); - private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - protected PropertyHelper propertyHelper = PropertyHelper.getInstance(); - protected Map featureMap; - protected RateLimiter rateLimiterOrigin; - protected RateLimiter rateLimiterTarget; + public static final String THREAD_CONTEXT_LABEL = "ThreadLabel"; + protected static final String NEW_LINE = System.lineSeparator(); + private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + protected PropertyHelper propertyHelper = PropertyHelper.getInstance(); + protected Map featureMap; + protected RateLimiter rateLimiterOrigin; + protected RateLimiter rateLimiterTarget; - protected BaseJobSession(SparkConf sc) { - propertyHelper.initializeSparkConf(sc); - this.featureMap = calcFeatureMap(propertyHelper); - ThreadContext.put(THREAD_CONTEXT_LABEL, getThreadLabel()); - } + protected BaseJobSession(SparkConf sc) { + propertyHelper.initializeSparkConf(sc); + this.featureMap = calcFeatureMap(propertyHelper); + ThreadContext.put(THREAD_CONTEXT_LABEL, getThreadLabel()); + } - private Map calcFeatureMap(PropertyHelper propertyHelper) { - Map rtn = new HashMap<>(); - for (Featureset f : Featureset.values()) { - if (f.toString().startsWith("TEST_")) - continue; // Skip test features - Feature feature = FeatureFactory.getFeature(f); // FeatureFactory throws an RTE if the feature is not - // implemented - if (feature.loadProperties(propertyHelper)) { - rtn.put(f, feature); - } - } - return rtn; - } + private Map calcFeatureMap(PropertyHelper propertyHelper) { + Map rtn = new HashMap<>(); + for (Featureset f : Featureset.values()) { + if (f.toString().startsWith("TEST_")) + continue; // Skip test features + Feature feature = FeatureFactory.getFeature(f); // FeatureFactory throws an RTE if the feature is not + // implemented + if (feature.loadProperties(propertyHelper)) { + rtn.put(f, feature); + } + } + return rtn; + } - protected String getThreadLabel() { - return ThreadContext.get("main"); - } + protected String getThreadLabel() { + return ThreadContext.get("main"); + } - protected String getThreadLabel(BigInteger min, BigInteger max) { - String minString = min.toString(); - String maxString = max.toString(); - int minWidth = 20; - int formattedMaxWidth = Math.max(Math.max(minString.length(), maxString.length()), minWidth); + protected String getThreadLabel(BigInteger min, BigInteger max) { + String minString = min.toString(); + String maxString = max.toString(); + int minWidth = 20; + int formattedMaxWidth = Math.max(Math.max(minString.length(), maxString.length()), minWidth); - String formattedMin = String.format("%-" + minWidth + "s", minString).trim(); - String formattedMax = String.format("%" + formattedMaxWidth + "s", maxString); + String formattedMin = String.format("%-" + minWidth + "s", minString).trim(); + String formattedMax = String.format("%" + formattedMaxWidth + "s", maxString); - return formattedMin + ":" + formattedMax; - } + return formattedMin + ":" + formattedMax; + } } diff --git a/src/main/java/com/datastax/cdm/job/CopyJobSession.java b/src/main/java/com/datastax/cdm/job/CopyJobSession.java index 8001fdca..67ab5fd3 100644 --- a/src/main/java/com/datastax/cdm/job/CopyJobSession.java +++ b/src/main/java/com/datastax/cdm/job/CopyJobSession.java @@ -42,151 +42,151 @@ public class CopyJobSession extends AbstractJobSession { - private final PKFactory pkFactory; - private final boolean isCounterTable; - private final Integer fetchSize; - private final Integer batchSize; - public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - private TargetUpsertStatement targetUpsertStatement; - private TargetSelectByPKStatement targetSelectByPKStatement; - - protected CopyJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc) { - super(originSession, targetSession, sc); - this.jobCounter.setRegisteredTypes(JobCounter.CounterType.READ, JobCounter.CounterType.WRITE, - JobCounter.CounterType.SKIPPED, JobCounter.CounterType.ERROR, JobCounter.CounterType.UNFLUSHED); - - pkFactory = this.originSession.getPKFactory(); - isCounterTable = this.originSession.getCqlTable().isCounterTable(); - fetchSize = this.originSession.getCqlTable().getFetchSizeInRows(); - batchSize = this.originSession.getCqlTable().getBatchSize(); - - logger.info("CQL -- origin select: {}", this.originSession.getOriginSelectByPartitionRangeStatement().getCQL()); - logger.info("CQL -- target select: {}", this.targetSession.getTargetSelectByPKStatement().getCQL()); - logger.info("CQL -- target upsert: {}", this.targetSession.getTargetUpsertStatement().getCQL()); - } - - @Override - public void processSlice(SplitPartitions.Partition slice) { - this.getDataAndInsert(slice.getMin(), slice.getMax()); - } - - public synchronized void initCdmRun(Collection parts, TrackRun trackRunFeature) { - this.trackRunFeature = trackRunFeature; - if (null != trackRunFeature) - trackRunFeature.initCdmRun(parts, TrackRun.RUN_TYPE.MIGRATE); - } - - private void getDataAndInsert(BigInteger min, BigInteger max) { - ThreadContext.put(THREAD_CONTEXT_LABEL, getThreadLabel(min, max)); - logger.info("ThreadID: {} Processing min: {} max: {}", Thread.currentThread().getId(), min, max); - if (null != trackRunFeature) - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.STARTED); - - BatchStatement batch = BatchStatement.newInstance(BatchType.UNLOGGED); - String guardrailCheck; - jobCounter.threadReset(); - - try { - OriginSelectByPartitionRangeStatement originSelectByPartitionRangeStatement = this.originSession - .getOriginSelectByPartitionRangeStatement(); - targetUpsertStatement = this.targetSession.getTargetUpsertStatement(); - targetSelectByPKStatement = this.targetSession.getTargetSelectByPKStatement(); - ResultSet resultSet = originSelectByPartitionRangeStatement - .execute(originSelectByPartitionRangeStatement.bind(min, max)); - Collection> writeResults = new ArrayList<>(); - - for (Row originRow : resultSet) { - rateLimiterOrigin.acquire(1); - jobCounter.threadIncrement(JobCounter.CounterType.READ); - - Record record = new Record(pkFactory.getTargetPK(originRow), originRow, null); - if (originSelectByPartitionRangeStatement.shouldFilterRecord(record)) { - jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); - continue; - } - - for (Record r : pkFactory.toValidRecordList(record)) { - if (guardrailEnabled) { - guardrailCheck = guardrailFeature.guardrailChecks(r); - if (guardrailCheck != null && guardrailCheck != Guardrail.CLEAN_CHECK) { - logger.error("Guardrails failed for PrimaryKey {}; {}", r.getPk(), guardrailCheck); - jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); - continue; - } - } - - BoundStatement boundUpsert = bind(r); - if (null == boundUpsert) { - jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); - continue; - } - - rateLimiterTarget.acquire(1); - batch = writeAsync(batch, writeResults, boundUpsert); - jobCounter.threadIncrement(JobCounter.CounterType.UNFLUSHED); - - if (jobCounter.getCount(JobCounter.CounterType.UNFLUSHED) > fetchSize) { - flushAndClearWrites(batch, writeResults); - jobCounter.threadIncrement(JobCounter.CounterType.WRITE, - jobCounter.getCount(JobCounter.CounterType.UNFLUSHED)); - jobCounter.threadReset(JobCounter.CounterType.UNFLUSHED); - } - } - } - - flushAndClearWrites(batch, writeResults); - jobCounter.threadIncrement(JobCounter.CounterType.WRITE, - jobCounter.getCount(JobCounter.CounterType.UNFLUSHED)); - jobCounter.threadReset(JobCounter.CounterType.UNFLUSHED); - if (null != trackRunFeature) - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.PASS); - } catch (Exception e) { - jobCounter.threadIncrement(JobCounter.CounterType.ERROR, - jobCounter.getCount(JobCounter.CounterType.READ) - jobCounter.getCount(JobCounter.CounterType.WRITE) - - jobCounter.getCount(JobCounter.CounterType.SKIPPED)); - if (null != trackRunFeature) - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.FAIL); - logger.error("Error with PartitionRange -- ThreadID: {} Processing min: {} max: {}", - Thread.currentThread().getId(), min, max, e); - logger.error("Error stats " + jobCounter.getThreadCounters(false)); - } finally { - jobCounter.globalIncrement(); - printCounts(false); - } - } - - private void flushAndClearWrites(BatchStatement batch, Collection> writeResults) { - if (batch.size() > 0) { - writeResults.add(targetUpsertStatement.executeAsync(batch)); - } - writeResults.stream().forEach(writeResult -> writeResult.toCompletableFuture().join().one()); - writeResults.clear(); - } - - private BoundStatement bind(Record r) { - if (isCounterTable) { - rateLimiterTarget.acquire(1); - Record targetRecord = targetSelectByPKStatement.getRecord(r.getPk()); - if (null != targetRecord) { - r.setTargetRow(targetRecord.getTargetRow()); - } - } - return targetUpsertStatement.bindRecord(r); - } - - private BatchStatement writeAsync(BatchStatement batch, Collection> writeResults, - BoundStatement boundUpsert) { - if (batchSize > 1) { - batch = batch.add(boundUpsert); - if (batch.size() >= batchSize) { - writeResults.add(targetUpsertStatement.executeAsync(batch)); - return BatchStatement.newInstance(BatchType.UNLOGGED); - } - return batch; - } else { - writeResults.add(targetUpsertStatement.executeAsync(boundUpsert)); - return batch; - } - } + private final PKFactory pkFactory; + private final boolean isCounterTable; + private final Integer fetchSize; + private final Integer batchSize; + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + private TargetUpsertStatement targetUpsertStatement; + private TargetSelectByPKStatement targetSelectByPKStatement; + + protected CopyJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc) { + super(originSession, targetSession, sc); + this.jobCounter.setRegisteredTypes(JobCounter.CounterType.READ, JobCounter.CounterType.WRITE, + JobCounter.CounterType.SKIPPED, JobCounter.CounterType.ERROR, JobCounter.CounterType.UNFLUSHED); + + pkFactory = this.originSession.getPKFactory(); + isCounterTable = this.originSession.getCqlTable().isCounterTable(); + fetchSize = this.originSession.getCqlTable().getFetchSizeInRows(); + batchSize = this.originSession.getCqlTable().getBatchSize(); + + logger.info("CQL -- origin select: {}", this.originSession.getOriginSelectByPartitionRangeStatement().getCQL()); + logger.info("CQL -- target select: {}", this.targetSession.getTargetSelectByPKStatement().getCQL()); + logger.info("CQL -- target upsert: {}", this.targetSession.getTargetUpsertStatement().getCQL()); + } + + @Override + public void processSlice(SplitPartitions.Partition slice) { + this.getDataAndInsert(slice.getMin(), slice.getMax()); + } + + public synchronized void initCdmRun(Collection parts, TrackRun trackRunFeature) { + this.trackRunFeature = trackRunFeature; + if (null != trackRunFeature) + trackRunFeature.initCdmRun(parts, TrackRun.RUN_TYPE.MIGRATE); + } + + private void getDataAndInsert(BigInteger min, BigInteger max) { + ThreadContext.put(THREAD_CONTEXT_LABEL, getThreadLabel(min, max)); + logger.info("ThreadID: {} Processing min: {} max: {}", Thread.currentThread().getId(), min, max); + if (null != trackRunFeature) + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.STARTED); + + BatchStatement batch = BatchStatement.newInstance(BatchType.UNLOGGED); + String guardrailCheck; + jobCounter.threadReset(); + + try { + OriginSelectByPartitionRangeStatement originSelectByPartitionRangeStatement = this.originSession + .getOriginSelectByPartitionRangeStatement(); + targetUpsertStatement = this.targetSession.getTargetUpsertStatement(); + targetSelectByPKStatement = this.targetSession.getTargetSelectByPKStatement(); + ResultSet resultSet = originSelectByPartitionRangeStatement + .execute(originSelectByPartitionRangeStatement.bind(min, max)); + Collection> writeResults = new ArrayList<>(); + + for (Row originRow : resultSet) { + rateLimiterOrigin.acquire(1); + jobCounter.threadIncrement(JobCounter.CounterType.READ); + + Record record = new Record(pkFactory.getTargetPK(originRow), originRow, null); + if (originSelectByPartitionRangeStatement.shouldFilterRecord(record)) { + jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); + continue; + } + + for (Record r : pkFactory.toValidRecordList(record)) { + if (guardrailEnabled) { + guardrailCheck = guardrailFeature.guardrailChecks(r); + if (guardrailCheck != null && guardrailCheck != Guardrail.CLEAN_CHECK) { + logger.error("Guardrails failed for PrimaryKey {}; {}", r.getPk(), guardrailCheck); + jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); + continue; + } + } + + BoundStatement boundUpsert = bind(r); + if (null == boundUpsert) { + jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); + continue; + } + + rateLimiterTarget.acquire(1); + batch = writeAsync(batch, writeResults, boundUpsert); + jobCounter.threadIncrement(JobCounter.CounterType.UNFLUSHED); + + if (jobCounter.getCount(JobCounter.CounterType.UNFLUSHED) > fetchSize) { + flushAndClearWrites(batch, writeResults); + jobCounter.threadIncrement(JobCounter.CounterType.WRITE, + jobCounter.getCount(JobCounter.CounterType.UNFLUSHED)); + jobCounter.threadReset(JobCounter.CounterType.UNFLUSHED); + } + } + } + + flushAndClearWrites(batch, writeResults); + jobCounter.threadIncrement(JobCounter.CounterType.WRITE, + jobCounter.getCount(JobCounter.CounterType.UNFLUSHED)); + jobCounter.threadReset(JobCounter.CounterType.UNFLUSHED); + if (null != trackRunFeature) + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.PASS); + } catch (Exception e) { + jobCounter.threadIncrement(JobCounter.CounterType.ERROR, + jobCounter.getCount(JobCounter.CounterType.READ) - jobCounter.getCount(JobCounter.CounterType.WRITE) + - jobCounter.getCount(JobCounter.CounterType.SKIPPED)); + if (null != trackRunFeature) + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.FAIL); + logger.error("Error with PartitionRange -- ThreadID: {} Processing min: {} max: {}", + Thread.currentThread().getId(), min, max, e); + logger.error("Error stats " + jobCounter.getThreadCounters(false)); + } finally { + jobCounter.globalIncrement(); + printCounts(false); + } + } + + private void flushAndClearWrites(BatchStatement batch, Collection> writeResults) { + if (batch.size() > 0) { + writeResults.add(targetUpsertStatement.executeAsync(batch)); + } + writeResults.stream().forEach(writeResult -> writeResult.toCompletableFuture().join().one()); + writeResults.clear(); + } + + private BoundStatement bind(Record r) { + if (isCounterTable) { + rateLimiterTarget.acquire(1); + Record targetRecord = targetSelectByPKStatement.getRecord(r.getPk()); + if (null != targetRecord) { + r.setTargetRow(targetRecord.getTargetRow()); + } + } + return targetUpsertStatement.bindRecord(r); + } + + private BatchStatement writeAsync(BatchStatement batch, Collection> writeResults, + BoundStatement boundUpsert) { + if (batchSize > 1) { + batch = batch.add(boundUpsert); + if (batch.size() >= batchSize) { + writeResults.add(targetUpsertStatement.executeAsync(batch)); + return BatchStatement.newInstance(BatchType.UNLOGGED); + } + return batch; + } else { + writeResults.add(targetUpsertStatement.executeAsync(boundUpsert)); + return batch; + } + } } diff --git a/src/main/java/com/datastax/cdm/job/CopyJobSessionFactory.java b/src/main/java/com/datastax/cdm/job/CopyJobSessionFactory.java index 97d9ee0e..323a1bea 100644 --- a/src/main/java/com/datastax/cdm/job/CopyJobSessionFactory.java +++ b/src/main/java/com/datastax/cdm/job/CopyJobSessionFactory.java @@ -15,13 +15,15 @@ */ package com.datastax.cdm.job; -import com.datastax.oss.driver.api.core.CqlSession; import org.apache.spark.SparkConf; +import com.datastax.oss.driver.api.core.CqlSession; + public class CopyJobSessionFactory implements IJobSessionFactory { private static CopyJobSession jobSession = null; - public AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, SparkConf sc) { + public AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, + SparkConf sc) { if (jobSession == null) { synchronized (CopyJobSession.class) { if (jobSession == null) { diff --git a/src/main/java/com/datastax/cdm/job/DiffJobSession.java b/src/main/java/com/datastax/cdm/job/DiffJobSession.java index 643ff691..9669fedb 100644 --- a/src/main/java/com/datastax/cdm/job/DiffJobSession.java +++ b/src/main/java/com/datastax/cdm/job/DiffJobSession.java @@ -52,300 +52,300 @@ import com.datastax.oss.driver.api.core.type.DataType; public class DiffJobSession extends CopyJobSession { - protected final Boolean autoCorrectMissing; - protected final Boolean autoCorrectMismatch; - private final boolean isCounterTable; - private final boolean forceCounterWhenMissing; - private final List targetColumnNames; - private final List targetColumnTypes; - private final List originColumnTypes; - private final int explodeMapKeyIndex; - private final int explodeMapValueIndex; - private final List constantColumnIndexes; - public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - boolean logDebug = logger.isDebugEnabled(); - boolean logTrace = logger.isTraceEnabled(); - private ExtractJson extractJsonFeature; - private boolean overwriteTarget; - - public DiffJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc) { - super(originSession, targetSession, sc); - this.jobCounter.setRegisteredTypes(JobCounter.CounterType.READ, JobCounter.CounterType.VALID, - JobCounter.CounterType.MISMATCH, JobCounter.CounterType.CORRECTED_MISMATCH, - JobCounter.CounterType.MISSING, JobCounter.CounterType.CORRECTED_MISSING, - JobCounter.CounterType.SKIPPED); - - autoCorrectMissing = propertyHelper.getBoolean(KnownProperties.AUTOCORRECT_MISSING); - logger.info("PARAM -- Autocorrect Missing: {}", autoCorrectMissing); - - autoCorrectMismatch = propertyHelper.getBoolean(KnownProperties.AUTOCORRECT_MISMATCH); - logger.info("PARAM -- Autocorrect Mismatch: {}", autoCorrectMismatch); - - this.isCounterTable = this.originSession.getCqlTable().isCounterTable(); - this.forceCounterWhenMissing = propertyHelper.getBoolean(KnownProperties.AUTOCORRECT_MISSING_COUNTER); - this.targetColumnNames = this.targetSession.getCqlTable().getColumnNames(false); - this.targetColumnTypes = this.targetSession.getCqlTable().getColumnCqlTypes(); - this.originColumnTypes = this.originSession.getCqlTable().getColumnCqlTypes(); - - ConstantColumns constantColumnsFeature = (ConstantColumns) this.targetSession.getCqlTable() - .getFeature(Featureset.CONSTANT_COLUMNS); - if (null != constantColumnsFeature && constantColumnsFeature.isEnabled()) { - constantColumnIndexes = constantColumnsFeature.getNames().stream().map(targetColumnNames::indexOf) - .collect(Collectors.toList()); - if (logDebug) - logger.debug("Constant Column Indexes {}", this.constantColumnIndexes); - } else { - constantColumnIndexes = Collections.emptyList(); - } - - ExplodeMap explodeMapFeature = (ExplodeMap) this.targetSession.getCqlTable().getFeature(Featureset.EXPLODE_MAP); - if (null != explodeMapFeature && explodeMapFeature.isEnabled()) { - this.explodeMapKeyIndex = this.targetSession.getCqlTable().indexOf(explodeMapFeature.getKeyColumnName()); - this.explodeMapValueIndex = this.targetSession.getCqlTable() - .indexOf(explodeMapFeature.getValueColumnName()); - if (logDebug) - logger.debug("Explode Map KeyIndex={}, ValueIndex={}", this.explodeMapKeyIndex, - this.explodeMapValueIndex); - } else { - this.explodeMapKeyIndex = -1; - this.explodeMapValueIndex = -1; - } - - extractJsonFeature = (ExtractJson) this.targetSession.getCqlTable().getFeature(Featureset.EXTRACT_JSON); - overwriteTarget = extractJsonFeature.isEnabled() && extractJsonFeature.overwriteTarget(); - - logger.info("CQL -- origin select: {}", this.originSession.getOriginSelectByPartitionRangeStatement().getCQL()); - logger.info("CQL -- target select: {}", this.targetSession.getTargetSelectByPKStatement().getCQL()); - logger.info("CQL -- target upsert: {}", this.targetSession.getTargetUpsertStatement().getCQL()); - } - - @Override - public void processSlice(SplitPartitions.Partition slice) { - this.getDataAndDiff(slice.getMin(), slice.getMax()); - } - - @Override - public synchronized void initCdmRun(Collection parts, TrackRun trackRunFeature) { - this.trackRunFeature = trackRunFeature; - if (null != trackRunFeature) - trackRunFeature.initCdmRun(parts, TrackRun.RUN_TYPE.DIFF_DATA); - } - - private void getDataAndDiff(BigInteger min, BigInteger max) { - ThreadContext.put(THREAD_CONTEXT_LABEL, getThreadLabel(min, max)); - logger.info("ThreadID: {} Processing min: {} max: {}", Thread.currentThread().getId(), min, max); - if (null != trackRunFeature) - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.STARTED); - - AtomicBoolean hasDiff = new AtomicBoolean(false); - try { - jobCounter.threadReset(); - - PKFactory pkFactory = originSession.getPKFactory(); - OriginSelectByPartitionRangeStatement originSelectByPartitionRangeStatement = originSession - .getOriginSelectByPartitionRangeStatement(); - ResultSet resultSet = originSelectByPartitionRangeStatement - .execute(originSelectByPartitionRangeStatement.bind(min, max)); - TargetSelectByPKStatement targetSelectByPKStatement = targetSession.getTargetSelectByPKStatement(); - Integer fetchSizeInRows = originSession.getCqlTable().getFetchSizeInRows(); - - List recordsToDiff = new ArrayList<>(fetchSizeInRows); - StreamSupport.stream(resultSet.spliterator(), false).forEach(originRow -> { - rateLimiterOrigin.acquire(1); - Record record = new Record(pkFactory.getTargetPK(originRow), originRow, null); - jobCounter.threadIncrement(JobCounter.CounterType.READ); - - if (originSelectByPartitionRangeStatement.shouldFilterRecord(record)) { - jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); - } else { - for (Record r : pkFactory.toValidRecordList(record)) { - - if (guardrailEnabled) { - String guardrailCheck = guardrailFeature.guardrailChecks(r); - if (guardrailCheck != null && guardrailCheck != Guardrail.CLEAN_CHECK) { - logger.error("Guardrails failed for PrimaryKey {}; {}", r.getPk(), guardrailCheck); - jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); - continue; - } - } - - rateLimiterTarget.acquire(1); - CompletionStage targetResult = targetSelectByPKStatement - .getAsyncResult(r.getPk()); - - if (null == targetResult) { - jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); - } else { - r.setAsyncTargetRow(targetResult); - recordsToDiff.add(r); - if (recordsToDiff.size() > fetchSizeInRows) { - if (diffAndClear(recordsToDiff)) { - hasDiff.set(true); - } - } - } // targetRecord!=null - } // recordSet iterator - } // shouldFilterRecord - }); - if (diffAndClear(recordsToDiff)) { - hasDiff.set(true); - } - - if (hasDiff.get() && null != trackRunFeature) { - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.DIFF); - } else if (null != trackRunFeature) { - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.PASS); - } - } catch (Exception e) { - logger.error("Error with PartitionRange -- ThreadID: {} Processing min: {} max: {}", - Thread.currentThread().getId(), min, max, e); - if (null != trackRunFeature) - trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.FAIL); - } finally { - jobCounter.globalIncrement(); - printCounts(false); - } - } - - private boolean diffAndClear(List recordsToDiff) { - boolean isDiff = recordsToDiff.stream().map(r -> diff(r)).filter(b -> b == true).count() > 0; - recordsToDiff.clear(); - return isDiff; - } - - private boolean diff(Record record) { - if (record.getTargetRow() == null) { - jobCounter.threadIncrement(JobCounter.CounterType.MISSING); - logger.error("Missing target row found for key: {}", record.getPk()); - if (autoCorrectMissing && isCounterTable && !forceCounterWhenMissing) { - logger.error("{} is true, but not Inserting as {} is not enabled; key : {}", - KnownProperties.AUTOCORRECT_MISSING, KnownProperties.AUTOCORRECT_MISSING_COUNTER, - record.getPk()); - return true; - } - - // correct data - if (autoCorrectMissing) { - rateLimiterTarget.acquire(1); - targetSession.getTargetUpsertStatement().putRecord(record); - jobCounter.threadIncrement(JobCounter.CounterType.CORRECTED_MISSING); - logger.error("Inserted missing row in target: {}", record.getPk()); - } - return true; - } - - String diffData = isDifferent(record); - if (!diffData.isEmpty()) { - jobCounter.threadIncrement(JobCounter.CounterType.MISMATCH); - logger.error("Mismatch row found for key: {} Mismatch: {}", record.getPk(), diffData); - - if (autoCorrectMismatch) { - rateLimiterTarget.acquire(1); - targetSession.getTargetUpsertStatement().putRecord(record); - jobCounter.threadIncrement(JobCounter.CounterType.CORRECTED_MISMATCH); - logger.error("Corrected mismatch row in target: {}", record.getPk()); - } - - return true; - } else { - jobCounter.threadIncrement(JobCounter.CounterType.VALID); - return false; - } - } - - private String isDifferent(Record record) { - EnhancedPK pk = record.getPk(); - Row originRow = record.getOriginRow(); - Row targetRow = record.getTargetRow(); - - StringBuffer diffData = new StringBuffer(); - IntStream.range(0, targetColumnNames.size()).parallel().forEach(targetIndex -> { - String previousLabel = ThreadContext.get(THREAD_CONTEXT_LABEL); - try { - ThreadContext.put(THREAD_CONTEXT_LABEL, pk + ":" + targetColumnNames.get(targetIndex)); - Object origin = null; - int originIndex = -2; // this to distinguish default from indexOf result - Object targetAsOriginType = null; - try { - if (constantColumnIndexes.contains(targetIndex)) { - if (logTrace) - logger.trace("PK {}, targetIndex {} skipping constant column {}", pk, targetIndex, - targetColumnNames.get(targetIndex)); - return; // nothing to compare in origin - } - - targetAsOriginType = targetSession.getCqlTable().getAndConvertData(targetIndex, targetRow); - if (targetIndex == extractJsonFeature.getTargetColumnIndex()) { - if (!overwriteTarget && null != targetAsOriginType) { - return; // skip validation when target has data - } - originIndex = extractJsonFeature.getOriginColumnIndex(); - origin = extractJsonFeature.extract(originRow.getString(originIndex)); - } else { - originIndex = targetSession.getCqlTable().getCorrespondingIndex(targetIndex); - if (originIndex >= 0) { - origin = originSession.getCqlTable().getData(originIndex, originRow); - if (logTrace) - logger.trace( - "PK {}, targetIndex {} column {} using value from origin table at index {}: {}", - pk, targetIndex, targetColumnNames.get(targetIndex), originIndex, origin); - } else if (targetIndex == explodeMapKeyIndex) { - origin = pk.getExplodeMapKey(); - if (logTrace) - logger.trace("PK {}, targetIndex {} column {} using explodeMapKey stored on PK: {}", pk, - targetIndex, targetColumnNames.get(targetIndex), origin); - } else if (targetIndex == explodeMapValueIndex) { - origin = pk.getExplodeMapValue(); - if (logTrace) - logger.trace("PK {}, targetIndex {} column {} using explodeMapValue stored on PK: {}", - pk, targetIndex, targetColumnNames.get(targetIndex), origin); - } else if (targetIndex == extractJsonFeature.getTargetColumnIndex()) { - originIndex = extractJsonFeature.getOriginColumnIndex(); - } else { - throw new RuntimeException("Target column \"" + targetColumnNames.get(targetIndex) - + "\" at index " + targetIndex - + " cannot be found on Origin, and is neither a constant column (indexes:" - + constantColumnIndexes + ") nor an explode map column (keyIndex:" - + explodeMapKeyIndex + ", valueIndex:" + explodeMapValueIndex + ")"); - } - } - - if (logDebug) - logger.debug( - "Diff PK {}, target/origin index: {}/{} target/origin column: {}/{} target/origin value: {}/{}", - pk, targetIndex, originIndex, targetColumnNames.get(targetIndex), - originIndex < 0 ? "null" - : originSession.getCqlTable().getColumnNames(false).get(originIndex), - targetAsOriginType, origin); - if (null != origin && DataUtility.diff(origin, targetAsOriginType)) { - String originContent = CqlData - .getFormattedContent(CqlData.toType(originColumnTypes.get(originIndex)), origin); - String targetContent = CqlData.getFormattedContent( - CqlData.toType(targetColumnTypes.get(targetIndex)), targetAsOriginType); - diffData.append("Target column:").append(targetColumnNames.get(targetIndex)).append("-origin[") - .append(originContent).append("]").append("-target[").append(targetContent) - .append("]; "); - } else if (null == origin && null != targetAsOriginType) { - diffData.append("Target column:").append(targetColumnNames.get(targetIndex)) - .append(" origin is null, target is ").append(targetAsOriginType).append("; "); - } - } catch (Exception e) { - String exceptionName; - String myClassMethodLine = DataUtility.getMyClassMethodLine(e); - if (e instanceof ArrayIndexOutOfBoundsException) { - exceptionName = "ArrayIndexOutOfBoundsException@" + myClassMethodLine; - } else { - exceptionName = e + "@" + myClassMethodLine; - } - diffData.append("Target column:").append(targetColumnNames.get(targetIndex)).append(" Exception ") - .append(exceptionName).append(" targetIndex:").append(targetIndex).append(" originIndex:") - .append(originIndex).append("; "); - } - } finally { - ThreadContext.put(THREAD_CONTEXT_LABEL, previousLabel); - } - }); - return diffData.toString(); - } + protected final Boolean autoCorrectMissing; + protected final Boolean autoCorrectMismatch; + private final boolean isCounterTable; + private final boolean forceCounterWhenMissing; + private final List targetColumnNames; + private final List targetColumnTypes; + private final List originColumnTypes; + private final int explodeMapKeyIndex; + private final int explodeMapValueIndex; + private final List constantColumnIndexes; + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + boolean logDebug = logger.isDebugEnabled(); + boolean logTrace = logger.isTraceEnabled(); + private ExtractJson extractJsonFeature; + private boolean overwriteTarget; + + public DiffJobSession(CqlSession originSession, CqlSession targetSession, SparkConf sc) { + super(originSession, targetSession, sc); + this.jobCounter.setRegisteredTypes(JobCounter.CounterType.READ, JobCounter.CounterType.VALID, + JobCounter.CounterType.MISMATCH, JobCounter.CounterType.CORRECTED_MISMATCH, + JobCounter.CounterType.MISSING, JobCounter.CounterType.CORRECTED_MISSING, + JobCounter.CounterType.SKIPPED); + + autoCorrectMissing = propertyHelper.getBoolean(KnownProperties.AUTOCORRECT_MISSING); + logger.info("PARAM -- Autocorrect Missing: {}", autoCorrectMissing); + + autoCorrectMismatch = propertyHelper.getBoolean(KnownProperties.AUTOCORRECT_MISMATCH); + logger.info("PARAM -- Autocorrect Mismatch: {}", autoCorrectMismatch); + + this.isCounterTable = this.originSession.getCqlTable().isCounterTable(); + this.forceCounterWhenMissing = propertyHelper.getBoolean(KnownProperties.AUTOCORRECT_MISSING_COUNTER); + this.targetColumnNames = this.targetSession.getCqlTable().getColumnNames(false); + this.targetColumnTypes = this.targetSession.getCqlTable().getColumnCqlTypes(); + this.originColumnTypes = this.originSession.getCqlTable().getColumnCqlTypes(); + + ConstantColumns constantColumnsFeature = (ConstantColumns) this.targetSession.getCqlTable() + .getFeature(Featureset.CONSTANT_COLUMNS); + if (null != constantColumnsFeature && constantColumnsFeature.isEnabled()) { + constantColumnIndexes = constantColumnsFeature.getNames().stream().map(targetColumnNames::indexOf) + .collect(Collectors.toList()); + if (logDebug) + logger.debug("Constant Column Indexes {}", this.constantColumnIndexes); + } else { + constantColumnIndexes = Collections.emptyList(); + } + + ExplodeMap explodeMapFeature = (ExplodeMap) this.targetSession.getCqlTable().getFeature(Featureset.EXPLODE_MAP); + if (null != explodeMapFeature && explodeMapFeature.isEnabled()) { + this.explodeMapKeyIndex = this.targetSession.getCqlTable().indexOf(explodeMapFeature.getKeyColumnName()); + this.explodeMapValueIndex = this.targetSession.getCqlTable() + .indexOf(explodeMapFeature.getValueColumnName()); + if (logDebug) + logger.debug("Explode Map KeyIndex={}, ValueIndex={}", this.explodeMapKeyIndex, + this.explodeMapValueIndex); + } else { + this.explodeMapKeyIndex = -1; + this.explodeMapValueIndex = -1; + } + + extractJsonFeature = (ExtractJson) this.targetSession.getCqlTable().getFeature(Featureset.EXTRACT_JSON); + overwriteTarget = extractJsonFeature.isEnabled() && extractJsonFeature.overwriteTarget(); + + logger.info("CQL -- origin select: {}", this.originSession.getOriginSelectByPartitionRangeStatement().getCQL()); + logger.info("CQL -- target select: {}", this.targetSession.getTargetSelectByPKStatement().getCQL()); + logger.info("CQL -- target upsert: {}", this.targetSession.getTargetUpsertStatement().getCQL()); + } + + @Override + public void processSlice(SplitPartitions.Partition slice) { + this.getDataAndDiff(slice.getMin(), slice.getMax()); + } + + @Override + public synchronized void initCdmRun(Collection parts, TrackRun trackRunFeature) { + this.trackRunFeature = trackRunFeature; + if (null != trackRunFeature) + trackRunFeature.initCdmRun(parts, TrackRun.RUN_TYPE.DIFF_DATA); + } + + private void getDataAndDiff(BigInteger min, BigInteger max) { + ThreadContext.put(THREAD_CONTEXT_LABEL, getThreadLabel(min, max)); + logger.info("ThreadID: {} Processing min: {} max: {}", Thread.currentThread().getId(), min, max); + if (null != trackRunFeature) + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.STARTED); + + AtomicBoolean hasDiff = new AtomicBoolean(false); + try { + jobCounter.threadReset(); + + PKFactory pkFactory = originSession.getPKFactory(); + OriginSelectByPartitionRangeStatement originSelectByPartitionRangeStatement = originSession + .getOriginSelectByPartitionRangeStatement(); + ResultSet resultSet = originSelectByPartitionRangeStatement + .execute(originSelectByPartitionRangeStatement.bind(min, max)); + TargetSelectByPKStatement targetSelectByPKStatement = targetSession.getTargetSelectByPKStatement(); + Integer fetchSizeInRows = originSession.getCqlTable().getFetchSizeInRows(); + + List recordsToDiff = new ArrayList<>(fetchSizeInRows); + StreamSupport.stream(resultSet.spliterator(), false).forEach(originRow -> { + rateLimiterOrigin.acquire(1); + Record record = new Record(pkFactory.getTargetPK(originRow), originRow, null); + jobCounter.threadIncrement(JobCounter.CounterType.READ); + + if (originSelectByPartitionRangeStatement.shouldFilterRecord(record)) { + jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); + } else { + for (Record r : pkFactory.toValidRecordList(record)) { + + if (guardrailEnabled) { + String guardrailCheck = guardrailFeature.guardrailChecks(r); + if (guardrailCheck != null && guardrailCheck != Guardrail.CLEAN_CHECK) { + logger.error("Guardrails failed for PrimaryKey {}; {}", r.getPk(), guardrailCheck); + jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); + continue; + } + } + + rateLimiterTarget.acquire(1); + CompletionStage targetResult = targetSelectByPKStatement + .getAsyncResult(r.getPk()); + + if (null == targetResult) { + jobCounter.threadIncrement(JobCounter.CounterType.SKIPPED); + } else { + r.setAsyncTargetRow(targetResult); + recordsToDiff.add(r); + if (recordsToDiff.size() > fetchSizeInRows) { + if (diffAndClear(recordsToDiff)) { + hasDiff.set(true); + } + } + } // targetRecord!=null + } // recordSet iterator + } // shouldFilterRecord + }); + if (diffAndClear(recordsToDiff)) { + hasDiff.set(true); + } + + if (hasDiff.get() && null != trackRunFeature) { + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.DIFF); + } else if (null != trackRunFeature) { + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.PASS); + } + } catch (Exception e) { + logger.error("Error with PartitionRange -- ThreadID: {} Processing min: {} max: {}", + Thread.currentThread().getId(), min, max, e); + if (null != trackRunFeature) + trackRunFeature.updateCdmRun(min, TrackRun.RUN_STATUS.FAIL); + } finally { + jobCounter.globalIncrement(); + printCounts(false); + } + } + + private boolean diffAndClear(List recordsToDiff) { + boolean isDiff = recordsToDiff.stream().map(r -> diff(r)).filter(b -> b == true).count() > 0; + recordsToDiff.clear(); + return isDiff; + } + + private boolean diff(Record record) { + if (record.getTargetRow() == null) { + jobCounter.threadIncrement(JobCounter.CounterType.MISSING); + logger.error("Missing target row found for key: {}", record.getPk()); + if (autoCorrectMissing && isCounterTable && !forceCounterWhenMissing) { + logger.error("{} is true, but not Inserting as {} is not enabled; key : {}", + KnownProperties.AUTOCORRECT_MISSING, KnownProperties.AUTOCORRECT_MISSING_COUNTER, + record.getPk()); + return true; + } + + // correct data + if (autoCorrectMissing) { + rateLimiterTarget.acquire(1); + targetSession.getTargetUpsertStatement().putRecord(record); + jobCounter.threadIncrement(JobCounter.CounterType.CORRECTED_MISSING); + logger.error("Inserted missing row in target: {}", record.getPk()); + } + return true; + } + + String diffData = isDifferent(record); + if (!diffData.isEmpty()) { + jobCounter.threadIncrement(JobCounter.CounterType.MISMATCH); + logger.error("Mismatch row found for key: {} Mismatch: {}", record.getPk(), diffData); + + if (autoCorrectMismatch) { + rateLimiterTarget.acquire(1); + targetSession.getTargetUpsertStatement().putRecord(record); + jobCounter.threadIncrement(JobCounter.CounterType.CORRECTED_MISMATCH); + logger.error("Corrected mismatch row in target: {}", record.getPk()); + } + + return true; + } else { + jobCounter.threadIncrement(JobCounter.CounterType.VALID); + return false; + } + } + + private String isDifferent(Record record) { + EnhancedPK pk = record.getPk(); + Row originRow = record.getOriginRow(); + Row targetRow = record.getTargetRow(); + + StringBuffer diffData = new StringBuffer(); + IntStream.range(0, targetColumnNames.size()).parallel().forEach(targetIndex -> { + String previousLabel = ThreadContext.get(THREAD_CONTEXT_LABEL); + try { + ThreadContext.put(THREAD_CONTEXT_LABEL, pk + ":" + targetColumnNames.get(targetIndex)); + Object origin = null; + int originIndex = -2; // this to distinguish default from indexOf result + Object targetAsOriginType = null; + try { + if (constantColumnIndexes.contains(targetIndex)) { + if (logTrace) + logger.trace("PK {}, targetIndex {} skipping constant column {}", pk, targetIndex, + targetColumnNames.get(targetIndex)); + return; // nothing to compare in origin + } + + targetAsOriginType = targetSession.getCqlTable().getAndConvertData(targetIndex, targetRow); + if (targetIndex == extractJsonFeature.getTargetColumnIndex()) { + if (!overwriteTarget && null != targetAsOriginType) { + return; // skip validation when target has data + } + originIndex = extractJsonFeature.getOriginColumnIndex(); + origin = extractJsonFeature.extract(originRow.getString(originIndex)); + } else { + originIndex = targetSession.getCqlTable().getCorrespondingIndex(targetIndex); + if (originIndex >= 0) { + origin = originSession.getCqlTable().getData(originIndex, originRow); + if (logTrace) + logger.trace( + "PK {}, targetIndex {} column {} using value from origin table at index {}: {}", + pk, targetIndex, targetColumnNames.get(targetIndex), originIndex, origin); + } else if (targetIndex == explodeMapKeyIndex) { + origin = pk.getExplodeMapKey(); + if (logTrace) + logger.trace("PK {}, targetIndex {} column {} using explodeMapKey stored on PK: {}", pk, + targetIndex, targetColumnNames.get(targetIndex), origin); + } else if (targetIndex == explodeMapValueIndex) { + origin = pk.getExplodeMapValue(); + if (logTrace) + logger.trace("PK {}, targetIndex {} column {} using explodeMapValue stored on PK: {}", + pk, targetIndex, targetColumnNames.get(targetIndex), origin); + } else if (targetIndex == extractJsonFeature.getTargetColumnIndex()) { + originIndex = extractJsonFeature.getOriginColumnIndex(); + } else { + throw new RuntimeException("Target column \"" + targetColumnNames.get(targetIndex) + + "\" at index " + targetIndex + + " cannot be found on Origin, and is neither a constant column (indexes:" + + constantColumnIndexes + ") nor an explode map column (keyIndex:" + + explodeMapKeyIndex + ", valueIndex:" + explodeMapValueIndex + ")"); + } + } + + if (logDebug) + logger.debug( + "Diff PK {}, target/origin index: {}/{} target/origin column: {}/{} target/origin value: {}/{}", + pk, targetIndex, originIndex, targetColumnNames.get(targetIndex), + originIndex < 0 ? "null" + : originSession.getCqlTable().getColumnNames(false).get(originIndex), + targetAsOriginType, origin); + if (null != origin && DataUtility.diff(origin, targetAsOriginType)) { + String originContent = CqlData + .getFormattedContent(CqlData.toType(originColumnTypes.get(originIndex)), origin); + String targetContent = CqlData.getFormattedContent( + CqlData.toType(targetColumnTypes.get(targetIndex)), targetAsOriginType); + diffData.append("Target column:").append(targetColumnNames.get(targetIndex)).append("-origin[") + .append(originContent).append("]").append("-target[").append(targetContent) + .append("]; "); + } else if (null == origin && null != targetAsOriginType) { + diffData.append("Target column:").append(targetColumnNames.get(targetIndex)) + .append(" origin is null, target is ").append(targetAsOriginType).append("; "); + } + } catch (Exception e) { + String exceptionName; + String myClassMethodLine = DataUtility.getMyClassMethodLine(e); + if (e instanceof ArrayIndexOutOfBoundsException) { + exceptionName = "ArrayIndexOutOfBoundsException@" + myClassMethodLine; + } else { + exceptionName = e + "@" + myClassMethodLine; + } + diffData.append("Target column:").append(targetColumnNames.get(targetIndex)).append(" Exception ") + .append(exceptionName).append(" targetIndex:").append(targetIndex).append(" originIndex:") + .append(originIndex).append("; "); + } + } finally { + ThreadContext.put(THREAD_CONTEXT_LABEL, previousLabel); + } + }); + return diffData.toString(); + } } diff --git a/src/main/java/com/datastax/cdm/job/DiffJobSessionFactory.java b/src/main/java/com/datastax/cdm/job/DiffJobSessionFactory.java index 68370a50..84434a05 100644 --- a/src/main/java/com/datastax/cdm/job/DiffJobSessionFactory.java +++ b/src/main/java/com/datastax/cdm/job/DiffJobSessionFactory.java @@ -15,13 +15,15 @@ */ package com.datastax.cdm.job; -import com.datastax.oss.driver.api.core.CqlSession; import org.apache.spark.SparkConf; +import com.datastax.oss.driver.api.core.CqlSession; + public class DiffJobSessionFactory implements IJobSessionFactory { private static DiffJobSession jobSession = null; - public AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, SparkConf sc) { + public AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, + SparkConf sc) { if (jobSession == null) { synchronized (DiffJobSession.class) { if (jobSession == null) { diff --git a/src/main/java/com/datastax/cdm/job/GuardrailCheckJobSession.java b/src/main/java/com/datastax/cdm/job/GuardrailCheckJobSession.java index a0cce186..af22cd41 100644 --- a/src/main/java/com/datastax/cdm/job/GuardrailCheckJobSession.java +++ b/src/main/java/com/datastax/cdm/job/GuardrailCheckJobSession.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.job; -import com.datastax.cdm.cql.statement.OriginSelectByPartitionRangeStatement; -import com.datastax.cdm.data.PKFactory; -import com.datastax.cdm.data.Record; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.*; +import java.math.BigInteger; + import org.apache.logging.log4j.ThreadContext; import org.apache.spark.SparkConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.math.BigInteger; +import com.datastax.cdm.cql.statement.OriginSelectByPartitionRangeStatement; +import com.datastax.cdm.data.PKFactory; +import com.datastax.cdm.data.Record; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.*; public class GuardrailCheckJobSession extends AbstractJobSession { @@ -35,7 +36,8 @@ public class GuardrailCheckJobSession extends AbstractJobSession { private static GuardrailCheckJobSession jobSession = null; - public AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, SparkConf sc) { + public AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, + SparkConf sc) { if (jobSession == null) { synchronized (GuardrailCheckJobSession.class) { if (jobSession == null) { diff --git a/src/main/java/com/datastax/cdm/job/IJobSessionFactory.java b/src/main/java/com/datastax/cdm/job/IJobSessionFactory.java index b3bc25ab..907eec3a 100644 --- a/src/main/java/com/datastax/cdm/job/IJobSessionFactory.java +++ b/src/main/java/com/datastax/cdm/job/IJobSessionFactory.java @@ -15,9 +15,10 @@ */ package com.datastax.cdm.job; -import com.datastax.oss.driver.api.core.CqlSession; import org.apache.spark.SparkConf; +import com.datastax.oss.driver.api.core.CqlSession; + public interface IJobSessionFactory { AbstractJobSession getInstance(CqlSession originSession, CqlSession targetSession, SparkConf sc); } diff --git a/src/main/java/com/datastax/cdm/job/JobCounter.java b/src/main/java/com/datastax/cdm/job/JobCounter.java index 05dd0c56..59d595a6 100644 --- a/src/main/java/com/datastax/cdm/job/JobCounter.java +++ b/src/main/java/com/datastax/cdm/job/JobCounter.java @@ -25,209 +25,209 @@ public class JobCounter { - // Enumeration for counter types - public enum CounterType { - READ, WRITE, VALID, ERROR, MISMATCH, MISSING, CORRECTED_MISSING, CORRECTED_MISMATCH, SKIPPED, UNFLUSHED, LARGE - } - - // Logger instance - private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - - // Internal class to handle atomic counting operations - private static class CounterUnit { - private final AtomicLong globalCounter = new AtomicLong(0); - private final ThreadLocal threadLocalCounter = ThreadLocal.withInitial(() -> 0L); - - public void incrementThreadCounter(long incrementBy) { - threadLocalCounter.set(threadLocalCounter.get() + incrementBy); - } - - public long getThreadCounter() { - return threadLocalCounter.get(); - } - - public void resetThreadCounter() { - threadLocalCounter.set(0L); - } - - public void setGlobalCounter(long value) { - globalCounter.set(value); - } - - public void addThreadToGlobalCounter() { - globalCounter.addAndGet(threadLocalCounter.get()); - } - - public long getGlobalCounter() { - return globalCounter.get(); - } - } - - // Declare individual counters for different operations - private final HashMap counterMap = new HashMap<>(); - - // Variables to hold lock objects and registered types - private final Object globalLock = new Object(); - private final boolean printPerThread; - private final long printStatsAfter; - private final CounterUnit printCounter = new CounterUnit(); - - // Constructor - public JobCounter(long printStatsAfter, boolean printStatsPerPart) { - this.printStatsAfter = printStatsAfter; - this.printPerThread = printStatsPerPart; - } - - // Allows setting the registered counter types. - public void setRegisteredTypes(CounterType... registeredTypes) { - counterMap.clear(); - for (CounterType type : registeredTypes) { - counterMap.put(type, new CounterUnit()); - } - } - - // Utility method to fetch the appropriate counter unit based on type - private CounterUnit getCounterUnit(CounterType counterType) { - if (!counterMap.containsKey(counterType)) { - throw new IllegalArgumentException("CounterType " + counterType + " is not registered"); - } - return (counterMap.get(counterType)); - } - - // Method to get a counter's value - public long getCount(CounterType counterType, boolean global) { - return global ? getCounterUnit(counterType).getGlobalCounter() : getCounterUnit(counterType).getThreadCounter(); - } - - // Method to get a thread counter's value - public long getCount(CounterType counterType) { - return getCount(counterType, false); - } - - // Method to reset thread-specific counters for given type - public void threadReset(CounterType counterType) { - getCounterUnit(counterType).resetThreadCounter(); - } - - // Method to reset thread-specific counters for all registered types - public void threadReset() { - for (CounterType type : counterMap.keySet()) { - threadReset(type); - } - } - - // Method to increment thread-specific counters by a given value - public void threadIncrement(CounterType counterType, long incrementBy) { - getCounterUnit(counterType).incrementThreadCounter(incrementBy); - } - - // Method to increment thread-specific counters by 1 - public void threadIncrement(CounterType counterType) { - threadIncrement(counterType, 1); - } - - // Method to increment global counters based on thread-specific counters - public void globalIncrement() { - synchronized (globalLock) { - for (CounterType type : counterMap.keySet()) { - getCounterUnit(type).addThreadToGlobalCounter(); - } - } - } - - // Method to get current counts (both thread-specific and global) as a formatted - // string - public String getThreadCounters(boolean global) { - StringBuilder sb = new StringBuilder(); - for (CounterType type : counterMap.keySet()) { - long value = global ? getCounterUnit(type).getGlobalCounter() : getCounterUnit(type).getThreadCounter(); - sb.append(type.name()).append("=").append(value).append(", "); - } - // Remove the trailing comma and space - if (sb.length() > 2) { - sb.setLength(sb.length() - 2); - } - return sb.toString(); - } - - public void printProgress() { - if (printPerThread) { - printAndLogProgress("Thread Counts: ", false); - } else if (shouldPrintGlobalProgress()) { - printAndLogProgress("Progress Counts: ", true); - } - } - - // Determines if it's the right time to print global progress - protected boolean shouldPrintGlobalProgress() { - if (!counterMap.containsKey(CounterType.READ)) { - return false; - } - long globalReads = counterMap.get(CounterType.READ).getGlobalCounter(); - long expectedPrintCount = globalReads - globalReads % printStatsAfter; - if (expectedPrintCount > printCounter.getGlobalCounter()) { - printCounter.setGlobalCounter(expectedPrintCount); - return true; - } - return false; - } - - // Prints and logs the progress - protected void printAndLogProgress(String message, boolean global) { - String fullMessage = message + getThreadCounters(global); - logger.info(fullMessage); - } - - public void printFinal(TrackRun trackRunFeature) { - if (null != trackRunFeature) { - StringBuilder sb = new StringBuilder(); - if (counterMap.containsKey(CounterType.READ)) - sb.append("Read: " + counterMap.get(CounterType.READ).getGlobalCounter()); - if (counterMap.containsKey(CounterType.MISMATCH)) - sb.append("; Mismatch: " + counterMap.get(CounterType.MISMATCH).getGlobalCounter()); - if (counterMap.containsKey(CounterType.CORRECTED_MISMATCH)) - sb.append("; Corrected Mismatch: " + counterMap.get(CounterType.CORRECTED_MISMATCH).getGlobalCounter()); - if (counterMap.containsKey(CounterType.MISSING)) - sb.append("; Missing: " + counterMap.get(CounterType.MISSING).getGlobalCounter()); - if (counterMap.containsKey(CounterType.CORRECTED_MISSING)) - sb.append("; Corrected Missing: " + counterMap.get(CounterType.CORRECTED_MISSING).getGlobalCounter()); - if (counterMap.containsKey(CounterType.VALID)) - sb.append("; Valid: " + counterMap.get(CounterType.VALID).getGlobalCounter()); - if (counterMap.containsKey(CounterType.SKIPPED)) - sb.append("; Skipped: " + counterMap.get(CounterType.SKIPPED).getGlobalCounter()); - if (counterMap.containsKey(CounterType.WRITE)) - sb.append("; Write: " + counterMap.get(CounterType.WRITE).getGlobalCounter()); - if (counterMap.containsKey(CounterType.ERROR)) - sb.append("; Error: " + counterMap.get(CounterType.ERROR).getGlobalCounter()); - if (counterMap.containsKey(CounterType.LARGE)) - sb.append("; Large: " + counterMap.get(CounterType.LARGE).getGlobalCounter()); - - trackRunFeature.endCdmRun(sb.toString()); - } - logger.info("################################################################################################"); - if (counterMap.containsKey(CounterType.READ)) - logger.info("Final Read Record Count: {}", counterMap.get(CounterType.READ).getGlobalCounter()); - if (counterMap.containsKey(CounterType.MISMATCH)) - logger.info("Final Mismatch Record Count: {}", counterMap.get(CounterType.MISMATCH).getGlobalCounter()); - if (counterMap.containsKey(CounterType.CORRECTED_MISMATCH)) - logger.info("Final Corrected Mismatch Record Count: {}", - counterMap.get(CounterType.CORRECTED_MISMATCH).getGlobalCounter()); - if (counterMap.containsKey(CounterType.MISSING)) - logger.info("Final Missing Record Count: {}", counterMap.get(CounterType.MISSING).getGlobalCounter()); - if (counterMap.containsKey(CounterType.CORRECTED_MISSING)) - logger.info("Final Corrected Missing Record Count: {}", - counterMap.get(CounterType.CORRECTED_MISSING).getGlobalCounter()); - if (counterMap.containsKey(CounterType.VALID)) - logger.info("Final Valid Record Count: {}", counterMap.get(CounterType.VALID).getGlobalCounter()); - if (counterMap.containsKey(CounterType.SKIPPED)) - logger.info("Final Skipped Record Count: {}", counterMap.get(CounterType.SKIPPED).getGlobalCounter()); - if (counterMap.containsKey(CounterType.WRITE)) - logger.info("Final Write Record Count: {}", counterMap.get(CounterType.WRITE).getGlobalCounter()); - if (counterMap.containsKey(CounterType.ERROR)) - logger.info("Final Error Record Count: {}", counterMap.get(CounterType.ERROR).getGlobalCounter()); - if (counterMap.containsKey(CounterType.LARGE)) - logger.info("Final Large Record Count: {}", counterMap.get(CounterType.LARGE).getGlobalCounter()); - logger.info("################################################################################################"); - } + // Enumeration for counter types + public enum CounterType { + READ, WRITE, VALID, ERROR, MISMATCH, MISSING, CORRECTED_MISSING, CORRECTED_MISMATCH, SKIPPED, UNFLUSHED, LARGE + } + + // Logger instance + private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + + // Internal class to handle atomic counting operations + private static class CounterUnit { + private final AtomicLong globalCounter = new AtomicLong(0); + private final ThreadLocal threadLocalCounter = ThreadLocal.withInitial(() -> 0L); + + public void incrementThreadCounter(long incrementBy) { + threadLocalCounter.set(threadLocalCounter.get() + incrementBy); + } + + public long getThreadCounter() { + return threadLocalCounter.get(); + } + + public void resetThreadCounter() { + threadLocalCounter.set(0L); + } + + public void setGlobalCounter(long value) { + globalCounter.set(value); + } + + public void addThreadToGlobalCounter() { + globalCounter.addAndGet(threadLocalCounter.get()); + } + + public long getGlobalCounter() { + return globalCounter.get(); + } + } + + // Declare individual counters for different operations + private final HashMap counterMap = new HashMap<>(); + + // Variables to hold lock objects and registered types + private final Object globalLock = new Object(); + private final boolean printPerThread; + private final long printStatsAfter; + private final CounterUnit printCounter = new CounterUnit(); + + // Constructor + public JobCounter(long printStatsAfter, boolean printStatsPerPart) { + this.printStatsAfter = printStatsAfter; + this.printPerThread = printStatsPerPart; + } + + // Allows setting the registered counter types. + public void setRegisteredTypes(CounterType... registeredTypes) { + counterMap.clear(); + for (CounterType type : registeredTypes) { + counterMap.put(type, new CounterUnit()); + } + } + + // Utility method to fetch the appropriate counter unit based on type + private CounterUnit getCounterUnit(CounterType counterType) { + if (!counterMap.containsKey(counterType)) { + throw new IllegalArgumentException("CounterType " + counterType + " is not registered"); + } + return (counterMap.get(counterType)); + } + + // Method to get a counter's value + public long getCount(CounterType counterType, boolean global) { + return global ? getCounterUnit(counterType).getGlobalCounter() : getCounterUnit(counterType).getThreadCounter(); + } + + // Method to get a thread counter's value + public long getCount(CounterType counterType) { + return getCount(counterType, false); + } + + // Method to reset thread-specific counters for given type + public void threadReset(CounterType counterType) { + getCounterUnit(counterType).resetThreadCounter(); + } + + // Method to reset thread-specific counters for all registered types + public void threadReset() { + for (CounterType type : counterMap.keySet()) { + threadReset(type); + } + } + + // Method to increment thread-specific counters by a given value + public void threadIncrement(CounterType counterType, long incrementBy) { + getCounterUnit(counterType).incrementThreadCounter(incrementBy); + } + + // Method to increment thread-specific counters by 1 + public void threadIncrement(CounterType counterType) { + threadIncrement(counterType, 1); + } + + // Method to increment global counters based on thread-specific counters + public void globalIncrement() { + synchronized (globalLock) { + for (CounterType type : counterMap.keySet()) { + getCounterUnit(type).addThreadToGlobalCounter(); + } + } + } + + // Method to get current counts (both thread-specific and global) as a formatted + // string + public String getThreadCounters(boolean global) { + StringBuilder sb = new StringBuilder(); + for (CounterType type : counterMap.keySet()) { + long value = global ? getCounterUnit(type).getGlobalCounter() : getCounterUnit(type).getThreadCounter(); + sb.append(type.name()).append("=").append(value).append(", "); + } + // Remove the trailing comma and space + if (sb.length() > 2) { + sb.setLength(sb.length() - 2); + } + return sb.toString(); + } + + public void printProgress() { + if (printPerThread) { + printAndLogProgress("Thread Counts: ", false); + } else if (shouldPrintGlobalProgress()) { + printAndLogProgress("Progress Counts: ", true); + } + } + + // Determines if it's the right time to print global progress + protected boolean shouldPrintGlobalProgress() { + if (!counterMap.containsKey(CounterType.READ)) { + return false; + } + long globalReads = counterMap.get(CounterType.READ).getGlobalCounter(); + long expectedPrintCount = globalReads - globalReads % printStatsAfter; + if (expectedPrintCount > printCounter.getGlobalCounter()) { + printCounter.setGlobalCounter(expectedPrintCount); + return true; + } + return false; + } + + // Prints and logs the progress + protected void printAndLogProgress(String message, boolean global) { + String fullMessage = message + getThreadCounters(global); + logger.info(fullMessage); + } + + public void printFinal(TrackRun trackRunFeature) { + if (null != trackRunFeature) { + StringBuilder sb = new StringBuilder(); + if (counterMap.containsKey(CounterType.READ)) + sb.append("Read: " + counterMap.get(CounterType.READ).getGlobalCounter()); + if (counterMap.containsKey(CounterType.MISMATCH)) + sb.append("; Mismatch: " + counterMap.get(CounterType.MISMATCH).getGlobalCounter()); + if (counterMap.containsKey(CounterType.CORRECTED_MISMATCH)) + sb.append("; Corrected Mismatch: " + counterMap.get(CounterType.CORRECTED_MISMATCH).getGlobalCounter()); + if (counterMap.containsKey(CounterType.MISSING)) + sb.append("; Missing: " + counterMap.get(CounterType.MISSING).getGlobalCounter()); + if (counterMap.containsKey(CounterType.CORRECTED_MISSING)) + sb.append("; Corrected Missing: " + counterMap.get(CounterType.CORRECTED_MISSING).getGlobalCounter()); + if (counterMap.containsKey(CounterType.VALID)) + sb.append("; Valid: " + counterMap.get(CounterType.VALID).getGlobalCounter()); + if (counterMap.containsKey(CounterType.SKIPPED)) + sb.append("; Skipped: " + counterMap.get(CounterType.SKIPPED).getGlobalCounter()); + if (counterMap.containsKey(CounterType.WRITE)) + sb.append("; Write: " + counterMap.get(CounterType.WRITE).getGlobalCounter()); + if (counterMap.containsKey(CounterType.ERROR)) + sb.append("; Error: " + counterMap.get(CounterType.ERROR).getGlobalCounter()); + if (counterMap.containsKey(CounterType.LARGE)) + sb.append("; Large: " + counterMap.get(CounterType.LARGE).getGlobalCounter()); + + trackRunFeature.endCdmRun(sb.toString()); + } + logger.info("################################################################################################"); + if (counterMap.containsKey(CounterType.READ)) + logger.info("Final Read Record Count: {}", counterMap.get(CounterType.READ).getGlobalCounter()); + if (counterMap.containsKey(CounterType.MISMATCH)) + logger.info("Final Mismatch Record Count: {}", counterMap.get(CounterType.MISMATCH).getGlobalCounter()); + if (counterMap.containsKey(CounterType.CORRECTED_MISMATCH)) + logger.info("Final Corrected Mismatch Record Count: {}", + counterMap.get(CounterType.CORRECTED_MISMATCH).getGlobalCounter()); + if (counterMap.containsKey(CounterType.MISSING)) + logger.info("Final Missing Record Count: {}", counterMap.get(CounterType.MISSING).getGlobalCounter()); + if (counterMap.containsKey(CounterType.CORRECTED_MISSING)) + logger.info("Final Corrected Missing Record Count: {}", + counterMap.get(CounterType.CORRECTED_MISSING).getGlobalCounter()); + if (counterMap.containsKey(CounterType.VALID)) + logger.info("Final Valid Record Count: {}", counterMap.get(CounterType.VALID).getGlobalCounter()); + if (counterMap.containsKey(CounterType.SKIPPED)) + logger.info("Final Skipped Record Count: {}", counterMap.get(CounterType.SKIPPED).getGlobalCounter()); + if (counterMap.containsKey(CounterType.WRITE)) + logger.info("Final Write Record Count: {}", counterMap.get(CounterType.WRITE).getGlobalCounter()); + if (counterMap.containsKey(CounterType.ERROR)) + logger.info("Final Error Record Count: {}", counterMap.get(CounterType.ERROR).getGlobalCounter()); + if (counterMap.containsKey(CounterType.LARGE)) + logger.info("Final Large Record Count: {}", counterMap.get(CounterType.LARGE).getGlobalCounter()); + logger.info("################################################################################################"); + } } diff --git a/src/main/java/com/datastax/cdm/job/SplitPartitions.java b/src/main/java/com/datastax/cdm/job/SplitPartitions.java index e0150e49..eec89d62 100644 --- a/src/main/java/com/datastax/cdm/job/SplitPartitions.java +++ b/src/main/java/com/datastax/cdm/job/SplitPartitions.java @@ -28,7 +28,8 @@ public class SplitPartitions { public static Logger logger = LoggerFactory.getLogger(SplitPartitions.class.getName()); - public static List getRandomSubPartitions(int numSplits, BigInteger min, BigInteger max, int coveragePercent) { + public static List getRandomSubPartitions(int numSplits, BigInteger min, BigInteger max, + int coveragePercent) { logger.info("ThreadID: {} Splitting min: {} max: {}", Thread.currentThread().getId(), min, max); List partitions = getSubPartitions(numSplits, min, max, coveragePercent); Collections.shuffle(partitions); @@ -38,7 +39,8 @@ public static List getRandomSubPartitions(int numSplits, BigInteger m return partitions; } - private static List getSubPartitions(int numSplits, BigInteger min, BigInteger max, int coveragePercent) { + private static List getSubPartitions(int numSplits, BigInteger min, BigInteger max, + int coveragePercent) { if (coveragePercent < 1 || coveragePercent > 100) { coveragePercent = 100; } diff --git a/src/main/java/com/datastax/cdm/properties/IPropertyHelper.java b/src/main/java/com/datastax/cdm/properties/IPropertyHelper.java index 02d49d11..d1995282 100644 --- a/src/main/java/com/datastax/cdm/properties/IPropertyHelper.java +++ b/src/main/java/com/datastax/cdm/properties/IPropertyHelper.java @@ -15,9 +15,10 @@ */ package com.datastax.cdm.properties; -import org.apache.spark.SparkConf; import java.util.List; +import org.apache.spark.SparkConf; + public interface IPropertyHelper { void initializeSparkConf(SparkConf sc); diff --git a/src/main/java/com/datastax/cdm/properties/KnownProperties.java b/src/main/java/com/datastax/cdm/properties/KnownProperties.java index a1a743d7..b0573c0f 100644 --- a/src/main/java/com/datastax/cdm/properties/KnownProperties.java +++ b/src/main/java/com/datastax/cdm/properties/KnownProperties.java @@ -25,431 +25,432 @@ public class KnownProperties { - public enum PropertyType { - STRING, NUMBER, BOOLEAN, STRING_LIST, NUMBER_LIST, TEST_UNHANDLED_TYPE - } - - private static Map types = new HashMap<>(); - private static Map defaults = new HashMap<>(); - private static Set required = new HashSet<>(); - - // ========================================================================== - // Common connection parameters - // ========================================================================== - public static final String CONNECT_ORIGIN_HOST = "spark.cdm.connect.origin.host"; - public static final String CONNECT_ORIGIN_PORT = "spark.cdm.connect.origin.port"; - public static final String CONNECT_ORIGIN_SCB = "spark.cdm.connect.origin.scb"; - public static final String CONNECT_ORIGIN_USERNAME = "spark.cdm.connect.origin.username"; - public static final String CONNECT_ORIGIN_PASSWORD = "spark.cdm.connect.origin.password"; - - public static final String CONNECT_TARGET_HOST = "spark.cdm.connect.target.host"; - public static final String CONNECT_TARGET_PORT = "spark.cdm.connect.target.port"; - public static final String CONNECT_TARGET_SCB = "spark.cdm.connect.target.scb"; - public static final String CONNECT_TARGET_USERNAME = "spark.cdm.connect.target.username"; - public static final String CONNECT_TARGET_PASSWORD = "spark.cdm.connect.target.password"; - - static { - types.put(CONNECT_ORIGIN_HOST, PropertyType.STRING); - defaults.put(CONNECT_ORIGIN_HOST, "localhost"); - types.put(CONNECT_ORIGIN_PORT, PropertyType.NUMBER); - defaults.put(CONNECT_ORIGIN_PORT, "9042"); - types.put(CONNECT_ORIGIN_SCB, PropertyType.STRING); - types.put(CONNECT_ORIGIN_USERNAME, PropertyType.STRING); - defaults.put(CONNECT_ORIGIN_USERNAME, "cassandra"); - types.put(CONNECT_ORIGIN_PASSWORD, PropertyType.STRING); - defaults.put(CONNECT_ORIGIN_PASSWORD, "cassandra"); - - types.put(CONNECT_TARGET_HOST, PropertyType.STRING); - defaults.put(CONNECT_TARGET_HOST, "localhost"); - types.put(CONNECT_TARGET_PORT, PropertyType.NUMBER); - defaults.put(CONNECT_TARGET_PORT, "9042"); - types.put(CONNECT_TARGET_SCB, PropertyType.STRING); - types.put(CONNECT_TARGET_USERNAME, PropertyType.STRING); - defaults.put(CONNECT_TARGET_USERNAME, "cassandra"); - types.put(CONNECT_TARGET_PASSWORD, PropertyType.STRING); - defaults.put(CONNECT_TARGET_PASSWORD, "cassandra"); - } - - // ========================================================================== - // Properties that describe the origin schema - // ========================================================================== - public static final String ORIGIN_KEYSPACE_TABLE = "spark.cdm.schema.origin.keyspaceTable"; - public static final String ORIGIN_TTL_AUTO = "spark.cdm.schema.origin.column.ttl.automatic"; - public static final String ORIGIN_TTL_NAMES = "spark.cdm.schema.origin.column.ttl.names"; - public static final String ORIGIN_WRITETIME_AUTO = "spark.cdm.schema.origin.column.writetime.automatic"; - public static final String ORIGIN_WRITETIME_NAMES = "spark.cdm.schema.origin.column.writetime.names"; - - public static final String ORIGIN_COLUMN_NAMES_TO_TARGET = "spark.cdm.schema.origin.column.names.to.target"; - - static { - types.put(ORIGIN_KEYSPACE_TABLE, PropertyType.STRING); - required.add(ORIGIN_KEYSPACE_TABLE); - types.put(ORIGIN_TTL_NAMES, PropertyType.STRING_LIST); - types.put(ORIGIN_TTL_AUTO, PropertyType.BOOLEAN); - defaults.put(ORIGIN_TTL_AUTO, "true"); - types.put(ORIGIN_WRITETIME_NAMES, PropertyType.STRING_LIST); - types.put(ORIGIN_WRITETIME_AUTO, PropertyType.BOOLEAN); - defaults.put(ORIGIN_WRITETIME_AUTO, "true"); - types.put(ORIGIN_COLUMN_NAMES_TO_TARGET, PropertyType.STRING_LIST); - } - - // ========================================================================== - // Properties that describe the target schema - // ========================================================================== - public static final String TARGET_KEYSPACE_TABLE = "spark.cdm.schema.target.keyspaceTable"; - - static { - types.put(TARGET_KEYSPACE_TABLE, PropertyType.STRING); - } - - // ========================================================================== - // Autocorrection, Performance, and Operations Parameters - // ========================================================================== - public static final String AUTOCORRECT_MISSING = "spark.cdm.autocorrect.missing"; // false - public static final String AUTOCORRECT_MISMATCH = "spark.cdm.autocorrect.mismatch"; // false - public static final String AUTOCORRECT_MISSING_COUNTER = "spark.cdm.autocorrect.missing.counter"; // false - public static final String TRACK_RUN = "spark.cdm.trackRun"; - public static final String PREV_RUN_ID = "spark.cdm.trackRun.previousRunId"; - - public static final String PERF_NUM_PARTS = "spark.cdm.perfops.numParts"; // 5000, was spark.splitSize - public static final String PERF_BATCH_SIZE = "spark.cdm.perfops.batchSize"; // 5 - public static final String PERF_RATELIMIT_ORIGIN = "spark.cdm.perfops.ratelimit.origin"; // 20000 - public static final String PERF_RATELIMIT_TARGET = "spark.cdm.perfops.ratelimit.target"; // 20000 - - public static final String READ_CL = "spark.cdm.perfops.consistency.read"; - public static final String WRITE_CL = "spark.cdm.perfops.consistency.write"; - public static final String PERF_FETCH_SIZE = "spark.cdm.perfops.fetchSizeInRows"; - public static final String PRINT_STATS_AFTER = "spark.cdm.perfops.printStatsAfter"; - public static final String PRINT_STATS_PER_PART = "spark.cdm.perfops.printStatsPerPart"; - - static { - types.put(AUTOCORRECT_MISSING, PropertyType.BOOLEAN); - defaults.put(AUTOCORRECT_MISSING, "false"); - types.put(AUTOCORRECT_MISMATCH, PropertyType.BOOLEAN); - defaults.put(AUTOCORRECT_MISMATCH, "false"); - types.put(AUTOCORRECT_MISSING_COUNTER, PropertyType.BOOLEAN); - defaults.put(AUTOCORRECT_MISSING_COUNTER, "false"); - types.put(TRACK_RUN, PropertyType.BOOLEAN); - defaults.put(TRACK_RUN, "false"); + public enum PropertyType { + STRING, NUMBER, BOOLEAN, STRING_LIST, NUMBER_LIST, TEST_UNHANDLED_TYPE + } + + private static Map types = new HashMap<>(); + private static Map defaults = new HashMap<>(); + private static Set required = new HashSet<>(); + + // ========================================================================== + // Common connection parameters + // ========================================================================== + public static final String CONNECT_ORIGIN_HOST = "spark.cdm.connect.origin.host"; + public static final String CONNECT_ORIGIN_PORT = "spark.cdm.connect.origin.port"; + public static final String CONNECT_ORIGIN_SCB = "spark.cdm.connect.origin.scb"; + public static final String CONNECT_ORIGIN_USERNAME = "spark.cdm.connect.origin.username"; + public static final String CONNECT_ORIGIN_PASSWORD = "spark.cdm.connect.origin.password"; + + public static final String CONNECT_TARGET_HOST = "spark.cdm.connect.target.host"; + public static final String CONNECT_TARGET_PORT = "spark.cdm.connect.target.port"; + public static final String CONNECT_TARGET_SCB = "spark.cdm.connect.target.scb"; + public static final String CONNECT_TARGET_USERNAME = "spark.cdm.connect.target.username"; + public static final String CONNECT_TARGET_PASSWORD = "spark.cdm.connect.target.password"; + + static { + types.put(CONNECT_ORIGIN_HOST, PropertyType.STRING); + defaults.put(CONNECT_ORIGIN_HOST, "localhost"); + types.put(CONNECT_ORIGIN_PORT, PropertyType.NUMBER); + defaults.put(CONNECT_ORIGIN_PORT, "9042"); + types.put(CONNECT_ORIGIN_SCB, PropertyType.STRING); + types.put(CONNECT_ORIGIN_USERNAME, PropertyType.STRING); + defaults.put(CONNECT_ORIGIN_USERNAME, "cassandra"); + types.put(CONNECT_ORIGIN_PASSWORD, PropertyType.STRING); + defaults.put(CONNECT_ORIGIN_PASSWORD, "cassandra"); + + types.put(CONNECT_TARGET_HOST, PropertyType.STRING); + defaults.put(CONNECT_TARGET_HOST, "localhost"); + types.put(CONNECT_TARGET_PORT, PropertyType.NUMBER); + defaults.put(CONNECT_TARGET_PORT, "9042"); + types.put(CONNECT_TARGET_SCB, PropertyType.STRING); + types.put(CONNECT_TARGET_USERNAME, PropertyType.STRING); + defaults.put(CONNECT_TARGET_USERNAME, "cassandra"); + types.put(CONNECT_TARGET_PASSWORD, PropertyType.STRING); + defaults.put(CONNECT_TARGET_PASSWORD, "cassandra"); + } + + // ========================================================================== + // Properties that describe the origin schema + // ========================================================================== + public static final String ORIGIN_KEYSPACE_TABLE = "spark.cdm.schema.origin.keyspaceTable"; + public static final String ORIGIN_TTL_AUTO = "spark.cdm.schema.origin.column.ttl.automatic"; + public static final String ORIGIN_TTL_NAMES = "spark.cdm.schema.origin.column.ttl.names"; + public static final String ORIGIN_WRITETIME_AUTO = "spark.cdm.schema.origin.column.writetime.automatic"; + public static final String ORIGIN_WRITETIME_NAMES = "spark.cdm.schema.origin.column.writetime.names"; + + public static final String ORIGIN_COLUMN_NAMES_TO_TARGET = "spark.cdm.schema.origin.column.names.to.target"; + + static { + types.put(ORIGIN_KEYSPACE_TABLE, PropertyType.STRING); + required.add(ORIGIN_KEYSPACE_TABLE); + types.put(ORIGIN_TTL_NAMES, PropertyType.STRING_LIST); + types.put(ORIGIN_TTL_AUTO, PropertyType.BOOLEAN); + defaults.put(ORIGIN_TTL_AUTO, "true"); + types.put(ORIGIN_WRITETIME_NAMES, PropertyType.STRING_LIST); + types.put(ORIGIN_WRITETIME_AUTO, PropertyType.BOOLEAN); + defaults.put(ORIGIN_WRITETIME_AUTO, "true"); + types.put(ORIGIN_COLUMN_NAMES_TO_TARGET, PropertyType.STRING_LIST); + } + + // ========================================================================== + // Properties that describe the target schema + // ========================================================================== + public static final String TARGET_KEYSPACE_TABLE = "spark.cdm.schema.target.keyspaceTable"; + + static { + types.put(TARGET_KEYSPACE_TABLE, PropertyType.STRING); + } + + // ========================================================================== + // Autocorrection, Performance, and Operations Parameters + // ========================================================================== + public static final String AUTOCORRECT_MISSING = "spark.cdm.autocorrect.missing"; // false + public static final String AUTOCORRECT_MISMATCH = "spark.cdm.autocorrect.mismatch"; // false + public static final String AUTOCORRECT_MISSING_COUNTER = "spark.cdm.autocorrect.missing.counter"; // false + public static final String TRACK_RUN = "spark.cdm.trackRun"; + public static final String PREV_RUN_ID = "spark.cdm.trackRun.previousRunId"; + + public static final String PERF_NUM_PARTS = "spark.cdm.perfops.numParts"; // 5000, was spark.splitSize + public static final String PERF_BATCH_SIZE = "spark.cdm.perfops.batchSize"; // 5 + public static final String PERF_RATELIMIT_ORIGIN = "spark.cdm.perfops.ratelimit.origin"; // 20000 + public static final String PERF_RATELIMIT_TARGET = "spark.cdm.perfops.ratelimit.target"; // 20000 + + public static final String READ_CL = "spark.cdm.perfops.consistency.read"; + public static final String WRITE_CL = "spark.cdm.perfops.consistency.write"; + public static final String PERF_FETCH_SIZE = "spark.cdm.perfops.fetchSizeInRows"; + public static final String PRINT_STATS_AFTER = "spark.cdm.perfops.printStatsAfter"; + public static final String PRINT_STATS_PER_PART = "spark.cdm.perfops.printStatsPerPart"; + + static { + types.put(AUTOCORRECT_MISSING, PropertyType.BOOLEAN); + defaults.put(AUTOCORRECT_MISSING, "false"); + types.put(AUTOCORRECT_MISMATCH, PropertyType.BOOLEAN); + defaults.put(AUTOCORRECT_MISMATCH, "false"); + types.put(AUTOCORRECT_MISSING_COUNTER, PropertyType.BOOLEAN); + defaults.put(AUTOCORRECT_MISSING_COUNTER, "false"); + types.put(TRACK_RUN, PropertyType.BOOLEAN); + defaults.put(TRACK_RUN, "false"); types.put(PREV_RUN_ID, PropertyType.NUMBER); defaults.put(PREV_RUN_ID, "0"); - - types.put(PERF_NUM_PARTS, PropertyType.NUMBER); - defaults.put(PERF_NUM_PARTS, "5000"); - types.put(PERF_BATCH_SIZE, PropertyType.NUMBER); - defaults.put(PERF_BATCH_SIZE, "5"); - types.put(PERF_RATELIMIT_ORIGIN, PropertyType.NUMBER); - defaults.put(PERF_RATELIMIT_ORIGIN, "20000"); - types.put(PERF_RATELIMIT_TARGET, PropertyType.NUMBER); - defaults.put(PERF_RATELIMIT_TARGET, "20000"); - - types.put(READ_CL, PropertyType.STRING); - defaults.put(READ_CL, "LOCAL_QUORUM"); - types.put(WRITE_CL, PropertyType.STRING); - defaults.put(WRITE_CL, "LOCAL_QUORUM"); - types.put(PRINT_STATS_AFTER, PropertyType.NUMBER); - defaults.put(PRINT_STATS_AFTER, "100000"); - types.put(PRINT_STATS_PER_PART, PropertyType.BOOLEAN); - defaults.put(PRINT_STATS_PER_PART, "false"); - types.put(PERF_FETCH_SIZE, PropertyType.NUMBER); - defaults.put(PERF_FETCH_SIZE, "1000"); - } - - // ========================================================================== - // Transformations - // ========================================================================== - public static final String TRANSFORM_REPLACE_MISSING_TS = "spark.cdm.transform.missing.key.ts.replace.value"; - public static final String TRANSFORM_CUSTOM_WRITETIME = "spark.cdm.transform.custom.writetime"; - public static final String TRANSFORM_CUSTOM_WRITETIME_INCREMENT = "spark.cdm.transform.custom.writetime.incrementBy"; - public static final String TRANSFORM_CUSTOM_TTL = "spark.cdm.transform.custom.ttl"; - public static final String TRANSFORM_CODECS = "spark.cdm.transform.codecs"; - public static final String TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT = "spark.cdm.transform.codecs.timestamp.string.format"; - public static final String TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE = "spark.cdm.transform.codecs.timestamp.string.zone"; - public static final String TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE = "spark.cdm.transform.map.remove.null.value"; - - static { - types.put(TRANSFORM_REPLACE_MISSING_TS, PropertyType.NUMBER); - types.put(TRANSFORM_CUSTOM_WRITETIME, PropertyType.NUMBER); - defaults.put(TRANSFORM_CUSTOM_WRITETIME, "0"); - types.put(TRANSFORM_CUSTOM_TTL, PropertyType.NUMBER); - defaults.put(TRANSFORM_CUSTOM_TTL, "0"); - types.put(TRANSFORM_CUSTOM_WRITETIME_INCREMENT, PropertyType.NUMBER); - defaults.put(TRANSFORM_CUSTOM_WRITETIME_INCREMENT, "0"); - types.put(TRANSFORM_CODECS, PropertyType.STRING_LIST); - types.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT, PropertyType.STRING); - defaults.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT, "yyyyMMddHHmmss"); - types.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE, PropertyType.STRING); - defaults.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE, "UTC"); - types.put(TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE, PropertyType.BOOLEAN); - defaults.put(TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE, "false"); - } - - // ========================================================================== - // Cassandra-side Filters - // ========================================================================== - public static final String PARTITION_MIN = "spark.cdm.filter.cassandra.partition.min"; - public static final String PARTITION_MAX = "spark.cdm.filter.cassandra.partition.max"; - public static final String FILTER_CQL_WHERE_CONDITION = "spark.cdm.filter.cassandra.whereCondition"; - static { - types.put(PARTITION_MIN, PropertyType.STRING); - types.put(PARTITION_MAX, PropertyType.STRING); - types.put(FILTER_CQL_WHERE_CONDITION, PropertyType.STRING); - } - - // ========================================================================== - // Java-side Filters - // ========================================================================== - public static final String TOKEN_COVERAGE_PERCENT = "spark.cdm.filter.java.token.percent"; - public static final String FILTER_WRITETS_MIN = "spark.cdm.filter.java.writetime.min"; - public static final String FILTER_WRITETS_MAX = "spark.cdm.filter.java.writetime.max"; - public static final String FILTER_COLUMN_NAME = "spark.cdm.filter.java.column.name"; - public static final String FILTER_COLUMN_VALUE = "spark.cdm.filter.java.column.value"; - static { - types.put(TOKEN_COVERAGE_PERCENT, PropertyType.NUMBER); - defaults.put(TOKEN_COVERAGE_PERCENT, "100"); - types.put(FILTER_WRITETS_MIN, PropertyType.NUMBER); - types.put(FILTER_WRITETS_MAX, PropertyType.NUMBER); - types.put(FILTER_COLUMN_NAME, PropertyType.STRING); - types.put(FILTER_COLUMN_VALUE, PropertyType.STRING); - } - - // ========================================================================== - // Constant Column Feature - // ========================================================================== - public static final String CONSTANT_COLUMN_NAMES = "spark.cdm.feature.constantColumns.names"; // const1,const2 - public static final String CONSTANT_COLUMN_VALUES = "spark.cdm.feature.constantColumns.values"; // 'abcd',1234 - // Regex needed when values have commas - public static final String CONSTANT_COLUMN_SPLIT_REGEX = "spark.cdm.feature.constantColumns.splitRegex"; - static { - types.put(CONSTANT_COLUMN_NAMES, PropertyType.STRING_LIST); - types.put(CONSTANT_COLUMN_VALUES, PropertyType.STRING); - types.put(CONSTANT_COLUMN_SPLIT_REGEX, PropertyType.STRING); - defaults.put(CONSTANT_COLUMN_SPLIT_REGEX, ","); - } - - // ========================================================================== - // Explode Map Feature - // ========================================================================== - public static final String EXPLODE_MAP_ORIGIN_COLUMN_NAME = "spark.cdm.feature.explodeMap.origin.name"; // map_to_explode - public static final String EXPLODE_MAP_TARGET_KEY_COLUMN_NAME = "spark.cdm.feature.explodeMap.target.name.key"; // map_key - public static final String EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME = "spark.cdm.feature.explodeMap.target.name.value"; // map_value - - static { - types.put(EXPLODE_MAP_ORIGIN_COLUMN_NAME, PropertyType.STRING); - types.put(EXPLODE_MAP_TARGET_KEY_COLUMN_NAME, PropertyType.STRING); - types.put(EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME, PropertyType.STRING); - } - - // ========================================================================== - // Extract JsonFeature - // ========================================================================== - public static final String EXTRACT_JSON_EXCLUSIVE = "spark.cdm.feature.extractJson.exclusive"; - public static final String EXTRACT_JSON_ORIGIN_COLUMN_NAME = "spark.cdm.feature.extractJson.originColumn"; - public static final String EXTRACT_JSON_TARGET_COLUMN_MAPPING = "spark.cdm.feature.extractJson.propertyMapping"; - public static final String EXTRACT_JSON_TARGET_OVERWRITE = "spark.cdm.feature.extractJson.overwrite"; - - static { - types.put(EXTRACT_JSON_EXCLUSIVE, PropertyType.BOOLEAN); - defaults.put(EXTRACT_JSON_EXCLUSIVE, "false"); - types.put(EXTRACT_JSON_ORIGIN_COLUMN_NAME, PropertyType.STRING); - types.put(EXTRACT_JSON_TARGET_COLUMN_MAPPING, PropertyType.STRING); - types.put(EXTRACT_JSON_TARGET_OVERWRITE, PropertyType.BOOLEAN); - defaults.put(EXTRACT_JSON_TARGET_OVERWRITE, "false"); } - - // ========================================================================== - // Guardrail Feature - // ========================================================================== - public static final String GUARDRAIL_COLSIZE_KB = "spark.cdm.feature.guardrail.colSizeInKB"; - static { - types.put(GUARDRAIL_COLSIZE_KB, PropertyType.NUMBER); - defaults.put(GUARDRAIL_COLSIZE_KB, "0"); - } - - // ========================================================================== - // Properties that configure origin TLS - // ========================================================================== - public static final String ORIGIN_TLS_ENABLED = "spark.cdm.connect.origin.tls.enabled"; // false - public static final String ORIGIN_TLS_TRUSTSTORE_PATH = "spark.cdm.connect.origin.tls.trustStore.path"; - public static final String ORIGIN_TLS_TRUSTSTORE_PASSWORD = "spark.cdm.connect.origin.tls.trustStore.password"; - public static final String ORIGIN_TLS_TRUSTSTORE_TYPE = "spark.cdm.connect.origin.tls.trustStore.type"; // JKS - public static final String ORIGIN_TLS_KEYSTORE_PATH = "spark.cdm.connect.origin.tls.keyStore.path"; - public static final String ORIGIN_TLS_KEYSTORE_PASSWORD = "spark.cdm.connect.origin.tls.keyStore.password"; - public static final String ORIGIN_TLS_ALGORITHMS = "spark.cdm.connect.origin.tls.enabledAlgorithms"; // TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA - static { - types.put(ORIGIN_TLS_ENABLED, PropertyType.BOOLEAN); - defaults.put(ORIGIN_TLS_ENABLED, "false"); - types.put(ORIGIN_TLS_TRUSTSTORE_PATH, PropertyType.STRING); - types.put(ORIGIN_TLS_TRUSTSTORE_PASSWORD, PropertyType.STRING); - types.put(ORIGIN_TLS_TRUSTSTORE_TYPE, PropertyType.STRING); - defaults.put(ORIGIN_TLS_TRUSTSTORE_TYPE, "JKS"); - types.put(ORIGIN_TLS_KEYSTORE_PATH, PropertyType.STRING); - types.put(ORIGIN_TLS_KEYSTORE_PASSWORD, PropertyType.STRING); - types.put(ORIGIN_TLS_ALGORITHMS, PropertyType.STRING); // This is a list but it is handled by Spark - defaults.put(ORIGIN_TLS_ALGORITHMS, "TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA"); - } - - // ========================================================================== - // Properties that configure target TLS - // ========================================================================== - public static final String TARGET_TLS_ENABLED = "spark.cdm.connect.target.tls.enabled"; // false - public static final String TARGET_TLS_TRUSTSTORE_PATH = "spark.cdm.connect.target.tls.trustStore.path"; - public static final String TARGET_TLS_TRUSTSTORE_PASSWORD = "spark.cdm.connect.target.tls.trustStore.password"; - public static final String TARGET_TLS_TRUSTSTORE_TYPE = "spark.cdm.connect.target.tls.trustStore.type"; // JKS - public static final String TARGET_TLS_KEYSTORE_PATH = "spark.cdm.connect.target.tls.keyStore.path"; - public static final String TARGET_TLS_KEYSTORE_PASSWORD = "spark.cdm.connect.target.tls.keyStore.password"; - public static final String TARGET_TLS_ALGORITHMS = "spark.cdm.connect.target.tls.enabledAlgorithms"; // TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA - static { - types.put(TARGET_TLS_ENABLED, PropertyType.BOOLEAN); - defaults.put(TARGET_TLS_ENABLED, "false"); - types.put(TARGET_TLS_TRUSTSTORE_PATH, PropertyType.STRING); - types.put(TARGET_TLS_TRUSTSTORE_PASSWORD, PropertyType.STRING); - types.put(TARGET_TLS_TRUSTSTORE_TYPE, PropertyType.STRING); - defaults.put(TARGET_TLS_TRUSTSTORE_TYPE, "JKS"); - types.put(TARGET_TLS_KEYSTORE_PATH, PropertyType.STRING); - types.put(TARGET_TLS_KEYSTORE_PASSWORD, PropertyType.STRING); - types.put(TARGET_TLS_ALGORITHMS, PropertyType.STRING); // This is a list but it is handled by Spark - defaults.put(TARGET_TLS_ALGORITHMS, "TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA"); - } - - // ========================================================================== - // Properties used for Unit Testing - // ========================================================================== - public static final String TEST_STRING = "test.string"; - protected static final String TEST_STRING_DEFAULT = "text"; - public static final String TEST_STRING_NO_DEFAULT = "test.string.noDefault"; - public static final String TEST_STRING_LIST = "test.stringList"; - protected static final String TEST_STRING_LIST_DEFAULT = "text1,text2"; - public static final String TEST_NUMBER = "test.number"; - protected static final String TEST_NUMBER_DEFAULT = "1"; - public static final String TEST_NUMBER_LIST = "test.numberList"; - protected static final String TEST_NUMBER_LIST_DEFAULT = "1,2"; - public static final String TEST_BOOLEAN = "test.boolean"; - protected static final String TEST_BOOLEAN_DEFAULT = "true"; - public static final String TEST_UNHANDLED_TYPE = "test.unhandled.type"; - static { - types.put(TEST_STRING, PropertyType.STRING); - defaults.put(TEST_STRING, TEST_STRING_DEFAULT); - types.put(TEST_STRING_NO_DEFAULT, PropertyType.STRING); - types.put(TEST_STRING_LIST, PropertyType.STRING_LIST); - defaults.put(TEST_STRING_LIST, TEST_STRING_LIST_DEFAULT); - types.put(TEST_NUMBER, PropertyType.NUMBER); - defaults.put(TEST_NUMBER, TEST_NUMBER_DEFAULT); - types.put(TEST_NUMBER_LIST, PropertyType.NUMBER_LIST); - defaults.put(TEST_NUMBER_LIST, TEST_NUMBER_LIST_DEFAULT); - types.put(TEST_BOOLEAN, PropertyType.BOOLEAN); - defaults.put(TEST_BOOLEAN, TEST_BOOLEAN_DEFAULT); - types.put(TEST_UNHANDLED_TYPE, PropertyType.TEST_UNHANDLED_TYPE); - } - - public static Boolean isKnown(String key) { - return types.containsKey(key); - } - - public static Object asType(PropertyType propertyType, String propertyValue) { - switch (propertyType) { - case STRING: - return propertyValue; - case STRING_LIST: - return Arrays.asList(propertyValue.split(",")); - case NUMBER: - try { - return Long.parseLong(propertyValue); - } catch (NumberFormatException e) { - return null; - } - case NUMBER_LIST: - String[] numValues = propertyValue.split(","); - ArrayList numbers = new ArrayList<>(numValues.length); - try { - for (String value : numValues) { - numbers.add(Long.parseLong(value)); - } - return numbers; - } catch (NumberFormatException e) { - return null; - } - case BOOLEAN: - return Boolean.parseBoolean(propertyValue); - default: - throw new IllegalArgumentException("Unhandled property type: " + propertyType); - } - } - - public static Object getDefault(String key) { - PropertyType type = types.get(key); - String value = defaults.get(key); - if (type == null || value == null) { - return null; - } - return asType(type, value); - } - - public static String getDefaultAsString(String key) { - return defaults.get(key); - } - - public static PropertyType getType(String key) { - return types.get(key); - } - - public static Map getTypeMap() { - return types; - } - - public static Set getRequired() { - return required; - } - - public static boolean validateType(PropertyType expectedType, Object value) { - switch (expectedType) { - case STRING: - if (value instanceof String) { - return true; - } - break; - case STRING_LIST: - if (value instanceof List) { - List list = (List) value; - if (list.isEmpty()) { - return false; - } else { - for (Object o : list) { - if (!(o instanceof String)) { - return false; - } - } - return true; - } - } - break; - case NUMBER: - if (value instanceof Number) { - return true; - } - break; - case NUMBER_LIST: - if (value instanceof List) { - List list = (List) value; - if (list.isEmpty()) { - return false; - } else { - for (Object o : list) { - if (!(o instanceof Number)) { - return false; - } - } - return true; - } - } - break; - case BOOLEAN: - if (value instanceof Boolean) { - return true; - } - break; - default: - break; - } - return false; - } + + types.put(PERF_NUM_PARTS, PropertyType.NUMBER); + defaults.put(PERF_NUM_PARTS, "5000"); + types.put(PERF_BATCH_SIZE, PropertyType.NUMBER); + defaults.put(PERF_BATCH_SIZE, "5"); + types.put(PERF_RATELIMIT_ORIGIN, PropertyType.NUMBER); + defaults.put(PERF_RATELIMIT_ORIGIN, "20000"); + types.put(PERF_RATELIMIT_TARGET, PropertyType.NUMBER); + defaults.put(PERF_RATELIMIT_TARGET, "20000"); + + types.put(READ_CL, PropertyType.STRING); + defaults.put(READ_CL, "LOCAL_QUORUM"); + types.put(WRITE_CL, PropertyType.STRING); + defaults.put(WRITE_CL, "LOCAL_QUORUM"); + types.put(PRINT_STATS_AFTER, PropertyType.NUMBER); + defaults.put(PRINT_STATS_AFTER, "100000"); + types.put(PRINT_STATS_PER_PART, PropertyType.BOOLEAN); + defaults.put(PRINT_STATS_PER_PART, "false"); + types.put(PERF_FETCH_SIZE, PropertyType.NUMBER); + defaults.put(PERF_FETCH_SIZE, "1000"); + } + + // ========================================================================== + // Transformations + // ========================================================================== + public static final String TRANSFORM_REPLACE_MISSING_TS = "spark.cdm.transform.missing.key.ts.replace.value"; + public static final String TRANSFORM_CUSTOM_WRITETIME = "spark.cdm.transform.custom.writetime"; + public static final String TRANSFORM_CUSTOM_WRITETIME_INCREMENT = "spark.cdm.transform.custom.writetime.incrementBy"; + public static final String TRANSFORM_CUSTOM_TTL = "spark.cdm.transform.custom.ttl"; + public static final String TRANSFORM_CODECS = "spark.cdm.transform.codecs"; + public static final String TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT = "spark.cdm.transform.codecs.timestamp.string.format"; + public static final String TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE = "spark.cdm.transform.codecs.timestamp.string.zone"; + public static final String TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE = "spark.cdm.transform.map.remove.null.value"; + + static { + types.put(TRANSFORM_REPLACE_MISSING_TS, PropertyType.NUMBER); + types.put(TRANSFORM_CUSTOM_WRITETIME, PropertyType.NUMBER); + defaults.put(TRANSFORM_CUSTOM_WRITETIME, "0"); + types.put(TRANSFORM_CUSTOM_TTL, PropertyType.NUMBER); + defaults.put(TRANSFORM_CUSTOM_TTL, "0"); + types.put(TRANSFORM_CUSTOM_WRITETIME_INCREMENT, PropertyType.NUMBER); + defaults.put(TRANSFORM_CUSTOM_WRITETIME_INCREMENT, "0"); + types.put(TRANSFORM_CODECS, PropertyType.STRING_LIST); + types.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT, PropertyType.STRING); + defaults.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT, "yyyyMMddHHmmss"); + types.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE, PropertyType.STRING); + defaults.put(TRANSFORM_CODECS_TIMESTAMP_STRING_FORMAT_ZONE, "UTC"); + types.put(TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE, PropertyType.BOOLEAN); + defaults.put(TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE, "false"); + } + + // ========================================================================== + // Cassandra-side Filters + // ========================================================================== + public static final String PARTITION_MIN = "spark.cdm.filter.cassandra.partition.min"; + public static final String PARTITION_MAX = "spark.cdm.filter.cassandra.partition.max"; + public static final String FILTER_CQL_WHERE_CONDITION = "spark.cdm.filter.cassandra.whereCondition"; + static { + types.put(PARTITION_MIN, PropertyType.STRING); + types.put(PARTITION_MAX, PropertyType.STRING); + types.put(FILTER_CQL_WHERE_CONDITION, PropertyType.STRING); + } + + // ========================================================================== + // Java-side Filters + // ========================================================================== + public static final String TOKEN_COVERAGE_PERCENT = "spark.cdm.filter.java.token.percent"; + public static final String FILTER_WRITETS_MIN = "spark.cdm.filter.java.writetime.min"; + public static final String FILTER_WRITETS_MAX = "spark.cdm.filter.java.writetime.max"; + public static final String FILTER_COLUMN_NAME = "spark.cdm.filter.java.column.name"; + public static final String FILTER_COLUMN_VALUE = "spark.cdm.filter.java.column.value"; + static { + types.put(TOKEN_COVERAGE_PERCENT, PropertyType.NUMBER); + defaults.put(TOKEN_COVERAGE_PERCENT, "100"); + types.put(FILTER_WRITETS_MIN, PropertyType.NUMBER); + types.put(FILTER_WRITETS_MAX, PropertyType.NUMBER); + types.put(FILTER_COLUMN_NAME, PropertyType.STRING); + types.put(FILTER_COLUMN_VALUE, PropertyType.STRING); + } + + // ========================================================================== + // Constant Column Feature + // ========================================================================== + public static final String CONSTANT_COLUMN_NAMES = "spark.cdm.feature.constantColumns.names"; // const1,const2 + public static final String CONSTANT_COLUMN_VALUES = "spark.cdm.feature.constantColumns.values"; // 'abcd',1234 + // Regex needed when values have commas + public static final String CONSTANT_COLUMN_SPLIT_REGEX = "spark.cdm.feature.constantColumns.splitRegex"; + static { + types.put(CONSTANT_COLUMN_NAMES, PropertyType.STRING_LIST); + types.put(CONSTANT_COLUMN_VALUES, PropertyType.STRING); + types.put(CONSTANT_COLUMN_SPLIT_REGEX, PropertyType.STRING); + defaults.put(CONSTANT_COLUMN_SPLIT_REGEX, ","); + } + + // ========================================================================== + // Explode Map Feature + // ========================================================================== + public static final String EXPLODE_MAP_ORIGIN_COLUMN_NAME = "spark.cdm.feature.explodeMap.origin.name"; // map_to_explode + public static final String EXPLODE_MAP_TARGET_KEY_COLUMN_NAME = "spark.cdm.feature.explodeMap.target.name.key"; // map_key + public static final String EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME = "spark.cdm.feature.explodeMap.target.name.value"; // map_value + + static { + types.put(EXPLODE_MAP_ORIGIN_COLUMN_NAME, PropertyType.STRING); + types.put(EXPLODE_MAP_TARGET_KEY_COLUMN_NAME, PropertyType.STRING); + types.put(EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME, PropertyType.STRING); + } + + // ========================================================================== + // Extract JsonFeature + // ========================================================================== + public static final String EXTRACT_JSON_EXCLUSIVE = "spark.cdm.feature.extractJson.exclusive"; + public static final String EXTRACT_JSON_ORIGIN_COLUMN_NAME = "spark.cdm.feature.extractJson.originColumn"; + public static final String EXTRACT_JSON_TARGET_COLUMN_MAPPING = "spark.cdm.feature.extractJson.propertyMapping"; + public static final String EXTRACT_JSON_TARGET_OVERWRITE = "spark.cdm.feature.extractJson.overwrite"; + + static { + types.put(EXTRACT_JSON_EXCLUSIVE, PropertyType.BOOLEAN); + defaults.put(EXTRACT_JSON_EXCLUSIVE, "false"); + types.put(EXTRACT_JSON_ORIGIN_COLUMN_NAME, PropertyType.STRING); + types.put(EXTRACT_JSON_TARGET_COLUMN_MAPPING, PropertyType.STRING); + types.put(EXTRACT_JSON_TARGET_OVERWRITE, PropertyType.BOOLEAN); + defaults.put(EXTRACT_JSON_TARGET_OVERWRITE, "false"); + } + + // ========================================================================== + // Guardrail Feature + // ========================================================================== + public static final String GUARDRAIL_COLSIZE_KB = "spark.cdm.feature.guardrail.colSizeInKB"; + static { + types.put(GUARDRAIL_COLSIZE_KB, PropertyType.NUMBER); + defaults.put(GUARDRAIL_COLSIZE_KB, "0"); + } + + // ========================================================================== + // Properties that configure origin TLS + // ========================================================================== + public static final String ORIGIN_TLS_ENABLED = "spark.cdm.connect.origin.tls.enabled"; // false + public static final String ORIGIN_TLS_TRUSTSTORE_PATH = "spark.cdm.connect.origin.tls.trustStore.path"; + public static final String ORIGIN_TLS_TRUSTSTORE_PASSWORD = "spark.cdm.connect.origin.tls.trustStore.password"; + public static final String ORIGIN_TLS_TRUSTSTORE_TYPE = "spark.cdm.connect.origin.tls.trustStore.type"; // JKS + public static final String ORIGIN_TLS_KEYSTORE_PATH = "spark.cdm.connect.origin.tls.keyStore.path"; + public static final String ORIGIN_TLS_KEYSTORE_PASSWORD = "spark.cdm.connect.origin.tls.keyStore.password"; + public static final String ORIGIN_TLS_ALGORITHMS = "spark.cdm.connect.origin.tls.enabledAlgorithms"; // TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA + static { + types.put(ORIGIN_TLS_ENABLED, PropertyType.BOOLEAN); + defaults.put(ORIGIN_TLS_ENABLED, "false"); + types.put(ORIGIN_TLS_TRUSTSTORE_PATH, PropertyType.STRING); + types.put(ORIGIN_TLS_TRUSTSTORE_PASSWORD, PropertyType.STRING); + types.put(ORIGIN_TLS_TRUSTSTORE_TYPE, PropertyType.STRING); + defaults.put(ORIGIN_TLS_TRUSTSTORE_TYPE, "JKS"); + types.put(ORIGIN_TLS_KEYSTORE_PATH, PropertyType.STRING); + types.put(ORIGIN_TLS_KEYSTORE_PASSWORD, PropertyType.STRING); + types.put(ORIGIN_TLS_ALGORITHMS, PropertyType.STRING); // This is a list but it is handled by Spark + defaults.put(ORIGIN_TLS_ALGORITHMS, "TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA"); + } + + // ========================================================================== + // Properties that configure target TLS + // ========================================================================== + public static final String TARGET_TLS_ENABLED = "spark.cdm.connect.target.tls.enabled"; // false + public static final String TARGET_TLS_TRUSTSTORE_PATH = "spark.cdm.connect.target.tls.trustStore.path"; + public static final String TARGET_TLS_TRUSTSTORE_PASSWORD = "spark.cdm.connect.target.tls.trustStore.password"; + public static final String TARGET_TLS_TRUSTSTORE_TYPE = "spark.cdm.connect.target.tls.trustStore.type"; // JKS + public static final String TARGET_TLS_KEYSTORE_PATH = "spark.cdm.connect.target.tls.keyStore.path"; + public static final String TARGET_TLS_KEYSTORE_PASSWORD = "spark.cdm.connect.target.tls.keyStore.password"; + public static final String TARGET_TLS_ALGORITHMS = "spark.cdm.connect.target.tls.enabledAlgorithms"; // TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA + static { + types.put(TARGET_TLS_ENABLED, PropertyType.BOOLEAN); + defaults.put(TARGET_TLS_ENABLED, "false"); + types.put(TARGET_TLS_TRUSTSTORE_PATH, PropertyType.STRING); + types.put(TARGET_TLS_TRUSTSTORE_PASSWORD, PropertyType.STRING); + types.put(TARGET_TLS_TRUSTSTORE_TYPE, PropertyType.STRING); + defaults.put(TARGET_TLS_TRUSTSTORE_TYPE, "JKS"); + types.put(TARGET_TLS_KEYSTORE_PATH, PropertyType.STRING); + types.put(TARGET_TLS_KEYSTORE_PASSWORD, PropertyType.STRING); + types.put(TARGET_TLS_ALGORITHMS, PropertyType.STRING); // This is a list but it is handled by Spark + defaults.put(TARGET_TLS_ALGORITHMS, "TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA"); + } + + // ========================================================================== + // Properties used for Unit Testing + // ========================================================================== + public static final String TEST_STRING = "test.string"; + protected static final String TEST_STRING_DEFAULT = "text"; + public static final String TEST_STRING_NO_DEFAULT = "test.string.noDefault"; + public static final String TEST_STRING_LIST = "test.stringList"; + protected static final String TEST_STRING_LIST_DEFAULT = "text1,text2"; + public static final String TEST_NUMBER = "test.number"; + protected static final String TEST_NUMBER_DEFAULT = "1"; + public static final String TEST_NUMBER_LIST = "test.numberList"; + protected static final String TEST_NUMBER_LIST_DEFAULT = "1,2"; + public static final String TEST_BOOLEAN = "test.boolean"; + protected static final String TEST_BOOLEAN_DEFAULT = "true"; + public static final String TEST_UNHANDLED_TYPE = "test.unhandled.type"; + static { + types.put(TEST_STRING, PropertyType.STRING); + defaults.put(TEST_STRING, TEST_STRING_DEFAULT); + types.put(TEST_STRING_NO_DEFAULT, PropertyType.STRING); + types.put(TEST_STRING_LIST, PropertyType.STRING_LIST); + defaults.put(TEST_STRING_LIST, TEST_STRING_LIST_DEFAULT); + types.put(TEST_NUMBER, PropertyType.NUMBER); + defaults.put(TEST_NUMBER, TEST_NUMBER_DEFAULT); + types.put(TEST_NUMBER_LIST, PropertyType.NUMBER_LIST); + defaults.put(TEST_NUMBER_LIST, TEST_NUMBER_LIST_DEFAULT); + types.put(TEST_BOOLEAN, PropertyType.BOOLEAN); + defaults.put(TEST_BOOLEAN, TEST_BOOLEAN_DEFAULT); + types.put(TEST_UNHANDLED_TYPE, PropertyType.TEST_UNHANDLED_TYPE); + } + + public static Boolean isKnown(String key) { + return types.containsKey(key); + } + + public static Object asType(PropertyType propertyType, String propertyValue) { + switch (propertyType) { + case STRING: + return propertyValue; + case STRING_LIST: + return Arrays.asList(propertyValue.split(",")); + case NUMBER: + try { + return Long.parseLong(propertyValue); + } catch (NumberFormatException e) { + return null; + } + case NUMBER_LIST: + String[] numValues = propertyValue.split(","); + ArrayList numbers = new ArrayList<>(numValues.length); + try { + for (String value : numValues) { + numbers.add(Long.parseLong(value)); + } + return numbers; + } catch (NumberFormatException e) { + return null; + } + case BOOLEAN: + return Boolean.parseBoolean(propertyValue); + default: + throw new IllegalArgumentException("Unhandled property type: " + propertyType); + } + } + + public static Object getDefault(String key) { + PropertyType type = types.get(key); + String value = defaults.get(key); + if (type == null || value == null) { + return null; + } + return asType(type, value); + } + + public static String getDefaultAsString(String key) { + return defaults.get(key); + } + + public static PropertyType getType(String key) { + return types.get(key); + } + + public static Map getTypeMap() { + return types; + } + + public static Set getRequired() { + return required; + } + + public static boolean validateType(PropertyType expectedType, Object value) { + switch (expectedType) { + case STRING: + if (value instanceof String) { + return true; + } + break; + case STRING_LIST: + if (value instanceof List) { + List list = (List) value; + if (list.isEmpty()) { + return false; + } else { + for (Object o : list) { + if (!(o instanceof String)) { + return false; + } + } + return true; + } + } + break; + case NUMBER: + if (value instanceof Number) { + return true; + } + break; + case NUMBER_LIST: + if (value instanceof List) { + List list = (List) value; + if (list.isEmpty()) { + return false; + } else { + for (Object o : list) { + if (!(o instanceof Number)) { + return false; + } + } + return true; + } + } + break; + case BOOLEAN: + if (value instanceof Boolean) { + return true; + } + break; + default: + break; + } + return false; + } } diff --git a/src/main/java/com/datastax/cdm/properties/PropertyHelper.java b/src/main/java/com/datastax/cdm/properties/PropertyHelper.java index e7bf2fca..e93d8aa6 100644 --- a/src/main/java/com/datastax/cdm/properties/PropertyHelper.java +++ b/src/main/java/com/datastax/cdm/properties/PropertyHelper.java @@ -15,19 +15,20 @@ */ package com.datastax.cdm.properties; +import java.util.*; + import org.apache.commons.lang3.StringUtils; +import org.apache.spark.SparkConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.spark.SparkConf; -import scala.Tuple2; -import java.util.*; +import scala.Tuple2; public final class PropertyHelper extends KnownProperties implements IPropertyHelper { private static PropertyHelper instance = null; public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - private final Map propertyMap; + private final Map propertyMap; private volatile SparkConf sparkConf; private boolean sparkConfFullyLoaded = false; @@ -65,8 +66,7 @@ public static void destroyInstance() { } /** - * Loads the SparkConf into the propertyMap, but only - * if the SparkConf has not already been loaded. + * Loads the SparkConf into the propertyMap, but only if the SparkConf has not already been loaded. * * @param sc */ @@ -85,16 +85,17 @@ public void initializeSparkConf(SparkConf sc) { } /** - * Sets a property value if it is of the correct and known type. - * For _LIST types, the property will only be set if the list is not empty. + * Sets a property value if it is of the correct and known type. For _LIST types, the property will only be set if + * the list is not empty. + * * @param propertyName * @param propertyValue + * * @return propertyValue if it is of the correct type, null otherwise */ @Override public Object setProperty(String propertyName, Object propertyValue) { - if (null == propertyName || - null == propertyValue) + if (null == propertyName || null == propertyValue) return null; PropertyType expectedType = getType(propertyName); if (null == expectedType) { @@ -116,16 +117,14 @@ protected Object get(String propertyName) { return null; Object currentProperty; - synchronized (PropertyHelper.class){ + synchronized (PropertyHelper.class) { currentProperty = propertyMap.get(propertyName); } return currentProperty; } protected Object get(String propertyName, PropertyType expectedType) { - if (null == propertyName - || null == expectedType - || expectedType != getType(propertyName)) { + if (null == propertyName || null == expectedType || expectedType != getType(propertyName)) { return null; } Object currentProperty = get(propertyName); @@ -155,16 +154,14 @@ public Number getNumber(String propertyName) { @Override public Integer getInteger(String propertyName) { - if (null==getNumber(propertyName) - || PropertyType.NUMBER != getType(propertyName)) + if (null == getNumber(propertyName) || PropertyType.NUMBER != getType(propertyName)) return null; return toInteger(getNumber(propertyName)); } @Override public Long getLong(String propertyName) { - if (null==getNumber(propertyName) - || PropertyType.NUMBER != getType(propertyName)) + if (null == getNumber(propertyName) || PropertyType.NUMBER != getType(propertyName)) return null; return getNumber(propertyName).longValue(); } @@ -178,9 +175,8 @@ public List getNumberList(String propertyName) { public List getIntegerList(String propertyName) { List intList = new ArrayList<>(); Integer i; - if (null==propertyName - || PropertyType.NUMBER_LIST != getType(propertyName) - || null==getNumberList(propertyName)) + if (null == propertyName || PropertyType.NUMBER_LIST != getType(propertyName) + || null == getNumberList(propertyName)) return null; return toIntegerList(getNumberList(propertyName)); } @@ -200,20 +196,21 @@ public String getAsString(String propertyName) { } public static String asString(Object o, PropertyType t) { - if (null==o || null==t) return ""; + if (null == o || null == t) + return ""; String rtn = ""; switch (t) { - case STRING: - rtn = (String) o; - break; - case STRING_LIST: - case NUMBER_LIST: - rtn = StringUtils.join((List) o, ","); - break; - case NUMBER: - case BOOLEAN: - default: - rtn = o.toString(); + case STRING: + rtn = (String) o; + break; + case STRING_LIST: + case NUMBER_LIST: + rtn = StringUtils.join((List) o, ","); + break; + case NUMBER: + case BOOLEAN: + default: + rtn = o.toString(); } return (null == rtn) ? "" : rtn; } @@ -223,19 +220,22 @@ protected void loadSparkConf() { Object setValue; logger.info("Processing explicitly set and known sparkConf properties"); - for (Tuple2 kvp : sparkConf.getAll()) { + for (Tuple2 kvp : sparkConf.getAll()) { String scKey = kvp._1(); String scValue = kvp._2(); -; + ; if (isKnown(scKey)) { PropertyType propertyType = getType(scKey); - setValue = setProperty(scKey, asType(propertyType,scValue)); + setValue = setProperty(scKey, asType(propertyType, scValue)); if (null == setValue) { - logger.error("Unable to set property: [" + scKey + "], value: [" + scValue + "] with type: [" + propertyType +"]"); + logger.error("Unable to set property: [" + scKey + "], value: [" + scValue + "] with type: [" + + propertyType + "]"); fullyLoaded = false; } else { - if (scKey.contains("password")) scValue= "********"; - logger.info("Known property [" + scKey + "] is configured with value [" + scValue + "] and is type [" + propertyType + "]"); + if (scKey.contains("password")) + scValue = "********"; + logger.info("Known property [" + scKey + "] is configured with value [" + scValue + + "] and is type [" + propertyType + "]"); } } } @@ -245,7 +245,8 @@ protected void loadSparkConf() { if (null == get(knownProperty)) { Object defaultValue = getDefault(knownProperty); if (null != defaultValue) { - logger.debug("Setting known property [" + knownProperty + "] with default value [" + getDefaultAsString(knownProperty) + "]"); + logger.debug("Setting known property [" + knownProperty + "] with default value [" + + getDefaultAsString(knownProperty) + "]"); setProperty(knownProperty, defaultValue); } } @@ -270,16 +271,16 @@ protected boolean isValidConfig() { } // Check we have a configured origin connection - if ( (null == get(CONNECT_ORIGIN_HOST) && null == get(CONNECT_ORIGIN_SCB)) || - getAsString(CONNECT_ORIGIN_HOST).isEmpty() && getAsString(CONNECT_ORIGIN_SCB).isEmpty()) { + if ((null == get(CONNECT_ORIGIN_HOST) && null == get(CONNECT_ORIGIN_SCB)) + || getAsString(CONNECT_ORIGIN_HOST).isEmpty() && getAsString(CONNECT_ORIGIN_SCB).isEmpty()) { logger.error("Missing required property: " + CONNECT_ORIGIN_HOST + " or " + CONNECT_ORIGIN_SCB); valid = false; } else { // Validate TLS configuration is set if so-enabled if (null == get(CONNECT_ORIGIN_SCB) && null != get(ORIGIN_TLS_ENABLED) && getBoolean(ORIGIN_TLS_ENABLED)) { - for (String expectedProperty : new String[]{ORIGIN_TLS_TRUSTSTORE_PATH, ORIGIN_TLS_TRUSTSTORE_PASSWORD, - ORIGIN_TLS_TRUSTSTORE_TYPE, ORIGIN_TLS_KEYSTORE_PATH, ORIGIN_TLS_KEYSTORE_PASSWORD, - ORIGIN_TLS_ALGORITHMS}) { + for (String expectedProperty : new String[] { ORIGIN_TLS_TRUSTSTORE_PATH, + ORIGIN_TLS_TRUSTSTORE_PASSWORD, ORIGIN_TLS_TRUSTSTORE_TYPE, ORIGIN_TLS_KEYSTORE_PATH, + ORIGIN_TLS_KEYSTORE_PASSWORD, ORIGIN_TLS_ALGORITHMS }) { if (null == get(expectedProperty) || getAsString(expectedProperty).isEmpty()) { logger.error("TLS is enabled, but required value is not set: " + expectedProperty); valid = false; @@ -289,16 +290,16 @@ protected boolean isValidConfig() { } // Check we have a configured target connection - if ( (null == get(CONNECT_TARGET_HOST) && null == get(CONNECT_TARGET_SCB)) || - getAsString(CONNECT_TARGET_HOST).isEmpty() && getAsString(CONNECT_TARGET_SCB).isEmpty()) { + if ((null == get(CONNECT_TARGET_HOST) && null == get(CONNECT_TARGET_SCB)) + || getAsString(CONNECT_TARGET_HOST).isEmpty() && getAsString(CONNECT_TARGET_SCB).isEmpty()) { logger.error("Missing required property: " + CONNECT_TARGET_HOST + " or " + CONNECT_TARGET_SCB); valid = false; } else { // Validate TLS configuration is set if so-enabled if (null == get(CONNECT_TARGET_SCB) && null != get(TARGET_TLS_ENABLED) && getBoolean(TARGET_TLS_ENABLED)) { - for (String expectedProperty : new String[]{TARGET_TLS_TRUSTSTORE_PATH, TARGET_TLS_TRUSTSTORE_PASSWORD, - TARGET_TLS_TRUSTSTORE_TYPE, TARGET_TLS_KEYSTORE_PATH, TARGET_TLS_KEYSTORE_PASSWORD, - TARGET_TLS_ALGORITHMS}) { + for (String expectedProperty : new String[] { TARGET_TLS_TRUSTSTORE_PATH, + TARGET_TLS_TRUSTSTORE_PASSWORD, TARGET_TLS_TRUSTSTORE_TYPE, TARGET_TLS_KEYSTORE_PATH, + TARGET_TLS_KEYSTORE_PASSWORD, TARGET_TLS_ALGORITHMS }) { if (null == get(expectedProperty) || getAsString(expectedProperty).isEmpty()) { logger.error("TLS is enabled, but required value is not set: " + expectedProperty); valid = false; @@ -306,9 +307,10 @@ protected boolean isValidConfig() { } } } - + // Expecting these to normally be set, but it could be a valid configuration - for (String expectedProperty : new String[]{CONNECT_ORIGIN_USERNAME, CONNECT_ORIGIN_PASSWORD, CONNECT_TARGET_USERNAME, CONNECT_TARGET_PASSWORD}) { + for (String expectedProperty : new String[] { CONNECT_ORIGIN_USERNAME, CONNECT_ORIGIN_PASSWORD, + CONNECT_TARGET_USERNAME, CONNECT_TARGET_PASSWORD }) { if (null == get(expectedProperty) || getAsString(expectedProperty).isEmpty()) { logger.warn("Unusual this is not set: " + expectedProperty); } @@ -318,9 +320,7 @@ protected boolean isValidConfig() { } public static Integer toInteger(Number n) { - if (n instanceof Integer - || n instanceof Short - || n instanceof Byte) + if (n instanceof Integer || n instanceof Short || n instanceof Byte) return n.intValue(); else if (n instanceof Long) { if ((Long) n >= Integer.MIN_VALUE && (Long) n <= Integer.MAX_VALUE) { @@ -332,7 +332,8 @@ else if (n instanceof Long) { public static List toIntegerList(List numberList) { List intList = new ArrayList<>(); - if (null==numberList) return intList; + if (null == numberList) + return intList; Integer i; for (Number n : numberList) { i = toInteger(n); @@ -343,7 +344,7 @@ public static List toIntegerList(List numberList) { return intList; } - protected Map getPropertyMap() { + protected Map getPropertyMap() { return propertyMap; } @@ -355,7 +356,8 @@ public boolean isSparkConfFullyLoaded() { public boolean meetsMinimum(String valueName, Long testValue, Long minimumValue) { if (null != minimumValue && null != testValue && testValue >= minimumValue) return true; - logger.warn(valueName + " must be greater than or equal to " + minimumValue + ". Current value does not meet this requirement: " + testValue); + logger.warn(valueName + " must be greater than or equal to " + minimumValue + + ". Current value does not meet this requirement: " + testValue); return false; } diff --git a/src/main/java/com/datastax/cdm/schema/BaseTable.java b/src/main/java/com/datastax/cdm/schema/BaseTable.java index b5cfa4f5..d2071fa6 100644 --- a/src/main/java/com/datastax/cdm/schema/BaseTable.java +++ b/src/main/java/com/datastax/cdm/schema/BaseTable.java @@ -15,14 +15,16 @@ */ package com.datastax.cdm.schema; +import java.util.List; + +import javax.validation.constraints.NotNull; + +import org.apache.commons.lang3.StringUtils; + import com.datastax.cdm.data.CqlConversion; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.oss.driver.api.core.type.DataType; -import org.apache.commons.lang3.StringUtils; - -import javax.validation.constraints.NotNull; -import java.util.List; public class BaseTable implements Table { protected final IPropertyHelper propertyHelper; @@ -50,15 +52,16 @@ public BaseTable(IPropertyHelper propertyHelper, boolean isOrigin) { @NotNull private String getKeyspaceTableAsString(IPropertyHelper propertyHelper, boolean isOrigin) { - String keyspaceTableString = (isOrigin ? propertyHelper.getString(KnownProperties.ORIGIN_KEYSPACE_TABLE) : - propertyHelper.getString(KnownProperties.TARGET_KEYSPACE_TABLE)); + String keyspaceTableString = (isOrigin ? propertyHelper.getString(KnownProperties.ORIGIN_KEYSPACE_TABLE) + : propertyHelper.getString(KnownProperties.TARGET_KEYSPACE_TABLE)); // Use origin keyspaceTable property if target not specified if (!isOrigin && StringUtils.isBlank(keyspaceTableString)) { keyspaceTableString = propertyHelper.getString(KnownProperties.ORIGIN_KEYSPACE_TABLE); } if (StringUtils.isBlank(keyspaceTableString)) { - throw new RuntimeException("Value for required property " + KnownProperties.ORIGIN_KEYSPACE_TABLE + " not provided!!"); + throw new RuntimeException( + "Value for required property " + KnownProperties.ORIGIN_KEYSPACE_TABLE + " not provided!!"); } return keyspaceTableString.trim(); @@ -91,4 +94,4 @@ public List getConversions() { public boolean isOrigin() { return this.isOrigin; } -} \ No newline at end of file +} diff --git a/src/main/java/com/datastax/cdm/schema/CqlTable.java b/src/main/java/com/datastax/cdm/schema/CqlTable.java index 124ddefa..f2dccc42 100644 --- a/src/main/java/com/datastax/cdm/schema/CqlTable.java +++ b/src/main/java/com/datastax/cdm/schema/CqlTable.java @@ -26,7 +26,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import com.datastax.cdm.properties.IPropertyHelper; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,6 +36,7 @@ import com.datastax.cdm.feature.Feature; import com.datastax.cdm.feature.Featureset; import com.datastax.cdm.feature.WritetimeTTL; +import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.CqlIdentifier; @@ -54,444 +54,501 @@ import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; public class CqlTable extends BaseTable { - public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - boolean logDebug = logger.isDebugEnabled(); - boolean logTrace = logger.isTraceEnabled(); - - private final CqlSession cqlSession; - private boolean hasRandomPartitioner; - private final List partitionKeyNames; - private final List pkNames; - private final List pkClasses; - private final List pkIndexes; - private boolean isCounterTable; - private final ConsistencyLevel readConsistencyLevel; - private final ConsistencyLevel writeConsistencyLevel; - - private List cqlPartitionKey; - private List cqlPrimaryKey; - private List cqlAllColumns; - private Map columnNameToCqlTypeMap; - private final List bindClasses; - private List writetimeTTLColumns; - - private CqlTable otherCqlTable; - private List correspondingIndexes; - private final List counterIndexes; - protected Map featureMap; + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + boolean logDebug = logger.isDebugEnabled(); + boolean logTrace = logger.isTraceEnabled(); + + private final CqlSession cqlSession; + private boolean hasRandomPartitioner; + private final List partitionKeyNames; + private final List pkNames; + private final List pkClasses; + private final List pkIndexes; + private boolean isCounterTable; + private final ConsistencyLevel readConsistencyLevel; + private final ConsistencyLevel writeConsistencyLevel; + + private List cqlPartitionKey; + private List cqlPrimaryKey; + private List cqlAllColumns; + private Map columnNameToCqlTypeMap; + private final List bindClasses; + private List writetimeTTLColumns; + + private CqlTable otherCqlTable; + private List correspondingIndexes; + private final List counterIndexes; + protected Map featureMap; // These defaults address the problem where we cannot insert null values into a PK column - private final Long defaultForMissingTimestamp; - private final String defaultForMissingString; - - public CqlTable(IPropertyHelper propertyHelper, boolean isOrigin, CqlSession session) { - super(propertyHelper, isOrigin); - this.keyspaceName = unFormatName(keyspaceName); - this.tableName = unFormatName(tableName); - - this.cqlSession = session; - - // setCqlMetadata(session) will set: - // - this.cqlPartitionKey : List of the partition key column(s) - // - this.cqlPrimaryKey : List of the primary key (partition key + clustering columns) - // - this.cqlColumns : List of all columns on the table - // - columnNameToCqlTypeMap : Map of the column name to driver DataType - setCqlMetadata(session); - - if (null == this.columnNames || this.columnNames.isEmpty()) { - if (null == this.cqlAllColumns || this.cqlAllColumns.isEmpty()) { - throw new IllegalArgumentException("No columns defined for table " + this.keyspaceName + "." + this.tableName); - } - this.columnNames = this.cqlAllColumns.stream().map(columnMetadata -> columnMetadata.getName().asInternal()).collect(Collectors.toList()); - } - this.columnCqlTypes = columnNames.stream().map(columnName -> this.columnNameToCqlTypeMap.get(columnName)).collect(Collectors.toList()); - this.bindClasses = columnCqlTypes.stream() - .map(CqlData::getBindClass) - .collect(Collectors.toList()); - - this.partitionKeyNames = cqlPartitionKey.stream().map(columnMetadata -> columnMetadata.getName().asInternal()).collect(Collectors.toList()); - this.pkNames = cqlPrimaryKey.stream().map(columnMetadata -> columnMetadata.getName().asInternal()).collect(Collectors.toList()); - List pkTypes = cqlPrimaryKey.stream().map(ColumnMetadata::getType).collect(Collectors.toList()); - this.pkClasses = pkTypes.stream() - .map(CqlData::getBindClass) - .collect(Collectors.toList()); - this.pkIndexes = pkNames.stream() - .map(columnNames::indexOf) - .collect(Collectors.toList()); - - this.counterIndexes = IntStream.range(0, columnCqlTypes.size()) - .filter(i -> columnCqlTypes.get(i).equals(DataTypes.COUNTER)) - .boxed() + private final Long defaultForMissingTimestamp; + private final String defaultForMissingString; + + public CqlTable(IPropertyHelper propertyHelper, boolean isOrigin, CqlSession session) { + super(propertyHelper, isOrigin); + this.keyspaceName = unFormatName(keyspaceName); + this.tableName = unFormatName(tableName); + + this.cqlSession = session; + + // setCqlMetadata(session) will set: + // - this.cqlPartitionKey : List of the partition key column(s) + // - this.cqlPrimaryKey : List of the primary key (partition key + clustering columns) + // - this.cqlColumns : List of all columns on the table + // - columnNameToCqlTypeMap : Map of the column name to driver DataType + setCqlMetadata(session); + + if (null == this.columnNames || this.columnNames.isEmpty()) { + if (null == this.cqlAllColumns || this.cqlAllColumns.isEmpty()) { + throw new IllegalArgumentException( + "No columns defined for table " + this.keyspaceName + "." + this.tableName); + } + this.columnNames = this.cqlAllColumns.stream().map(columnMetadata -> columnMetadata.getName().asInternal()) .collect(Collectors.toList()); - this.isCounterTable = !this.counterIndexes.isEmpty(); + } + this.columnCqlTypes = columnNames.stream().map(columnName -> this.columnNameToCqlTypeMap.get(columnName)) + .collect(Collectors.toList()); + this.bindClasses = columnCqlTypes.stream().map(CqlData::getBindClass).collect(Collectors.toList()); + + this.partitionKeyNames = cqlPartitionKey.stream().map(columnMetadata -> columnMetadata.getName().asInternal()) + .collect(Collectors.toList()); + this.pkNames = cqlPrimaryKey.stream().map(columnMetadata -> columnMetadata.getName().asInternal()) + .collect(Collectors.toList()); + List pkTypes = cqlPrimaryKey.stream().map(ColumnMetadata::getType).collect(Collectors.toList()); + this.pkClasses = pkTypes.stream().map(CqlData::getBindClass).collect(Collectors.toList()); + this.pkIndexes = pkNames.stream().map(columnNames::indexOf).collect(Collectors.toList()); + + this.counterIndexes = IntStream.range(0, columnCqlTypes.size()) + .filter(i -> columnCqlTypes.get(i).equals(DataTypes.COUNTER)).boxed().collect(Collectors.toList()); + this.isCounterTable = !this.counterIndexes.isEmpty(); + + this.readConsistencyLevel = mapToConsistencyLevel(propertyHelper.getString(KnownProperties.READ_CL)); + this.writeConsistencyLevel = mapToConsistencyLevel(propertyHelper.getString(KnownProperties.WRITE_CL)); - this.readConsistencyLevel = mapToConsistencyLevel(propertyHelper.getString(KnownProperties.READ_CL)); - this.writeConsistencyLevel = mapToConsistencyLevel(propertyHelper.getString(KnownProperties.WRITE_CL)); + this.featureMap = new HashMap<>(); - this.featureMap = new HashMap<>(); + this.defaultForMissingTimestamp = propertyHelper.getLong(KnownProperties.TRANSFORM_REPLACE_MISSING_TS); + this.defaultForMissingString = ""; + } - this.defaultForMissingTimestamp = propertyHelper.getLong(KnownProperties.TRANSFORM_REPLACE_MISSING_TS); - this.defaultForMissingString = ""; - } + @Override + public String getKeyspaceTable() { + return formatName(this.keyspaceName) + "." + formatName(this.tableName); + } - @Override - public String getKeyspaceTable() { - return formatName(this.keyspaceName) + "." + formatName(this.tableName); - } + public void setFeatureMap(Map featureMap) { + this.featureMap = featureMap; + } - public void setFeatureMap(Map featureMap) { this.featureMap = featureMap; } - public Feature getFeature(Featureset featureEnum) { return featureMap.get(featureEnum); } + public Feature getFeature(Featureset featureEnum) { + return featureMap.get(featureEnum); + } - public void setOtherCqlTable(CqlTable otherCqlTable) { - this.otherCqlTable = otherCqlTable; - this.correspondingIndexes = calcCorrespondingIndex(); - this.cqlConversions = CqlConversion.getConversions(this, otherCqlTable); - } - public CqlTable getOtherCqlTable() { return otherCqlTable; } + public void setOtherCqlTable(CqlTable otherCqlTable) { + this.otherCqlTable = otherCqlTable; + this.correspondingIndexes = calcCorrespondingIndex(); + this.cqlConversions = CqlConversion.getConversions(this, otherCqlTable); + } - public boolean isCounterTable() { return isCounterTable; } - public List getCounterIndexes() { return counterIndexes; } + public CqlTable getOtherCqlTable() { + return otherCqlTable; + } - public Class getBindClass(int index) { return bindClasses.get(index); } - public int indexOf(String columnName) { return columnNames.indexOf(columnName); } - public DataType getDataType(String columnName) { return columnNameToCqlTypeMap.get(columnName); } - public DataType getDataType(int index) { return ((index < 0 || index>=columnCqlTypes.size()) ? null : columnCqlTypes.get(index)); } + public boolean isCounterTable() { + return isCounterTable; + } - public MutableCodecRegistry getCodecRegistry() { return (MutableCodecRegistry) cqlSession.getContext().getCodecRegistry(); } + public List getCounterIndexes() { + return counterIndexes; + } - public ConsistencyLevel getReadConsistencyLevel() { return readConsistencyLevel; } - public ConsistencyLevel getWriteConsistencyLevel() { return writeConsistencyLevel; } + public Class getBindClass(int index) { + return bindClasses.get(index); + } - public boolean hasRandomPartitioner() { return hasRandomPartitioner; } - public Integer getFetchSizeInRows() { return propertyHelper.getInteger(KnownProperties.PERF_FETCH_SIZE); } - public Integer getBatchSize() { - Integer prop = propertyHelper.getInteger(KnownProperties.PERF_BATCH_SIZE); - WritetimeTTL f = (WritetimeTTL) getFeature(Featureset.WRITETIME_TTL); - if (isCounterTable || (null != f && f.hasWriteTimestampFilter()) || null == prop || prop < 1) - return 1; - else - return prop; - } + public int indexOf(String columnName) { + return columnNames.indexOf(columnName); + } - private boolean removeMapWithNoValues = propertyHelper - .getBoolean(KnownProperties.TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE); + public DataType getDataType(String columnName) { + return columnNameToCqlTypeMap.get(columnName); + } + + public DataType getDataType(int index) { + return ((index < 0 || index >= columnCqlTypes.size()) ? null : columnCqlTypes.get(index)); + } + + public MutableCodecRegistry getCodecRegistry() { + return (MutableCodecRegistry) cqlSession.getContext().getCodecRegistry(); + } + + public ConsistencyLevel getReadConsistencyLevel() { + return readConsistencyLevel; + } + + public ConsistencyLevel getWriteConsistencyLevel() { + return writeConsistencyLevel; + } + + public boolean hasRandomPartitioner() { + return hasRandomPartitioner; + } + + public Integer getFetchSizeInRows() { + return propertyHelper.getInteger(KnownProperties.PERF_FETCH_SIZE); + } + + public Integer getBatchSize() { + Integer prop = propertyHelper.getInteger(KnownProperties.PERF_BATCH_SIZE); + WritetimeTTL f = (WritetimeTTL) getFeature(Featureset.WRITETIME_TTL); + if (isCounterTable || (null != f && f.hasWriteTimestampFilter()) || null == prop || prop < 1) + return 1; + else + return prop; + } + + private boolean removeMapWithNoValues = propertyHelper + .getBoolean(KnownProperties.TRANSFORM_MAP_REMOVE_KEY_WITH_NO_VALUE); // Adds to the current column list based on the name and type of columns already existing in the table // This is useful where a feature is adding a column by name of an existing column. - // If the column is already present, the bind class is added to the return list. - public List extendColumns(List columnNames) { - List columnTypes = columnNames.stream().map(columnName -> this.columnNameToCqlTypeMap.get(columnName)).collect(Collectors.toList()); - return extendColumns(columnNames, columnTypes); - } + // If the column is already present, the bind class is added to the return list. + public List extendColumns(List columnNames) { + List columnTypes = columnNames.stream().map(columnName -> this.columnNameToCqlTypeMap.get(columnName)) + .collect(Collectors.toList()); + return extendColumns(columnNames, columnTypes); + } // Adds to the current column list based on the name and type of columns, where the column may or may not // be on the table. This is useful for adding virtual/computed columns on a SELECT statement. - // If the column is already present, the bind class is added to the return list. - public List extendColumns(List columnNames, List columnTypes) { - List rtn = new ArrayList(); - - if (null==columnNames || null== columnTypes || columnNames.isEmpty() || columnNames.size()!=columnTypes.size()) - throw new IllegalArgumentException("Column name and type must be non-null and non-empty, and must be of the same length"); - - for (int i = 0; i < columnNames.size(); i++) { - String columnName = columnNames.get(i); - DataType columnType = columnTypes.get(i); - - if (this.columnNames.contains(columnName)) { - rtn.add(this.bindClasses.get(this.columnNames.indexOf(columnName))); - continue; - } - - if (null == columnName || columnName.isEmpty() || null == columnType) { - logger.warn("Column name and/or type are null or empty for table " + this.keyspaceName + "." + this.tableName + ". Skipping column."); - rtn.add(null); - continue; - } - - this.columnNames.add(columnName); - this.columnCqlTypes.add(columnType); - Class bindClass = CqlData.getBindClass(columnType); - this.bindClasses.add(bindClass); - rtn.add(bindClass); - if (DataTypes.COUNTER.equals(columnType)) { - this.counterIndexes.add(this.columnNames.size() - 1); - this.isCounterTable = true; - } - } - - this.correspondingIndexes = calcCorrespondingIndex(); - this.cqlConversions = CqlConversion.getConversions(this, otherCqlTable); - - return rtn; - } - - @Override - public List getColumnNames(boolean format) { - if (format) return formatNames(this.columnNames); - else return this.columnNames; - } - - public List getPKNames(boolean format) { - if (format) return formatNames(this.pkNames); - else return this.pkNames; - } - public List getPKClasses() {return this.pkClasses;} - - public static List formatNames(List list) { - if (null==list || list.isEmpty()) return list; - return list.stream() - .map(CqlTable::formatName) - .collect(Collectors.toList()); - } - - public static String formatName(String name) { - if (null==name || name.isEmpty()) return name; - if (name.toUpperCase().matches("^[A-Z0-9_]*\\(.*\\)$")) return name; // function - if (name.matches("^\"[^\\s]*\"$")) return name; // already quoted - return CqlIdentifier.fromInternal(name).asCql(true); - } - - public static List unFormatNames(List list) { - if (null==list || list.isEmpty()) return list; - return list.stream() - .map(CqlTable::unFormatName) - .collect(Collectors.toList()); - } - - public static String unFormatName(String name) { - if (null==name || name.isEmpty()) return name; - if (name.matches("^[^\\s\"]+$")) return name; // not quoted, assume unformatted - return CqlIdentifier.fromCql(name).asInternal(); - } - - public List getPartitionKeyNames(boolean format) { - if (format) return formatNames(this.partitionKeyNames); - else return this.partitionKeyNames; - } - - public Object getData(int index, Row row) { - return row.get(index, this.getBindClass(index)); - } - - public int byteCount(int index, Object object) { - if (null==object) return 0; - try { - return getCodecRegistry() - .codecFor(getDataType(index)) - .encode(object, CqlConversion.PROTOCOL_VERSION) - .remaining(); - } catch (IllegalArgumentException | CodecNotFoundException | NullPointerException e) { - throw new IllegalArgumentException("Unable to encode object " + object + " of Class/DataType " + object.getClass().getName() + "/" + getDataType(index) + " for column " + this.columnNames.get(index), e); - } - } - - public Object getAndConvertData(int index, Row row) { - Object thisObject = getData(index, row); - if (null == thisObject) { - return convertNull(index); - } - - if (removeMapWithNoValues && thisObject instanceof Map) { - return removeNullValuesFromMap(thisObject); - } - - CqlConversion cqlConversion = this.cqlConversions.get(index); - if (null == cqlConversion) { - if (logTrace) logger.trace("{} Index:{} not converting:{}",isOrigin?"origin":"target",index,thisObject); - return thisObject; + // If the column is already present, the bind class is added to the return list. + public List extendColumns(List columnNames, List columnTypes) { + List rtn = new ArrayList(); + + if (null == columnNames || null == columnTypes || columnNames.isEmpty() + || columnNames.size() != columnTypes.size()) + throw new IllegalArgumentException( + "Column name and type must be non-null and non-empty, and must be of the same length"); + + for (int i = 0; i < columnNames.size(); i++) { + String columnName = columnNames.get(i); + DataType columnType = columnTypes.get(i); + + if (this.columnNames.contains(columnName)) { + rtn.add(this.bindClasses.get(this.columnNames.indexOf(columnName))); + continue; + } + + if (null == columnName || columnName.isEmpty() || null == columnType) { + logger.warn("Column name and/or type are null or empty for table " + this.keyspaceName + "." + + this.tableName + ". Skipping column."); + rtn.add(null); + continue; + } + + this.columnNames.add(columnName); + this.columnCqlTypes.add(columnType); + Class bindClass = CqlData.getBindClass(columnType); + this.bindClasses.add(bindClass); + rtn.add(bindClass); + if (DataTypes.COUNTER.equals(columnType)) { + this.counterIndexes.add(this.columnNames.size() - 1); + this.isCounterTable = true; + } + } + + this.correspondingIndexes = calcCorrespondingIndex(); + this.cqlConversions = CqlConversion.getConversions(this, otherCqlTable); + + return rtn; + } + + @Override + public List getColumnNames(boolean format) { + if (format) + return formatNames(this.columnNames); + else + return this.columnNames; + } + + public List getPKNames(boolean format) { + if (format) + return formatNames(this.pkNames); + else + return this.pkNames; + } + + public List getPKClasses() { + return this.pkClasses; + } + + public static List formatNames(List list) { + if (null == list || list.isEmpty()) + return list; + return list.stream().map(CqlTable::formatName).collect(Collectors.toList()); + } + + public static String formatName(String name) { + if (null == name || name.isEmpty()) + return name; + if (name.toUpperCase().matches("^[A-Z0-9_]*\\(.*\\)$")) + return name; // function + if (name.matches("^\"[^\\s]*\"$")) + return name; // already quoted + return CqlIdentifier.fromInternal(name).asCql(true); + } + + public static List unFormatNames(List list) { + if (null == list || list.isEmpty()) + return list; + return list.stream().map(CqlTable::unFormatName).collect(Collectors.toList()); + } + + public static String unFormatName(String name) { + if (null == name || name.isEmpty()) + return name; + if (name.matches("^[^\\s\"]+$")) + return name; // not quoted, assume unformatted + return CqlIdentifier.fromCql(name).asInternal(); + } + + public List getPartitionKeyNames(boolean format) { + if (format) + return formatNames(this.partitionKeyNames); + else + return this.partitionKeyNames; + } + + public Object getData(int index, Row row) { + return row.get(index, this.getBindClass(index)); + } + + public int byteCount(int index, Object object) { + if (null == object) + return 0; + try { + return getCodecRegistry().codecFor(getDataType(index)).encode(object, CqlConversion.PROTOCOL_VERSION) + .remaining(); + } catch (IllegalArgumentException | CodecNotFoundException | NullPointerException e) { + throw new IllegalArgumentException( + "Unable to encode object " + object + " of Class/DataType " + object.getClass().getName() + "/" + + getDataType(index) + " for column " + this.columnNames.get(index), + e); + } + } + + public Object getAndConvertData(int index, Row row) { + Object thisObject = getData(index, row); + if (null == thisObject) { + return convertNull(index); } - else { - if (logTrace) logger.trace("{} Index:{} converting:{} via CqlConversion:{}",isOrigin?"origin":"target",index,thisObject,cqlConversion); - return cqlConversion.convert(thisObject); - } - } - - private Object removeNullValuesFromMap(Object thisObject) { - Set ms = (((Map) thisObject).entrySet()); - return ms.stream().filter(e -> (e.getValue() != null)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - public Object convertNull(int thisIndex) { - // We do not need to convert nulls for non-PK columns - int otherIndex = this.getCorrespondingIndex(thisIndex); - if (!getOtherCqlTable().pkIndexes.contains(otherIndex)) - return null; - - Class c = getOtherCqlTable().bindClasses.get(otherIndex); - if (Objects.equals(c, String.class)) { - return defaultForMissingString; + + if (removeMapWithNoValues && thisObject instanceof Map) { + return removeNullValuesFromMap(thisObject); + } + + CqlConversion cqlConversion = this.cqlConversions.get(index); + if (null == cqlConversion) { + if (logTrace) + logger.trace("{} Index:{} not converting:{}", isOrigin ? "origin" : "target", index, thisObject); + return thisObject; + } else { + if (logTrace) + logger.trace("{} Index:{} converting:{} via CqlConversion:{}", isOrigin ? "origin" : "target", index, + thisObject, cqlConversion); + return cqlConversion.convert(thisObject); } - else if (Objects.equals(c, Instant.class)) { - if (null != defaultForMissingTimestamp) { - return Instant.ofEpochMilli(defaultForMissingTimestamp); - } else { - logger.error("This index {} corresponds to That index {}, which is a primary key column and cannot be null. Consider setting {}.", thisIndex, otherIndex, KnownProperties.TRANSFORM_REPLACE_MISSING_TS); - return null; - } - } - - logger.error("This index {} corresponds to That index {}, which is a primary key column and cannot be null.", thisIndex, otherIndex); - return null; - } - - public Integer getCorrespondingIndex(int index) { - if (index < 0) return index; - return this.correspondingIndexes.get(index); - } - private List calcCorrespondingIndex() { - List rtn = new ArrayList<>(); - List thisColumnNames = this.getColumnNames(false); - List thatColumnNames = this.otherCqlTable.getColumnNames(false); - Map thisToThatNameMap = DataUtility.getThisToThatColumnNameMap(propertyHelper, this, otherCqlTable); - - for (String thisColumnName : thisColumnNames) { + } + + private Object removeNullValuesFromMap(Object thisObject) { + Set ms = (((Map) thisObject).entrySet()); + return ms.stream().filter(e -> (e.getValue() != null)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + public Object convertNull(int thisIndex) { + // We do not need to convert nulls for non-PK columns + int otherIndex = this.getCorrespondingIndex(thisIndex); + if (!getOtherCqlTable().pkIndexes.contains(otherIndex)) + return null; + + Class c = getOtherCqlTable().bindClasses.get(otherIndex); + if (Objects.equals(c, String.class)) { + return defaultForMissingString; + } else if (Objects.equals(c, Instant.class)) { + if (null != defaultForMissingTimestamp) { + return Instant.ofEpochMilli(defaultForMissingTimestamp); + } else { + logger.error( + "This index {} corresponds to That index {}, which is a primary key column and cannot be null. Consider setting {}.", + thisIndex, otherIndex, KnownProperties.TRANSFORM_REPLACE_MISSING_TS); + return null; + } + } + + logger.error("This index {} corresponds to That index {}, which is a primary key column and cannot be null.", + thisIndex, otherIndex); + return null; + } + + public Integer getCorrespondingIndex(int index) { + if (index < 0) + return index; + return this.correspondingIndexes.get(index); + } + + private List calcCorrespondingIndex() { + List rtn = new ArrayList<>(); + List thisColumnNames = this.getColumnNames(false); + List thatColumnNames = this.otherCqlTable.getColumnNames(false); + Map thisToThatNameMap = DataUtility.getThisToThatColumnNameMap(propertyHelper, this, + otherCqlTable); + + for (String thisColumnName : thisColumnNames) { // Iterate over the thisColumnNames. If there is an entry on the thisToThatNameMap // then there a corresponding column name, and we place the column index on the list. // Otherwise, we place -1, indicating this column name is not present in the other - if (!thisToThatNameMap.containsKey(thisColumnName)) { - rtn.add(-1); + if (!thisToThatNameMap.containsKey(thisColumnName)) { + rtn.add(-1); + } else { + rtn.add(thatColumnNames.indexOf(thisToThatNameMap.get(thisColumnName))); } - else { - rtn.add(thatColumnNames.indexOf(thisToThatNameMap.get(thisColumnName))); - } - } - if (logDebug) logger.debug("Corresponding index for {}: {}-{}",isOrigin?"origin":"target",columnNames,rtn); - return rtn; - } - - // This facilitates unit testing - protected Metadata fetchMetadataFromSession(CqlSession cqlSession) { - return cqlSession.getMetadata(); - } - - private void setCqlMetadata(CqlSession cqlSession) { - Metadata metadata = fetchMetadataFromSession(cqlSession); - - String partitionerName = metadata.getTokenMap().get().getPartitionerName(); - if (null != partitionerName && partitionerName.endsWith("RandomPartitioner")) - this.hasRandomPartitioner = true; - else - this.hasRandomPartitioner = false; - - Optional keyspaceMetadataOpt = metadata.getKeyspace(formatName(this.keyspaceName)); - if (!keyspaceMetadataOpt.isPresent()) { - throw new IllegalArgumentException("Keyspace not found: " + this.keyspaceName); - } - KeyspaceMetadata keyspaceMetadata = keyspaceMetadataOpt.get(); - - Optional tableMetadataOpt = keyspaceMetadata.getTable(formatName(this.tableName)); - if (!tableMetadataOpt.isPresent()) { - throw new IllegalArgumentException("Table not found: " + tableName); - } - TableMetadata tableMetadata = tableMetadataOpt.get(); - - this.cqlPrimaryKey = new ArrayList<>(); - this.cqlAllColumns = new ArrayList<>(); - - this.cqlPartitionKey = tableMetadata.getPartitionKey(); - this.cqlPrimaryKey.addAll(this.cqlPartitionKey); - this.cqlPrimaryKey.addAll(tableMetadata.getClusteringColumns().keySet()); - this.cqlAllColumns.addAll(this.cqlPrimaryKey); - - boolean extractJsonExclusive = propertyHelper.getBoolean(KnownProperties.EXTRACT_JSON_EXCLUSIVE); - String extractColumnName = ""; - if (extractJsonExclusive) { - String originColumnName = unFormatName( - propertyHelper.getString(KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME)); - String targetColumnName = unFormatName( - propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)); - extractColumnName = isOrigin ? originColumnName : targetColumnName; - if (!extractColumnName.isBlank() && extractColumnName.contains(":")) { - extractColumnName = extractColumnName.split("\\:")[1]; - } - } - String columnName = extractColumnName; - this.cqlAllColumns = tableMetadata.getColumns().values().stream().filter(md -> !this.cqlAllColumns.contains(md)) - .filter(md -> !extractJsonExclusive || md.getName().asCql(true).endsWith(columnName)) - .collect(Collectors.toCollection(() -> this.cqlAllColumns)); + } + if (logDebug) + logger.debug("Corresponding index for {}: {}-{}", isOrigin ? "origin" : "target", columnNames, rtn); + return rtn; + } + + // This facilitates unit testing + protected Metadata fetchMetadataFromSession(CqlSession cqlSession) { + return cqlSession.getMetadata(); + } + + private void setCqlMetadata(CqlSession cqlSession) { + Metadata metadata = fetchMetadataFromSession(cqlSession); + + String partitionerName = metadata.getTokenMap().get().getPartitionerName(); + if (null != partitionerName && partitionerName.endsWith("RandomPartitioner")) + this.hasRandomPartitioner = true; + else + this.hasRandomPartitioner = false; + + Optional keyspaceMetadataOpt = metadata.getKeyspace(formatName(this.keyspaceName)); + if (!keyspaceMetadataOpt.isPresent()) { + throw new IllegalArgumentException("Keyspace not found: " + this.keyspaceName); + } + KeyspaceMetadata keyspaceMetadata = keyspaceMetadataOpt.get(); + + Optional tableMetadataOpt = keyspaceMetadata.getTable(formatName(this.tableName)); + if (!tableMetadataOpt.isPresent()) { + throw new IllegalArgumentException("Table not found: " + tableName); + } + TableMetadata tableMetadata = tableMetadataOpt.get(); + + this.cqlPrimaryKey = new ArrayList<>(); + this.cqlAllColumns = new ArrayList<>(); + + this.cqlPartitionKey = tableMetadata.getPartitionKey(); + this.cqlPrimaryKey.addAll(this.cqlPartitionKey); + this.cqlPrimaryKey.addAll(tableMetadata.getClusteringColumns().keySet()); + this.cqlAllColumns.addAll(this.cqlPrimaryKey); + + boolean extractJsonExclusive = propertyHelper.getBoolean(KnownProperties.EXTRACT_JSON_EXCLUSIVE); + String extractColumnName = ""; + if (extractJsonExclusive) { + String originColumnName = unFormatName( + propertyHelper.getString(KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME)); + String targetColumnName = unFormatName( + propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)); + extractColumnName = isOrigin ? originColumnName : targetColumnName; + if (!extractColumnName.isBlank() && extractColumnName.contains(":")) { + extractColumnName = extractColumnName.split("\\:")[1]; + } + } + String columnName = extractColumnName; + this.cqlAllColumns = tableMetadata.getColumns().values().stream().filter(md -> !this.cqlAllColumns.contains(md)) + .filter(md -> !extractJsonExclusive || md.getName().asCql(true).endsWith(columnName)) + .collect(Collectors.toCollection(() -> this.cqlAllColumns)); this.writetimeTTLColumns = tableMetadata.getColumns().values().stream() - .filter(columnMetadata -> canColumnHaveTTLorWritetime(tableMetadata, columnMetadata)) - .map(ColumnMetadata::getName) - .map(CqlIdentifier::asInternal) + .filter(columnMetadata -> canColumnHaveTTLorWritetime(tableMetadata, columnMetadata)) + .map(ColumnMetadata::getName).map(CqlIdentifier::asInternal).collect(Collectors.toList()); + + this.columnNameToCqlTypeMap = this.cqlAllColumns.stream().collect( + Collectors.toMap(columnMetadata -> columnMetadata.getName().asInternal(), ColumnMetadata::getType)); + } + + private boolean canColumnHaveTTLorWritetime(TableMetadata tableMetadata, ColumnMetadata columnMetadata) { + DataType dataType = columnMetadata.getType(); + boolean isKeyColumn = tableMetadata.getPartitionKey().contains(columnMetadata) + || tableMetadata.getClusteringColumns().containsKey(columnMetadata); + + if (isKeyColumn) + return false; + if (CqlData.isPrimitive(dataType)) + return true; + if (dataType instanceof TupleType) + return true; // TODO: WRITETIME and TTL functions are very slow on Tuples in cqlsh...should they be + // supported here? + if (CqlData.isFrozen(dataType)) + return true; + return false; + } + + public List getWritetimeTTLColumns() { + return this.writetimeTTLColumns.stream().filter(columnName -> this.columnNames.contains(columnName)) .collect(Collectors.toList()); + } + + public boolean isWritetimeTTLColumn(String columnName) { + return this.writetimeTTLColumns.contains(columnName); + } + + public boolean hasUnfrozenList() { + return this.cqlAllColumns.stream() + .filter(columnMetadata -> columnNames.contains(columnMetadata.getName().asInternal()) + && columnMetadata.getType() instanceof ListType) + .anyMatch(columnMetadata -> !CqlData.isFrozen(columnMetadata.getType())); + } + + protected static ConsistencyLevel mapToConsistencyLevel(String level) { + ConsistencyLevel retVal = ConsistencyLevel.LOCAL_QUORUM; + if (StringUtils.isNotEmpty(level)) { + switch (level.toUpperCase()) { + case "ANY": + retVal = ConsistencyLevel.ANY; + break; + case "ONE": + retVal = ConsistencyLevel.ONE; + break; + case "TWO": + retVal = ConsistencyLevel.TWO; + break; + case "THREE": + retVal = ConsistencyLevel.THREE; + break; + case "QUORUM": + retVal = ConsistencyLevel.QUORUM; + break; + case "LOCAL_ONE": + retVal = ConsistencyLevel.LOCAL_ONE; + break; + case "EACH_QUORUM": + retVal = ConsistencyLevel.EACH_QUORUM; + break; + case "SERIAL": + retVal = ConsistencyLevel.SERIAL; + break; + case "LOCAL_SERIAL": + retVal = ConsistencyLevel.LOCAL_SERIAL; + break; + case "ALL": + retVal = ConsistencyLevel.ALL; + break; + } + } - this.columnNameToCqlTypeMap = this.cqlAllColumns.stream() - .collect(Collectors.toMap( - columnMetadata -> columnMetadata.getName().asInternal(), - ColumnMetadata::getType - )); - } - - private boolean canColumnHaveTTLorWritetime(TableMetadata tableMetadata, ColumnMetadata columnMetadata) { - DataType dataType = columnMetadata.getType(); - boolean isKeyColumn = tableMetadata.getPartitionKey().contains(columnMetadata) || - tableMetadata.getClusteringColumns().containsKey(columnMetadata); - - if (isKeyColumn) return false; - if (CqlData.isPrimitive(dataType)) return true; - if (dataType instanceof TupleType) return true; // TODO: WRITETIME and TTL functions are very slow on Tuples in cqlsh...should they be supported here? - if (CqlData.isFrozen(dataType)) return true; - return false; - } - - public List getWritetimeTTLColumns() { - return this.writetimeTTLColumns.stream() - .filter(columnName -> this.columnNames.contains(columnName)) - .collect(Collectors.toList()); - } - - public boolean isWritetimeTTLColumn(String columnName) { - return this.writetimeTTLColumns.contains(columnName); - } - - public boolean hasUnfrozenList() { - return this.cqlAllColumns.stream() - .filter(columnMetadata -> - columnNames.contains(columnMetadata.getName().asInternal()) && - columnMetadata.getType() instanceof ListType) - .anyMatch(columnMetadata -> !CqlData.isFrozen(columnMetadata.getType())); - } - - protected static ConsistencyLevel mapToConsistencyLevel(String level) { - ConsistencyLevel retVal = ConsistencyLevel.LOCAL_QUORUM; - if (StringUtils.isNotEmpty(level)) { - switch (level.toUpperCase()) { - case "ANY": - retVal = ConsistencyLevel.ANY; - break; - case "ONE": - retVal = ConsistencyLevel.ONE; - break; - case "TWO": - retVal = ConsistencyLevel.TWO; - break; - case "THREE": - retVal = ConsistencyLevel.THREE; - break; - case "QUORUM": - retVal = ConsistencyLevel.QUORUM; - break; - case "LOCAL_ONE": - retVal = ConsistencyLevel.LOCAL_ONE; - break; - case "EACH_QUORUM": - retVal = ConsistencyLevel.EACH_QUORUM; - break; - case "SERIAL": - retVal = ConsistencyLevel.SERIAL; - break; - case "LOCAL_SERIAL": - retVal = ConsistencyLevel.LOCAL_SERIAL; - break; - case "ALL": - retVal = ConsistencyLevel.ALL; - break; - } - } - - return retVal; - } + return retVal; + } } diff --git a/src/main/java/com/datastax/cdm/schema/Table.java b/src/main/java/com/datastax/cdm/schema/Table.java index ba6df211..e7f0fdf8 100644 --- a/src/main/java/com/datastax/cdm/schema/Table.java +++ b/src/main/java/com/datastax/cdm/schema/Table.java @@ -15,10 +15,10 @@ */ package com.datastax.cdm.schema; -import com.datastax.oss.driver.api.core.type.DataType; - import java.util.List; +import com.datastax.oss.driver.api.core.type.DataType; + public interface Table { String getKeyspaceName(); diff --git a/src/test/java/com/datastax/cdm/cql/CommonMocks.java b/src/test/java/com/datastax/cdm/cql/CommonMocks.java index 1e975174..75d64455 100644 --- a/src/test/java/com/datastax/cdm/cql/CommonMocks.java +++ b/src/test/java/com/datastax/cdm/cql/CommonMocks.java @@ -15,6 +15,24 @@ */ package com.datastax.cdm.cql; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.when; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.InetAddress; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.util.*; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + import com.datastax.cdm.data.*; import com.datastax.cdm.data.Record; import com.datastax.cdm.feature.*; @@ -29,69 +47,82 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.net.InetAddress; -import java.nio.ByteBuffer; -import java.time.Duration; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalTime; -import java.util.*; -import java.util.concurrent.CompletionStage; -import java.util.stream.Collectors; - -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.when; /** - * This class is a bit bonkers, to be honest. It is basically trying to provide a simulation of - * Cassandra plus some of the other classes in the CDM codebase. It is used by the unit tests, - * and originally designed for statements but it could well be useful in other contexts as well. + * This class is a bit bonkers, to be honest. It is basically trying to provide a simulation of Cassandra plus some of + * the other classes in the CDM codebase. It is used by the unit tests, and originally designed for statements but it + * could well be useful in other contexts as well. */ public class CommonMocks { private boolean hasExplodeMap; private boolean hasConstantColumns; private boolean hasCounters; - @Mock public IPropertyHelper propertyHelper; - - @Mock public EnhancedSession originSession; - @Mock public CqlSession originCqlSession; - @Mock public CqlTable originTable; - @Mock public ResultSet originResultSet; - @Mock public MutableCodecRegistry originCodecRegistry; - @Mock public TypeCodec originCodec; - @Mock public List originConversionList; - @Mock public CqlConversion originCqlConversion; - - @Mock public EnhancedSession targetSession; - @Mock public CqlSession targetCqlSession; - @Mock public CqlTable targetTable; - @Mock public ResultSet targetResultSet; - @Mock public MutableCodecRegistry targetCodecRegistry; - @Mock public TypeCodec targetCodec; - @Mock public List targetConversionList; - @Mock public CqlConversion targetCqlConversion; - - @Mock public ConstantColumns constantColumnsFeature; - @Mock public ExplodeMap explodeMapFeature; - @Mock public ExtractJson extractJsonFeature; - @Mock public WritetimeTTL writetimeTTLFeature; - @Mock public OriginFilterCondition originFilterConditionFeature; - - @Mock public PreparedStatement preparedStatement; - @Mock public BoundStatement boundStatement; - @Mock public CompletionStage completionStage; - - @Mock public EnhancedPK pk; - @Mock public PKFactory pkFactory; - @Mock public Record record; - @Mock public Row originRow; - @Mock public Row targetRow; + @Mock + public IPropertyHelper propertyHelper; + + @Mock + public EnhancedSession originSession; + @Mock + public CqlSession originCqlSession; + @Mock + public CqlTable originTable; + @Mock + public ResultSet originResultSet; + @Mock + public MutableCodecRegistry originCodecRegistry; + @Mock + public TypeCodec originCodec; + @Mock + public List originConversionList; + @Mock + public CqlConversion originCqlConversion; + + @Mock + public EnhancedSession targetSession; + @Mock + public CqlSession targetCqlSession; + @Mock + public CqlTable targetTable; + @Mock + public ResultSet targetResultSet; + @Mock + public MutableCodecRegistry targetCodecRegistry; + @Mock + public TypeCodec targetCodec; + @Mock + public List targetConversionList; + @Mock + public CqlConversion targetCqlConversion; + + @Mock + public ConstantColumns constantColumnsFeature; + @Mock + public ExplodeMap explodeMapFeature; + @Mock + public ExtractJson extractJsonFeature; + @Mock + public WritetimeTTL writetimeTTLFeature; + @Mock + public OriginFilterCondition originFilterConditionFeature; + + @Mock + public PreparedStatement preparedStatement; + @Mock + public BoundStatement boundStatement; + @Mock + public CompletionStage completionStage; + + @Mock + public EnhancedPK pk; + @Mock + public PKFactory pkFactory; + @Mock + public Record record; + @Mock + public Row originRow; + @Mock + public Row targetRow; public String originKeyspaceName; public String originTableName; @@ -146,7 +177,7 @@ public class CommonMocks { public List constantColumnTypes; public void commonSetup() { - commonSetup(false,false, false); + commonSetup(false, false, false); } public void commonSetup(boolean hasExplodeMap, boolean hasConstantColumns, boolean hasCounters) { @@ -154,7 +185,8 @@ public void commonSetup(boolean hasExplodeMap, boolean hasConstantColumns, boole commonSetupWithoutDefaultClassVariables(hasExplodeMap, hasConstantColumns, hasCounters); } - public void commonSetupWithoutDefaultClassVariables(boolean hasExplodeMap, boolean hasConstantColumns, boolean hasCounters) { + public void commonSetupWithoutDefaultClassVariables(boolean hasExplodeMap, boolean hasConstantColumns, + boolean hasCounters) { if (hasCounters && (hasExplodeMap || hasConstantColumns)) { throw new IllegalArgumentException("Counters cannot be used with ExplodeMap or ConstantColumns"); } @@ -180,23 +212,23 @@ public void commonSetupWithoutDefaultClassVariables(boolean hasExplodeMap, boole } public void commonSetupWithoutDefaultClassVariables() { - commonSetupWithoutDefaultClassVariables(false,false,false); + commonSetupWithoutDefaultClassVariables(false, false, false); } public void defaultClassVariables() { originKeyspaceName = "origin_ks"; originTableName = "table_name"; - originPartitionKey = Arrays.asList("part_key1","part_key2"); - originPartitionKeyTypes = Arrays.asList(DataTypes.TEXT,DataTypes.TEXT); + originPartitionKey = Arrays.asList("part_key1", "part_key2"); + originPartitionKeyTypes = Arrays.asList(DataTypes.TEXT, DataTypes.TEXT); originClusteringKey = Collections.singletonList("cluster_key"); originClusteringKeyTypes = Collections.singletonList(DataTypes.TEXT); filterCol = "filter_col"; filterColType = DataTypes.TEXT; vectorCol = "vector_col"; - vectorColType = DataTypes.vectorOf(DataTypes.FLOAT,3); + vectorColType = DataTypes.vectorOf(DataTypes.FLOAT, 3); originValueColumns = Arrays.asList("value1", filterCol, vectorCol); originValueColumnTypes = Arrays.asList(DataTypes.TEXT, filterColType, vectorColType); - originCounterColumns = Arrays.asList("counter1","counter2"); + originCounterColumns = Arrays.asList("counter1", "counter2"); originToTargetNameList = Collections.emptyList(); targetKeyspaceName = "target_ks"; @@ -213,8 +245,8 @@ public void defaultClassVariables() { explodeMapValue = "map_value"; explodeMapValueType = DataTypes.TEXT; - constantColumns = Arrays.asList("const1","const2","const3"); - constantColumnValues = Arrays.asList("'abcd'","1234","543"); + constantColumns = Arrays.asList("const1", "const2", "const3"); + constantColumnValues = Arrays.asList("'abcd'", "1234", "543"); constantColumnTypes = Arrays.asList(DataTypes.TEXT, DataTypes.INT, DataTypes.BIGINT); } @@ -222,7 +254,7 @@ public void defaultClassVariables() { public void setCompoundClassVariables() { originKeyspaceTableName = originKeyspaceName + "." + originTableName; targetKeyspaceTableName = targetKeyspaceName + "." + targetTableName; - explodeMapType = DataTypes.mapOf(explodeMapKeyType,explodeMapValueType); + explodeMapType = DataTypes.mapOf(explodeMapKeyType, explodeMapValueType); } public void setOriginVariables() { @@ -232,8 +264,7 @@ public void setOriginVariables() { originColumnNames = new ArrayList<>(originPrimaryKey); if (hasCounters) { originColumnNames.addAll(originCounterColumns); - originCounterIndexes = originCounterColumns.stream() - .map(originColumnNames::indexOf) + originCounterIndexes = originCounterColumns.stream().map(originColumnNames::indexOf) .collect(Collectors.toList()); } else { @@ -244,7 +275,7 @@ public void setOriginVariables() { originColumnTypes = new ArrayList<>(originPartitionKeyTypes); originColumnTypes.addAll(originClusteringKeyTypes); if (hasCounters) { - originColumnTypes.addAll(Collections.nCopies(originCounterColumns.size(),DataTypes.COUNTER)); + originColumnTypes.addAll(Collections.nCopies(originCounterColumns.size(), DataTypes.COUNTER)); } else { originColumnTypes.addAll(originValueColumnTypes); } @@ -316,13 +347,20 @@ public void setOriginTableWhens() { } public void setTargetVariables() { - if (null==targetPartitionKey || targetPartitionKey.isEmpty()) targetPartitionKey = new ArrayList<>(originPartitionKey); - if (null==targetClusteringKey || targetClusteringKey.isEmpty()) targetClusteringKey = new ArrayList<>(originClusteringKey); - if (null==targetValueColumns || targetValueColumns.isEmpty()) targetValueColumns = new ArrayList<>(originValueColumns); - if (null==targetPartitionKeyTypes || targetPartitionKeyTypes.isEmpty()) targetPartitionKeyTypes = new ArrayList<>(originPartitionKeyTypes); - if (null==targetClusteringKeyTypes || targetClusteringKeyTypes.isEmpty()) targetClusteringKeyTypes = new ArrayList<>(originClusteringKeyTypes); - if (null==targetValueColumnTypes || targetValueColumnTypes.isEmpty()) targetValueColumnTypes = new ArrayList<>(originValueColumnTypes); - if (null==targetCounterColumns || targetCounterColumns.isEmpty()) targetCounterColumns = new ArrayList<>(originCounterColumns); + if (null == targetPartitionKey || targetPartitionKey.isEmpty()) + targetPartitionKey = new ArrayList<>(originPartitionKey); + if (null == targetClusteringKey || targetClusteringKey.isEmpty()) + targetClusteringKey = new ArrayList<>(originClusteringKey); + if (null == targetValueColumns || targetValueColumns.isEmpty()) + targetValueColumns = new ArrayList<>(originValueColumns); + if (null == targetPartitionKeyTypes || targetPartitionKeyTypes.isEmpty()) + targetPartitionKeyTypes = new ArrayList<>(originPartitionKeyTypes); + if (null == targetClusteringKeyTypes || targetClusteringKeyTypes.isEmpty()) + targetClusteringKeyTypes = new ArrayList<>(originClusteringKeyTypes); + if (null == targetValueColumnTypes || targetValueColumnTypes.isEmpty()) + targetValueColumnTypes = new ArrayList<>(originValueColumnTypes); + if (null == targetCounterColumns || targetCounterColumns.isEmpty()) + targetCounterColumns = new ArrayList<>(originCounterColumns); targetPrimaryKey = new ArrayList<>(targetPartitionKey); targetPrimaryKey.addAll(targetClusteringKey); @@ -330,8 +368,7 @@ public void setTargetVariables() { targetColumnNames = new ArrayList<>(targetPrimaryKey); if (hasCounters) { targetColumnNames.addAll(targetCounterColumns); - targetCounterIndexes = targetCounterColumns.stream() - .map(targetColumnNames::indexOf) + targetCounterIndexes = targetCounterColumns.stream().map(targetColumnNames::indexOf) .collect(Collectors.toList()); } else { targetColumnNames.addAll(targetValueColumns); @@ -341,7 +378,7 @@ public void setTargetVariables() { targetColumnTypes = new ArrayList<>(targetPartitionKeyTypes); targetColumnTypes.addAll(targetClusteringKeyTypes); if (hasCounters) { - targetColumnTypes.addAll(Collections.nCopies(targetCounterColumns.size(),DataTypes.COUNTER)); + targetColumnTypes.addAll(Collections.nCopies(targetCounterColumns.size(), DataTypes.COUNTER)); } else { targetColumnTypes.addAll(targetValueColumnTypes); } @@ -495,40 +532,70 @@ public void setPKAndRecordWhens() { public static Object getSampleData(DataType type) { CqlData.Type cqlDataType = CqlData.toType(type); switch (cqlDataType) { - case PRIMITIVE: - if (type.equals(DataTypes.BOOLEAN)) return true; - if (type.equals(DataTypes.TINYINT)) return (byte)1; - if (type.equals(DataTypes.SMALLINT)) return (short)1; - if (type.equals(DataTypes.INT)) return 1; - if (type.equals(DataTypes.BIGINT)) return 1L; - if (type.equals(DataTypes.FLOAT)) return 1.0f; - if (type.equals(DataTypes.DOUBLE)) return 1.0d; - if (type.equals(DataTypes.DECIMAL)) return new BigDecimal(1); - if (type.equals(DataTypes.UUID)) return UUID.randomUUID(); - if (type.equals(DataTypes.INET)) return InetAddress.getLoopbackAddress(); - if (type.equals(DataTypes.TIMESTAMP)) return Instant.now(); - if (type.equals(DataTypes.TIME)) return LocalTime.now(); - if (type.equals(DataTypes.DATE)) return LocalDate.now(); - if (type.equals(DataTypes.DURATION)) return Duration.ofSeconds(1); - if (type.equals(DataTypes.BLOB)) return ByteBuffer.wrap("sample_data".getBytes()); - if (type.equals(DataTypes.ASCII)) return "sample_data"; - if (type.equals(DataTypes.TEXT)) return "sample_data"; - if (type.equals(DataTypes.VARINT)) return BigInteger.ONE; - if (type.equals(DataTypes.COUNTER)) return 1L; - if (type.equals(DataTypes.TIMEUUID)) return UUID.randomUUID(); - break; - case LIST: return Arrays.asList("1","2","3"); - case SET: return new HashSet(Arrays.asList("1","2","3")); - case MAP: return new HashMap() {{put("1","one");put("2","two");put("3","three");}}; - case VECTOR: return CqlVector.newInstance(1.1,2.2,3.3); + case PRIMITIVE: + if (type.equals(DataTypes.BOOLEAN)) + return true; + if (type.equals(DataTypes.TINYINT)) + return (byte) 1; + if (type.equals(DataTypes.SMALLINT)) + return (short) 1; + if (type.equals(DataTypes.INT)) + return 1; + if (type.equals(DataTypes.BIGINT)) + return 1L; + if (type.equals(DataTypes.FLOAT)) + return 1.0f; + if (type.equals(DataTypes.DOUBLE)) + return 1.0d; + if (type.equals(DataTypes.DECIMAL)) + return new BigDecimal(1); + if (type.equals(DataTypes.UUID)) + return UUID.randomUUID(); + if (type.equals(DataTypes.INET)) + return InetAddress.getLoopbackAddress(); + if (type.equals(DataTypes.TIMESTAMP)) + return Instant.now(); + if (type.equals(DataTypes.TIME)) + return LocalTime.now(); + if (type.equals(DataTypes.DATE)) + return LocalDate.now(); + if (type.equals(DataTypes.DURATION)) + return Duration.ofSeconds(1); + if (type.equals(DataTypes.BLOB)) + return ByteBuffer.wrap("sample_data".getBytes()); + if (type.equals(DataTypes.ASCII)) + return "sample_data"; + if (type.equals(DataTypes.TEXT)) + return "sample_data"; + if (type.equals(DataTypes.VARINT)) + return BigInteger.ONE; + if (type.equals(DataTypes.COUNTER)) + return 1L; + if (type.equals(DataTypes.TIMEUUID)) + return UUID.randomUUID(); + break; + case LIST: + return Arrays.asList("1", "2", "3"); + case SET: + return new HashSet(Arrays.asList("1", "2", "3")); + case MAP: + return new HashMap() { + { + put("1", "one"); + put("2", "two"); + put("3", "three"); + } + }; + case VECTOR: + return CqlVector.newInstance(1.1, 2.2, 3.3); } - return "DataType "+type+" is not supported, so returning a String"; + return "DataType " + type + " is not supported, so returning a String"; } public String keyEqualsBindJoinedWithAND(List bindList) { StringBuilder sb = new StringBuilder(); - for (int i=0; i0) { + for (int i = 0; i < bindList.size(); i++) { + if (i > 0) { sb.append(" AND "); } String key = bindList.get(i); diff --git a/src/test/java/com/datastax/cdm/cql/codec/BIGINT_StringCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/BIGINT_StringCodecTest.java index 8c14d0e0..22925e51 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/BIGINT_StringCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/BIGINT_StringCodecTest.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.cdm.properties.PropertyHelper; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class BIGINT_StringCodecTest { @@ -88,9 +89,9 @@ void format_ShouldFormatValueAsString() { } @Test - // The test seems trivial because we are basically sending in a - // number converted to a string, expecting it to convert that to a number - // and return us the number as a string + // The test seems trivial because we are basically sending in a + // number converted to a string, expecting it to convert that to a number + // and return us the number as a string void parse_ShouldParseStringToValueAndReturnAsString() { String valueAsString = "9223372036854775807"; String result = codec.parse(valueAsString); @@ -98,11 +99,10 @@ void parse_ShouldParseStringToValueAndReturnAsString() { } @Test - // Slightly more interesting test, we are sending in a string that is not - // a number, expecting it throw a IllegalArgumentException + // Slightly more interesting test, we are sending in a string that is not + // a number, expecting it throw a IllegalArgumentException void parse_ShouldThrowIllegalArgumentException_WhenValueIsNotANumber() { String valueAsString = "not a number"; Assertions.assertThrows(IllegalArgumentException.class, () -> codec.parse(valueAsString)); } } - diff --git a/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java b/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java index 7a92ac65..ddcc8b28 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java @@ -15,6 +15,16 @@ */ package com.datastax.cdm.cql.codec; +import static org.junit.jupiter.api.Assertions.*; + +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; + import com.datastax.cdm.data.MockitoExtension; import com.datastax.cdm.properties.PropertyHelper; import com.datastax.dse.driver.internal.core.type.codec.geometry.LineStringCodec; @@ -22,15 +32,6 @@ import com.datastax.dse.driver.internal.core.type.codec.geometry.PolygonCodec; import com.datastax.dse.driver.internal.core.type.codec.time.DateRangeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; - -import java.util.List; - -import static org.junit.jupiter.api.Assertions.*; @ExtendWith(MockitoExtension.class) class CodecFactoryTest { @@ -39,7 +40,7 @@ class CodecFactoryTest { @BeforeEach void setUp() { - //Mockito.when(propertyHelper.getString("timestamp.format")).thenReturn("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); + // Mockito.when(propertyHelper.getString("timestamp.format")).thenReturn("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); } @Test @@ -89,14 +90,14 @@ void getCodecPair_ShouldReturnCorrectCodecsForTimestampStringMillis() { assertTrue(codecs.get(1) instanceof TEXTMillis_InstantCodec); } -// @Test -// void getCodecPair_ShouldReturnCorrectCodecsForTimestampStringFormat() { -// Mockito.when(propertyHelper.getString("timestamp.format")).thenReturn("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); -// List> codecs = CodecFactory.getCodecPair(propertyHelper, Codecset.TIMESTAMP_STRING_FORMAT); -// assertFalse(codecs.isEmpty()); -// assertTrue(codecs.get(0) instanceof TIMESTAMP_StringFormatCodec); -// assertTrue(codecs.get(1) instanceof TEXTFormat_InstantCodec); -// } + // @Test + // void getCodecPair_ShouldReturnCorrectCodecsForTimestampStringFormat() { + // Mockito.when(propertyHelper.getString("timestamp.format")).thenReturn("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); + // List> codecs = CodecFactory.getCodecPair(propertyHelper, Codecset.TIMESTAMP_STRING_FORMAT); + // assertFalse(codecs.isEmpty()); + // assertTrue(codecs.get(0) instanceof TIMESTAMP_StringFormatCodec); + // assertTrue(codecs.get(1) instanceof TEXTFormat_InstantCodec); + // } @Test void getCodecPair_ShouldReturnCorrectCodecsForPointType() { diff --git a/src/test/java/com/datastax/cdm/cql/codec/CodecTestHelper.java b/src/test/java/com/datastax/cdm/cql/codec/CodecTestHelper.java index f7c4c029..011b90f9 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/CodecTestHelper.java +++ b/src/test/java/com/datastax/cdm/cql/codec/CodecTestHelper.java @@ -15,14 +15,15 @@ */ package com.datastax.cdm.cql.codec; -import org.junit.jupiter.api.Assertions; - import java.nio.ByteBuffer; +import org.junit.jupiter.api.Assertions; + public class CodecTestHelper { public static void assertByteBufferEquals(ByteBuffer expected, ByteBuffer actual) { Assertions.assertEquals(expected.remaining(), actual.remaining(), - () -> String.format("ByteBuffers have different remaining bytes:%nExpected byte[]: %s%nActual byte[]: %s", + () -> String.format( + "ByteBuffers have different remaining bytes:%nExpected byte[]: %s%nActual byte[]: %s", byteBufferToHexString(expected), byteBufferToHexString(actual))); Assertions.assertTrue(expected.equals(actual), diff --git a/src/test/java/com/datastax/cdm/cql/codec/DATERANGETYPE_CodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/DATERANGETYPE_CodecTest.java index 807d1495..d0fb7af7 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/DATERANGETYPE_CodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/DATERANGETYPE_CodecTest.java @@ -15,11 +15,8 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.dse.driver.api.core.data.time.DateRange; -import com.datastax.dse.driver.internal.core.type.codec.time.DateRangeCodec; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; + import java.nio.ByteBuffer; import java.text.ParseException; import java.time.ZoneOffset; @@ -28,7 +25,12 @@ import java.time.format.DateTimeFormatterBuilder; import java.time.temporal.ChronoUnit; -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.dse.driver.api.core.data.time.DateRange; +import com.datastax.dse.driver.internal.core.type.codec.time.DateRangeCodec; +import com.datastax.oss.driver.api.core.ProtocolVersion; class DATERANGETYPE_CodecTest { @@ -108,12 +110,9 @@ void format_ShouldFormatDateRangeToString() throws ParseException { @Test void parse_ShouldParseStringToDateRange() throws ParseException { - DateTimeFormatter df = (new DateTimeFormatterBuilder()) - .appendInstant(3).toFormatter(); - String formattedDateTime = ZonedDateTime.now() - .withZoneSameInstant(ZoneOffset.UTC) - .truncatedTo(ChronoUnit.MILLIS) - .format(df); + DateTimeFormatter df = (new DateTimeFormatterBuilder()).appendInstant(3).toFormatter(); + String formattedDateTime = ZonedDateTime.now().withZoneSameInstant(ZoneOffset.UTC) + .truncatedTo(ChronoUnit.MILLIS).format(df); // Enclose in single quotes as per the error message String dateRangeLiteral = "'" + formattedDateTime + "'"; diff --git a/src/test/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodecTest.java index f9b44060..4c112c25 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/DECIMAL_StringCodecTest.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.math.BigDecimal; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.math.BigDecimal; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class DECIMAL_StringCodecTest { @@ -82,9 +83,9 @@ void format_ShouldFormatValueAsString() { } @Test - // The test seems trivial because we are basically sending in a - // number converted to a string, expecting it to convert that to a number - // and return us the number as a string + // The test seems trivial because we are basically sending in a + // number converted to a string, expecting it to convert that to a number + // and return us the number as a string void parse_ShouldParseStringToValueAndReturnAsString() { String valueAsString = "123.456"; String result = codec.parse(valueAsString); @@ -92,8 +93,8 @@ void parse_ShouldParseStringToValueAndReturnAsString() { } @Test - // Slightly more interesting test, we are sending in a string that is not - // a number, expecting it throw a IllegalArgumentException + // Slightly more interesting test, we are sending in a string that is not + // a number, expecting it throw a IllegalArgumentException void parse_ShouldThrowIllegalArgumentException_WhenValueIsNotANumber() { String valueAsString = "not a number"; Assertions.assertThrows(IllegalArgumentException.class, () -> codec.parse(valueAsString)); diff --git a/src/test/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodecTest.java index ed52829c..e6f0d9f1 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/DOUBLE_StringCodecTest.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class DOUBLE_StringCodecTest { @@ -72,9 +73,9 @@ void decode_ShouldDecodeByteBufferToValueAndReturnAsString() { } @Test - // The test seems trivial because we are basically sending in a - // number converted to a string, expecting it to convert that to a number - // and return us the number as a string + // The test seems trivial because we are basically sending in a + // number converted to a string, expecting it to convert that to a number + // and return us the number as a string void parse_ShouldParseStringToValueAndReturnAsString() { String valueAsString = "21474836470.7"; Double value = Double.parseDouble(valueAsString); @@ -83,8 +84,8 @@ void parse_ShouldParseStringToValueAndReturnAsString() { } @Test - // Slightly more interesting test, we are sending in a string that is not - // a number, expecting it throw a IllegalArgumentException + // Slightly more interesting test, we are sending in a string that is not + // a number, expecting it throw a IllegalArgumentException void parse_ShouldThrowIllegalArgumentException_WhenValueIsNotANumber() { String valueAsString = "not a number"; Assertions.assertThrows(IllegalArgumentException.class, () -> codec.parse(valueAsString)); @@ -97,4 +98,3 @@ void parse_ShouldReturnNull_WhenValueIsNull() { Assertions.assertNull(result); } } - diff --git a/src/test/java/com/datastax/cdm/cql/codec/INT_StringCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/INT_StringCodecTest.java index 41ba9ce0..cbeca4eb 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/INT_StringCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/INT_StringCodecTest.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class INT_StringCodecTest { @@ -82,9 +83,9 @@ void format_ShouldFormatValueAsString() { } @Test - // The test seems trivial because we are basically sending in a - // number converted to a string, expecting it to convert that to a number - // and return us the number as a string + // The test seems trivial because we are basically sending in a + // number converted to a string, expecting it to convert that to a number + // and return us the number as a string void parse_ShouldParseStringToValueAndReturnAsString() { String valueAsString = "123"; String result = codec.parse(valueAsString); @@ -92,12 +93,11 @@ void parse_ShouldParseStringToValueAndReturnAsString() { } @Test - // Slightly more interesting test, we are sending in a string that is not - // a number, expecting it throw a IllegalArgumentException + // Slightly more interesting test, we are sending in a string that is not + // a number, expecting it throw a IllegalArgumentException void parse_ShouldThrowIllegalArgumentException_WhenValueIsNotANumber() { String valueAsString = "not a number"; Assertions.assertThrows(IllegalArgumentException.class, () -> codec.parse(valueAsString)); } } - diff --git a/src/test/java/com/datastax/cdm/cql/codec/LINESTRINGTYPE_CodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/LINESTRINGTYPE_CodecTest.java index 73270df9..37441c58 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/LINESTRINGTYPE_CodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/LINESTRINGTYPE_CodecTest.java @@ -15,16 +15,18 @@ */ package com.datastax.cdm.cql.codec; +import static org.junit.jupiter.api.Assertions.*; + +import java.nio.ByteBuffer; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import com.datastax.cdm.data.CqlConversion; import com.datastax.dse.driver.api.core.data.geometry.LineString; import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; import com.datastax.dse.driver.internal.core.type.codec.geometry.LineStringCodec; import com.esri.core.geometry.ogc.OGCLineString; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; - -import static org.junit.jupiter.api.Assertions.*; class LINESTRINGTYPE_CodecTest { @@ -37,7 +39,8 @@ void setUp() { @Test void encode_ShouldEncodeLineStringToByteBuffer() { - LineString lineString = new DefaultLineString((OGCLineString) OGCLineString.fromText("LINESTRING (30 10, 10 30, 40 40)")); + LineString lineString = new DefaultLineString( + (OGCLineString) OGCLineString.fromText("LINESTRING (30 10, 10 30, 40 40)")); ByteBuffer encoded = codec.encode(lineString, CqlConversion.PROTOCOL_VERSION); assertNotNull(encoded); diff --git a/src/test/java/com/datastax/cdm/cql/codec/POINTTYPE_CodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/POINTTYPE_CodecTest.java index f049ae69..af502659 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/POINTTYPE_CodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/POINTTYPE_CodecTest.java @@ -15,16 +15,18 @@ */ package com.datastax.cdm.cql.codec; +import static org.junit.jupiter.api.Assertions.*; + +import java.nio.ByteBuffer; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import com.datastax.cdm.data.CqlConversion; import com.datastax.dse.driver.api.core.data.geometry.Point; import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; import com.datastax.dse.driver.internal.core.type.codec.geometry.PointCodec; import com.esri.core.geometry.ogc.OGCPoint; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; - -import static org.junit.jupiter.api.Assertions.*; class POINTTYPE_CodecTest { diff --git a/src/test/java/com/datastax/cdm/cql/codec/POLYGONTYPE_CodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/POLYGONTYPE_CodecTest.java index 5736461f..4b221c2e 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/POLYGONTYPE_CodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/POLYGONTYPE_CodecTest.java @@ -15,16 +15,18 @@ */ package com.datastax.cdm.cql.codec; +import static org.junit.jupiter.api.Assertions.*; + +import java.nio.ByteBuffer; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import com.datastax.cdm.data.CqlConversion; import com.datastax.dse.driver.api.core.data.geometry.Polygon; import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; import com.datastax.dse.driver.internal.core.type.codec.geometry.PolygonCodec; import com.esri.core.geometry.ogc.OGCPolygon; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; - -import static org.junit.jupiter.api.Assertions.*; class POLYGONTYPE_CodecTest { @@ -37,8 +39,11 @@ void setUp() { @Test void encode_ShouldEncodePolygonToByteBuffer() { - Polygon polygon = new DefaultPolygon((OGCPolygon) OGCPolygon.fromText("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")); - ByteBuffer encoded = codec.encode(polygon, CqlConversion.PROTOCOL_VERSION); // Assuming protocol version is not needed or a mock version is provided + Polygon polygon = new DefaultPolygon( + (OGCPolygon) OGCPolygon.fromText("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")); + ByteBuffer encoded = codec.encode(polygon, CqlConversion.PROTOCOL_VERSION); // Assuming protocol version is not + // needed or a mock version is + // provided // Assert that the result is not null assertNotNull(encoded); @@ -67,6 +72,7 @@ void decode_ShouldDecodeByteBufferToPolygon() { String actualWkt = actualPolygon.asWellKnownText(); assertEquals(wkt, actualWkt); } + @Test void format_ShouldFormatPolygonToWktString() { String wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"; diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java index 9933abfe..160fb705 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java @@ -15,12 +15,13 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.properties.PropertyHelper; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; + import org.apache.spark.SparkConf; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -28,12 +29,12 @@ import org.junit.jupiter.api.Test; import org.mockito.Mock; -import java.nio.ByteBuffer; -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TEXTFormat_InstantCodecTest { @@ -111,7 +112,7 @@ void parse_ShouldParseTextAndReturnAsInstant() { String valueAsString = "220412215715"; Instant value = LocalDateTime.parse(valueAsString, formatter).toInstant(zoneOffset); Instant result = codec.parse(valueAsString); - Assertions.assertEquals(value,result); + Assertions.assertEquals(value, result); } @Test @@ -147,7 +148,6 @@ void constructor_ShouldThrowIllegalArgumentException_WhenInvalidTimeZone() { Assertions.assertThrows(IllegalArgumentException.class, () -> new TEXTFormat_InstantCodec(propertyHelper)); } - @Test void constructor_ShouldThrowIllegalArgumentException_WhenEmptyTimeZone() { PropertyHelper.destroyInstance(); diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodecTest.java index 3a4c3470..481901d8 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXTMillis_InstantCodecTest.java @@ -15,18 +15,19 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.cdm.properties.PropertyHelper; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; +import java.time.Instant; + import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; -import java.time.Instant; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TEXTMillis_InstantCodecTest { diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodecTest.java index abde75c8..4958e5ed 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXT_BigDecimalCodecTest.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.math.BigDecimal; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.math.BigDecimal; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TEXT_BigDecimalCodecTest { @@ -32,7 +33,7 @@ class TEXT_BigDecimalCodecTest { @BeforeEach void setUp() { - codec = new TEXT_BigDecimalCodec( null); + codec = new TEXT_BigDecimalCodec(null); } @Test @@ -81,7 +82,8 @@ void parse_ShouldParseStringToBigDecimalValue() { @Test void format_ShouldFormatNumberValueAsText() { - BigDecimal value = new BigDecimal("12345.6789");; + BigDecimal value = new BigDecimal("12345.6789"); + ; String expectedValue = TypeCodecs.DECIMAL.format(value); String result = codec.format(value); Assertions.assertEquals(expectedValue, result); diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodecTest.java index 2009d513..a6af6de9 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXT_DoubleCodecTest.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TEXT_DoubleCodecTest { @@ -32,7 +33,7 @@ class TEXT_DoubleCodecTest { @BeforeEach void setUp() { - codec = new TEXT_DoubleCodec( null); + codec = new TEXT_DoubleCodec(null); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodecTest.java index d3280d89..758cc15f 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXT_IntegerCodecTest.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TEXT_IntegerCodecTest { @@ -32,7 +33,7 @@ class TEXT_IntegerCodecTest { @BeforeEach void setUp() { - codec = new TEXT_IntegerCodec( null); + codec = new TEXT_IntegerCodec(null); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXT_LongCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXT_LongCodecTest.java index c2fb88e1..37120d6d 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXT_LongCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXT_LongCodecTest.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TEXT_LongCodecTest { @@ -32,7 +33,7 @@ class TEXT_LongCodecTest { @BeforeEach void setUp() { - codec = new TEXT_LongCodec( null); + codec = new TEXT_LongCodec(null); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodecTest.java index b85a6d59..05aaea14 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringFormatCodecTest.java @@ -15,18 +15,6 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.properties.PropertyHelper; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import org.apache.spark.SparkConf; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - import java.nio.ByteBuffer; import java.time.Instant; import java.time.LocalDateTime; @@ -34,6 +22,19 @@ import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; +import org.apache.spark.SparkConf; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.data.CqlConversion; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; + class TIMESTAMP_StringFormatCodecTest { private TIMESTAMP_StringFormatCodec codec; diff --git a/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodecTest.java index a34f15f7..8dc807c2 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TIMESTAMP_StringMillisCodecTest.java @@ -15,18 +15,19 @@ */ package com.datastax.cdm.cql.codec; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.cdm.properties.PropertyHelper; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.nio.ByteBuffer; +import java.time.Instant; + import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.nio.ByteBuffer; -import java.time.Instant; +import com.datastax.cdm.data.CqlConversion; +import com.datastax.cdm.properties.PropertyHelper; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; class TIMESTAMP_StringMillisCodecTest { diff --git a/src/test/java/com/datastax/cdm/cql/statement/BaseCdmStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/BaseCdmStatementTest.java index 4f16c1eb..98f16898 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/BaseCdmStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/BaseCdmStatementTest.java @@ -15,18 +15,19 @@ */ package com.datastax.cdm.cql.statement; -import com.datastax.cdm.cql.EnhancedSession; -import com.datastax.cdm.properties.IPropertyHelper; -import com.datastax.cdm.schema.CqlTable; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.when; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.when; +import com.datastax.cdm.cql.EnhancedSession; +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.schema.CqlTable; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; public class BaseCdmStatementTest { diff --git a/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java b/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java index d3e02a0a..2f2d3cc6 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java @@ -15,20 +15,21 @@ */ package com.datastax.cdm.cql.statement; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; + +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.mockito.Mock; + import com.datastax.cdm.data.CqlData; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; -import org.junit.jupiter.api.BeforeEach; -import org.mockito.Mock; - -import java.util.Arrays; -import java.util.List; - -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; public class Feature_CounterTest { @@ -41,10 +42,10 @@ public class Feature_CounterTest { @Mock CqlTable targetTable; - - List standardNames = Arrays.asList("key","col1","col2"); + List standardNames = Arrays.asList("key", "col1", "col2"); List standardDataTypes = Arrays.asList(DataTypes.TIMESTAMP, DataTypes.COUNTER, DataTypes.COUNTER); - List standardBindClasses = Arrays.asList(CqlData.getBindClass(standardDataTypes.get(0)), CqlData.getBindClass(standardDataTypes.get(1)), CqlData.getBindClass(standardDataTypes.get(2))); + List standardBindClasses = Arrays.asList(CqlData.getBindClass(standardDataTypes.get(0)), + CqlData.getBindClass(standardDataTypes.get(1)), CqlData.getBindClass(standardDataTypes.get(2))); @BeforeEach public void setup() { @@ -53,45 +54,49 @@ public void setup() { targetTable = mock(CqlTable.class); } + private void setValidSparkConf() { + } - private void setValidSparkConf() {} - -// @Test -// public void smokeTest_initialize() { -// setValidSparkConf(); -// helper.initializeSparkConf(validSparkConf); -// assertAll( -// () -> assertEquals(Arrays.asList(1,2), helper.getIntegerList(KnownProperties.ORIGIN_COUNTER_INDEXES), "ORIGIN_COUNTER_INDEXES") -// ); -// } -// -// @Test -// public void smokeCQL() { -// SparkConf sparkConf = new SparkConf(); -// sparkConf.set(KnownProperties.ORIGIN_CONNECT_HOST, "localhost"); -// sparkConf.set(KnownProperties.ORIGIN_KEYSPACE_TABLE, "origin.tab1"); -// sparkConf.set(KnownProperties.ORIGIN_COLUMN_NAMES, "key,col1,col2"); -// sparkConf.set(KnownProperties.ORIGIN_COLUMN_TYPES, "4,2,2"); -// sparkConf.set(KnownProperties.ORIGIN_PARTITION_KEY, "key"); -// sparkConf.set(KnownProperties.ORIGIN_COUNTER_INDEXES, "1,2"); -// -// sparkConf.set(KnownProperties.TARGET_PRIMARY_KEY, "key"); -// sparkConf.set(KnownProperties.TARGET_KEYSPACE_TABLE, "target.tab1"); -// -// helper.initializeSparkConf(sparkConf); -// CqlHelper cqlHelper = new CqlHelper(); -// cqlHelper.initialize(); -// -// String originSelect = "SELECT key,col1,col2 FROM origin.tab1 WHERE TOKEN(key) >= ? AND TOKEN(key) <= ? ALLOW FILTERING"; -// String originSelectByPK = "SELECT key,col1,col2 FROM origin.tab1 WHERE key=?"; -// String targetUpdate = "UPDATE target.tab1 SET col1=col1+?,col2=col2+? WHERE key=?"; -// String targetSelect = "SELECT key,col1,col2 FROM target.tab1 WHERE key=?"; -// -// assertAll( -// () -> assertEquals(originSelect, cqlHelper.getOriginSelectByPartitionRangeStatement(null).getCQL().replaceAll("\\s+"," ")), -// () -> assertEquals(originSelectByPK, cqlHelper.getOriginSelectByPKStatement(null).getCQL().replaceAll("\\s+"," ")), -// () -> assertEquals(targetUpdate, cqlHelper.getTargetUpdateStatement(null).getCQL().replaceAll("\\s+"," ")), -// () -> assertEquals(targetSelect, cqlHelper.getTargetSelectByPKStatement(null).getCQL().replaceAll("\\s+"," ")) -// ); -// } + // @Test + // public void smokeTest_initialize() { + // setValidSparkConf(); + // helper.initializeSparkConf(validSparkConf); + // assertAll( + // () -> assertEquals(Arrays.asList(1,2), helper.getIntegerList(KnownProperties.ORIGIN_COUNTER_INDEXES), + // "ORIGIN_COUNTER_INDEXES") + // ); + // } + // + // @Test + // public void smokeCQL() { + // SparkConf sparkConf = new SparkConf(); + // sparkConf.set(KnownProperties.ORIGIN_CONNECT_HOST, "localhost"); + // sparkConf.set(KnownProperties.ORIGIN_KEYSPACE_TABLE, "origin.tab1"); + // sparkConf.set(KnownProperties.ORIGIN_COLUMN_NAMES, "key,col1,col2"); + // sparkConf.set(KnownProperties.ORIGIN_COLUMN_TYPES, "4,2,2"); + // sparkConf.set(KnownProperties.ORIGIN_PARTITION_KEY, "key"); + // sparkConf.set(KnownProperties.ORIGIN_COUNTER_INDEXES, "1,2"); + // + // sparkConf.set(KnownProperties.TARGET_PRIMARY_KEY, "key"); + // sparkConf.set(KnownProperties.TARGET_KEYSPACE_TABLE, "target.tab1"); + // + // helper.initializeSparkConf(sparkConf); + // CqlHelper cqlHelper = new CqlHelper(); + // cqlHelper.initialize(); + // + // String originSelect = "SELECT key,col1,col2 FROM origin.tab1 WHERE TOKEN(key) >= ? AND TOKEN(key) <= ? ALLOW + // FILTERING"; + // String originSelectByPK = "SELECT key,col1,col2 FROM origin.tab1 WHERE key=?"; + // String targetUpdate = "UPDATE target.tab1 SET col1=col1+?,col2=col2+? WHERE key=?"; + // String targetSelect = "SELECT key,col1,col2 FROM target.tab1 WHERE key=?"; + // + // assertAll( + // () -> assertEquals(originSelect, + // cqlHelper.getOriginSelectByPartitionRangeStatement(null).getCQL().replaceAll("\\s+"," ")), + // () -> assertEquals(originSelectByPK, cqlHelper.getOriginSelectByPKStatement(null).getCQL().replaceAll("\\s+"," + // ")), + // () -> assertEquals(targetUpdate, cqlHelper.getTargetUpdateStatement(null).getCQL().replaceAll("\\s+"," ")), + // () -> assertEquals(targetSelect, cqlHelper.getTargetSelectByPKStatement(null).getCQL().replaceAll("\\s+"," ")) + // ); + // } } diff --git a/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatementTest.java index 2db91ea3..3d647f15 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPKStatementTest.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.cql.statement; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; import com.datastax.cdm.data.EnhancedPK; import com.datastax.cdm.data.PKFactory; import com.datastax.cdm.data.Record; import com.datastax.oss.driver.api.core.cql.BoundStatement; -import com.datastax.cdm.cql.CommonMocks; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; public class OriginSelectByPKStatementTest extends CommonMocks { @@ -40,15 +41,11 @@ public void setup() { @Test public void smoke_basicCQL() { StringBuilder sb = new StringBuilder(); - sb.append("SELECT ") - .append(String.join(",", originColumnNames)) - .append(" FROM ") - .append(originKeyspaceTableName) - .append(" WHERE ") - .append(keyEqualsBindJoinedWithAND(originPrimaryKey)); + sb.append("SELECT ").append(String.join(",", originColumnNames)).append(" FROM ") + .append(originKeyspaceTableName).append(" WHERE ").append(keyEqualsBindJoinedWithAND(originPrimaryKey)); String cql = originSelectByPKStatement.getCQL(); - assertEquals(sb.toString(),cql); + assertEquals(sb.toString(), cql); } @Test @@ -80,12 +77,9 @@ public void testGetRecord_nullRow() { public void testBind_success() { originSelectByPKStatement.bind(pk); - assertAll( - () -> verify(preparedStatement).bind(), - () -> verify(boundStatement).setConsistencyLevel(readCL), + assertAll(() -> verify(preparedStatement).bind(), () -> verify(boundStatement).setConsistencyLevel(readCL), () -> verify(boundStatement).setPageSize(fetchSizeInRows), - () -> verify(pkFactory).bindWhereClause(PKFactory.Side.ORIGIN, pk, boundStatement, 0) - ); + () -> verify(pkFactory).bindWhereClause(PKFactory.Side.ORIGIN, pk, boundStatement, 0)); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatementTest.java index aa6b0298..92098a43 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/OriginSelectByPartitionRangeStatementTest.java @@ -15,14 +15,15 @@ */ package com.datastax.cdm.cql.statement; -import com.datastax.cdm.cql.CommonMocks; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; import java.math.BigInteger; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; public class OriginSelectByPartitionRangeStatementTest extends CommonMocks { @@ -31,54 +32,50 @@ public class OriginSelectByPartitionRangeStatementTest extends CommonMocks { @BeforeEach public void setup() { commonSetup(); - originSelectByPartitionRangeStatement = new OriginSelectByPartitionRangeStatement(propertyHelper, originSession); + originSelectByPartitionRangeStatement = new OriginSelectByPartitionRangeStatement(propertyHelper, + originSession); } @Test public void smoke_basicCQL() { String keys = String.join(",", originPartitionKey); StringBuilder sb = new StringBuilder(); - sb.append("SELECT ") - .append(String.join(",", originColumnNames)) - .append(" FROM ") - .append(originKeyspaceTableName) - .append(" WHERE ") - .append("TOKEN(").append(keys).append(") >= ? AND TOKEN(").append(keys).append(") <= ?") - .append(" ALLOW FILTERING"); + sb.append("SELECT ").append(String.join(",", originColumnNames)).append(" FROM ") + .append(originKeyspaceTableName).append(" WHERE ").append("TOKEN(").append(keys) + .append(") >= ? AND TOKEN(").append(keys).append(") <= ?").append(" ALLOW FILTERING"); String cql = originSelectByPartitionRangeStatement.getCQL(); - assertEquals(sb.toString(),cql); + assertEquals(sb.toString(), cql); } @Test public void originFilterCondition() { - String filter=" AND cluster_key = 'abc'"; + String filter = " AND cluster_key = 'abc'"; when(originFilterConditionFeature.getFilterCondition()).thenReturn(filter); - originSelectByPartitionRangeStatement = new OriginSelectByPartitionRangeStatement(propertyHelper, originSession); + originSelectByPartitionRangeStatement = new OriginSelectByPartitionRangeStatement(propertyHelper, + originSession); String keys = String.join(",", originPartitionKey); StringBuilder sb = new StringBuilder(); - sb.append("SELECT ") - .append(String.join(",", originColumnNames)) - .append(" FROM ") - .append(originKeyspaceTableName) - .append(" WHERE ") - .append("TOKEN(").append(keys).append(") >= ? AND TOKEN(").append(keys).append(") <= ?") - .append(filter) - .append(" ALLOW FILTERING"); + sb.append("SELECT ").append(String.join(",", originColumnNames)).append(" FROM ") + .append(originKeyspaceTableName).append(" WHERE ").append("TOKEN(").append(keys) + .append(") >= ? AND TOKEN(").append(keys).append(") <= ?").append(filter).append(" ALLOW FILTERING"); String cql = originSelectByPartitionRangeStatement.getCQL(); - assertEquals(sb.toString(),cql); + assertEquals(sb.toString(), cql); } @Test public void bind_withNullBinds() { assertAll( - () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind(null,null), "two null"), - () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind(BigInteger.valueOf(20)), "missing second"), - () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind(BigInteger.valueOf(20),null), "null second"), - () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind(null,BigInteger.valueOf(20)), "null first") - ); + () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind(null, null), + "two null"), + () -> assertThrows(RuntimeException.class, + () -> originSelectByPartitionRangeStatement.bind(BigInteger.valueOf(20)), "missing second"), + () -> assertThrows(RuntimeException.class, + () -> originSelectByPartitionRangeStatement.bind(BigInteger.valueOf(20), null), "null second"), + () -> assertThrows(RuntimeException.class, + () -> originSelectByPartitionRangeStatement.bind(null, BigInteger.valueOf(20)), "null first")); } @Test @@ -87,14 +84,11 @@ public void bind_withNonNullBinds_usesProvidedPartitions() { BigInteger providedMax = BigInteger.valueOf(67890L); originSelectByPartitionRangeStatement.bind(providedMin, providedMax); - assertAll( - () -> verify(preparedStatement).bind(providedMin.longValueExact(), providedMax.longValueExact()), + assertAll(() -> verify(preparedStatement).bind(providedMin.longValueExact(), providedMax.longValueExact()), () -> verify(boundStatement).setConsistencyLevel(readCL), - () -> verify(boundStatement).setPageSize(fetchSizeInRows) - ); + () -> verify(boundStatement).setPageSize(fetchSizeInRows)); } - @Test public void bind_withNonNullBinds_usesProvidedPartitions_whenRandomPartitioner() { when(originTable.hasRandomPartitioner()).thenReturn(true); @@ -115,9 +109,12 @@ public void bind_withNonNullBinds_usesProvidedPartitions_whenRandomPartitioner() @Test public void bind_withInvalidBindType_throwsException() { assertAll( - () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind("invalidType", BigInteger.valueOf(20)), "invalid first"), - () -> assertThrows(RuntimeException.class, () -> originSelectByPartitionRangeStatement.bind(BigInteger.valueOf(20),"invalidType"), "invalid second") - ); + () -> assertThrows(RuntimeException.class, + () -> originSelectByPartitionRangeStatement.bind("invalidType", BigInteger.valueOf(20)), + "invalid first"), + () -> assertThrows(RuntimeException.class, + () -> originSelectByPartitionRangeStatement.bind(BigInteger.valueOf(20), "invalidType"), + "invalid second")); } } diff --git a/src/test/java/com/datastax/cdm/cql/statement/OriginSelectStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/OriginSelectStatementTest.java index 8498d4a1..eb3d88af 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/OriginSelectStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/OriginSelectStatementTest.java @@ -15,19 +15,20 @@ */ package com.datastax.cdm.cql.statement; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.when; + +import java.util.Collections; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; -import com.datastax.cdm.cql.CommonMocks; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.util.Collections; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.when; public class OriginSelectStatementTest extends CommonMocks { @@ -44,15 +45,11 @@ public void setup() { @Test public void smoke_basicCQL() { StringBuilder sb = new StringBuilder(); - sb.append("SELECT ") - .append(String.join(",", originColumnNames)) - .append(" FROM ") - .append(originKeyspaceTableName) - .append(" WHERE ") - .append(bindClause); + sb.append("SELECT ").append(String.join(",", originColumnNames)).append(" FROM ") + .append(originKeyspaceTableName).append(" WHERE ").append(bindClause); String cql = originSelectStatement.getCQL(); - assertEquals(sb.toString(),cql); + assertEquals(sb.toString(), cql); } @Test @@ -122,24 +119,19 @@ public void column_filter_values() { @Test public void isRecordValid() { - assertAll( - () -> { - assertFalse(originSelectStatement.isRecordValid(null), "null record"); - }, - () -> { - assertTrue(originSelectStatement.isRecordValid(record), "valid row"); - }, - () -> { - when(record.getPk().isError()).thenReturn(true); - when(record.getPk().isWarning()).thenReturn(false); - assertFalse(originSelectStatement.isRecordValid(record), "error PK"); - }, - () -> { - when(record.getPk().isError()).thenReturn(false); - when(record.getPk().isWarning()).thenReturn(true); - assertTrue(originSelectStatement.isRecordValid(record), "warning PK"); - } - ); + assertAll(() -> { + assertFalse(originSelectStatement.isRecordValid(null), "null record"); + }, () -> { + assertTrue(originSelectStatement.isRecordValid(record), "valid row"); + }, () -> { + when(record.getPk().isError()).thenReturn(true); + when(record.getPk().isWarning()).thenReturn(false); + assertFalse(originSelectStatement.isRecordValid(record), "error PK"); + }, () -> { + when(record.getPk().isError()).thenReturn(false); + when(record.getPk().isWarning()).thenReturn(true); + assertTrue(originSelectStatement.isRecordValid(record), "warning PK"); + }); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/statement/TargetInsertStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/TargetInsertStatementTest.java index 9c4330f1..b8624450 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/TargetInsertStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/TargetInsertStatementTest.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.cql.statement; -import com.datastax.cdm.cql.CommonMocks; -import com.datastax.oss.driver.api.core.cql.*; -import com.datastax.oss.driver.api.core.type.DataTypes; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; import java.util.*; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.oss.driver.api.core.cql.*; +import com.datastax.oss.driver.api.core.type.DataTypes; public class TargetInsertStatementTest extends CommonMocks { @@ -39,10 +40,9 @@ public void setup() { @Test public void smoke_basicCQL() { StringBuilder sb = new StringBuilder(); - sb.append("INSERT INTO ") - .append(targetKeyspaceTableName) - .append(" (").append(String.join(",",targetColumnNames)).append(")") - .append(" VALUES (").append(String.join(",",Collections.nCopies(targetColumnNames.size(),"?"))).append(")"); + sb.append("INSERT INTO ").append(targetKeyspaceTableName).append(" (") + .append(String.join(",", targetColumnNames)).append(")").append(" VALUES (") + .append(String.join(",", Collections.nCopies(targetColumnNames.size(), "?"))).append(")"); String insertStatement = sb.toString(); assertEquals(insertStatement, targetInsertStatement.getCQL()); @@ -93,18 +93,14 @@ public void cql_withTTLAndWritetime() { @Test public void cql_ConstantColumns() { - commonSetup(false,true,false); + commonSetup(false, true, false); targetInsertStatement = new TargetInsertStatement(propertyHelper, targetSession); StringBuilder sb = new StringBuilder(); - sb.append("INSERT INTO ") - .append(targetKeyspaceTableName) - .append(" (") - .append(String.join(",",targetColumnNames)) - .append(")") - .append(" VALUES (").append(String.join(",",Collections.nCopies(targetColumnNames.size()-constantColumns.size(),"?"))) - .append(",").append(String.join(",",constantColumnValues)) - .append(")"); + sb.append("INSERT INTO ").append(targetKeyspaceTableName).append(" (") + .append(String.join(",", targetColumnNames)).append(")").append(" VALUES (") + .append(String.join(",", Collections.nCopies(targetColumnNames.size() - constantColumns.size(), "?"))) + .append(",").append(String.join(",", constantColumnValues)).append(")"); String insertStatement = sb.toString(); assertEquals(insertStatement, targetInsertStatement.getCQL()); @@ -112,7 +108,7 @@ public void cql_ConstantColumns() { @Test public void bind_withStandardInput() { - BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null,null,null,null); + BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null, null, null, null); assertNotNull(result); verify(boundStatement, times(targetColumnNames.size())).set(anyInt(), any(), any(Class.class)); } @@ -153,22 +149,25 @@ public void bind_withTTLAndWritetime() { @Test public void bind_withExplodeMap() { - commonSetup(true,false,false); + commonSetup(true, false, false); targetInsertStatement = new TargetInsertStatement(propertyHelper, targetSession); - BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null,null,getSampleData(explodeMapKeyType),getSampleData(explodeMapValueType)); + BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null, null, + getSampleData(explodeMapKeyType), getSampleData(explodeMapValueType)); assertNotNull(result); verify(boundStatement, times(targetColumnNames.size())).set(anyInt(), any(), any(Class.class)); } @Test public void bind_withConstantColumns() { - commonSetup(false,true, false); + commonSetup(false, true, false); targetInsertStatement = new TargetInsertStatement(propertyHelper, targetSession); - BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null,null,getSampleData(explodeMapKeyType),getSampleData(explodeMapValueType)); + BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null, null, + getSampleData(explodeMapKeyType), getSampleData(explodeMapValueType)); assertNotNull(result); - verify(boundStatement, times(targetColumnNames.size()-constantColumns.size())).set(anyInt(), any(), any(Class.class)); + verify(boundStatement, times(targetColumnNames.size() - constantColumns.size())).set(anyInt(), any(), + any(Class.class)); } @Test @@ -177,15 +176,16 @@ public void bind_extraTargetColumn() { targetColumnTypes.add(DataTypes.TEXT); targetInsertStatement = new TargetInsertStatement(propertyHelper, targetSession); - BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null,null,getSampleData(explodeMapKeyType),getSampleData(explodeMapValueType)); + BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null, null, + getSampleData(explodeMapKeyType), getSampleData(explodeMapValueType)); assertNotNull(result); - verify(boundStatement, times(targetColumnNames.size()-1)).set(anyInt(), any(), any(Class.class)); + verify(boundStatement, times(targetColumnNames.size() - 1)).set(anyInt(), any(), any(Class.class)); } - @Test public void bind_withNullOriginRow() { - RuntimeException exception = assertThrows(RuntimeException.class, () -> targetInsertStatement.bind(null, targetRow, 3600, 123456789L, explodeMapKey, explodeMapValue)); + RuntimeException exception = assertThrows(RuntimeException.class, + () -> targetInsertStatement.bind(null, targetRow, 3600, 123456789L, explodeMapKey, explodeMapValue)); assertEquals("Origin row is null", exception.getMessage()); } @@ -206,14 +206,13 @@ public void bind_withExceptionWhenBindingValue() { assertThrows(RuntimeException.class, () -> targetInsertStatement.bind(originRow, targetRow, 3600, 123456789L, explodeMapKey, explodeMapValue)); } - @Test public void bind_withVectorColumns() { targetInsertStatement = new TargetInsertStatement(propertyHelper, targetSession); assertTrue(targetInsertStatement.targetColumnNames.contains(vectorCol)); assertTrue(6 == targetInsertStatement.targetColumnNames.size()); assertEquals(vectorColType, targetInsertStatement.targetColumnTypes.get(5)); - BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null, null,null, null); + BoundStatement result = targetInsertStatement.bind(originRow, targetRow, null, null, null, null); assertNotNull(result); verify(boundStatement, times(targetColumnNames.size())).set(anyInt(), any(), any(Class.class)); } diff --git a/src/test/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatementTest.java index 0bea8325..4c4a4161 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/TargetSelectByPKStatementTest.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.cql.statement; -import com.datastax.cdm.data.EnhancedPK; -import com.datastax.cdm.data.PKFactory; -import com.datastax.oss.driver.api.core.cql.BoundStatement; -import com.datastax.cdm.cql.CommonMocks; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.data.EnhancedPK; +import com.datastax.cdm.data.PKFactory; +import com.datastax.oss.driver.api.core.cql.BoundStatement; public class TargetSelectByPKStatementTest extends CommonMocks { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -41,12 +42,8 @@ public void setup() { @Test public void smoke_basicCQL() { StringBuilder sb = new StringBuilder(); - sb.append("SELECT ") - .append(String.join(",",targetColumnNames)) - .append(" FROM ") - .append(targetKeyspaceTableName) - .append(" WHERE ") - .append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); + sb.append("SELECT ").append(String.join(",", targetColumnNames)).append(" FROM ") + .append(targetKeyspaceTableName).append(" WHERE ").append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); assertEquals(sb.toString(), targetSelectByPKStatement.getCQL()); } @@ -57,18 +54,15 @@ public void cql_withConstantColumnInKey() { String constKeyVal = constantColumnValues.get(0); targetClusteringKey.add(constKeyCol); targetClusteringKeyTypes.add(constantColumnTypes.get(0)); - commonSetup(false,true,false); + commonSetup(false, true, false); targetSelectByPKStatement = new TargetSelectByPKStatement(propertyHelper, targetSession); StringBuilder sb = new StringBuilder(); - sb.append("SELECT ") - .append(String.join(",",targetColumnNames)) - .append(" FROM ") - .append(targetKeyspaceTableName) - .append(" WHERE "); - - for (int i=0; i0) { + sb.append("SELECT ").append(String.join(",", targetColumnNames)).append(" FROM ") + .append(targetKeyspaceTableName).append(" WHERE "); + + for (int i = 0; i < targetPrimaryKey.size(); i++) { + if (i > 0) { sb.append(" AND "); } String key = targetPrimaryKey.get(i); @@ -86,11 +80,8 @@ public void cql_withConstantColumnInKey() { @Test public void getRecord() { targetSelectByPKStatement.getRecord(pk); - assertAll( - () -> verify(preparedStatement).bind(), - () -> verify(boundStatement).setConsistencyLevel(readCL), - () -> verify(pkFactory).bindWhereClause(PKFactory.Side.TARGET, pk, boundStatement, 0) - ); + assertAll(() -> verify(preparedStatement).bind(), () -> verify(boundStatement).setConsistencyLevel(readCL), + () -> verify(pkFactory).bindWhereClause(PKFactory.Side.TARGET, pk, boundStatement, 0)); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/statement/TargetUpdateStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/TargetUpdateStatementTest.java index a1aecaa9..a2679b3b 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/TargetUpdateStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/TargetUpdateStatementTest.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.cql.statement; -import com.datastax.oss.driver.api.core.cql.BoundStatement; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.cdm.cql.CommonMocks; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.type.DataTypes; public class TargetUpdateStatementTest extends CommonMocks { @@ -36,18 +37,16 @@ public class TargetUpdateStatementTest extends CommonMocks { @BeforeEach public void setup() { // UPDATE is needed by counters, though the class should handle non-counter updates - commonSetup(false,false,true); + commonSetup(false, false, true); targetUpdateStatement = new TargetUpdateStatement(propertyHelper, targetSession); updateCQLBeginning = "UPDATE " + targetKeyspaceTableName; StringBuilder sb = new StringBuilder(); sb.append(" SET ") - .append(targetCounterColumns.stream() - .map(column -> column + "=" + column + "+?") - .collect(Collectors.joining(","))) - .append(" WHERE ") - .append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); + .append(targetCounterColumns.stream().map(column -> column + "=" + column + "+?") + .collect(Collectors.joining(","))) + .append(" WHERE ").append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); counterUpdateCQLEnding = sb.toString(); } @@ -59,15 +58,11 @@ public void smoke_basicCQL_Counter() { @Test public void smoke_basicCQL_Other() { - commonSetup(false,false,false); + commonSetup(false, false, false); StringBuilder sb = new StringBuilder(); - sb.append(updateCQLBeginning) - .append(" SET ") - .append(targetValueColumns.stream() - .map(column -> column + "=?") - .collect(Collectors.joining(","))) - .append(" WHERE ") - .append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); + sb.append(updateCQLBeginning).append(" SET ") + .append(targetValueColumns.stream().map(column -> column + "=?").collect(Collectors.joining(","))) + .append(" WHERE ").append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); targetUpdateStatement = new TargetUpdateStatement(propertyHelper, targetSession); assertEquals(sb.toString(), targetUpdateStatement.getCQL()); @@ -75,19 +70,15 @@ public void smoke_basicCQL_Other() { @Test public void smoke_basicCQL_Constant() { - commonSetup(false,true,false); + commonSetup(false, true, false); StringBuilder sb = new StringBuilder(); - sb.append(updateCQLBeginning) - .append(" SET ") - .append(targetValueColumns.stream() - .map(column -> column + "=?") - .collect(Collectors.joining(","))) + sb.append(updateCQLBeginning).append(" SET ") + .append(targetValueColumns.stream().map(column -> column + "=?").collect(Collectors.joining(","))) .append(",") .append(IntStream.range(0, constantColumns.size()) .mapToObj(i -> constantColumns.get(i) + "=" + constantColumnValues.get(i)) .collect(Collectors.joining(","))) - .append(" WHERE ") - .append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); + .append(" WHERE ").append(keyEqualsBindJoinedWithAND(targetPrimaryKey)); targetUpdateStatement = new TargetUpdateStatement(propertyHelper, targetSession); assertEquals(sb.toString(), targetUpdateStatement.getCQL()); @@ -120,7 +111,7 @@ public void cql_withTTLAndWritetime() { @Test public void bind_withStandardInput() { - BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null,null,null,null); + BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null, null, null, null); assertNotNull(result); verify(boundStatement, times(targetColumnNames.size())).set(anyInt(), any(), any(Class.class)); } @@ -161,37 +152,39 @@ public void bind_withTTLAndWritetime() { @Test public void testBindOriginRowNull() { - assertThrows(RuntimeException.class, () -> targetUpdateStatement.bind(null, targetRow, 30, 123456789L, getSampleData(explodeMapKeyType),getSampleData(explodeMapValueType))); + assertThrows(RuntimeException.class, () -> targetUpdateStatement.bind(null, targetRow, 30, 123456789L, + getSampleData(explodeMapKeyType), getSampleData(explodeMapValueType))); } @Test public void bind_nonCounter_withStandardInput() { - commonSetup(false,false,false); + commonSetup(false, false, false); targetUpdateStatement = new TargetUpdateStatement(propertyHelper, targetSession); - BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null,null,null,null); + BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null, null, null, null); assertNotNull(result); verify(boundStatement, times(targetColumnNames.size())).set(anyInt(), any(), any(Class.class)); } @Test public void bind_explodeMap_withStandardInput() { - commonSetup(true,false,false); + commonSetup(true, false, false); targetUpdateStatement = new TargetUpdateStatement(propertyHelper, targetSession); - BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null,null,getSampleData(explodeMapKeyType),getSampleData(explodeMapValueType)); + BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null, null, + getSampleData(explodeMapKeyType), getSampleData(explodeMapValueType)); assertNotNull(result); verify(boundStatement, times(targetColumnNames.size())).set(anyInt(), any(), any(Class.class)); } @Test public void bind_nonCounter_withExtraColumn() { - commonSetup(false,false,false); + commonSetup(false, false, false); targetColumnNames.add("extraColumn"); targetColumnTypes.add(DataTypes.TEXT); targetUpdateStatement = new TargetUpdateStatement(propertyHelper, targetSession); - BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null,null,null,null); + BoundStatement result = targetUpdateStatement.bind(originRow, targetRow, null, null, null, null); assertNotNull(result); - verify(boundStatement, times(targetColumnNames.size()-1)).set(anyInt(), any(), any(Class.class)); + verify(boundStatement, times(targetColumnNames.size() - 1)).set(anyInt(), any(), any(Class.class)); } @Test diff --git a/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertStatementTest.java index cc768c64..92ae3178 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertStatementTest.java @@ -15,16 +15,18 @@ */ package com.datastax.cdm.cql.statement; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Collections; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import com.datastax.cdm.cql.CommonMocks; import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.oss.driver.api.core.cql.*; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import java.util.Collections; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; public class TargetUpsertStatementTest extends CommonMocks { @@ -39,10 +41,9 @@ public void setup() { @Test public void smoke_basicCQL() { StringBuilder sb = new StringBuilder(); - sb.append("INSERT INTO ") - .append(targetKeyspaceTableName) - .append(" (").append(String.join(",",targetColumnNames)).append(")") - .append(" VALUES (").append(String.join(",",Collections.nCopies(targetColumnNames.size(),"?"))).append(")"); + sb.append("INSERT INTO ").append(targetKeyspaceTableName).append(" (") + .append(String.join(",", targetColumnNames)).append(")").append(" VALUES (") + .append(String.join(",", Collections.nCopies(targetColumnNames.size(), "?"))).append(")"); String insertStatement = sb.toString(); targetUpsertStatement = new TestTargetUpsertStatement(propertyHelper, targetSession, insertStatement); @@ -77,42 +78,46 @@ public void checkBindInputs_Writetime_throwsRuntimeException() { @Test public void checkBindInputs_ExplodeMap_nullKey_throwsRuntimeException() { - commonSetup(true,false,false); + commonSetup(true, false, false); Object mockValue = mock(Object.class); targetUpsertStatement = new TestTargetUpsertStatement(propertyHelper, targetSession); - RuntimeException exception = assertThrows(RuntimeException.class, () -> targetUpsertStatement.checkBindInputs(null, null, null, mockValue)); + RuntimeException exception = assertThrows(RuntimeException.class, + () -> targetUpsertStatement.checkBindInputs(null, null, null, mockValue)); assertTrue(exception.getMessage().startsWith("ExplodeMap is enabled, but no map key")); } @Test public void checkBindInputs_ExplodeMap_nullValue_throwsRuntimeException() { - commonSetup(true,false,false); + commonSetup(true, false, false); String goodKey = "abc"; targetUpsertStatement = new TestTargetUpsertStatement(propertyHelper, targetSession); - RuntimeException exception = assertThrows(RuntimeException.class, () -> targetUpsertStatement.checkBindInputs(null, null, goodKey, null)); + RuntimeException exception = assertThrows(RuntimeException.class, + () -> targetUpsertStatement.checkBindInputs(null, null, goodKey, null)); assertTrue(exception.getMessage().startsWith("ExplodeMap is enabled, but no map value")); } @Test public void checkBindInputs_ExplodeMap_invalidKeyType_throwsRuntimeException() { - commonSetup(true,false,false); + commonSetup(true, false, false); Integer badKey = 1; targetUpsertStatement = new TestTargetUpsertStatement(propertyHelper, targetSession); - RuntimeException exception = assertThrows(RuntimeException.class, () -> targetUpsertStatement.checkBindInputs(null, null, badKey, null)); + RuntimeException exception = assertThrows(RuntimeException.class, + () -> targetUpsertStatement.checkBindInputs(null, null, badKey, null)); assertTrue(exception.getMessage().startsWith("ExplodeMap is enabled, but the map key type provided")); } @Test public void checkBindInputs_ExplodeMap_invalidValueType_throwsRuntimeException() { - commonSetup(true,false,false); + commonSetup(true, false, false); String goodKey = "abc"; Integer badValue = 1; targetUpsertStatement = new TestTargetUpsertStatement(propertyHelper, targetSession); - RuntimeException exception = assertThrows(RuntimeException.class, () -> targetUpsertStatement.checkBindInputs(null, null, goodKey, badValue)); + RuntimeException exception = assertThrows(RuntimeException.class, + () -> targetUpsertStatement.checkBindInputs(null, null, goodKey, badValue)); assertTrue(exception.getMessage().startsWith("ExplodeMap is enabled, but the map value type provided")); } @@ -131,14 +136,12 @@ public void executeAsync_executesAsyncStatement() { @Test public void constantColumns_goodConfig() { - commonSetup(false,true,false); + commonSetup(false, true, false); targetUpsertStatement = new TestTargetUpsertStatement(propertyHelper, targetSession); - assertAll( - () -> assertEquals(constantColumns,targetUpsertStatement.constantColumnNames), - () -> assertEquals(constantColumnValues,targetUpsertStatement.constantColumnValues) - ); + assertAll(() -> assertEquals(constantColumns, targetUpsertStatement.constantColumnNames), + () -> assertEquals(constantColumnValues, targetUpsertStatement.constantColumnValues)); } @Test @@ -181,15 +184,19 @@ public TestTargetUpsertStatement(IPropertyHelper h, EnhancedSession s, String st super(h, s); this.statement = statement; } + public TestTargetUpsertStatement(IPropertyHelper h, EnhancedSession s) { - this(h,s,"some arbitrary text"); + this(h, s, "some arbitrary text"); } @Override - protected String buildStatement() { return statement; }; + protected String buildStatement() { + return statement; + }; @Override - protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, Object explodeMapValue) { + protected BoundStatement bind(Row originRow, Row targetRow, Integer ttl, Long writeTime, Object explodeMapKey, + Object explodeMapValue) { checkBindInputs(ttl, writeTime, explodeMapKey, explodeMapValue); return boundStatement; } diff --git a/src/test/java/com/datastax/cdm/data/CqlConversionTest.java b/src/test/java/com/datastax/cdm/data/CqlConversionTest.java index 4d732a3c..9ea7876b 100644 --- a/src/test/java/com/datastax/cdm/data/CqlConversionTest.java +++ b/src/test/java/com/datastax/cdm/data/CqlConversionTest.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.data; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; import java.util.Collections; import java.util.List; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; @ExtendWith(MockitoExtension.class) class CqlConversionTest { @@ -44,62 +45,65 @@ void setUp() { @Test void testConstructorThrowsIllegalArgumentExceptionWhenArgumentsAreNull() { assertAll( - () -> assertThrows(IllegalArgumentException.class, () -> new CqlConversion(null, toDataType, codecRegistry), "null fromDataType"), - () -> assertThrows(IllegalArgumentException.class, () -> new CqlConversion(fromDataType, null, codecRegistry), "null toDataType"), - () -> assertThrows(IllegalArgumentException.class, () -> new CqlConversion(fromDataType, toDataType, null), "null codecRegistry") - ); + () -> assertThrows(IllegalArgumentException.class, + () -> new CqlConversion(null, toDataType, codecRegistry), "null fromDataType"), + () -> assertThrows(IllegalArgumentException.class, + () -> new CqlConversion(fromDataType, null, codecRegistry), "null toDataType"), + () -> assertThrows(IllegalArgumentException.class, + () -> new CqlConversion(fromDataType, toDataType, null), "null codecRegistry")); } -// @Test -// void testConvertWhenConversionTypeIsNone() { -// CqlConversion.Type conversionType = CqlConversion.Type.NONE; -// List conversionTypeList = Collections.singletonList(conversionType); -// -// CqlConversion cqlConversion = spy(new CqlConversion(fromDataType, toDataType, codecRegistry)); -// doReturn(conversionTypeList).when(cqlConversion).getConversionTypeList(); -// -// Object inputData = new Object(); -// Object result = cqlConversion.convert(inputData); -// -// assertSame(inputData, result); -// } -// -// @Test -// void testConvertWhenConversionTypeIsUnsupported() { -// CqlConversion.Type conversionType = CqlConversion.Type.UNSUPPORTED; -// List conversionTypeList = Collections.singletonList(conversionType); -// -// CqlConversion cqlConversion = spy(new CqlConversion(fromDataType, toDataType, codecRegistry)); -// doReturn(conversionTypeList).when(cqlConversion).getConversionTypeList(); -// -// Object inputData = new Object(); -// Object result = cqlConversion.convert(inputData); -// -// assertSame(inputData, result); -// } -// -// @Test -// void testConvertWhenConversionTypeIsCodec() { -// CqlConversion.Type conversionType = CqlConversion.Type.CODEC; -// List conversionTypeList = Collections.singletonList(conversionType); -// -// CqlConversion cqlConversion = spy(new CqlConversion(fromDataType, toDataType, codecRegistry)); -// doReturn(conversionTypeList).when(cqlConversion).getConversionTypeList(); -// doReturn(Collections.singletonList(fromDataType)).when(cqlConversion).getFromDataTypeList(); -// doReturn(Collections.singletonList(toDataType)).when(cqlConversion).getToDataTypeList(); -// -// Object inputData = new Object(); -// Object expectedResult = new Object(); -// -// // Stub the convert_ONE() method to return expectedResult when called with specific arguments -// doReturn(expectedResult).when(cqlConversion).convert_ONE(conversionType, inputData, fromDataType, toDataType, codecRegistry); -// -// Object result = cqlConversion.convert(inputData); -// -// // Verify that convert_ONE() was called with the expected arguments -// verify(cqlConversion).convert_ONE(conversionType, inputData, fromDataType, toDataType, codecRegistry); -// -// assertEquals(expectedResult, result); -// } + // @Test + // void testConvertWhenConversionTypeIsNone() { + // CqlConversion.Type conversionType = CqlConversion.Type.NONE; + // List conversionTypeList = Collections.singletonList(conversionType); + // + // CqlConversion cqlConversion = spy(new CqlConversion(fromDataType, toDataType, codecRegistry)); + // doReturn(conversionTypeList).when(cqlConversion).getConversionTypeList(); + // + // Object inputData = new Object(); + // Object result = cqlConversion.convert(inputData); + // + // assertSame(inputData, result); + // } + // + // @Test + // void testConvertWhenConversionTypeIsUnsupported() { + // CqlConversion.Type conversionType = CqlConversion.Type.UNSUPPORTED; + // List conversionTypeList = Collections.singletonList(conversionType); + // + // CqlConversion cqlConversion = spy(new CqlConversion(fromDataType, toDataType, codecRegistry)); + // doReturn(conversionTypeList).when(cqlConversion).getConversionTypeList(); + // + // Object inputData = new Object(); + // Object result = cqlConversion.convert(inputData); + // + // assertSame(inputData, result); + // } + // + // @Test + // void testConvertWhenConversionTypeIsCodec() { + // CqlConversion.Type conversionType = CqlConversion.Type.CODEC; + // List conversionTypeList = Collections.singletonList(conversionType); + // + // CqlConversion cqlConversion = spy(new CqlConversion(fromDataType, toDataType, codecRegistry)); + // doReturn(conversionTypeList).when(cqlConversion).getConversionTypeList(); + // doReturn(Collections.singletonList(fromDataType)).when(cqlConversion).getFromDataTypeList(); + // doReturn(Collections.singletonList(toDataType)).when(cqlConversion).getToDataTypeList(); + // + // Object inputData = new Object(); + // Object expectedResult = new Object(); + // + // // Stub the convert_ONE() method to return expectedResult when called with specific arguments + // doReturn(expectedResult).when(cqlConversion).convert_ONE(conversionType, inputData, fromDataType, toDataType, + // codecRegistry); + // + // Object result = cqlConversion.convert(inputData); + // + // // Verify that convert_ONE() was called with the expected arguments + // verify(cqlConversion).convert_ONE(conversionType, inputData, fromDataType, toDataType, codecRegistry); + // + // assertEquals(expectedResult, result); + // } } diff --git a/src/test/java/com/datastax/cdm/data/DataUtilityTest.java b/src/test/java/com/datastax/cdm/data/DataUtilityTest.java index 5aeb6847..6c116c72 100644 --- a/src/test/java/com/datastax/cdm/data/DataUtilityTest.java +++ b/src/test/java/com/datastax/cdm/data/DataUtilityTest.java @@ -54,35 +54,34 @@ private void setTestVariables() { targetValueColumns = new ArrayList<>(originValueColumns); targetValueColumnTypes = new ArrayList<>(originValueColumnTypes); - originValueColumns.addAll(Arrays.asList("parameter-value","PaRaMeTeR-Value-MiXedCaSE")); + originValueColumns.addAll(Arrays.asList("parameter-value", "PaRaMeTeR-Value-MiXedCaSE")); originValueColumnTypes.addAll(Arrays.asList(DataTypes.INT, DataTypes.TEXT)); - originToTargetNameList = Arrays.asList("parameter-value:parameter_value","PaRaMeTeR-Value-MiXedCaSE:parameter_value_standard_case"); + originToTargetNameList = Arrays.asList("parameter-value:parameter_value", + "PaRaMeTeR-Value-MiXedCaSE:parameter_value_standard_case"); - targetValueColumns.addAll(Arrays.asList("parameter_value","parameter_value_standard_case")); + targetValueColumns.addAll(Arrays.asList("parameter_value", "parameter_value_standard_case")); targetValueColumnTypes.addAll(Arrays.asList(DataTypes.INT, DataTypes.TEXT)); } @Test public void originToTarget() { - Map map = DataUtility.getThisToThatColumnNameMap(propertyHelper, originTable, targetTable); + Map map = DataUtility.getThisToThatColumnNameMap(propertyHelper, originTable, targetTable); - assertAll( - () -> assertEquals("parameter_value", map.get("parameter-value"), "encapsulated name"), - () -> assertEquals("parameter_value_standard_case", map.get("PaRaMeTeR-Value-MiXedCaSE"), "Mixed and complete rename"), - () -> assertEquals(targetColumnNames.size(), map.size(), "Map size should match origin column count") - ); + assertAll(() -> assertEquals("parameter_value", map.get("parameter-value"), "encapsulated name"), + () -> assertEquals("parameter_value_standard_case", map.get("PaRaMeTeR-Value-MiXedCaSE"), + "Mixed and complete rename"), + () -> assertEquals(targetColumnNames.size(), map.size(), "Map size should match origin column count")); } @Test public void targetToOrigin() { Map map = DataUtility.getThisToThatColumnNameMap(propertyHelper, targetTable, originTable); - assertAll( - () -> assertEquals("parameter-value", map.get("parameter_value"), "encapsulated name"), - () -> assertEquals("PaRaMeTeR-Value-MiXedCaSE", map.get("parameter_value_standard_case"), "Mixed and complete rename"), - () -> assertEquals(originColumnNames.size(), map.size(), "Map size should match target column count") - ); + assertAll(() -> assertEquals("parameter-value", map.get("parameter_value"), "encapsulated name"), + () -> assertEquals("PaRaMeTeR-Value-MiXedCaSE", map.get("parameter_value_standard_case"), + "Mixed and complete rename"), + () -> assertEquals(originColumnNames.size(), map.size(), "Map size should match target column count")); } @Test @@ -106,24 +105,24 @@ public void columnOnThatNotThis() { assertNotEquals(extraColumn, entry.getValue()); } } - + @Test - public void diffTest() { - assertFalse(DataUtility.diff(null, null)); - assertFalse(DataUtility.diff("Hello", "Hello")); - assertTrue(DataUtility.diff(null, "Hello")); - assertTrue(DataUtility.diff("Hello", null)); - assertTrue(DataUtility.diff("", "Hello")); - assertTrue(DataUtility.diff("hello", "Hello")); - } - + public void diffTest() { + assertFalse(DataUtility.diff(null, null)); + assertFalse(DataUtility.diff("Hello", "Hello")); + assertTrue(DataUtility.diff(null, "Hello")); + assertTrue(DataUtility.diff("Hello", null)); + assertTrue(DataUtility.diff("", "Hello")); + assertTrue(DataUtility.diff("hello", "Hello")); + } + @Test - public void extractObjectsFromCollectionTest() { - List expected = Arrays.asList(1, 2, 3); + public void extractObjectsFromCollectionTest() { + List expected = Arrays.asList(1, 2, 3); List actualList = new ArrayList<>(); - actualList.add(1); - actualList.add(2); - actualList.add(3); + actualList.add(1); + actualList.add(2); + actualList.add(3); assertEquals(expected, DataUtility.extractObjectsFromCollection(actualList)); Set actualSet = new HashSet<>(); @@ -131,16 +130,18 @@ public void extractObjectsFromCollectionTest() { actualSet.add(2); actualSet.add(3); assertEquals(expected, DataUtility.extractObjectsFromCollection(actualSet)); - + Map actualMap = Map.of("1", "one", "2", "two", "3", "three"); List expectedMap = new ArrayList<>(actualMap.entrySet()); assertEquals(expectedMap, DataUtility.extractObjectsFromCollection(actualMap)); - } - + } + @Test - public void getMyClassMethodLineTest() { - Exception ex = new Exception(); - ex.setStackTrace(new StackTraceElement[] {new StackTraceElement("com.datastax.cdm.data.DataUtilityTest", "getMyClassMethodLineTest", "DataUtilityTest.java", 0)}); - assertEquals("com.datastax.cdm.data.DataUtilityTest.getMyClassMethodLineTest:0", DataUtility.getMyClassMethodLine(ex)); - } + public void getMyClassMethodLineTest() { + Exception ex = new Exception(); + ex.setStackTrace(new StackTraceElement[] { new StackTraceElement("com.datastax.cdm.data.DataUtilityTest", + "getMyClassMethodLineTest", "DataUtilityTest.java", 0) }); + assertEquals("com.datastax.cdm.data.DataUtilityTest.getMyClassMethodLineTest:0", + DataUtility.getMyClassMethodLine(ex)); + } } diff --git a/src/test/java/com/datastax/cdm/feature/AbstractFeatureTest.java b/src/test/java/com/datastax/cdm/feature/AbstractFeatureTest.java index 9c08e265..b91b7758 100644 --- a/src/test/java/com/datastax/cdm/feature/AbstractFeatureTest.java +++ b/src/test/java/com/datastax/cdm/feature/AbstractFeatureTest.java @@ -15,13 +15,14 @@ */ package com.datastax.cdm.feature; -import com.datastax.cdm.properties.IPropertyHelper; -import com.datastax.cdm.schema.CqlTable; +import static org.junit.jupiter.api.Assertions.*; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import static org.junit.jupiter.api.Assertions.*; +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.schema.CqlTable; class AbstractFeatureTest { @@ -54,10 +55,8 @@ void setUp() { @Test void initialize_setsInitializedToTrueAndReturnsTrue() { boolean result = testFeature.loadProperties(propertyHelper); - assertAll( - () -> assertTrue(result, "Expected initialize() to return true"), - () -> assertTrue(testFeature.isLoaded, "Expected isInitialized to be set to true") - ); + assertAll(() -> assertTrue(result, "Expected initialize() to return true"), + () -> assertTrue(testFeature.isLoaded, "Expected isInitialized to be set to true")); } @Test diff --git a/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java b/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java index 54cfd289..f987d947 100644 --- a/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java +++ b/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java @@ -15,67 +15,68 @@ */ package com.datastax.cdm.feature; -import com.datastax.cdm.cql.CommonMocks; -import com.datastax.cdm.data.CqlData; -import com.datastax.cdm.properties.KnownProperties; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.data.CqlData; +import com.datastax.cdm.properties.KnownProperties; public class ConstantColumnsTest extends CommonMocks { ConstantColumns feature; List expectedBindClasses; -// String standardValues = "'abcd',1234,543"; -// String standardRegex = ","; -// List standardBindClasses = standardDataTypes.stream().map(CqlData::getBindClass).collect(Collectors.toList()); -// List standardValuesAsList = Arrays.asList(standardValues.split(standardRegex)); + // String standardValues = "'abcd',1234,543"; + // String standardRegex = ","; + // List standardBindClasses = + // standardDataTypes.stream().map(CqlData::getBindClass).collect(Collectors.toList()); + // List standardValuesAsList = Arrays.asList(standardValues.split(standardRegex)); @BeforeEach public void setup() { defaultClassVariables(); -// setTestVariables(); - commonSetupWithoutDefaultClassVariables(false,true,false); -// setTestWhens(); + // setTestVariables(); + commonSetupWithoutDefaultClassVariables(false, true, false); + // setTestWhens(); feature = new ConstantColumns(); expectedBindClasses = constantColumnTypes.stream().map(CqlData::getBindClass).collect(Collectors.toList()); when(propertyHelper.getStringList(KnownProperties.CONSTANT_COLUMN_NAMES)).thenReturn(constantColumns); - when(propertyHelper.getString(KnownProperties.CONSTANT_COLUMN_VALUES)).thenReturn(String.join(",", constantColumnValues)); + when(propertyHelper.getString(KnownProperties.CONSTANT_COLUMN_VALUES)) + .thenReturn(String.join(",", constantColumnValues)); when(propertyHelper.getString(KnownProperties.CONSTANT_COLUMN_SPLIT_REGEX)).thenReturn(","); } -// private void setTestVariables() { -// targetValueColumns = new ArrayList<>(originValueColumns); -// targetValueColumns.addAll(standardNames); -// targetValueColumnTypes = new ArrayList<>(originValueColumnTypes); -// targetValueColumnTypes.addAll(standardDataTypes); -// } + // private void setTestVariables() { + // targetValueColumns = new ArrayList<>(originValueColumns); + // targetValueColumns.addAll(standardNames); + // targetValueColumnTypes = new ArrayList<>(originValueColumnTypes); + // targetValueColumnTypes.addAll(standardDataTypes); + // } -// private void setTestWhens(){ -// when(targetCodec.parse(anyString())).thenReturn(any()); -// } + // private void setTestWhens(){ + // when(targetCodec.parse(anyString())).thenReturn(any()); + // } @Test public void smokeTest_loadProperties() { feature.loadProperties(propertyHelper); - assertAll( - () -> assertTrue(feature.isEnabled()), + assertAll(() -> assertTrue(feature.isEnabled()), () -> assertEquals(constantColumns, feature.getNames(), "names"), - () -> assertEquals(constantColumnValues, feature.getValues(), "values") - ); + () -> assertEquals(constantColumnValues, feature.getValues(), "values")); } @Test @@ -83,10 +84,8 @@ public void smokeTest_initializeAndValidate() { feature.loadProperties(propertyHelper); boolean valid = feature.initializeAndValidate(originTable, targetTable); - assertAll( - () -> assertTrue(valid, "correct validation"), - () -> assertEquals(expectedBindClasses, feature.getBindClasses(), "bind classes") - ); + assertAll(() -> assertTrue(valid, "correct validation"), + () -> assertEquals(expectedBindClasses, feature.getBindClasses(), "bind classes")); } @Test @@ -112,10 +111,8 @@ public void testEmptyConstantColumnValue() { feature.loadProperties(propertyHelper); boolean valid = feature.initializeAndValidate(originTable, targetTable); - assertAll( - () -> assertFalse(valid, "null string is invalid"), - () -> assertFalse(feature.isEnabled(), "feature should be disabled") - ); + assertAll(() -> assertFalse(valid, "null string is invalid"), + () -> assertFalse(feature.isEnabled(), "feature should be disabled")); } @Test @@ -125,10 +122,8 @@ public void testMismatchedConstantColumnNamesAndValues() { feature.loadProperties(propertyHelper); boolean valid = feature.initializeAndValidate(originTable, targetTable); - assertAll( - () -> assertFalse(valid, "Validation should fail with mismatched names and values"), - () -> assertFalse(feature.isEnabled(), "feature should be disabled") - ); + assertAll(() -> assertFalse(valid, "Validation should fail with mismatched names and values"), + () -> assertFalse(feature.isEnabled(), "feature should be disabled")); } @Test @@ -138,7 +133,8 @@ public void testMissingConstantColumnInTargetTable() { when(targetTable.extendColumns(constantColumns)).thenReturn(bindClasses); feature.loadProperties(propertyHelper); - assertFalse(feature.initializeAndValidate(originTable, targetTable), "Validation should fail with a missing constant column in the target table"); + assertFalse(feature.initializeAndValidate(originTable, targetTable), + "Validation should fail with a missing constant column in the target table"); } @Test diff --git a/src/test/java/com/datastax/cdm/feature/ExplodeMapTest.java b/src/test/java/com/datastax/cdm/feature/ExplodeMapTest.java index 7e8040be..15d19385 100644 --- a/src/test/java/com/datastax/cdm/feature/ExplodeMapTest.java +++ b/src/test/java/com/datastax/cdm/feature/ExplodeMapTest.java @@ -15,25 +15,26 @@ */ package com.datastax.cdm.feature; +import static org.apache.hadoop.shaded.com.google.common.base.CharMatcher.any; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.*; + +import java.util.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.cdm.data.CqlData; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.cdm.schema.CqlTable; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import java.util.*; - -import static org.apache.hadoop.shaded.com.google.common.base.CharMatcher.any; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.*; public class ExplodeMapTest { @@ -58,39 +59,45 @@ public class ExplodeMapTest { MutableCodecRegistry codecRegistry; String standardMapColumnName = "map_col"; - List standardOriginNames = Arrays.asList("key","val",standardMapColumnName); - List standardOriginTypes = Arrays.asList(DataTypes.TIMESTAMP, DataTypes.INT, DataTypes.mapOf(DataTypes.TEXT, DataTypes.DOUBLE)); + List standardOriginNames = Arrays.asList("key", "val", standardMapColumnName); + List standardOriginTypes = Arrays.asList(DataTypes.TIMESTAMP, DataTypes.INT, + DataTypes.mapOf(DataTypes.TEXT, DataTypes.DOUBLE)); String standardKeyColumnName = "map_key"; String standardValueColumnName = "map_val"; - List standardTargetNames = Arrays.asList("key","val",standardKeyColumnName,standardValueColumnName); - List standardTargetTypes = Arrays.asList(DataTypes.TIMESTAMP, DataTypes.INT, DataTypes.TEXT, DataTypes.DOUBLE); + List standardTargetNames = Arrays.asList("key", "val", standardKeyColumnName, standardValueColumnName); + List standardTargetTypes = Arrays.asList(DataTypes.TIMESTAMP, DataTypes.INT, DataTypes.TEXT, + DataTypes.DOUBLE); @BeforeEach public void setup() { feature = new ExplodeMap(); MockitoAnnotations.openMocks(this); - when(propertyHelper.getString(KnownProperties.EXPLODE_MAP_ORIGIN_COLUMN_NAME)).thenReturn(standardMapColumnName); - when(propertyHelper.getString(KnownProperties.EXPLODE_MAP_TARGET_KEY_COLUMN_NAME)).thenReturn(standardKeyColumnName); - when(propertyHelper.getString(KnownProperties.EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME)).thenReturn(standardValueColumnName); + when(propertyHelper.getString(KnownProperties.EXPLODE_MAP_ORIGIN_COLUMN_NAME)) + .thenReturn(standardMapColumnName); + when(propertyHelper.getString(KnownProperties.EXPLODE_MAP_TARGET_KEY_COLUMN_NAME)) + .thenReturn(standardKeyColumnName); + when(propertyHelper.getString(KnownProperties.EXPLODE_MAP_TARGET_VALUE_COLUMN_NAME)) + .thenReturn(standardValueColumnName); when(originTable.isOrigin()).thenReturn(true); - when(originTable.extendColumns(Collections.singletonList(standardMapColumnName))). - thenReturn(Collections.singletonList(CqlData.getBindClass(standardOriginTypes.get(2)))); + when(originTable.extendColumns(Collections.singletonList(standardMapColumnName))) + .thenReturn(Collections.singletonList(CqlData.getBindClass(standardOriginTypes.get(2)))); when(targetTable.isOrigin()).thenReturn(false); - when(targetTable.extendColumns(Arrays.asList(standardKeyColumnName,standardValueColumnName))) - .thenReturn(Arrays.asList(CqlData.getBindClass(standardTargetTypes.get(2)), CqlData.getBindClass(standardTargetTypes.get(3)))); + when(targetTable.extendColumns(Arrays.asList(standardKeyColumnName, standardValueColumnName))) + .thenReturn(Arrays.asList(CqlData.getBindClass(standardTargetTypes.get(2)), + CqlData.getBindClass(standardTargetTypes.get(3)))); - for (int i = 0; i< standardOriginNames.size(); i++) { + for (int i = 0; i < standardOriginNames.size(); i++) { when(originTable.getColumnNames(false)).thenReturn(standardOriginNames); when(originTable.indexOf(standardOriginNames.get(i))).thenReturn(i); when(originTable.getDataType(standardOriginNames.get(i))).thenReturn(standardOriginTypes.get(i)); when(originTable.getBindClass(i)).thenReturn(CqlData.getBindClass(standardOriginTypes.get(i))); } - for (int i = 0; i< standardTargetNames.size(); i++) { + for (int i = 0; i < standardTargetNames.size(); i++) { when(targetTable.getColumnNames(false)).thenReturn(standardTargetNames); when(targetTable.indexOf(standardTargetNames.get(i))).thenReturn(i); when(targetTable.getDataType(standardTargetNames.get(i))).thenReturn(standardTargetTypes.get(i)); @@ -107,28 +114,25 @@ public void setup() { public void smokeTest_loadProperties() { boolean loaded = feature.loadProperties(propertyHelper); - assertAll( - () -> assertTrue(loaded, "properties are loaded and valid"), - () -> assertTrue(feature.isEnabled()), + assertAll(() -> assertTrue(loaded, "properties are loaded and valid"), () -> assertTrue(feature.isEnabled()), () -> assertEquals(standardMapColumnName, feature.getOriginColumnName(), "origin name"), () -> assertEquals(standardKeyColumnName, feature.getKeyColumnName(), "key name"), - () -> assertEquals(standardValueColumnName, feature.getValueColumnName(), "value name") - ); + () -> assertEquals(standardValueColumnName, feature.getValueColumnName(), "value name")); } - @Test public void smokeTest_initializeAndValidate() { feature.loadProperties(propertyHelper); boolean valid = feature.initializeAndValidate(originTable, targetTable); - assertAll( - () -> assertTrue(valid, "configuration is valid"), - () -> assertEquals(standardOriginNames.indexOf(standardMapColumnName), feature.getOriginColumnIndex(), "origin index"), - () -> assertEquals(standardTargetNames.indexOf(standardKeyColumnName), feature.getKeyColumnIndex(), "key index"), - () -> assertEquals(standardTargetNames.indexOf(standardValueColumnName), feature.getValueColumnIndex(), "value index") - ); + assertAll(() -> assertTrue(valid, "configuration is valid"), + () -> assertEquals(standardOriginNames.indexOf(standardMapColumnName), feature.getOriginColumnIndex(), + "origin index"), + () -> assertEquals(standardTargetNames.indexOf(standardKeyColumnName), feature.getKeyColumnIndex(), + "key index"), + () -> assertEquals(standardTargetNames.indexOf(standardValueColumnName), feature.getValueColumnIndex(), + "value index")); } @Test @@ -241,7 +245,6 @@ public void testMissingValueColumn() { ); } - @Test public void testOriginIsNull() { feature.loadProperties(propertyHelper); @@ -273,7 +276,7 @@ public void testExplode_noConversion() { feature.loadProperties(propertyHelper); feature.initializeAndValidate(originTable, targetTable); - Map testMap = new HashMap<>(); + Map testMap = new HashMap<>(); testMap.put("key1", 10); testMap.put("key2", 20); Set> testEntries = testMap.entrySet(); @@ -333,4 +336,3 @@ public Object convert(Object value) { assertEquals(convertedMap.entrySet(), feature.explode(testMap)); } } - diff --git a/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java b/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java index e7cfe218..0cec5f6a 100644 --- a/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java +++ b/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java @@ -15,21 +15,6 @@ */ package com.datastax.cdm.feature; -import com.datastax.cdm.data.CqlConversion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.cdm.data.CqlData; -import com.datastax.cdm.properties.IPropertyHelper; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.cdm.schema.CqlTable; -import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonMappingException; - import static org.apache.hadoop.shaded.com.google.common.base.CharMatcher.any; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.anyInt; @@ -39,6 +24,22 @@ import java.util.Collections; import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import com.datastax.cdm.data.CqlConversion; +import com.datastax.cdm.data.CqlData; +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.cdm.schema.CqlTable; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; + public class ExtractJsonTest { ExtractJson feature; @@ -52,8 +53,8 @@ public class ExtractJsonTest { @Mock CqlTable targetTable; - List standardOriginNames = Arrays.asList("id","content"); - List standardTargetNames = Arrays.asList("id","age"); + List standardOriginNames = Arrays.asList("id", "content"); + List standardTargetNames = Arrays.asList("id", "age"); List standardOriginTypes = Arrays.asList(DataTypes.TEXT, DataTypes.TEXT); List standardTargetTypes = Arrays.asList(DataTypes.TEXT, DataTypes.TEXT); @@ -67,25 +68,26 @@ public void setup() { MockitoAnnotations.openMocks(this); when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME)).thenReturn(standardOriginName); - when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)).thenReturn(standardTargetName); + when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)) + .thenReturn(standardTargetName); when(originTable.getKeyspaceTable()).thenReturn("ORIGIN_TABLE"); when(originTable.isOrigin()).thenReturn(true); - when(originTable.extendColumns(Collections.singletonList(standardOriginName))). - thenReturn(Collections.singletonList(CqlData.getBindClass(standardOriginTypes.get(1)))); + when(originTable.extendColumns(Collections.singletonList(standardOriginName))) + .thenReturn(Collections.singletonList(CqlData.getBindClass(standardOriginTypes.get(1)))); when(targetTable.getKeyspaceTable()).thenReturn("TARGET_TABLE"); when(targetTable.isOrigin()).thenReturn(false); - when(targetTable.extendColumns(Collections.singletonList(standardTargetName))). - thenReturn(Collections.singletonList(CqlData.getBindClass(standardTargetTypes.get(1)))); + when(targetTable.extendColumns(Collections.singletonList(standardTargetName))) + .thenReturn(Collections.singletonList(CqlData.getBindClass(standardTargetTypes.get(1)))); - for (int i = 0; i< standardOriginNames.size(); i++) { + for (int i = 0; i < standardOriginNames.size(); i++) { when(originTable.getColumnNames(false)).thenReturn(standardOriginNames); when(originTable.indexOf(standardOriginNames.get(i))).thenReturn(i); when(originTable.getBindClass(i)).thenReturn(CqlData.getBindClass(standardOriginTypes.get(i))); } - for (int i = 0; i< standardTargetNames.size(); i++) { + for (int i = 0; i < standardTargetNames.size(); i++) { when(targetTable.getColumnNames(false)).thenReturn(standardTargetNames); when(targetTable.indexOf(standardTargetNames.get(i))).thenReturn(i); when(targetTable.getBindClass(i)).thenReturn(CqlData.getBindClass(standardTargetTypes.get(i))); @@ -96,12 +98,9 @@ public void setup() { public void loadProperties() { boolean loaded = feature.loadProperties(propertyHelper); - assertAll( - () -> assertTrue(loaded, "properties are loaded and valid"), - () -> assertTrue(feature.isEnabled()), + assertAll(() -> assertTrue(loaded, "properties are loaded and valid"), () -> assertTrue(feature.isEnabled()), () -> assertFalse(feature.overwriteTarget()), - () -> assertEquals(standardTargetName, feature.getTargetColumnName()) - ); + () -> assertEquals(standardTargetName, feature.getTargetColumnName())); } @Test @@ -115,10 +114,11 @@ public void loadPropertiesWithMapping() { () -> assertEquals("person_age", feature.getTargetColumnName()) ); } - + @Test public void loadPropertiesException() { - IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> feature.loadProperties(null)); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, + () -> feature.loadProperties(null)); assertTrue(thrown.getMessage().contains("helper is null")); } @@ -127,27 +127,27 @@ public void loadPropertiesOriginError() { when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME)).thenReturn(null); assertFalse(feature.loadProperties(propertyHelper), "Origin column name is not set"); } - + @Test public void loadPropertiesTargetError() { when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)).thenReturn(null); assertFalse(feature.loadProperties(propertyHelper), "Target column name is not set"); } - + @Test public void initializeAndValidate() { feature.loadProperties(propertyHelper); boolean valid = feature.initializeAndValidate(originTable, targetTable); - assertAll( - () -> assertTrue(valid, "configuration is valid"), - () -> assertEquals(standardOriginNames.indexOf(standardOriginName), feature.getOriginColumnIndex(), "origin index"), - () -> assertEquals(standardTargetNames.indexOf(standardTargetName), feature.getTargetColumnIndex(), "target index") - ); + assertAll(() -> assertTrue(valid, "configuration is valid"), + () -> assertEquals(standardOriginNames.indexOf(standardOriginName), feature.getOriginColumnIndex(), + "origin index"), + () -> assertEquals(standardTargetNames.indexOf(standardTargetName), feature.getTargetColumnIndex(), + "target index")); } @Test - public void extractNull() throws JsonMappingException, JsonProcessingException{ + public void extractNull() throws JsonMappingException, JsonProcessingException { feature.loadProperties(propertyHelper); boolean valid = feature.initializeAndValidate(originTable, targetTable); @@ -170,23 +170,25 @@ public void disabledFeature() { () -> assertEquals(-1, feature.getTargetColumnIndex(), "target index"), () -> assertEquals(-1, feature.getOriginColumnIndex(), "origin index") ); - + when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)).thenReturn(null); assertEquals("", feature.getTargetColumnName(), "target name"); } @Test public void initializeAndValidateExceptionOriginNull() { - IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> feature.initializeAndValidate(null, targetTable)); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, + () -> feature.initializeAndValidate(null, targetTable)); assertTrue(thrown.getMessage().contains("Origin table and/or Target table is null")); } - + @Test public void initializeAndValidateExceptionTargetNull() { - IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> feature.initializeAndValidate(originTable, null)); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, + () -> feature.initializeAndValidate(originTable, null)); assertTrue(thrown.getMessage().contains("Origin table and/or Target table is null")); } - + @Test public void initializeAndValidateExceptionOriginColumn() { when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_ORIGIN_COLUMN_NAME)).thenReturn("incorrect_column"); @@ -195,16 +197,16 @@ public void initializeAndValidateExceptionOriginColumn() { IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> feature.initializeAndValidate(originTable, targetTable)); } - + @Test public void initializeAndValidateExceptionTargetColumn() { when(propertyHelper.getString(KnownProperties.EXTRACT_JSON_TARGET_COLUMN_MAPPING)).thenReturn("incorrect_column"); - + feature.loadProperties(propertyHelper); IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> feature.initializeAndValidate(originTable, targetTable)); } - + @Test public void initializeAndValidateExceptionOriginIncorrect() { when(originTable.isOrigin()).thenReturn(false); @@ -233,4 +235,3 @@ public void invalidFeature() { } } - diff --git a/src/test/java/com/datastax/cdm/feature/FeatureFactoryTest.java b/src/test/java/com/datastax/cdm/feature/FeatureFactoryTest.java index 5b0ed112..af37b549 100644 --- a/src/test/java/com/datastax/cdm/feature/FeatureFactoryTest.java +++ b/src/test/java/com/datastax/cdm/feature/FeatureFactoryTest.java @@ -15,28 +15,30 @@ */ package com.datastax.cdm.feature; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.util.HashMap; import java.util.Map; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import org.junit.jupiter.api.Test; public class FeatureFactoryTest { @Test public void knownButUnimplementedFeature() { - assertThrows(IllegalArgumentException.class, () -> FeatureFactory.getFeature(Featureset.TEST_UNIMPLEMENTED_FEATURE)); + assertThrows(IllegalArgumentException.class, + () -> FeatureFactory.getFeature(Featureset.TEST_UNIMPLEMENTED_FEATURE)); } @Test public void testKnownFeatures() { int expectedFeatures = 0; - Map featureMap = new HashMap<>(); + Map featureMap = new HashMap<>(); for (Featureset feature : Featureset.values()) { - if (Featureset.TEST_UNIMPLEMENTED_FEATURE.equals(feature)) continue; + if (Featureset.TEST_UNIMPLEMENTED_FEATURE.equals(feature)) + continue; featureMap.put(feature, FeatureFactory.getFeature(feature)); expectedFeatures++; } @@ -45,10 +47,8 @@ public void testKnownFeatures() { assertEquals(expectedFeatures, featureMap.size(), "all features should be added"); // assert that none of the features in the list are null - assertAll( - featureMap.entrySet().stream() - .map(entry -> () -> assertNotNull(entry.getValue(), "Feature is null for key " + entry.getKey())) - ); + assertAll(featureMap.entrySet().stream() + .map(entry -> () -> assertNotNull(entry.getValue(), "Feature is null for key " + entry.getKey()))); } @Test @@ -58,12 +58,9 @@ public void testIsEnabled() { Feature mockDisabledFeature = mock(Feature.class); when(mockDisabledFeature.isEnabled()).thenReturn(false); - assertAll( - () -> assertFalse(FeatureFactory.isEnabled(null), "null feature should return false"), + assertAll(() -> assertFalse(FeatureFactory.isEnabled(null), "null feature should return false"), () -> assertFalse(FeatureFactory.isEnabled(mockDisabledFeature), "feature should return false"), - () -> assertTrue(FeatureFactory.isEnabled(mockEnabledFeature), "feature should return true") - ); + () -> assertTrue(FeatureFactory.isEnabled(mockEnabledFeature), "feature should return true")); } - } diff --git a/src/test/java/com/datastax/cdm/feature/GuardrailTest.java b/src/test/java/com/datastax/cdm/feature/GuardrailTest.java index a28ad5df..e692db4a 100644 --- a/src/test/java/com/datastax/cdm/feature/GuardrailTest.java +++ b/src/test/java/com/datastax/cdm/feature/GuardrailTest.java @@ -15,13 +15,14 @@ */ package com.datastax.cdm.feature; -import com.datastax.cdm.cql.CommonMocks; -import com.datastax.cdm.properties.KnownProperties; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.properties.KnownProperties; public class GuardrailTest extends CommonMocks { Guardrail guardrail; @@ -88,7 +89,8 @@ public void explodeMap_KeyExceeds() { guardrail.loadProperties(propertyHelper); guardrail.initializeAndValidate(originTable, targetTable); - when(targetTable.byteCount(eq(explodeMapFeature.getKeyColumnIndex()),any())).thenReturn(Guardrail.BASE_FACTOR+1); + when(targetTable.byteCount(eq(explodeMapFeature.getKeyColumnIndex()), any())) + .thenReturn(Guardrail.BASE_FACTOR + 1); String guardrailChecksResult = guardrail.guardrailChecks(record); assertTrue(guardrailChecksResult.startsWith("Large columns"), "guardrailChecks"); @@ -103,7 +105,8 @@ public void explodeMap_ValueExceeds() { guardrail.loadProperties(propertyHelper); guardrail.initializeAndValidate(originTable, targetTable); - when(targetTable.byteCount(eq(explodeMapFeature.getValueColumnIndex()),any())).thenReturn(Guardrail.BASE_FACTOR+1); + when(targetTable.byteCount(eq(explodeMapFeature.getValueColumnIndex()), any())) + .thenReturn(Guardrail.BASE_FACTOR + 1); String guardrailChecksResult = guardrail.guardrailChecks(record); assertTrue(guardrailChecksResult.startsWith("Large columns"), "guardrailChecks"); @@ -124,10 +127,8 @@ public void loadProperties_configured() { public void loadProperties_unconfigured() { boolean loadPropertiesResult = guardrail.loadProperties(propertyHelper); - assertAll( - () -> assertTrue(loadPropertiesResult, "loadProperties"), - () -> assertFalse(guardrail.isEnabled(), "enabled") - ); + assertAll(() -> assertTrue(loadPropertiesResult, "loadProperties"), + () -> assertFalse(guardrail.isEnabled(), "enabled")); } @Test @@ -215,5 +216,4 @@ public void checkWithNullOriginRow() { assertEquals(Guardrail.CLEAN_CHECK, guardrailChecksResult, "guardrailChecks"); } - } diff --git a/src/test/java/com/datastax/cdm/feature/OriginFilterConditionTest.java b/src/test/java/com/datastax/cdm/feature/OriginFilterConditionTest.java index 86547a31..47b60031 100644 --- a/src/test/java/com/datastax/cdm/feature/OriginFilterConditionTest.java +++ b/src/test/java/com/datastax/cdm/feature/OriginFilterConditionTest.java @@ -15,16 +15,17 @@ */ package com.datastax.cdm.feature; -import com.datastax.cdm.properties.IPropertyHelper; -import com.datastax.cdm.properties.KnownProperties; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.properties.KnownProperties; public class OriginFilterConditionTest { @@ -65,12 +66,10 @@ public void andIsPrepended() { String conditionIn = "a > 1"; when(propertyHelper.getString(KnownProperties.FILTER_CQL_WHERE_CONDITION)).thenReturn(conditionIn); - assertAll( - () -> assertTrue(feature.loadProperties(propertyHelper), "loadProperties"), - () -> assertTrue(feature.initializeAndValidate(null,null), "initializeAndValidate"), + assertAll(() -> assertTrue(feature.loadProperties(propertyHelper), "loadProperties"), + () -> assertTrue(feature.initializeAndValidate(null, null), "initializeAndValidate"), () -> assertTrue(feature.isEnabled(), "feature should be disabled"), - () -> assertEquals(" AND " + conditionIn, feature.getFilterCondition(), "and is prepended") - ); + () -> assertEquals(" AND " + conditionIn, feature.getFilterCondition(), "and is prepended")); } @Test @@ -78,11 +77,9 @@ public void whitespaceOnly() { String whitespaceString = " \t "; when(propertyHelper.getString(KnownProperties.FILTER_CQL_WHERE_CONDITION)).thenReturn(whitespaceString); - assertAll( - () -> assertFalse(feature.loadProperties(propertyHelper), "loadProperties"), - () -> assertFalse(feature.initializeAndValidate(null,null), "initializeAndValidate"), + assertAll(() -> assertFalse(feature.loadProperties(propertyHelper), "loadProperties"), + () -> assertFalse(feature.initializeAndValidate(null, null), "initializeAndValidate"), () -> assertFalse(feature.isEnabled(), "feature should be disabled"), - () -> assertEquals(whitespaceString, feature.getFilterCondition(), "whitespace hurts no one") - ); + () -> assertEquals(whitespaceString, feature.getFilterCondition(), "whitespace hurts no one")); } } diff --git a/src/test/java/com/datastax/cdm/feature/TTLAndWritetimeTest.java b/src/test/java/com/datastax/cdm/feature/TTLAndWritetimeTest.java index c94ac38a..101a0ed1 100644 --- a/src/test/java/com/datastax/cdm/feature/TTLAndWritetimeTest.java +++ b/src/test/java/com/datastax/cdm/feature/TTLAndWritetimeTest.java @@ -15,20 +15,21 @@ */ package com.datastax.cdm.feature; -import com.datastax.cdm.cql.CommonMocks; -import com.datastax.cdm.properties.KnownProperties; -import com.datastax.oss.driver.api.core.type.DataTypes; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.properties.KnownProperties; +import com.datastax.oss.driver.api.core.type.DataTypes; public class TTLAndWritetimeTest extends CommonMocks { @@ -45,16 +46,16 @@ public class TTLAndWritetimeTest extends CommonMocks { public void setup() { defaultClassVariables(); setTestVariables(); - commonSetupWithoutDefaultClassVariables(false,false,false); + commonSetupWithoutDefaultClassVariables(false, false, false); setTestWhens(); feature = new WritetimeTTL(); } private void setTestVariables() { originValueColumns = new ArrayList<>(); - originValueColumns.addAll(Arrays.asList(writetimeColumnName,ttlColumnName,writetimeTTLColumnName)); + originValueColumns.addAll(Arrays.asList(writetimeColumnName, ttlColumnName, writetimeTTLColumnName)); originValueColumnTypes = new ArrayList<>(originValueColumnTypes); - originValueColumnTypes.addAll(Arrays.asList(DataTypes.TEXT,DataTypes.TEXT,DataTypes.TEXT)); + originValueColumnTypes.addAll(Arrays.asList(DataTypes.TEXT, DataTypes.TEXT, DataTypes.TEXT)); } private void setTestWhens(){ @@ -73,7 +74,6 @@ private void setTestWhens(){ when(propertyHelper.getLong(KnownProperties.TRANSFORM_CUSTOM_TTL)).thenReturn(customTTL); } - @Test public void smoke_loadProperties() { when(propertyHelper.getStringList(KnownProperties.ORIGIN_WRITETIME_NAMES)).thenReturn(null); @@ -132,7 +132,6 @@ public void smokeTest_disabledFeature() { ); } - @Test public void smokeTest_enabledFeature_withOnlyWritetimeAuto() { when(propertyHelper.getLong(KnownProperties.FILTER_WRITETS_MIN)).thenReturn(null); @@ -320,7 +319,6 @@ public void counter_configured() { ); } - @Test public void test_ttl_noValidColumns() { when(propertyHelper.getLong(KnownProperties.TRANSFORM_CUSTOM_WRITETIME)).thenReturn(0L); diff --git a/src/test/java/com/datastax/cdm/feature/TrackRunTest.java b/src/test/java/com/datastax/cdm/feature/TrackRunTest.java index 5bfa547e..248cf6d6 100644 --- a/src/test/java/com/datastax/cdm/feature/TrackRunTest.java +++ b/src/test/java/com/datastax/cdm/feature/TrackRunTest.java @@ -21,13 +21,13 @@ class TrackRunTest { - @Test - void test() { - assertEquals("MIGRATE", TrackRun.RUN_TYPE.MIGRATE.name()); - assertEquals("DIFF_DATA", TrackRun.RUN_TYPE.DIFF_DATA.name()); + @Test + void test() { + assertEquals("MIGRATE", TrackRun.RUN_TYPE.MIGRATE.name()); + assertEquals("DIFF_DATA", TrackRun.RUN_TYPE.DIFF_DATA.name()); - assertEquals(2, TrackRun.RUN_TYPE.values().length); - assertEquals(5, TrackRun.RUN_STATUS.values().length); - } + assertEquals(2, TrackRun.RUN_TYPE.values().length); + assertEquals(5, TrackRun.RUN_STATUS.values().length); + } } diff --git a/src/test/java/com/datastax/cdm/job/JobCounterTest.java b/src/test/java/com/datastax/cdm/job/JobCounterTest.java index a82b10ea..49280d8b 100644 --- a/src/test/java/com/datastax/cdm/job/JobCounterTest.java +++ b/src/test/java/com/datastax/cdm/job/JobCounterTest.java @@ -16,9 +16,9 @@ package com.datastax.cdm.job; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.assertFalse; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -32,140 +32,140 @@ public class JobCounterTest { - private JobCounter jobCounter; - @Mock - private TrackRun trackRun; - - @BeforeEach - public void setUp() { - MockitoAnnotations.openMocks(this); - - jobCounter = new JobCounter(10, true); // Changed to true to test printPerThread - jobCounter.setRegisteredTypes(JobCounter.CounterType.values()); - } - - @Test - public void testThreadIncrement() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - assertEquals(5, jobCounter.getCount(JobCounter.CounterType.READ)); - } - - @Test - public void testGlobalIncrement() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.globalIncrement(); - assertEquals(5, jobCounter.getCount(JobCounter.CounterType.READ, true)); - } - - @Test - public void testThreadResetForSpecificType() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.threadReset(JobCounter.CounterType.READ); - assertEquals(0, jobCounter.getCount(JobCounter.CounterType.READ)); - } - - @Test - public void testThreadResetForAllTypes() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.threadIncrement(JobCounter.CounterType.WRITE, 5); - jobCounter.threadReset(); - assertEquals(0, jobCounter.getCount(JobCounter.CounterType.READ)); - assertEquals(0, jobCounter.getCount(JobCounter.CounterType.WRITE)); - } - - @Test - public void testUnregisteredCounterType() { - JobCounter localJobCounter = new JobCounter(10, true); - localJobCounter.setRegisteredTypes(JobCounter.CounterType.READ); - assertThrows(IllegalArgumentException.class, - () -> localJobCounter.threadIncrement(JobCounter.CounterType.WRITE, 5)); - } - - @Test - public void testShouldPrintGlobalProgress() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 11); - jobCounter.globalIncrement(); - assertTrue(jobCounter.shouldPrintGlobalProgress()); // assuming printStatsAfter is set to 10 - } - - @Test - public void testPrintProgressForGlobalAndThread() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 11); - jobCounter.globalIncrement(); - // You may use mocking to capture logger outputs - jobCounter.printProgress(); - } - - @Test - public void testPrintFinal() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.globalIncrement(); - // You may use mocking to capture logger outputs - jobCounter.printFinal(null); - } - - @Captor - private ArgumentCaptor trackRunInfoCaptor; - - @Test - public void testPrintFinalWithRunTracking() { - String expected = "Read: 5; Mismatch: 0; Corrected Mismatch: 0; Missing: 0; Corrected Missing: 7; Valid: 0; Skipped: 0; Write: 0; Error: 72; Large: 42"; - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.threadIncrement(JobCounter.CounterType.CORRECTED_MISSING, 7); - jobCounter.threadIncrement(JobCounter.CounterType.ERROR, 72); - jobCounter.threadIncrement(JobCounter.CounterType.LARGE, 42); - jobCounter.globalIncrement(); - // You may use mocking to capture logger outputs - jobCounter.printFinal(trackRun); - Mockito.verify(trackRun).endCdmRun(trackRunInfoCaptor.capture()); - assertEquals(expected, trackRunInfoCaptor.getValue()); - } - - @Test - public void testGetCountGlobal() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.globalIncrement(); - assertEquals(5, jobCounter.getCount(JobCounter.CounterType.READ, true)); - } - - @Test - public void threadIncrementByOne() { - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.threadIncrement(JobCounter.CounterType.READ); - assertEquals(6, jobCounter.getCount(JobCounter.CounterType.READ)); - } - - @Test - public void testShouldPrintGlobalProgressWithSufficientReads() { - // Increment global READ counter to go beyond the printStatsAfter threshold - // (assume it's 10) - jobCounter.threadIncrement(JobCounter.CounterType.READ, 11); - jobCounter.globalIncrement(); - - // shouldPrintGlobalProgress should return true because there are enough READs - assertTrue(jobCounter.shouldPrintGlobalProgress()); - } - - @Test - public void testShouldPrintGlobalProgressWithInsufficientReads() { - // Increment global READ counter to remain less than printStatsAfter threshold - // (assume it's 10) - jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); - jobCounter.globalIncrement(); - - // shouldPrintGlobalProgress should return true because there are enough READs - assertFalse(jobCounter.shouldPrintGlobalProgress()); - } - - @Test - public void testShouldPrintGlobalProgressWithUnregisteredRead() { - jobCounter = new JobCounter(10, true); // Changed to true to test printPerThread - - // Set only WRITE as the registered type - jobCounter.setRegisteredTypes(JobCounter.CounterType.WRITE); - - // shouldPrintGlobalProgress should return false because READ is not registered - assertFalse(jobCounter.shouldPrintGlobalProgress()); - } + private JobCounter jobCounter; + @Mock + private TrackRun trackRun; + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + + jobCounter = new JobCounter(10, true); // Changed to true to test printPerThread + jobCounter.setRegisteredTypes(JobCounter.CounterType.values()); + } + + @Test + public void testThreadIncrement() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + assertEquals(5, jobCounter.getCount(JobCounter.CounterType.READ)); + } + + @Test + public void testGlobalIncrement() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.globalIncrement(); + assertEquals(5, jobCounter.getCount(JobCounter.CounterType.READ, true)); + } + + @Test + public void testThreadResetForSpecificType() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.threadReset(JobCounter.CounterType.READ); + assertEquals(0, jobCounter.getCount(JobCounter.CounterType.READ)); + } + + @Test + public void testThreadResetForAllTypes() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.threadIncrement(JobCounter.CounterType.WRITE, 5); + jobCounter.threadReset(); + assertEquals(0, jobCounter.getCount(JobCounter.CounterType.READ)); + assertEquals(0, jobCounter.getCount(JobCounter.CounterType.WRITE)); + } + + @Test + public void testUnregisteredCounterType() { + JobCounter localJobCounter = new JobCounter(10, true); + localJobCounter.setRegisteredTypes(JobCounter.CounterType.READ); + assertThrows(IllegalArgumentException.class, + () -> localJobCounter.threadIncrement(JobCounter.CounterType.WRITE, 5)); + } + + @Test + public void testShouldPrintGlobalProgress() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 11); + jobCounter.globalIncrement(); + assertTrue(jobCounter.shouldPrintGlobalProgress()); // assuming printStatsAfter is set to 10 + } + + @Test + public void testPrintProgressForGlobalAndThread() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 11); + jobCounter.globalIncrement(); + // You may use mocking to capture logger outputs + jobCounter.printProgress(); + } + + @Test + public void testPrintFinal() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.globalIncrement(); + // You may use mocking to capture logger outputs + jobCounter.printFinal(null); + } + + @Captor + private ArgumentCaptor trackRunInfoCaptor; + + @Test + public void testPrintFinalWithRunTracking() { + String expected = "Read: 5; Mismatch: 0; Corrected Mismatch: 0; Missing: 0; Corrected Missing: 7; Valid: 0; Skipped: 0; Write: 0; Error: 72; Large: 42"; + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.threadIncrement(JobCounter.CounterType.CORRECTED_MISSING, 7); + jobCounter.threadIncrement(JobCounter.CounterType.ERROR, 72); + jobCounter.threadIncrement(JobCounter.CounterType.LARGE, 42); + jobCounter.globalIncrement(); + // You may use mocking to capture logger outputs + jobCounter.printFinal(trackRun); + Mockito.verify(trackRun).endCdmRun(trackRunInfoCaptor.capture()); + assertEquals(expected, trackRunInfoCaptor.getValue()); + } + + @Test + public void testGetCountGlobal() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.globalIncrement(); + assertEquals(5, jobCounter.getCount(JobCounter.CounterType.READ, true)); + } + + @Test + public void threadIncrementByOne() { + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.threadIncrement(JobCounter.CounterType.READ); + assertEquals(6, jobCounter.getCount(JobCounter.CounterType.READ)); + } + + @Test + public void testShouldPrintGlobalProgressWithSufficientReads() { + // Increment global READ counter to go beyond the printStatsAfter threshold + // (assume it's 10) + jobCounter.threadIncrement(JobCounter.CounterType.READ, 11); + jobCounter.globalIncrement(); + + // shouldPrintGlobalProgress should return true because there are enough READs + assertTrue(jobCounter.shouldPrintGlobalProgress()); + } + + @Test + public void testShouldPrintGlobalProgressWithInsufficientReads() { + // Increment global READ counter to remain less than printStatsAfter threshold + // (assume it's 10) + jobCounter.threadIncrement(JobCounter.CounterType.READ, 5); + jobCounter.globalIncrement(); + + // shouldPrintGlobalProgress should return true because there are enough READs + assertFalse(jobCounter.shouldPrintGlobalProgress()); + } + + @Test + public void testShouldPrintGlobalProgressWithUnregisteredRead() { + jobCounter = new JobCounter(10, true); // Changed to true to test printPerThread + + // Set only WRITE as the registered type + jobCounter.setRegisteredTypes(JobCounter.CounterType.WRITE); + + // shouldPrintGlobalProgress should return false because READ is not registered + assertFalse(jobCounter.shouldPrintGlobalProgress()); + } } diff --git a/src/test/java/com/datastax/cdm/job/SplitPartitionsTest.java b/src/test/java/com/datastax/cdm/job/SplitPartitionsTest.java index 80c41a92..8b172f82 100644 --- a/src/test/java/com/datastax/cdm/job/SplitPartitionsTest.java +++ b/src/test/java/com/datastax/cdm/job/SplitPartitionsTest.java @@ -25,26 +25,26 @@ import com.datastax.cdm.properties.PropertyHelper; public class SplitPartitionsTest { - @AfterEach - void tearDown() { - PropertyHelper.destroyInstance(); - } - - @Test - void getRandomSubPartitionsTest() { - List partitions = SplitPartitions.getRandomSubPartitions(10, BigInteger.ONE, - BigInteger.valueOf(100), 100); - assertEquals(10, partitions.size()); - partitions.forEach(p -> { - assertEquals(9, p.getMax().longValue() - p.getMin().longValue()); - }); - } - - @Test - void getRandomSubPartitionsTestOver100() { - List partitions = SplitPartitions.getRandomSubPartitions(8, BigInteger.ONE, - BigInteger.valueOf(44), 200); - assertEquals(8, partitions.size()); - } + @AfterEach + void tearDown() { + PropertyHelper.destroyInstance(); + } + + @Test + void getRandomSubPartitionsTest() { + List partitions = SplitPartitions.getRandomSubPartitions(10, BigInteger.ONE, + BigInteger.valueOf(100), 100); + assertEquals(10, partitions.size()); + partitions.forEach(p -> { + assertEquals(9, p.getMax().longValue() - p.getMin().longValue()); + }); + } + + @Test + void getRandomSubPartitionsTestOver100() { + List partitions = SplitPartitions.getRandomSubPartitions(8, BigInteger.ONE, + BigInteger.valueOf(44), 200); + assertEquals(8, partitions.size()); + } } diff --git a/src/test/java/com/datastax/cdm/properties/KnownPropertiesTest.java b/src/test/java/com/datastax/cdm/properties/KnownPropertiesTest.java index 8171de77..b63f4c44 100644 --- a/src/test/java/com/datastax/cdm/properties/KnownPropertiesTest.java +++ b/src/test/java/com/datastax/cdm/properties/KnownPropertiesTest.java @@ -15,19 +15,19 @@ */ package com.datastax.cdm.properties; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class KnownPropertiesTest { @Test public void getDefault_knownDefault() { - assertEquals("text",KnownProperties.getDefaultAsString(KnownProperties.TEST_STRING)); + assertEquals("text", KnownProperties.getDefaultAsString(KnownProperties.TEST_STRING)); } @Test @@ -79,7 +79,8 @@ public void asType_String() { @Test public void asType_StringList() { String value = "a,b,c"; - assertEquals(Arrays.asList(value.split(",")), KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, value)); + assertEquals(Arrays.asList(value.split(",")), + KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, value)); } @Test @@ -95,7 +96,8 @@ public void asType_Number_Invalid() { @Test public void asType_NumberList() { - assertEquals(Arrays.asList(1L,2L,3L), KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, "1,2,3")); + assertEquals(Arrays.asList(1L, 2L, 3L), + KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, "1,2,3")); } @Test @@ -118,12 +120,13 @@ public void asType_unhandledType() { @Test public void getDefaultAsString_NumberList() { - Assertions.assertEquals(KnownProperties.TEST_NUMBER_LIST_DEFAULT, KnownProperties.getDefaultAsString(KnownProperties.TEST_NUMBER_LIST)); + Assertions.assertEquals(KnownProperties.TEST_NUMBER_LIST_DEFAULT, + KnownProperties.getDefaultAsString(KnownProperties.TEST_NUMBER_LIST)); } @Test public void getDefault_NumberList() { - assertEquals(Arrays.asList(1L,2L), KnownProperties.getDefault(KnownProperties.TEST_NUMBER_LIST)); + assertEquals(Arrays.asList(1L, 2L), KnownProperties.getDefault(KnownProperties.TEST_NUMBER_LIST)); } @Test @@ -139,7 +142,8 @@ public void getDefault_noDefault() { @Test public void getTypeMap() { assertNotNull(KnownProperties.getTypeMap()); - assertEquals(KnownProperties.PropertyType.STRING, KnownProperties.getTypeMap().get(KnownProperties.TEST_STRING)); + assertEquals(KnownProperties.PropertyType.STRING, + KnownProperties.getTypeMap().get(KnownProperties.TEST_STRING)); } @Test @@ -160,7 +164,8 @@ public void validateType_String_notString() { @Test public void validateType_StringList() { - assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.STRING_LIST, KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST,"a,b,c"))); + assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.STRING_LIST, + KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, "a,b,c"))); } @Test @@ -178,12 +183,14 @@ public void validateType_StringList_MixedList() { @Test public void validateType_Number() { - assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.NUMBER, KnownProperties.asType(KnownProperties.PropertyType.NUMBER,"1"))); + assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.NUMBER, + KnownProperties.asType(KnownProperties.PropertyType.NUMBER, "1"))); } @Test public void validateType_NumberList() { - assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.NUMBER_LIST, KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST,"1,2,3"))); + assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.NUMBER_LIST, + KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, "1,2,3"))); } @Test @@ -201,7 +208,8 @@ public void validateType_NumberList_MixedList() { @Test public void validateType_Boolean() { - assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.BOOLEAN, KnownProperties.asType(KnownProperties.PropertyType.BOOLEAN,"false"))); + assertTrue(KnownProperties.validateType(KnownProperties.PropertyType.BOOLEAN, + KnownProperties.asType(KnownProperties.PropertyType.BOOLEAN, "false"))); } -} \ No newline at end of file +} diff --git a/src/test/java/com/datastax/cdm/properties/PropertyHelperTest.java b/src/test/java/com/datastax/cdm/properties/PropertyHelperTest.java index cf2d8fc3..8cd13838 100644 --- a/src/test/java/com/datastax/cdm/properties/PropertyHelperTest.java +++ b/src/test/java/com/datastax/cdm/properties/PropertyHelperTest.java @@ -15,14 +15,15 @@ */ package com.datastax.cdm.properties; -import java.util.List; +import static org.junit.jupiter.api.Assertions.*; + import java.util.Arrays; +import java.util.List; import org.apache.spark.SparkConf; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; public class PropertyHelperTest { PropertyHelper helper; @@ -48,7 +49,7 @@ public void setProperty_String() { @Test public void setProperty_StringList() { - List value = Arrays.asList("a","b", "c"); + List value = Arrays.asList("a", "b", "c"); List setValue = (List) helper.setProperty(KnownProperties.TEST_STRING_LIST, value); assertEquals(value, setValue); } @@ -63,14 +64,16 @@ public void setProperty_StringList_oneValue() { @Test public void setProperty_StringList_splitString() { String list = "a,b,c"; - List setValue = (List) helper.setProperty(KnownProperties.TEST_STRING_LIST, KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, list)); + List setValue = (List) helper.setProperty(KnownProperties.TEST_STRING_LIST, + KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, list)); assertEquals(Arrays.asList(list.split(",")), setValue); } @Test public void setProperty_StringList_splitString_oneValue() { String list = "a"; - List setValue = (List) helper.setProperty(KnownProperties.TEST_STRING_LIST, KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, list)); + List setValue = (List) helper.setProperty(KnownProperties.TEST_STRING_LIST, + KnownProperties.asType(KnownProperties.PropertyType.STRING_LIST, list)); assertEquals(Arrays.asList(list), setValue); } @@ -90,7 +93,7 @@ public void setProperty_Number() { @Test public void setProperty_NumberList() { - List value = Arrays.asList(1,2,3,4); + List value = Arrays.asList(1, 2, 3, 4); List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, value); assertEquals(value, setValue); } @@ -98,28 +101,32 @@ public void setProperty_NumberList() { @Test public void setProperty_NumberList_splitString() { String list = "1,2,3,4"; - List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); - assertEquals(Arrays.asList(1L,2L,3L,4L), setValue); + List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, + KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); + assertEquals(Arrays.asList(1L, 2L, 3L, 4L), setValue); } @Test public void setProperty_NumberList_splitString_oneValue() { String list = "1"; - List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); + List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, + KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); assertEquals(Arrays.asList(1L), setValue); } @Test public void setProperty_NumberList_splitString_LongValue() { String list = String.valueOf(Long.MAX_VALUE); - List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); + List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, + KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); assertEquals(Arrays.asList(Long.MAX_VALUE), setValue); } @Test public void setProperty_NumberList_splitString_badNumber() { String list = "1,2,x,4"; - List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); + List setValue = (List) helper.setProperty(KnownProperties.TEST_NUMBER_LIST, + KnownProperties.asType(KnownProperties.PropertyType.NUMBER_LIST, list)); assertNull(setValue); } @@ -137,7 +144,6 @@ public void setProperty_Boolean() { assertEquals(value, setValue); } - @Test public void setProperty_nullArguments() { assertNull(helper.setProperty(null, "test")); @@ -172,8 +178,8 @@ public void getString() { @Test public void getStringList() { - helper.setProperty(KnownProperties.TEST_STRING_LIST, Arrays.asList("a","b","c")); - assertEquals(Arrays.asList("a","b","c"), helper.getStringList(KnownProperties.TEST_STRING_LIST)); + helper.setProperty(KnownProperties.TEST_STRING_LIST, Arrays.asList("a", "b", "c")); + assertEquals(Arrays.asList("a", "b", "c"), helper.getStringList(KnownProperties.TEST_STRING_LIST)); } @Test @@ -228,7 +234,7 @@ public void getInteger_nullArgument() { @Test public void getInteger_wrongType() { - helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1,2,3)); + helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1, 2, 3)); assertNull(helper.getInteger(KnownProperties.TEST_NUMBER_LIST)); } @@ -251,14 +257,14 @@ public void getLong_wrongType() { @Test public void getNumberList() { - helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1,2,3)); - assertEquals(Arrays.asList(1,2,3), helper.getNumberList(KnownProperties.TEST_NUMBER_LIST)); + helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1, 2, 3)); + assertEquals(Arrays.asList(1, 2, 3), helper.getNumberList(KnownProperties.TEST_NUMBER_LIST)); } @Test public void getIntegerList() { - helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1,2,3)); - assertEquals(Arrays.asList(1,2,3), helper.getIntegerList(KnownProperties.TEST_NUMBER_LIST)); + helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1, 2, 3)); + assertEquals(Arrays.asList(1, 2, 3), helper.getIntegerList(KnownProperties.TEST_NUMBER_LIST)); } @Test @@ -268,13 +274,13 @@ public void getIntegerList_nullParameter() { @Test public void getIntegerList_nullValue() { - helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1,null,3)); + helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1, null, 3)); assertNull(helper.getIntegerList(null)); } @Test public void getIntegerList_wrongType() { - helper.setProperty(KnownProperties.TEST_STRING_LIST, Arrays.asList("1","2","3")); + helper.setProperty(KnownProperties.TEST_STRING_LIST, Arrays.asList("1", "2", "3")); assertNull(helper.getIntegerList(KnownProperties.TEST_STRING_LIST)); } @@ -334,7 +340,7 @@ public void get_invalidType_String() { public void get_invalidType_StringList() { // Any code that actually does this is broken, but we should handle it gracefully helper.setProperty(KnownProperties.TEST_STRING_LIST, "a,b,c,d"); - helper.getPropertyMap().put(KnownProperties.TEST_STRING_LIST, Arrays.asList(1,2,3,4)); + helper.getPropertyMap().put(KnownProperties.TEST_STRING_LIST, Arrays.asList(1, 2, 3, 4)); assertNull(helper.get(KnownProperties.TEST_STRING_LIST, KnownProperties.PropertyType.STRING_LIST)); } @@ -350,7 +356,7 @@ public void get_invalidType_Number() { public void get_invalidType_NumberList() { // Any code that actually does this is broken, but we should handle it gracefully helper.setProperty(KnownProperties.TEST_NUMBER_LIST, "1,2,3,4"); - helper.getPropertyMap().put(KnownProperties.TEST_NUMBER_LIST, Arrays.asList("a","b","c","d")); + helper.getPropertyMap().put(KnownProperties.TEST_NUMBER_LIST, Arrays.asList("a", "b", "c", "d")); assertNull(helper.get(KnownProperties.TEST_NUMBER_LIST, KnownProperties.PropertyType.NUMBER_LIST)); } @@ -370,7 +376,7 @@ public void getAsString_String() { @Test public void getAsString_StringList() { - helper.setProperty(KnownProperties.TEST_STRING_LIST, Arrays.asList("a","b","c","d")); + helper.setProperty(KnownProperties.TEST_STRING_LIST, Arrays.asList("a", "b", "c", "d")); assertEquals("a,b,c,d", helper.getAsString(KnownProperties.TEST_STRING_LIST)); } @@ -382,7 +388,7 @@ public void getAsString_Number() { @Test public void getAsString_NumberList() { - helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1,2,3,4)); + helper.setProperty(KnownProperties.TEST_NUMBER_LIST, Arrays.asList(1, 2, 3, 4)); assertEquals("1,2,3,4", helper.getAsString(KnownProperties.TEST_NUMBER_LIST)); } @@ -394,7 +400,7 @@ public void getAsString_Boolean() { @Test public void getAsString_valueNotSet_string() { - assertEquals("",helper.getAsString(KnownProperties.TEST_STRING_NO_DEFAULT)); + assertEquals("", helper.getAsString(KnownProperties.TEST_STRING_NO_DEFAULT)); } @Test @@ -405,7 +411,7 @@ public void getAsString_nullArgument() { @Test public void getAsString_nullUnhanldedType() { helper.setProperty(KnownProperties.TEST_UNHANDLED_TYPE, "abcd"); - assertEquals("",helper.getAsString(KnownProperties.TEST_UNHANDLED_TYPE)); + assertEquals("", helper.getAsString(KnownProperties.TEST_UNHANDLED_TYPE)); } @Test @@ -417,7 +423,7 @@ public void getInstance() { @Test public void initializeSparkConf_null() { - Exception e = assertThrows(IllegalArgumentException.class, () -> { + Exception e = assertThrows(IllegalArgumentException.class, () -> { helper.initializeSparkConf(null); }); assertTrue(e.getMessage().contains("SparkConf cannot be null")); @@ -510,7 +516,6 @@ public void test_valid_connection_noUserPassword() { assertTrue(helper.isValidConfig()); } - @Test public void meetsMinimum_true() { assertTrue(helper.meetsMinimum("a", 100L, 0L)); @@ -529,4 +534,4 @@ private void setValidSparkConf() { validSparkConf.set(KnownProperties.TARGET_KEYSPACE_TABLE, "ks.tab1"); } -} \ No newline at end of file +} diff --git a/src/test/java/com/datastax/cdm/regression/CQL_ColumnRenameWithConstantsAndExplode.java b/src/test/java/com/datastax/cdm/regression/CQL_ColumnRenameWithConstantsAndExplode.java index a5573e31..da285af7 100644 --- a/src/test/java/com/datastax/cdm/regression/CQL_ColumnRenameWithConstantsAndExplode.java +++ b/src/test/java/com/datastax/cdm/regression/CQL_ColumnRenameWithConstantsAndExplode.java @@ -15,17 +15,18 @@ */ package com.datastax.cdm.regression; -import com.datastax.cdm.cql.statement.*; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.cdm.cql.CommonMocks; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Arrays; import java.util.Collections; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.cql.statement.*; +import com.datastax.oss.driver.api.core.type.DataTypes; /* This test addresses CDM-34 which needs to migrate from: @@ -53,7 +54,7 @@ public class CQL_ColumnRenameWithConstantsAndExplode extends CommonMocks { public void setup() { defaultClassVariables(); setTestVariables(); - commonSetupWithoutDefaultClassVariables(true,true,false); + commonSetupWithoutDefaultClassVariables(true, true, false); } // Set up table as it would be in Cassandra @@ -95,19 +96,19 @@ public void smokeCQL() { String targetUpdateString = "UPDATE astra.indextable SET value=? WHERE customer='CUSTOMER' AND parameter_name=? AND id=?"; String targetSelectString = "SELECT customer,parameter_name,id,value FROM astra.indextable WHERE customer='CUSTOMER' AND parameter_name=? AND id=?"; - OriginSelectByPartitionRangeStatement originSelect = new OriginSelectByPartitionRangeStatement(propertyHelper, originSession); + OriginSelectByPartitionRangeStatement originSelect = new OriginSelectByPartitionRangeStatement(propertyHelper, + originSession); OriginSelectByPKStatement originSelectByPK = new OriginSelectByPKStatement(propertyHelper, originSession); TargetInsertStatement targetInsert = new TargetInsertStatement(propertyHelper, targetSession); TargetUpdateStatement targetUpdate = new TargetUpdateStatement(propertyHelper, targetSession); TargetSelectByPKStatement targetSelect = new TargetSelectByPKStatement(propertyHelper, targetSession); - assertAll( - () -> assertEquals(originSelectString, originSelect.getCQL().replaceAll("\\s+"," "), "originSelect"), - () -> assertEquals(originSelectByPKString, originSelectByPK.getCQL().replaceAll("\\s+"," "), "originSelectByPK"), - () -> assertEquals(targetInsertString, targetInsert.getCQL().replaceAll("\\s+"," "), "targetInsert"), - () -> assertEquals(targetUpdateString, targetUpdate.getCQL().replaceAll("\\s+"," "), "targetUpdate"), - () -> assertEquals(targetSelectString, targetSelect.getCQL().replaceAll("\\s+"," "), "targetSelect") - ); + assertAll(() -> assertEquals(originSelectString, originSelect.getCQL().replaceAll("\\s+", " "), "originSelect"), + () -> assertEquals(originSelectByPKString, originSelectByPK.getCQL().replaceAll("\\s+", " "), + "originSelectByPK"), + () -> assertEquals(targetInsertString, targetInsert.getCQL().replaceAll("\\s+", " "), "targetInsert"), + () -> assertEquals(targetUpdateString, targetUpdate.getCQL().replaceAll("\\s+", " "), "targetUpdate"), + () -> assertEquals(targetSelectString, targetSelect.getCQL().replaceAll("\\s+", " "), "targetSelect")); } } diff --git a/src/test/java/com/datastax/cdm/schema/BaseTableTest.java b/src/test/java/com/datastax/cdm/schema/BaseTableTest.java index e87df414..29d130b7 100644 --- a/src/test/java/com/datastax/cdm/schema/BaseTableTest.java +++ b/src/test/java/com/datastax/cdm/schema/BaseTableTest.java @@ -15,15 +15,16 @@ */ package com.datastax.cdm.schema; -import com.datastax.cdm.cql.CommonMocks; -import com.datastax.cdm.properties.KnownProperties; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.when; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.when; +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.properties.KnownProperties; public class BaseTableTest extends CommonMocks { public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); @@ -73,6 +74,7 @@ public void useTargetWhenTargetPresent() { @Test public void failWhenKsTableAbsent() { RuntimeException thrown = assertThrows(RuntimeException.class, () -> new BaseTable(propertyHelper, false)); - assertTrue(thrown.getMessage().contentEquals("Value for required property " + KnownProperties.ORIGIN_KEYSPACE_TABLE + " not provided!!")); + assertTrue(thrown.getMessage().contentEquals( + "Value for required property " + KnownProperties.ORIGIN_KEYSPACE_TABLE + " not provided!!")); } -} \ No newline at end of file +} diff --git a/src/test/java/com/datastax/cdm/schema/CqlTableTest.java b/src/test/java/com/datastax/cdm/schema/CqlTableTest.java index 318ed87c..e634b427 100644 --- a/src/test/java/com/datastax/cdm/schema/CqlTableTest.java +++ b/src/test/java/com/datastax/cdm/schema/CqlTableTest.java @@ -24,19 +24,19 @@ class CqlTableTest extends CommonMocks { - @Test - void testCL() { - assertEquals(CqlTable.mapToConsistencyLevel("LOCAL_QUORUM"), ConsistencyLevel.LOCAL_QUORUM); - assertEquals(CqlTable.mapToConsistencyLevel("any"), ConsistencyLevel.ANY); - assertEquals(CqlTable.mapToConsistencyLevel("one"), ConsistencyLevel.ONE); - assertEquals(CqlTable.mapToConsistencyLevel("two"), ConsistencyLevel.TWO); - assertEquals(CqlTable.mapToConsistencyLevel("three"), ConsistencyLevel.THREE); - assertEquals(CqlTable.mapToConsistencyLevel("QUORUM"), ConsistencyLevel.QUORUM); - assertEquals(CqlTable.mapToConsistencyLevel("Local_one"), ConsistencyLevel.LOCAL_ONE); - assertEquals(CqlTable.mapToConsistencyLevel("EACH_quorum"), ConsistencyLevel.EACH_QUORUM); - assertEquals(CqlTable.mapToConsistencyLevel("serial"), ConsistencyLevel.SERIAL); - assertEquals(CqlTable.mapToConsistencyLevel("local_serial"), ConsistencyLevel.LOCAL_SERIAL); - assertEquals(CqlTable.mapToConsistencyLevel("all"), ConsistencyLevel.ALL); - } + @Test + void testCL() { + assertEquals(CqlTable.mapToConsistencyLevel("LOCAL_QUORUM"), ConsistencyLevel.LOCAL_QUORUM); + assertEquals(CqlTable.mapToConsistencyLevel("any"), ConsistencyLevel.ANY); + assertEquals(CqlTable.mapToConsistencyLevel("one"), ConsistencyLevel.ONE); + assertEquals(CqlTable.mapToConsistencyLevel("two"), ConsistencyLevel.TWO); + assertEquals(CqlTable.mapToConsistencyLevel("three"), ConsistencyLevel.THREE); + assertEquals(CqlTable.mapToConsistencyLevel("QUORUM"), ConsistencyLevel.QUORUM); + assertEquals(CqlTable.mapToConsistencyLevel("Local_one"), ConsistencyLevel.LOCAL_ONE); + assertEquals(CqlTable.mapToConsistencyLevel("EACH_quorum"), ConsistencyLevel.EACH_QUORUM); + assertEquals(CqlTable.mapToConsistencyLevel("serial"), ConsistencyLevel.SERIAL); + assertEquals(CqlTable.mapToConsistencyLevel("local_serial"), ConsistencyLevel.LOCAL_SERIAL); + assertEquals(CqlTable.mapToConsistencyLevel("all"), ConsistencyLevel.ALL); + } }