Skip to content

Commit

Permalink
[runners-flink] apache#30621 use groupBy for Reshuffle in batch
Browse files Browse the repository at this point in the history
  • Loading branch information
je-ik committed Mar 13, 2024
1 parent 289bbc4 commit 5697c67
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 10 deletions.
2 changes: 0 additions & 2 deletions runners/flink/flink_runner.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -192,13 +192,11 @@ dependencies {

if (flink_version.compareTo("1.14") >= 0) {
implementation "org.apache.flink:flink-runtime:$flink_version"
implementation "org.apache.flink:flink-optimizer:$flink_version"
implementation "org.apache.flink:flink-metrics-core:$flink_version"
testImplementation "org.apache.flink:flink-runtime:$flink_version:tests"
testImplementation "org.apache.flink:flink-rpc-akka:$flink_version"
} else {
implementation "org.apache.flink:flink-runtime_2.12:$flink_version"
implementation "org.apache.flink:flink-optimizer_2.12:$flink_version"
testImplementation "org.apache.flink:flink-runtime_2.12:$flink_version:tests"
}
testImplementation project(path: ":sdks:java:core", configuration: "shadowTest")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,6 @@
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.operators.SingleInputUdfOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.optimizer.Optimizer;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.joda.time.Instant;

Expand Down Expand Up @@ -413,12 +411,15 @@ public void translateNode(
outputType,
FlinkIdentityFunction.of(),
getCurrentTransformName(context));
final Configuration partitionOptions = new Configuration();
partitionOptions.setString(
Optimizer.HINT_SHIP_STRATEGY, Optimizer.HINT_SHIP_STRATEGY_REPARTITION);
context.setOutputDataSet(
context.getOutput(transform),
retypedDataSet.map(FlinkIdentityFunction.of()).withParameters(partitionOptions));
WindowedValue.WindowedValueCoder<KV<K, InputT>> kvWvCoder =
(WindowedValue.WindowedValueCoder<KV<K, InputT>>) outputType.getCoder();
KvCoder<K, InputT> kvCoder = (KvCoder<K, InputT>) kvWvCoder.getValueCoder();
DataSet<WindowedValue<KV<K, InputT>>> reshuffle =
retypedDataSet
.groupBy(new KvKeySelector<>(kvCoder.getKeyCoder()))
.<WindowedValue<KV<K, InputT>>>reduceGroup((i, c) -> i.forEach(c::collect))
.returns(outputType);
context.setOutputDataSet(context.getOutput(transform), reshuffle);
}
}

Expand Down

0 comments on commit 5697c67

Please sign in to comment.