diff --git a/.github/workflows/beam_PreCommit_Spotless.yml b/.github/workflows/beam_PreCommit_Spotless.yml index 145e31c4448a..80931168856d 100644 --- a/.github/workflows/beam_PreCommit_Spotless.yml +++ b/.github/workflows/beam_PreCommit_Spotless.yml @@ -85,6 +85,8 @@ jobs: github_job: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) - name: Setup environment uses: ./.github/actions/setup-environment-action + with: + java-version: '17' - name: run Spotless PreCommit script uses: ./.github/actions/gradle-command-self-hosted-action with: diff --git a/.test-infra/metrics/build.gradle b/.test-infra/metrics/build.gradle index f1ecba05f84d..d8549d921b95 100644 --- a/.test-infra/metrics/build.gradle +++ b/.test-infra/metrics/build.gradle @@ -42,7 +42,8 @@ dependencies { task testMetricsStack { doLast { // TODO(BEAM-5837): Add some actual validation of the metrics stack - println "Hello world!" } + println "Hello world!" + } } // Create an empty file and set DOCKER_CONFIG to ignore bad config file on Jenkins nodes. BEAM-7405 diff --git a/.test-infra/mock-apis/src/main/java/org/apache/beam/testinfra/mockapis/echo/v1/Echo.java b/.test-infra/mock-apis/src/main/java/org/apache/beam/testinfra/mockapis/echo/v1/Echo.java index 4652ff716b87..f376fd01e42b 100644 --- a/.test-infra/mock-apis/src/main/java/org/apache/beam/testinfra/mockapis/echo/v1/Echo.java +++ b/.test-infra/mock-apis/src/main/java/org/apache/beam/testinfra/mockapis/echo/v1/Echo.java @@ -54,6 +54,7 @@ public interface EchoRequestOrBuilder * @return The id. */ java.lang.String getId(); + /** * string id = 1 [json_name = "id"]; * @@ -68,6 +69,7 @@ public interface EchoRequestOrBuilder */ com.google.protobuf.ByteString getPayload(); } + /** * * @@ -82,6 +84,7 @@ public static final class EchoRequest extends com.google.protobuf.GeneratedMessa // @@protoc_insertion_point(message_implements:proto.echo.v1.EchoRequest) EchoRequestOrBuilder { private static final long serialVersionUID = 0L; + // Use EchoRequest.newBuilder() to construct. private EchoRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); @@ -122,6 +125,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { @SuppressWarnings("serial") private volatile java.lang.Object id_ = ""; + /** * string id = 1 [json_name = "id"]; * @@ -139,6 +143,7 @@ public java.lang.String getId() { return s; } } + /** * string id = 1 [json_name = "id"]; * @@ -159,6 +164,7 @@ public com.google.protobuf.ByteString getIdBytes() { public static final int PAYLOAD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** * bytes payload = 2 [json_name = "payload"]; * @@ -339,6 +345,7 @@ protected Builder newBuilderForType( Builder builder = new Builder(parent); return builder; } + /** * * @@ -542,6 +549,7 @@ public Builder mergeFrom( private int bitField0_; private java.lang.Object id_ = ""; + /** * string id = 1 [json_name = "id"]; * @@ -558,6 +566,7 @@ public java.lang.String getId() { return (java.lang.String) ref; } } + /** * string id = 1 [json_name = "id"]; * @@ -574,6 +583,7 @@ public com.google.protobuf.ByteString getIdBytes() { return (com.google.protobuf.ByteString) ref; } } + /** * string id = 1 [json_name = "id"]; * @@ -589,6 +599,7 @@ public Builder setId(java.lang.String value) { onChanged(); return this; } + /** * string id = 1 [json_name = "id"]; * @@ -600,6 +611,7 @@ public Builder clearId() { onChanged(); return this; } + /** * string id = 1 [json_name = "id"]; * @@ -618,6 +630,7 @@ public Builder setIdBytes(com.google.protobuf.ByteString value) { } private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** * bytes payload = 2 [json_name = "payload"]; * @@ -627,6 +640,7 @@ public Builder setIdBytes(com.google.protobuf.ByteString value) { public com.google.protobuf.ByteString getPayload() { return payload_; } + /** * bytes payload = 2 [json_name = "payload"]; * @@ -642,6 +656,7 @@ public Builder setPayload(com.google.protobuf.ByteString value) { onChanged(); return this; } + /** * bytes payload = 2 [json_name = "payload"]; * @@ -730,6 +745,7 @@ public interface EchoResponseOrBuilder * @return The id. */ java.lang.String getId(); + /** * string id = 1 [json_name = "id"]; * @@ -744,6 +760,7 @@ public interface EchoResponseOrBuilder */ com.google.protobuf.ByteString getPayload(); } + /** * * @@ -758,6 +775,7 @@ public static final class EchoResponse extends com.google.protobuf.GeneratedMess // @@protoc_insertion_point(message_implements:proto.echo.v1.EchoResponse) EchoResponseOrBuilder { private static final long serialVersionUID = 0L; + // Use EchoResponse.newBuilder() to construct. private EchoResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); @@ -798,6 +816,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { @SuppressWarnings("serial") private volatile java.lang.Object id_ = ""; + /** * string id = 1 [json_name = "id"]; * @@ -815,6 +834,7 @@ public java.lang.String getId() { return s; } } + /** * string id = 1 [json_name = "id"]; * @@ -835,6 +855,7 @@ public com.google.protobuf.ByteString getIdBytes() { public static final int PAYLOAD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** * bytes payload = 2 [json_name = "payload"]; * @@ -1015,6 +1036,7 @@ protected Builder newBuilderForType( Builder builder = new Builder(parent); return builder; } + /** * * @@ -1218,6 +1240,7 @@ public Builder mergeFrom( private int bitField0_; private java.lang.Object id_ = ""; + /** * string id = 1 [json_name = "id"]; * @@ -1234,6 +1257,7 @@ public java.lang.String getId() { return (java.lang.String) ref; } } + /** * string id = 1 [json_name = "id"]; * @@ -1250,6 +1274,7 @@ public com.google.protobuf.ByteString getIdBytes() { return (com.google.protobuf.ByteString) ref; } } + /** * string id = 1 [json_name = "id"]; * @@ -1265,6 +1290,7 @@ public Builder setId(java.lang.String value) { onChanged(); return this; } + /** * string id = 1 [json_name = "id"]; * @@ -1276,6 +1302,7 @@ public Builder clearId() { onChanged(); return this; } + /** * string id = 1 [json_name = "id"]; * @@ -1294,6 +1321,7 @@ public Builder setIdBytes(com.google.protobuf.ByteString value) { } private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** * bytes payload = 2 [json_name = "payload"]; * @@ -1303,6 +1331,7 @@ public Builder setIdBytes(com.google.protobuf.ByteString value) { public com.google.protobuf.ByteString getPayload() { return payload_; } + /** * bytes payload = 2 [json_name = "payload"]; * @@ -1318,6 +1347,7 @@ public Builder setPayload(com.google.protobuf.ByteString value) { onChanged(); return this; } + /** * bytes payload = 2 [json_name = "payload"]; * diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts index 9ad1a6a5bf3b..679cad82d100 100644 --- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -20,7 +20,7 @@ plugins { `java-gradle-plugin` groovy - id("com.diffplug.spotless") version "5.6.1" + id("com.diffplug.spotless") version "7.2.1" } // Define the set of repositories required to fetch and enable plugins. @@ -49,7 +49,7 @@ dependencies { runtimeOnly("com.google.protobuf:protobuf-gradle-plugin:0.8.13") // Enable proto code generation runtimeOnly("com.github.davidmc24.gradle.plugin:gradle-avro-plugin:1.9.1") // Enable Avro code generation. Version 1.1.0 is the last supporting avro 1.10.2 - runtimeOnly("com.diffplug.spotless:spotless-plugin-gradle:5.6.1") // Enable a code formatting plugin + runtimeOnly("com.diffplug.spotless:spotless-plugin-gradle:7.2.1") // Enable a code formatting plugin runtimeOnly("gradle.plugin.com.dorongold.plugins:task-tree:1.5") // Adds a 'taskTree' task to print task dependency tree runtimeOnly("net.linguica.gradle:maven-settings-plugin:0.5") runtimeOnly("gradle.plugin.io.pry.gradle.offline_dependencies:gradle-offline-dependencies-plugin:0.5.0") // Enable creating an offline repository diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamDockerPlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamDockerPlugin.groovy index 6963f96d7313..9aaeaea96950 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamDockerPlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamDockerPlugin.groovy @@ -185,7 +185,8 @@ class BeamDockerPlugin implements Plugin { [ tagName: tagName, tagTask: { - -> tagName } + -> tagName + } ] ] } @@ -201,7 +202,8 @@ class BeamDockerPlugin implements Plugin { tags[taskName] = [ tagName: unresolvedTagName, tagTask: { - -> computeName(ext.name, unresolvedTagName) } + -> computeName(ext.name, unresolvedTagName) + } ] } } diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index e32d1c9afb77..e7348624a882 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -1157,16 +1157,16 @@ class BeamModulePlugin implements Plugin { // If compiled on older SDK, compile with JDK configured with compatible javaXXHome // The order is intended here if (requireJavaVersion.compareTo(JavaVersion.VERSION_11) <= 0 && - project.hasProperty('java11Home')) { + project.hasProperty('java11Home')) { forkJavaVersion = '11' } else if (requireJavaVersion.compareTo(JavaVersion.VERSION_17) <= 0 && - project.hasProperty('java17Home')) { + project.hasProperty('java17Home')) { forkJavaVersion = '17' } else if (requireJavaVersion.compareTo(JavaVersion.VERSION_21) <= 0 && - project.hasProperty('java21Home')) { + project.hasProperty('java21Home')) { forkJavaVersion = '21' } else if (requireJavaVersion.compareTo(JavaVersion.VERSION_25) <= 0 && - project.hasProperty('java25Home')) { + project.hasProperty('java25Home')) { forkJavaVersion = '25' } else { logger.config("Module ${project.name} disabled. To enable, either " + @@ -1459,7 +1459,7 @@ class BeamModulePlugin implements Plugin { enforceCheck !disableSpotlessCheck java { licenseHeader javaLicenseHeader - googleJavaFormat('1.7') + googleJavaFormat('1.17.0') target project.fileTree(project.projectDir) { include 'src/*/java/**/*.java' exclude '**/DefaultPackageTest.java' @@ -1589,6 +1589,8 @@ class BeamModulePlugin implements Plugin { project.tasks.withType(JavaCompile).configureEach { // we configure the Java compiler to use UTF-8. options.encoding = "UTF-8" + options.fork = true + options.forkOptions.memoryMaximumSize = '4g' // If compiled on newer JDK, set byte code compatibility if (requireJavaVersion.compareTo(JavaVersion.current()) < 0) { def compatVersion = project.javaVersion == '11' ? '11' : project.javaVersion @@ -2076,7 +2078,8 @@ class BeamModulePlugin implements Plugin { } else { dependencyNode.appendNode('groupId', it.group) dependencyNode.appendNode('artifactId', it.name) - if (it.version != null) { // bom-managed artifacts do not have their versions + if (it.version != null) { + // bom-managed artifacts do not have their versions dependencyNode.appendNode('version', it.version) } dependencyNode.appendNode('scope', param.scope) @@ -2443,7 +2446,8 @@ class BeamModulePlugin implements Plugin { project.protobuf { protoc { // The artifact spec for the Protobuf Compiler - artifact = "com.google.protobuf:protoc:$protobuf_version" } + artifact = "com.google.protobuf:protoc:$protobuf_version" + } // Configure the codegen plugins plugins { @@ -2525,7 +2529,8 @@ class BeamModulePlugin implements Plugin { project.protobuf { protoc { // The artifact spec for the Protobuf Compiler - artifact = "com.google.protobuf:protoc:${GrpcVendoring_1_69_0.protobuf_version}" } + artifact = "com.google.protobuf:protoc:${GrpcVendoring_1_69_0.protobuf_version}" + } // Configure the codegen plugins plugins { @@ -2722,7 +2727,8 @@ class BeamModulePlugin implements Plugin { doLast { def beamPythonTestPipelineOptions = [ "pipeline_opts": config.pythonPipelineOptions + (usesDataflowRunner ? [ - "--sdk_location=${project.ext.sdkLocation}"] + "--sdk_location=${project.ext.sdkLocation}" + ] : []), "test_opts": config.pytestOptions, "suite": config.name, @@ -3012,7 +3018,8 @@ class BeamModulePlugin implements Plugin { doLast { def beamPythonTestPipelineOptions = [ "pipeline_opts": config.pythonPipelineOptions + (usesDataflowRunner ? [ - "--sdk_location=${project.ext.sdkLocation}"] + "--sdk_location=${project.ext.sdkLocation}" + ] : []), "test_opts": config.pytestOptions, "suite": config.name, diff --git a/examples/java/src/main/java/org/apache/beam/examples/RateLimiterSimple.java b/examples/java/src/main/java/org/apache/beam/examples/RateLimiterSimple.java index 3ec8fcec0bd8..952c66389bfe 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/RateLimiterSimple.java +++ b/examples/java/src/main/java/org/apache/beam/examples/RateLimiterSimple.java @@ -112,6 +112,7 @@ public void processElement(ProcessContext c) throws Exception { c.output("Processed: " + element); } } + // [END RateLimiterSimpleJava] public static void main(String[] args) { diff --git a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java index 3d67f28e9f10..f7789ca0c28e 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java +++ b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java @@ -106,6 +106,7 @@ */ public class WindowedWordCount { static final int WINDOW_SIZE = 10; // Default window duration in minutes + /** * Concept #2: A DoFn that sets the data element timestamp. This is a silly method, just for this * example, for the bounded data case. diff --git a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java index 4eb7eeadfa1c..4c878a8a74e4 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java +++ b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java @@ -130,6 +130,7 @@ public void processElement(@Element String element, OutputReceiver recei }); } } + // [END extract_words_fn] /** A SimpleFunction that converts a Word and Count into a printable string. */ @@ -163,6 +164,7 @@ public PCollection> expand(PCollection lines) { return wordCounts; } } + // [END count_words] /** @@ -194,6 +196,7 @@ public interface WordCountOptions extends PipelineOptions { void setOutput(String value); } + // [END wordcount_options] static void runWordCount(WordCountOptions options) { diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/game/GameStats.java b/examples/java/src/main/java/org/apache/beam/examples/complete/game/GameStats.java index a3ed04bb1c48..99bb6ed7ee5c 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/game/GameStats.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/game/GameStats.java @@ -144,6 +144,7 @@ public void processElement(ProcessContext c) { return filtered; } } + // [END DocInclude_AbuseDetect] /** Calculate and output an element's session duration. */ diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java b/examples/java/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java index 832c0ad79e76..9fd60a4b9e75 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java @@ -275,6 +275,7 @@ public PCollection> expand(PCollection infos .apply("ExtractTeamScore", new ExtractAndSumScore("team")); } } + // [END DocInclude_WindowAndTrigger] // [START DocInclude_ProcTimeTrigger] diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/game/UserScore.java b/examples/java/src/main/java/org/apache/beam/examples/complete/game/UserScore.java index b30b4665d265..6b0fce7500cf 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/game/UserScore.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/game/UserScore.java @@ -202,6 +202,7 @@ public PCollection> expand(PCollection gameI .apply(Sum.integersPerKey()); } } + // [END DocInclude_USExtractXform] /** Options supported by {@link UserScore}. */ diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/options/KafkaToPubsubOptions.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/options/KafkaToPubsubOptions.java index edb280ff9715..f35ca6fbc4b5 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/options/KafkaToPubsubOptions.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/options/KafkaToPubsubOptions.java @@ -91,8 +91,7 @@ public interface KafkaToPubsubOptions extends PipelineOptions { @Description( "Additional kafka consumer configs to be applied to Kafka Consumer (e.g. key1=value1;key2=value2).") - @Nullable - String getKafkaConsumerConfig(); + @Nullable String getKafkaConsumerConfig(); void setKafkaConsumerConfig(String kafkaConfig); } diff --git a/examples/java/src/main/java/org/apache/beam/examples/snippets/Snippets.java b/examples/java/src/main/java/org/apache/beam/examples/snippets/Snippets.java index 4f24c69f74b7..28ccb8dda239 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/snippets/Snippets.java +++ b/examples/java/src/main/java/org/apache/beam/examples/snippets/Snippets.java @@ -730,6 +730,7 @@ public Collection assignWindows(WindowFn.AssignContext c) { // each other) will be merged. return Arrays.asList(new IntervalWindow(c.timestamp(), gapDuration)); } + // [END CustomSessionWindow1] // [START CustomSessionWindow2] @@ -760,6 +761,7 @@ public Collection assignWindows(AssignContext c) { } return Arrays.asList(new IntervalWindow(c.timestamp(), dataDrivenGap)); } + // [END CustomSessionWindow3] // [START CustomSessionWindow4] @@ -1239,6 +1241,7 @@ public Coder getRestrictionCoder() { return OffsetRange.Coder.of(); } } + // [END SDF_BasicExample] @SuppressWarnings("unused") @@ -1348,6 +1351,7 @@ public WatermarkEstimator newWatermarkEstimator( return new MyCustomWatermarkEstimator(oldState); } } + // [END SDF_CustomWatermarkEstimator] @SuppressWarnings("unused") diff --git a/examples/java/src/main/java/org/apache/beam/examples/subprocess/kernel/SubProcessIOFiles.java b/examples/java/src/main/java/org/apache/beam/examples/subprocess/kernel/SubProcessIOFiles.java index f6131010a571..f97053480e2c 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/subprocess/kernel/SubProcessIOFiles.java +++ b/examples/java/src/main/java/org/apache/beam/examples/subprocess/kernel/SubProcessIOFiles.java @@ -55,7 +55,9 @@ public String getOutFileLocation() { return outFileLocation; } - /** @param workerWorkingDirectory */ + /** + * @param workerWorkingDirectory + */ public SubProcessIOFiles(String workerWorkingDirectory) { this.uuid = UUID.randomUUID().toString(); diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java index 13f80012cc1b..f271ca5d59eb 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java @@ -52,6 +52,7 @@ public class LeaderBoardTest implements Serializable { private Instant baseTime = new Instant(0); @Rule public TestPipeline p = TestPipeline.create(); + /** Some example users, on two separate teams. */ private enum TestUser { RED_ONE("scarlet", "red"), diff --git a/examples/java/twitter/src/main/java/org/apache/beam/examples/twitterstreamgenerator/ReadFromTwitterDoFn.java b/examples/java/twitter/src/main/java/org/apache/beam/examples/twitterstreamgenerator/ReadFromTwitterDoFn.java index 3a7d4dee9f60..1376f99b62c3 100644 --- a/examples/java/twitter/src/main/java/org/apache/beam/examples/twitterstreamgenerator/ReadFromTwitterDoFn.java +++ b/examples/java/twitter/src/main/java/org/apache/beam/examples/twitterstreamgenerator/ReadFromTwitterDoFn.java @@ -47,6 +47,7 @@ final class ReadFromTwitterDoFn extends DoFn { ReadFromTwitterDoFn() { this.startTime = new DateTime(); } + /* Logger for class.*/ private static final Logger LOG = LoggerFactory.getLogger(ReadFromTwitterDoFn.class); diff --git a/it/common/src/main/java/org/apache/beam/it/common/PipelineLauncher.java b/it/common/src/main/java/org/apache/beam/it/common/PipelineLauncher.java index 439cc5623a18..5b8d8ff74a8b 100644 --- a/it/common/src/main/java/org/apache/beam/it/common/PipelineLauncher.java +++ b/it/common/src/main/java/org/apache/beam/it/common/PipelineLauncher.java @@ -448,8 +448,7 @@ List listMessages( * @return value of the metric or null * @throws IOException if there is an issue sending the request */ - @Nullable - Double getMetric(String project, String region, String jobId, String metricName) + @Nullable Double getMetric(String project, String region, String jobId, String metricName) throws IOException; /** diff --git a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java index 11a09c4ba749..f6e39f3307e7 100644 --- a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java +++ b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java @@ -436,8 +436,8 @@ private List extractOptions(String project, String region, LaunchConfig // add pipeline options from beamTestPipelineOptions system property to preserve the // pipeline options already set in TestPipeline. - @Nullable - String beamTestPipelineOptions = System.getProperty(PROPERTY_BEAM_TEST_PIPELINE_OPTIONS); + @Nullable String beamTestPipelineOptions = + System.getProperty(PROPERTY_BEAM_TEST_PIPELINE_OPTIONS); if (!Strings.isNullOrEmpty(beamTestPipelineOptions)) { try { additionalOptions.addAll(MAPPER.readValue(beamTestPipelineOptions, List.class)); diff --git a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java index 6e511bd8e5c6..2d761e664e69 100644 --- a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java +++ b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java @@ -128,8 +128,7 @@ public void setUpTest() { } // Set expected table if the property is provided, - @Nullable - String expectedTable = + @Nullable String expectedTable = TestProperties.getProperty("expectedTable", "", TestProperties.Type.PROPERTY); if (!Strings.isNullOrEmpty(expectedTable)) { config = config.toBuilder().setExpectedTable(expectedTable).build(); diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/InMemoryTimerInternals.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/InMemoryTimerInternals.java index 7e84ccbbccfe..42da298f65ee 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/InMemoryTimerInternals.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/InMemoryTimerInternals.java @@ -167,7 +167,9 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -177,7 +179,9 @@ public void deleteTimer(StateNamespace namespace, String timerId, String timerFa } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(TimerData timer) { diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java index ebd88442b211..b3ee50dbf0f2 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java @@ -239,8 +239,7 @@ public FinishBundleContext finishBundleContext(DoFn doFn) { } }); processContext.cancelScheduledCheckpoint(); - @Nullable - KV> residual = + @Nullable KV> residual = processContext.getTakenCheckpoint(); if (cont.shouldResume()) { checkState( diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFn.java index 5bfb6e369281..08e41cb781f9 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFn.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFn.java @@ -61,6 +61,7 @@ public abstract class ProcessValueContext extends Context { public abstract InputT value(); public abstract CausedByDrain causedByDrain(); + /** Return the timestamp associated with the value. */ public abstract Instant timestamp(); } @@ -78,6 +79,7 @@ public abstract class OnTriggerContext extends Context { public abstract PaneInfo paneInfo(); public abstract CausedByDrain causedByDrain(); + /** Output the given value in the current window. */ public abstract void output(OutputT value); } diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFnRunner.java index 1ae0c52f853a..d3cba6947ea1 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/ReduceFnRunner.java @@ -799,8 +799,7 @@ public void onTimers(Iterable timers) throws Exception { // We need to call onTrigger to emit the final pane if required. // The final pane *may* be ON_TIME if no prior ON_TIME pane has been emitted, // and the watermark has passed the end of the window. - @Nullable - Instant newHold = + @Nullable Instant newHold = onTrigger( directContext, renamedContext, diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java index d553a7be2d44..4bb14a790a5a 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java @@ -89,6 +89,7 @@ public class SimpleDoFnRunner implements DoFnRunner { private final PipelineOptions options; + /** The {@link DoFn} being run. */ private final DoFn fn; @@ -99,6 +100,7 @@ public class SimpleDoFnRunner implements DoFnRunner mainOutputTag; + /** The set of known output tags. */ private final Set> outputTags; @@ -375,6 +377,7 @@ public BundleFinalizer bundleFinalizer() { private class DoFnProcessContext extends DoFn.ProcessContext implements DoFnInvoker.ArgumentProvider { final WindowedValue elem; + /** Lazily initialized; should only be accessed via {@link #getNamespace()}. */ private @Nullable StateNamespace namespace; @@ -963,8 +966,7 @@ public State state(String stateId, boolean alwaysFetched) { stateDeclaration.field(), stateId); - @NonNull - State state = + @NonNull State state = stepContext .stateInternals() .state(getNamespace(), StateTags.tagForSpec(stateId, (StateSpec) spec)); diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDoViaKeyedWorkItems.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDoViaKeyedWorkItems.java index 424ea567115f..ab9c012f99ab 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDoViaKeyedWorkItems.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDoViaKeyedWorkItems.java @@ -276,7 +276,8 @@ public static class ProcessFn stateInternalsFactory; private transient @Nullable TimerInternalsFactory timerInternalsFactory; private transient @Nullable SideInputReader sideInputReader; - private transient @Nullable SplittableProcessElementInvoker< + private transient @Nullable + SplittableProcessElementInvoker< InputT, OutputT, RestrictionT, PositionT, WatermarkEstimatorStateT> processElementInvoker; diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/StateTags.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/StateTags.java index 5d69abe8ffce..c14be7cb1dfc 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/StateTags.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/StateTags.java @@ -59,7 +59,9 @@ protected int doHash(StateTag stateTag) { } }; - /** @deprecated for migration purposes only */ + /** + * @deprecated for migration purposes only + */ @Deprecated private static StateBinder adaptTagBinder(final StateTag.StateBinder binder) { return new StateBinder() { @@ -338,7 +340,9 @@ public SimpleStateTag(StructuredId id, StateSpec spec) { this.spec = spec; } - /** @deprecated use {@link StateSpec#bind} method via {@link #getSpec} for now. */ + /** + * @deprecated use {@link StateSpec#bind} method via {@link #getSpec} for now. + */ @Override @Deprecated public StateT bind(StateTag.StateBinder binder) { diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/TimerInternals.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/TimerInternals.java index f19a5b1cf2e3..4dff57039412 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/TimerInternals.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/TimerInternals.java @@ -80,11 +80,15 @@ void setTimer( void deleteTimer( StateNamespace namespace, String timerId, String timerFamilyId, TimeDomain timeDomain); - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId); - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated void deleteTimer(TimerData timerKey); @@ -95,8 +99,7 @@ void deleteTimer( * Returns the current timestamp in the {@link TimeDomain#SYNCHRONIZED_PROCESSING_TIME} time * domain or {@code null} if unknown. */ - @Nullable - Instant currentSynchronizedProcessingTime(); + @Nullable Instant currentSynchronizedProcessingTime(); /** * Return the current, local input watermark timestamp for this computation in the {@link @@ -163,8 +166,7 @@ void deleteTimer( * viewed on the output of a computation remains locally late on the input of a following * computation. */ - @Nullable - Instant currentOutputWatermarkTime(); + @Nullable Instant currentOutputWatermarkTime(); /** Data about a timer as represented within {@link TimerInternals}. */ @AutoValue diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/BoundedTrieData.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/BoundedTrieData.java index cf63713a52e8..2d5aec7b61dd 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/BoundedTrieData.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/BoundedTrieData.java @@ -315,6 +315,7 @@ public static class BoundedTrieNode implements Serializable { public static final String TRUNCATED_TRUE = String.valueOf(true); public static final String TRUNCATED_FALSE = String.valueOf(false); + /** * A map from strings to child nodes. Each key represents a segment of a path/FQN, and the * corresponding value represents the subtree rooted at that segment. diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricUpdates.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricUpdates.java index 34939bfefe9d..c72d671a98b1 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricUpdates.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricUpdates.java @@ -49,6 +49,7 @@ public abstract static class MetricUpdate implements Serializable { /** The key being updated. */ public abstract MetricKey getKey(); + /** The value of the update. */ public abstract T getUpdate(); diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerImpl.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerImpl.java index 3532cd3be111..7d5bbb01512d 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerImpl.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerImpl.java @@ -290,7 +290,9 @@ public MetricUpdates getUpdates() { extractHistogramUpdates(histograms)); } - /** @return The MonitoringInfo metadata from the metric. */ + /** + * @return The MonitoringInfo metadata from the metric. + */ private @Nullable SimpleMonitoringInfoBuilder metricToMonitoringMetadata( MetricKey metricKey, String typeUrn, String userUrn) { SimpleMonitoringInfoBuilder builder = new SimpleMonitoringInfoBuilder(true); @@ -339,7 +341,9 @@ public MetricUpdates getUpdates() { return builder.build(); } - /** @return The MonitoringInfo metadata from the counter metric. */ + /** + * @return The MonitoringInfo metadata from the counter metric. + */ private @Nullable SimpleMonitoringInfoBuilder counterToMonitoringMetadata(MetricKey metricKey) { return metricToMonitoringMetadata( metricKey, @@ -347,7 +351,9 @@ public MetricUpdates getUpdates() { MonitoringInfoConstants.Urns.USER_SUM_INT64); } - /** @return The MonitoringInfo generated from the counter metricUpdate. */ + /** + * @return The MonitoringInfo generated from the counter metricUpdate. + */ private @Nullable MonitoringInfo counterUpdateToMonitoringInfo(MetricUpdate metricUpdate) { SimpleMonitoringInfoBuilder builder = counterToMonitoringMetadata(metricUpdate.getKey()); if (builder == null) { @@ -357,7 +363,9 @@ public MetricUpdates getUpdates() { return builder.build(); } - /** @return The MonitoringInfo metadata from the distribution metric. */ + /** + * @return The MonitoringInfo metadata from the distribution metric. + */ private @Nullable SimpleMonitoringInfoBuilder distributionToMonitoringMetadata( MetricKey metricKey) { return metricToMonitoringMetadata( @@ -380,7 +388,9 @@ public MetricUpdates getUpdates() { return builder.build(); } - /** @return The MonitoringInfo metadata from the gauge metric. */ + /** + * @return The MonitoringInfo metadata from the gauge metric. + */ private @Nullable SimpleMonitoringInfoBuilder gaugeToMonitoringMetadata(MetricKey metricKey) { return metricToMonitoringMetadata( metricKey, @@ -402,7 +412,9 @@ public MetricUpdates getUpdates() { return builder.build(); } - /** @return The MonitoringInfo metadata from the string set metric. */ + /** + * @return The MonitoringInfo metadata from the string set metric. + */ private @Nullable SimpleMonitoringInfoBuilder stringSetToMonitoringMetadata(MetricKey metricKey) { return metricToMonitoringMetadata( metricKey, @@ -410,7 +422,9 @@ public MetricUpdates getUpdates() { MonitoringInfoConstants.Urns.USER_SET_STRING); } - /** @return The MonitoringInfo metadata from the string set metric. */ + /** + * @return The MonitoringInfo metadata from the string set metric. + */ private @Nullable SimpleMonitoringInfoBuilder boundedTrieToMonitoringMetadata( MetricKey metricKey) { return metricToMonitoringMetadata( @@ -419,7 +433,9 @@ public MetricUpdates getUpdates() { MonitoringInfoConstants.Urns.USER_BOUNDED_TRIE); } - /** @return The MonitoringInfo metadata from the histogram metric. */ + /** + * @return The MonitoringInfo metadata from the histogram metric. + */ private @Nullable SimpleMonitoringInfoBuilder histogramToMonitoringMetadata(MetricKey metricKey) { return metricToMonitoringMetadata( metricKey, diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoConstants.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoConstants.java index c161e97e1a7f..122d7022688a 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoConstants.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoConstants.java @@ -216,8 +216,8 @@ private static String extractLabel(MonitoringInfo.MonitoringInfoLabels value) { } public static boolean isPerWorkerMetric(MetricName metricName) { - @Nullable - String value = metricName.getLabels().get(MonitoringInfoConstants.Labels.PER_WORKER_METRIC); + @Nullable String value = + metricName.getLabels().get(MonitoringInfoConstants.Labels.PER_WORKER_METRIC); if (value != null && value.equals("true")) { return true; } diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoMetricName.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoMetricName.java index 7694b187e12c..66fd3c06c426 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoMetricName.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MonitoringInfoMetricName.java @@ -79,7 +79,9 @@ public String getUrn() { return this.urn; } - /** @return The labels associated with this MonitoringInfo. */ + /** + * @return The labels associated with this MonitoringInfo. + */ @Override public Map getLabels() { return this.labels; diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/SimpleMonitoringInfoBuilder.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/SimpleMonitoringInfoBuilder.java index 32ba1508c5a6..85a98988226b 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/SimpleMonitoringInfoBuilder.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/SimpleMonitoringInfoBuilder.java @@ -197,6 +197,7 @@ public SimpleMonitoringInfoBuilder setLabels(Map labels) { public void clear() { this.builder = MonitoringInfo.newBuilder(); } + /** Clear the builder and merge from the provided monitoringInfo. */ public void merge(MonitoringInfo monitoringInfo) { this.builder.mergeFrom(monitoringInfo); diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/metrics/MonitoringInfoTestUtil.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/metrics/MonitoringInfoTestUtil.java index 69ebcfbf9daf..f3d25131de86 100644 --- a/runners/core-java/src/test/java/org/apache/beam/runners/core/metrics/MonitoringInfoTestUtil.java +++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/metrics/MonitoringInfoTestUtil.java @@ -27,7 +27,9 @@ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) }) public class MonitoringInfoTestUtil { - /** @return A basic MonitoringInfoMetricName to test. */ + /** + * @return A basic MonitoringInfoMetricName to test. + */ public static MonitoringInfoMetricName testElementCountName() { HashMap labels = new HashMap(); labels.put(MonitoringInfoConstants.Labels.PCOLLECTION, "testPCollection"); @@ -36,7 +38,9 @@ public static MonitoringInfoMetricName testElementCountName() { return name; } - /** @return A basic MonitoringInfo which matches the testElementCountName. */ + /** + * @return A basic MonitoringInfo which matches the testElementCountName. + */ public static MonitoringInfo testElementCountMonitoringInfo(long value) { SimpleMonitoringInfoBuilder builder = new SimpleMonitoringInfoBuilder(); builder.setUrn(MonitoringInfoConstants.Urns.ELEMENT_COUNT); diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/triggers/AfterPaneStateMachineTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/triggers/AfterPaneStateMachineTest.java index 8c1f947ef8eb..7ebdba4a8b0a 100644 --- a/runners/core-java/src/test/java/org/apache/beam/runners/core/triggers/AfterPaneStateMachineTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/triggers/AfterPaneStateMachineTest.java @@ -36,6 +36,7 @@ public class AfterPaneStateMachineTest { SimpleTriggerStateMachineTester tester; + /** * Tests that the trigger does fire when enough elements are in a window, and that it only fires * that window (no leakage). diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedBundle.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedBundle.java index 936429442638..9f08a2d2e979 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedBundle.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedBundle.java @@ -35,8 +35,7 @@ interface CommittedBundle extends Bundle> { /** Returns the PCollection that the elements of this bundle belong to. */ @Override - @Nullable - PCollection getPCollection(); + @Nullable PCollection getPCollection(); /** * Returns the key that was output in the most recent {@code GroupByKey} in the execution of this @@ -71,6 +70,7 @@ interface CommittedBundle extends Bundle> { */ @Override Instant getSynchronizedProcessingOutputWatermark(); + /** * Return a new {@link CommittedBundle} that is like this one, except calls to {@link * #getElements()} will return the provided elements. This bundle is unchanged. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java index 762a2338c3e6..f3f1cc98e7d6 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java @@ -95,14 +95,18 @@ public void deleteTimer( timeDomain)); } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { throw new UnsupportedOperationException("Canceling of timer by ID is not yet supported."); } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(TimerData timerData) { diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTransformExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTransformExecutor.java index fb07a3fa919f..1db987c776e1 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTransformExecutor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTransformExecutor.java @@ -81,6 +81,7 @@ public TransformExecutor create( /** The transform that will be evaluated. */ private final AppliedPTransform transform; + /** The inputs this {@link DirectTransformExecutor} will deliver to the transform. */ private final CommittedBundle inputBundle; @@ -118,8 +119,8 @@ public void run() { ModelEnforcement enforcement = enforcementFactory.forBundle(inputBundle, transform); enforcements.add(enforcement); } - @Nullable - TransformEvaluator evaluator = evaluatorRegistry.forApplication(transform, inputBundle); + @Nullable TransformEvaluator evaluator = + evaluatorRegistry.forApplication(transform, inputBundle); if (evaluator == null) { onComplete.handleEmpty(transform); // Nothing to do diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/MultiStepCombine.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/MultiStepCombine.java index e1573abf7864..bd8613da6118 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/MultiStepCombine.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/MultiStepCombine.java @@ -367,6 +367,7 @@ public int hashCode() { static final String DIRECT_MERGE_ACCUMULATORS_EXTRACT_OUTPUT_URN = "beam:directrunner:transforms:merge_accumulators_extract_output:v1"; + /** * A primitive {@link PTransform} that merges iterables of accumulators and extracts the output. * diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java index bf1a4eed4f1f..db7cbe82124b 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java @@ -45,8 +45,7 @@ interface TransformEvaluatorFactory { * can be constructed. * @throws Exception whenever constructing the underlying evaluator throws an exception */ - @Nullable - TransformEvaluator forApplication( + @Nullable TransformEvaluator forApplication( AppliedPTransform application, CommittedBundle inputBundle) throws Exception; /** diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java index 2817ddb97510..f2e37cd9bc88 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java @@ -80,8 +80,7 @@ interface TransformResult { * *

If this evaluation did not access state, this may return null. */ - @Nullable - CopyOnAccessInMemoryStateInternals getState(); + @Nullable CopyOnAccessInMemoryStateInternals getState(); /** * Returns a TimerUpdateBuilder that was produced as a result of this evaluation. If the diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundle.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundle.java index 83c64dc3bbd2..7a701115fabd 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundle.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundle.java @@ -32,8 +32,7 @@ */ interface UncommittedBundle { /** Returns the PCollection that the elements of this {@link UncommittedBundle} belong to. */ - @Nullable - PCollection getPCollection(); + @Nullable PCollection getPCollection(); /** * Outputs an element to this bundle. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java index 0163b46a4d22..b048b4e82c18 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java @@ -1727,6 +1727,7 @@ public String toString() { public static class FiredTimers { /** The executable the timers were set at and will be delivered to. */ private final ExecutableT executable; + /** The key the timers were set for and will be delivered to. */ private final StructuralKey key; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java index 7131247c3d70..91b5f9879f4c 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java @@ -972,8 +972,7 @@ StaticQueue add(T elem) { return this; } - @Nullable - Optional take() throws InterruptedException { + @Nullable Optional take() throws InterruptedException { return queue.take(); } diff --git a/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java index fcafbd600481..d0f273e8440f 100644 --- a/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java +++ b/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -198,6 +198,7 @@ public class DoFnOperator /** Max number of elements to include in a bundle. */ private final long maxBundleSize; + /** Max duration of a bundle. */ private final long maxBundleTimeMills; @@ -222,9 +223,11 @@ public class DoFnOperator /** Stores new finalizations being gathered. */ private transient InMemoryBundleFinalizer bundleFinalizer; + /** Pending bundle finalizations which have not been acknowledged yet. */ private transient LinkedHashMap> pendingFinalizations; + /** * Keep a maximum of 32 bundle finalizations for {@link * BundleFinalizer.Callback#onBundleSuccess()}. @@ -257,12 +260,16 @@ public class DoFnOperator * been addressed. */ private transient volatile boolean bundleStarted; + /** Number of processed elements in the current bundle. */ private transient volatile long elementCount; + /** Time that the last bundle was finished (to set the timer). */ private transient volatile long lastFinishBundleTime; + /** Callback to be executed before the current bundle is started. */ private transient volatile Runnable preBundleCallback; + /** Callback to be executed after the current bundle was finished. */ private transient volatile Runnable bundleFinishedCallback; @@ -1236,6 +1243,7 @@ public static class BufferedOutputManager implements WindowedValueMulti private final TupleTag mainTag; private final Map, OutputTag>> tagsToOutputTags; private final Map, Integer> tagsToIds; + /** * A lock to be acquired before writing to the buffer. This lock will only be acquired during * buffering. It will not be acquired during flushing the buffer. @@ -1245,6 +1253,7 @@ public static class BufferedOutputManager implements WindowedValueMulti private final boolean isStreaming; private Map> idsToTags; + /** Elements buffered during a snapshot, by output id. */ @VisibleForTesting final PushedBackElementsHandler>> pushedBackElementsHandler; @@ -1253,6 +1262,7 @@ public static class BufferedOutputManager implements WindowedValueMulti /** Indicates whether we are buffering data as part of snapshotState(). */ private boolean openBuffer = false; + /** For performance, to avoid having to access the state backend when the buffer is empty. */ private boolean bufferIsEmpty = false; @@ -1655,7 +1665,9 @@ void onFiredOrDeletedTimer(TimerData timer) { } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -1672,7 +1684,9 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Override @Deprecated public void deleteTimer(TimerData timer) { diff --git a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java index 647297394f52..7a2ec94524ea 100644 --- a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java +++ b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java @@ -221,7 +221,9 @@ public interface PTransformTranslator { void translate(String id, RunnerApi.Pipeline pipeline, T t); } - /** @deprecated Legacy non-portable source which can be replaced by a DoFn with timers. */ + /** + * @deprecated Legacy non-portable source which can be replaced by a DoFn with timers. + */ @Deprecated private static final String STREAMING_IMPULSE_TRANSFORM_URN = "flink:transform:streaming_impulse:v1"; diff --git a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java index e582635e0988..4b619f1975e4 100644 --- a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java +++ b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -197,6 +197,7 @@ public class DoFnOperator /** Max number of elements to include in a bundle. */ private final long maxBundleSize; + /** Max duration of a bundle. */ private final long maxBundleTimeMills; @@ -221,9 +222,11 @@ public class DoFnOperator /** Stores new finalizations being gathered. */ private transient InMemoryBundleFinalizer bundleFinalizer; + /** Pending bundle finalizations which have not been acknowledged yet. */ private transient LinkedHashMap> pendingFinalizations; + /** * Keep a maximum of 32 bundle finalizations for {@link * BundleFinalizer.Callback#onBundleSuccess()}. @@ -256,12 +259,16 @@ public class DoFnOperator * been addressed. */ private transient volatile boolean bundleStarted; + /** Number of processed elements in the current bundle. */ private transient volatile long elementCount; + /** Time that the last bundle was finished (to set the timer). */ private transient volatile long lastFinishBundleTime; + /** Callback to be executed before the current bundle is started. */ private transient volatile Runnable preBundleCallback; + /** Callback to be executed after the current bundle was finished. */ private transient volatile Runnable bundleFinishedCallback; @@ -1236,6 +1243,7 @@ public static class BufferedOutputManager implements WindowedValueMulti private final TupleTag mainTag; private final Map, OutputTag>> tagsToOutputTags; private final Map, Integer> tagsToIds; + /** * A lock to be acquired before writing to the buffer. This lock will only be acquired during * buffering. It will not be acquired during flushing the buffer. @@ -1245,6 +1253,7 @@ public static class BufferedOutputManager implements WindowedValueMulti private final boolean isStreaming; private Map> idsToTags; + /** Elements buffered during a snapshot, by output id. */ @VisibleForTesting final PushedBackElementsHandler>> pushedBackElementsHandler; @@ -1253,6 +1262,7 @@ public static class BufferedOutputManager implements WindowedValueMulti /** Indicates whether we are buffering data as part of snapshotState(). */ private boolean openBuffer = false; + /** For performance, to avoid having to access the state backend when the buffer is empty. */ private boolean bufferIsEmpty = false; @@ -1655,7 +1665,9 @@ void onFiredOrDeletedTimer(TimerData timer) { } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -1672,7 +1684,9 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Override @Deprecated public void deleteTimer(TimerData timer) { diff --git a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java index 25cf9879766f..6dd6ecfd15a8 100644 --- a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java +++ b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java @@ -77,6 +77,7 @@ public class UnboundedSourceWrapper> pendingCheckpoints; + /** Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}. */ private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32; diff --git a/runners/flink/build.gradle b/runners/flink/build.gradle index 4d058a481820..f2818b41f20c 100644 --- a/runners/flink/build.gradle +++ b/runners/flink/build.gradle @@ -18,6 +18,10 @@ apply plugin: 'com.diffplug.spotless' +repositories { + mavenCentral() +} + /* * This build.gradle file is empty except to configure the spotless task on the java sources. * These java sources are included in the subproject's sources in order to compile against the different versions. @@ -26,7 +30,7 @@ apply plugin: 'com.diffplug.spotless' spotless { java { licenseHeader org.apache.beam.gradle.BeamModulePlugin.javaLicenseHeader - googleJavaFormat('1.7') + googleJavaFormat('1.17.0') target project.fileTree(project.projectDir) { include 'src/*/java/**/*.java' } } } \ No newline at end of file diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobServerDriver.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobServerDriver.java index 671cc2597cb2..99a988d5d688 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobServerDriver.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobServerDriver.java @@ -61,8 +61,7 @@ String getFlinkMaster() { + "over configurations in FLINK_CONF_DIR.") private String flinkConfDir = null; - @Nullable - String getFlinkConfDir() { + @Nullable String getFlinkConfDir() { return flinkConfDir; } } diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java index f0724b4d031f..6e4d998ed098 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java @@ -209,7 +209,9 @@ public interface FlinkPipelineOptions + "Defaults to the flink cluster's state.backend configuration.") Class getStateBackendFactory(); - /** @deprecated Please use setStateBackend below. */ + /** + * @deprecated Please use setStateBackend below. + */ @Deprecated void setStateBackendFactory(Class stateBackendFactory); diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java index 77dc4d795167..ae6a290dcadf 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java @@ -221,7 +221,9 @@ public interface PTransformTranslator { void translate(String id, RunnerApi.Pipeline pipeline, T t); } - /** @deprecated Legacy non-portable source which can be replaced by a DoFn with timers. */ + /** + * @deprecated Legacy non-portable source which can be replaced by a DoFn with timers. + */ @Deprecated private static final String STREAMING_IMPULSE_TRANSFORM_URN = "flink:transform:streaming_impulse:v1"; diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/utils/CheckpointStats.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/utils/CheckpointStats.java index 9922656bc22f..42dc09935d22 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/utils/CheckpointStats.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/utils/CheckpointStats.java @@ -27,6 +27,7 @@ public class CheckpointStats { /** Checkpoint id => Checkpoint start (System.currentTimeMillis()). */ private final Map checkpointDurations = new HashMap<>(); + /** Distribution cell for reporting checkpoint durations. */ private final Supplier distributionCellSupplier; diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java index 7cf364d14698..b1f15548597b 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -199,6 +199,7 @@ public class DoFnOperator /** Max number of elements to include in a bundle. */ private final long maxBundleSize; + /** Max duration of a bundle. */ private final long maxBundleTimeMills; @@ -223,9 +224,11 @@ public class DoFnOperator /** Stores new finalizations being gathered. */ private transient InMemoryBundleFinalizer bundleFinalizer; + /** Pending bundle finalizations which have not been acknowledged yet. */ private transient LinkedHashMap> pendingFinalizations; + /** * Keep a maximum of 32 bundle finalizations for {@link * BundleFinalizer.Callback#onBundleSuccess()}. @@ -257,12 +260,16 @@ public class DoFnOperator * been addressed. */ private transient volatile boolean bundleStarted; + /** Number of processed elements in the current bundle. */ private transient volatile long elementCount; + /** Time that the last bundle was finished (to set the timer). */ private transient volatile long lastFinishBundleTime; + /** Callback to be executed before the current bundle is started. */ private transient volatile Runnable preBundleCallback; + /** Callback to be executed after the current bundle was finished. */ private transient volatile Runnable bundleFinishedCallback; @@ -1239,6 +1246,7 @@ public static class BufferedOutputManager implements WindowedValueMulti private final TupleTag mainTag; private final Map, OutputTag>> tagsToOutputTags; private final Map, Integer> tagsToIds; + /** * A lock to be acquired before writing to the buffer. This lock will only be acquired during * buffering. It will not be acquired during flushing the buffer. @@ -1248,6 +1256,7 @@ public static class BufferedOutputManager implements WindowedValueMulti private final boolean isStreaming; private Map> idsToTags; + /** Elements buffered during a snapshot, by output id. */ @VisibleForTesting final PushedBackElementsHandler>> pushedBackElementsHandler; @@ -1256,6 +1265,7 @@ public static class BufferedOutputManager implements WindowedValueMulti /** Indicates whether we are buffering data as part of snapshotState(). */ private boolean openBuffer = false; + /** For performance, to avoid having to access the state backend when the buffer is empty. */ private boolean bufferIsEmpty = false; @@ -1658,7 +1668,9 @@ void onFiredOrDeletedTimer(TimerData timer) { } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -1675,7 +1687,9 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ + /** + * @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. + */ @Override @Deprecated public void deleteTimer(TimerData timer) { diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java index 4ebb359fceae..03dc6b02e64e 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java @@ -149,6 +149,7 @@ public class ExecutableStageDoFnOperator private final FlinkExecutableStageContextFactory contextFactory; private final Map> outputMap; private final Map> sideInputIds; + /** A lock which has to be acquired when concurrently accessing state and timers. */ private final ReentrantLock stateBackendLock; @@ -372,10 +373,13 @@ static class BagUserStateFactory private final StateInternals stateInternals; private final KeyedStateBackend keyedStateBackend; + /** Lock to hold whenever accessing the state backend. */ private final Lock stateBackendLock; + /** For debugging: The key coder used by the Runner. */ private final @Nullable Coder runnerKeyCoder; + /** For debugging: Same as keyedStateBackend but upcasted, to access key group meta info. */ private final @Nullable AbstractKeyedStateBackend keyStateBackendWithKeyGroupInfo; @@ -916,6 +920,7 @@ private static class SdkHarnessDoFnRunner * this consistent. Please see the description in DoFnOperator. */ private volatile RemoteBundle remoteBundle; + /** * Current main input receiver. Volatile to ensure mutually exclusive bundle processing threads * see this consistent. Please see the description in DoFnOperator. diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java index 1d50fd72d465..3fa758eb676d 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java @@ -76,6 +76,7 @@ public class UnboundedSourceWrapper> pendingCheckpoints; + /** Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}. */ private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32; diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/stableinput/BufferingDoFnRunner.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/stableinput/BufferingDoFnRunner.java index 73b20238ef05..2a21bd8bb52d 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/stableinput/BufferingDoFnRunner.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/stableinput/BufferingDoFnRunner.java @@ -112,29 +112,39 @@ public static BufferingDoFnRunner create( /** The underlying DoFnRunner that any buffered data will be handed over to eventually. */ private final DoFnRunner underlying; + /** A union list state which contains all to-be-acknowledged snapshot ids. */ private final ListState notYetAcknowledgedSnapshots; + /** A factory for constructing new BufferingElementsHandler scoped by an internal id. */ private final BufferingElementsHandlerFactory bufferingElementsHandlerFactory; + /** The maximum number of buffers for data of not yet acknowledged checkpoints. */ final int numCheckpointBuffers; + /** The current active state id which, on checkpoint, is linked to a checkpoint id. */ int currentStateIndex; + /** The current handler used for buffering. */ private BufferingElementsHandler currentBufferingElementsHandler; + /** Minimum timestamp of all buffered elements. */ private volatile long minBufferedElementTimestamp; + /** The associated keyed state backend. */ private final @Nullable KeyedStateBackend keyedStateBackend; + /** * Locker that must be held (if present) before buffering an element. If non-null, we must * manually set a key to the state backend. */ private final @Nullable Supplier locker; + /** * A selector of key. When non-null, this must be set to the keyed state backend before buffering. */ private final @Nullable Function keySelector; + /** Callable to notify about possibility to flush bundle. */ private final @Nullable Runnable finishBundleCallback; diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java index 01c9d25f1bf1..788206a17d7a 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java @@ -162,12 +162,9 @@ public class ExecutableStageDoFnOperatorTest { private final String stateId = "userState"; private final ExecutableStagePayload stagePayloadWithUserState = - stagePayload - .toBuilder() + stagePayload.toBuilder() .setComponents( - stagePayload - .getComponents() - .toBuilder() + stagePayload.getComponents().toBuilder() .putTransforms( "transform", RunnerApi.PTransform.newBuilder() @@ -186,12 +183,9 @@ public class ExecutableStageDoFnOperatorTest { .build(); private final ExecutableStagePayload stagePayloadWithStableInput = - stagePayload - .toBuilder() + stagePayload.toBuilder() .setComponents( - stagePayload - .getComponents() - .toBuilder() + stagePayload.getComponents().toBuilder() .putTransforms( "transform", RunnerApi.PTransform.newBuilder() diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java index b15b3f3834d2..886a69d43e2c 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java @@ -205,6 +205,7 @@ public void processElement(ProcessContext c) throws Exception { private final transient DataflowRunner runner; private final PCollectionView> view; + /** Builds an instance of this class from the overridden transform. */ @SuppressWarnings("unused") // used via reflection in DataflowRunner#apply() public BatchViewAsMap( @@ -693,6 +694,7 @@ public void processElement(ProcessContext c) throws Exception { private final transient DataflowRunner runner; private final PCollectionView>> view; + /** Builds an instance of this class from the overridden transform. */ @SuppressWarnings("unused") // used via reflection in DataflowRunner#apply() public BatchViewAsMultimap( @@ -1073,6 +1075,7 @@ public void processElement(ProcessContext c) throws Exception { private final transient DataflowRunner runner; private final PCollectionView> view; + /** Builds an instance of this class from the overridden transform. */ @SuppressWarnings("unused") // used via reflection in DataflowRunner#apply() public BatchViewAsList(DataflowRunner runner, CreatePCollectionView> transform) { @@ -1154,6 +1157,7 @@ static class BatchViewAsIterable private final transient DataflowRunner runner; private final PCollectionView> view; + /** Builds an instance of this class from the overridden transform. */ @SuppressWarnings("unused") // used via reflection in DataflowRunner#apply() public BatchViewAsIterable( diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowMetrics.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowMetrics.java index 06435793b56f..ca3cf6f7786e 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowMetrics.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowMetrics.java @@ -62,6 +62,7 @@ class DataflowMetrics extends MetricResults { private static final Logger LOG = LoggerFactory.getLogger(DataflowMetrics.class); + /** * Client for the Dataflow service. This can be used to query the service for information about * the job. @@ -391,8 +392,7 @@ private boolean isMetricTentative(MetricUpdate metricUpdate) { return null; } - @Nullable - AppliedPTransform appliedPTransform = + @Nullable AppliedPTransform appliedPTransform = transformStepNames.inverse().get(internalStepName); if (appliedPTransform == null) { return null; diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineJob.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineJob.java index 400f161dee2f..7cab2862de04 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineJob.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineJob.java @@ -263,8 +263,7 @@ public DataflowPipelineJob getReplacedByJob() { } @VisibleForTesting - @Nullable - State waitUntilFinish( + @Nullable State waitUntilFinish( Duration duration, MonitoringUtil.@Nullable JobMessagesHandler messageHandler, Sleeper sleeper, @@ -298,8 +297,7 @@ private static BackOff getMessagesBackoff(Duration duration) { * @throws InterruptedException if the thread is interrupted. */ @VisibleForTesting - @Nullable - State waitUntilFinish( + @Nullable State waitUntilFinish( Duration duration, MonitoringUtil.@Nullable JobMessagesHandler messageHandler, Sleeper sleeper, @@ -510,8 +508,7 @@ public State getState() { BackOffAdapter.toGcpBackOff(STATUS_BACKOFF_FACTORY.backoff()), Sleeper.DEFAULT); } - @Nullable - String getLatestStateString() { + @Nullable String getLatestStateString() { return latestStateString; } diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java index c19673a3117e..ee58c6908084 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java @@ -220,6 +220,7 @@ public class DataflowRunner extends PipelineRunner { "unsafely_attempt_to_process_unbounded_data_in_batch_mode"; private static final Logger LOG = LoggerFactory.getLogger(DataflowRunner.class); + /** Provided configuration options. */ private final DataflowPipelineOptions options; @@ -1342,8 +1343,7 @@ public DataflowPipelineJob run(Pipeline pipeline) { SdkComponents portableComponents = SdkComponents.create( options, - defaultEnvironmentForDataflow - .toBuilder() + defaultEnvironmentForDataflow.toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); @@ -1386,8 +1386,7 @@ public DataflowPipelineJob run(Pipeline pipeline) { SdkComponents dataflowNonPortableComponents = SdkComponents.create( options, - defaultEnvironmentForDataflow - .toBuilder() + defaultEnvironmentForDataflow.toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); @@ -1839,7 +1838,8 @@ public void visitValue(PValue value, Node producer) { return true; } } - }; + } + ; /** Returns the DataflowPipelineTranslator associated with this object. */ public DataflowPipelineTranslator getTranslator() { diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/TestDataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/TestDataflowRunner.java index b00194dacb08..db8364bcbe8e 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/TestDataflowRunner.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/TestDataflowRunner.java @@ -310,8 +310,7 @@ Optional checkForPAssertSuccess(DataflowPipelineJob job) { } @VisibleForTesting - @Nullable - JobMetrics getJobMetrics(DataflowPipelineJob job) { + @Nullable JobMetrics getJobMetrics(DataflowPipelineJob job) { JobMetrics metrics = null; try { metrics = dataflowClient.getJobMetrics(job.getJobId()); diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineWorkerPoolOptions.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineWorkerPoolOptions.java index 3fcf69f4cacf..bd2b968ff893 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineWorkerPoolOptions.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineWorkerPoolOptions.java @@ -45,7 +45,9 @@ enum AutoscalingAlgorithmType { /** Use numWorkers machines. Do not autoscale the worker pool. */ NONE("AUTOSCALING_ALGORITHM_NONE"), - /** @deprecated use {@link #THROUGHPUT_BASED}. */ + /** + * @deprecated use {@link #THROUGHPUT_BASED}. + */ @Deprecated BASIC("AUTOSCALING_ALGORITHM_BASIC"), @@ -104,7 +106,9 @@ public String getAlgorithm() { void setDiskSizeGb(int value); /** Container image used as Dataflow worker harness image. */ - /** @deprecated Use {@link #getSdkContainerImage} instead. */ + /** + * @deprecated Use {@link #getSdkContainerImage} instead. + */ @Description( "Container image used to configure a Dataflow worker. " + "Can only be used for official Dataflow container images. " @@ -113,7 +117,9 @@ public String getAlgorithm() { @Hidden String getWorkerHarnessContainerImage(); - /** @deprecated Use {@link #setSdkContainerImage} instead. */ + /** + * @deprecated Use {@link #setSdkContainerImage} instead. + */ @Deprecated @Hidden void setWorkerHarnessContainerImage(String value); @@ -195,15 +201,13 @@ public String getAlgorithm() { /** Provisioned IOPS for the worker disk. */ @Description("Provisioned IOPS for the worker disk.") - @Nullable - Long getDiskProvisionedIops(); + @Nullable Long getDiskProvisionedIops(); void setDiskProvisionedIops(Long value); /** Provisioned throughput in MiB/s for the worker disk. */ @Description("Provisioned throughput in MiB/s for the worker disk.") - @Nullable - Long getDiskProvisionedThroughputMibps(); + @Nullable Long getDiskProvisionedThroughputMibps(); void setDiskProvisionedThroughputMibps(Long value); @@ -215,8 +219,7 @@ public String getAlgorithm() { @Description( "Specifies whether worker pools should be started with public IP addresses. WARNING:" + "This feature is available only through allowlist.") - @Nullable - Boolean getUsePublicIps(); + @Nullable Boolean getUsePublicIps(); void setUsePublicIps(@Nullable Boolean value); @@ -228,8 +231,7 @@ public String getAlgorithm() { * Pipeline Execution Parameters. */ @Description("GCE minimum CPU platform. Default is determined by GCP.") - @Nullable - String getMinCpuPlatform(); + @Nullable String getMinCpuPlatform(); void setMinCpuPlatform(String minCpuPlatform); } diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowStreamingPipelineOptions.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowStreamingPipelineOptions.java index 9727048e47aa..ab84064f5082 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowStreamingPipelineOptions.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowStreamingPipelineOptions.java @@ -133,14 +133,18 @@ public interface DataflowStreamingPipelineOptions extends PipelineOptions { void setWindmillMessagesBetweenIsReadyChecks(int value); - /** @deprecated since 2.73.0 */ + /** + * @deprecated since 2.73.0 + */ @Deprecated @Description("Unused flag.") Boolean getUseWindmillIsolatedChannels(); void setUseWindmillIsolatedChannels(Boolean value); - /** @deprecated since beam 2.73.0 */ + /** + * @deprecated since beam 2.73.0 + */ @Deprecated @Description("Unused Flag") Boolean getUseSeparateWindmillHeartbeatStreams(); diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DataflowTemplateJob.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DataflowTemplateJob.java index d8ecdb9d42a7..79152423688d 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DataflowTemplateJob.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DataflowTemplateJob.java @@ -57,8 +57,7 @@ public MetricResults metrics() { } @VisibleForTesting - @Nullable - State waitUntilFinish( + @Nullable State waitUntilFinish( Duration duration, MonitoringUtil.JobMessagesHandler messageHandler, Sleeper sleeper, diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DefaultCoderCloudObjectTranslatorRegistrar.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DefaultCoderCloudObjectTranslatorRegistrar.java index 2d83bd77526e..7137eabf1f04 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DefaultCoderCloudObjectTranslatorRegistrar.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DefaultCoderCloudObjectTranslatorRegistrar.java @@ -85,6 +85,7 @@ public class DefaultCoderCloudObjectTranslatorRegistrar CloudObjectTranslators.union(), CloudObjectTranslators.coGroupByKeyResult(), CloudObjectTranslators.javaSerialized()); + // TODO: ElementAndRestrictionCoder. This is in runners-core, but probably needs to be // in core-construction @VisibleForTesting @@ -112,6 +113,7 @@ public class DefaultCoderCloudObjectTranslatorRegistrar TextualIntegerCoder.class, VarIntCoder.class, VoidCoder.class); + // TODO: WriteBundlesToFiles.ResultCoder.class); // TODO: Atomic, GCPIO Coders: // TableRowInfoCoder.class diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PackageUtil.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PackageUtil.java index 31c9a5f3ce0d..1c86d2d63b9a 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PackageUtil.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PackageUtil.java @@ -382,8 +382,10 @@ public static PackageUtil.StagedFile of(String source, String sha256, String des /** The file to stage. */ public abstract String getSource(); + /** The SHA-256 hash of the source file. */ public abstract String getSha256(); + /** Staged target for this file. */ public abstract String getDestination(); } diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PropertyNames.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PropertyNames.java index 4e1c1c30e8d6..03682a4dcb26 100644 --- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PropertyNames.java +++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PropertyNames.java @@ -71,6 +71,7 @@ public class PropertyNames { public static final String RESOURCE_HINTS = "resource_hints"; public static final String PRESERVES_KEYS = "preserves_keys"; public static final String ALLOW_DUPLICATES = "allow_duplicates"; + /** * @deprecated Uses the incorrect terminology. {@link #RESTRICTION_ENCODING}. Should be removed * once non FnAPI SplittableDoFn expansion for Dataflow is removed. diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java index ab3b62a0aa1b..8039c5e772fc 100644 --- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java +++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java @@ -2592,9 +2592,7 @@ public ExpansionApi.ExpansionResponse expand(ExpansionApi.ExpansionRequest reque ExpansionApi.ExpansionResponse.newBuilder() .setComponents(components) .setTransform( - components - .getTransformsOrThrow(transformId) - .toBuilder() + components.getTransformsOrThrow(transformId).toBuilder() .setUniqueName(transformId)) .addAllRequirements(requirementsBuilder.build()) .build(); diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/util/CloudObjectsTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/util/CloudObjectsTest.java index 32a9b154f33f..fabacd4b877e 100644 --- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/util/CloudObjectsTest.java +++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/util/CloudObjectsTest.java @@ -202,7 +202,7 @@ public static Iterable> data() { @Test public void toAndFromCloudObject() throws Exception { - CloudObject cloudObject = CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null); + CloudObject cloudObject = CloudObjects.asCloudObject(coder, /* sdkComponents= */ null); Coder fromCloudObject = CloudObjects.coderFromCloudObject(cloudObject); assertEquals(coder.getClass(), fromCloudObject.getClass()); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchDataflowWorker.java index ebb0b5ff9977..fa6adaa75212 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchDataflowWorker.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchDataflowWorker.java @@ -60,8 +60,10 @@ }) public class BatchDataflowWorker implements Closeable { private static final Logger LOG = LoggerFactory.getLogger(BatchDataflowWorker.class); + /** The idGenerator to generate unique id globally. */ private static final IdGenerator idGenerator = IdGenerators.decrementingLongs(); + /** * Function which converts map tasks to their network representation for execution. * @@ -77,6 +79,7 @@ public class BatchDataflowWorker implements Closeable { .andThen(new MapTaskToNetworkFunction(idGenerator)); private static final int DEFAULT_STATUS_PORT = 8081; + /** * A weight in "bytes" for the overhead of a {@link Weighted} wrapper in the cache. It is just an * approximation so it is OK for it to be fairly arbitrary as long as it is nonzero. @@ -84,28 +87,37 @@ public class BatchDataflowWorker implements Closeable { private static final int OVERHEAD_WEIGHT = 8; private static final long MEGABYTES = 1024 * 1024; + /** * Limit the number of logical references. Weak references may never be cleared if the object is * long lived irrespective if the user actually is interested in the key lookup anymore. */ private static final int MAX_LOGICAL_REFERENCES = 1_000_000; + /** How many concurrent write operations to a cache should we allow. */ private static final int CACHE_CONCURRENCY_LEVEL = 4 * Runtime.getRuntime().availableProcessors(); + /** A client to get and update work items. */ private final WorkUnitClient workUnitClient; + /** * Pipeline options, initially provided via the constructor and partially provided via each work * work unit. */ private final DataflowWorkerHarnessOptions options; + /** The factory to create {@link DataflowMapTaskExecutor DataflowMapTaskExecutors}. */ private final DataflowMapTaskExecutorFactory mapTaskExecutorFactory; + /** Registry of known {@link ReaderFactory ReaderFactories}. */ private final ReaderRegistry readerRegistry = ReaderRegistry.defaultRegistry(); + /** Registry of known {@link SinkFactory SinkFactories}. */ private final SinkRegistry sinkRegistry = SinkRegistry.defaultRegistry(); + /** A side input cache shared between all execution contexts. */ private final Cache> sideInputDataCache; + /** * A side input cache shared between all execution contexts. This cache is meant to store values * as weak references. This allows for insertion of logical keys with zero weight since they will @@ -116,8 +128,10 @@ public class BatchDataflowWorker implements Closeable { private final Function> mapTaskToNetwork; private final MemoryMonitor memoryMonitor; private final Thread memoryMonitorThread; + /** Status pages returning health of worker. */ private final WorkerStatusPages statusPages; + /** Periodic sender of debug information to the debug capture service. */ private DebugCapture.Manager debugCaptureManager = null; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowMapTaskExecutor.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowMapTaskExecutor.java index 96db026fbdb5..0a3c8b6d1b82 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowMapTaskExecutor.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowMapTaskExecutor.java @@ -27,7 +27,9 @@ public abstract class DataflowMapTaskExecutor extends MapTaskExecutor implements DataflowWorkExecutor { - /** @deprecated subclasses should move to composition instead of inheritance, make this private */ + /** + * @deprecated subclasses should move to composition instead of inheritance, make this private + */ @Deprecated protected DataflowMapTaskExecutor( List operations, diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowPortabilityPCollectionView.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowPortabilityPCollectionView.java index ab3464c4ce53..88d35c3a1aef 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowPortabilityPCollectionView.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowPortabilityPCollectionView.java @@ -113,7 +113,8 @@ public MultimapView apply(MultimapView o) { public TypeDescriptor> getTypeDescriptor() { throw new UnsupportedOperationException(); } - }; + } + ; @Override public WindowMappingFn getWindowMappingFn() { diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DoFnInstanceManagers.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DoFnInstanceManagers.java index a956f66aec52..32a8445d2211 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DoFnInstanceManagers.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DoFnInstanceManagers.java @@ -32,6 +32,7 @@ }) public class DoFnInstanceManagers { private static final Logger LOG = LoggerFactory.getLogger(DoFnInstanceManagers.class); + /** * Returns a {@link DoFnInstanceManager} that returns {@link DoFnInfo} instances obtained by * deserializing the provided bytes. {@link DoFnInstanceManager} will call {@link DoFn.Setup} as diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowFnRunner.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowFnRunner.java index 4845bb0c98e4..a33d9fac6657 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowFnRunner.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowFnRunner.java @@ -38,6 +38,7 @@ public class GroupAlsoByWindowFnRunner implements DoFnRunner { private final PipelineOptions options; + /** The {@link GroupAlsoByWindowFn} being run. */ private final GroupAlsoByWindowFn fn; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReader.java index 687cbe1d7830..cabdbe1b822d 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReader.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReader.java @@ -129,8 +129,10 @@ public class IsmSideInputReader implements SideInputReader { private final ExecutorService executorService; private final Set> singletonMaterializedTags; + /** A map from tuple tag to non-empty IsmReaders. */ @VisibleForTesting final Map, List>> tagToIsmReaderMap; + /** * A map from tuple tag to empty IsmReaders. Even though this is unused, we want to maintain a * strong reference so that it is retained in memory so the logical reference cache keeps a diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/OperationalLimits.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/OperationalLimits.java index 2a42e3cfa3f9..3bcbea3512bd 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/OperationalLimits.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/OperationalLimits.java @@ -27,8 +27,10 @@ public abstract class OperationalLimits { // Maximum size of a commit from a single work item. public abstract long getMaxWorkItemCommitBytes(); + // Maximum size of a single output element's serialized key. public abstract long getMaxOutputKeyBytes(); + // Maximum size of a single output element's serialized value. public abstract long getMaxOutputValueBytes(); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubReader.java index b60cb84415ff..d85813312cf0 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubReader.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubReader.java @@ -98,8 +98,7 @@ public NativeReader create( (SimpleFunction) SerializableUtils.deserializeFromByteArray(attributesFnBytes, "serialized fn info"); } - @Nullable - ValueProvider skipUndecodableElements = + @Nullable ValueProvider skipUndecodableElements = (options != null) ? options .as(DataflowStreamingPipelineOptions.class) diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/RemoveSafeDeltaCounterCell.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/RemoveSafeDeltaCounterCell.java index 657aad420669..b58721127d39 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/RemoveSafeDeltaCounterCell.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/RemoveSafeDeltaCounterCell.java @@ -33,6 +33,7 @@ public class RemoveSafeDeltaCounterCell implements Counter, MetricCell { private final MetricName metricName; + /** * This class does not own {@code countersMap} and only operates on a single key in the map * specified by {@code metricName}. These opeations include the {@link Counter} interface along diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/SimpleParDoFn.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/SimpleParDoFn.java index 434d46c20a5b..b17d00c19f5c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/SimpleParDoFn.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/SimpleParDoFn.java @@ -538,8 +538,7 @@ private Instant earliestAllowableCleanupTime( * finished. */ @VisibleForTesting - @Nullable - DoFnInfo getDoFnInfo() { + @Nullable DoFnInfo getDoFnInfo() { return fnInfo; } } diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java index f5e5adab1556..2582ed8acaf6 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java @@ -173,6 +173,7 @@ public final class StreamingDataflowWorker { @SuppressWarnings("unused") private static final String STREAMING_ENGINE_USE_JOB_SETTINGS_FOR_HEARTBEAT_POOL_EXPERIMENT = "streaming_engine_use_job_settings_for_heartbeat_pool"; + // Experiment make the monitor within BoundedQueueExecutor fair public static final String BOUNDED_QUEUE_EXECUTOR_USE_FAIR_MONITOR_EXPERIMENT = "windmill_bounded_queue_executor_use_fair_monitor"; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java index f75d452b211b..6bb2cb6d6d86 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java @@ -129,6 +129,7 @@ public class StreamingModeExecutionContext extends DataflowExecutionContext, Map>> sideInputCache; private WindmillTagEncoding windmillTagEncoding; + /** * The current user-facing key for this execution context. * @@ -907,8 +908,7 @@ private void onUserTimerModified(TimerData timerData) { } private boolean isTimerUnmodified(TimerData timerData) { - @Nullable - TimerData updatedTimer = + @Nullable TimerData updatedTimer = modifiedUserTimerKeys.get( WindmillTimerInternals.getTimerDataKey(timerData), timerData.getNamespace()); return updatedTimer == null || updatedTimer.equals(timerData); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingStepMetricsContainer.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingStepMetricsContainer.java index 7aa3d0756714..bee998ecc48a 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingStepMetricsContainer.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingStepMetricsContainer.java @@ -335,6 +335,7 @@ public static void setEnablePerWorkerMetrics(Boolean enablePerWorkerMetrics) { public static boolean getEnablePerWorkerMetrics() { return StreamingStepMetricsContainer.enablePerWorkerMetrics; } + /** * Updates {@code perWorkerCountersByFirstStaleTime} with the current zero-valued metrics and * removes metrics that have been stale for longer than {@code maximumPerWorkerCounterStaleness} diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/UngroupedWindmillReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/UngroupedWindmillReader.java index 0b883c048462..c82c125407ad 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/UngroupedWindmillReader.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/UngroupedWindmillReader.java @@ -93,8 +93,7 @@ public NativeReader create( throws Exception { Coder> typedCoder = (Coder>) checkArgumentNotNull(coder); - @Nullable - ValueProvider skipUndecodableElements = + @Nullable ValueProvider skipUndecodableElements = options != null ? options .as(DataflowStreamingPipelineOptions.class) diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindowingWindmillReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindowingWindmillReader.java index 173b254f6395..88dcd661c07b 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindowingWindmillReader.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindowingWindmillReader.java @@ -102,8 +102,7 @@ public NativeReader create( @SuppressWarnings("unchecked") Coder>> typedCoder = (Coder>>) checkArgumentNotNull(coder); - @Nullable - ValueProvider skipUndecodableElements = + @Nullable ValueProvider skipUndecodableElements = (options != null) ? options .as(DataflowStreamingPipelineOptions.class) diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/counters/Counter.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/counters/Counter.java index 62ffd855812b..0d7f02e551e2 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/counters/Counter.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/counters/Counter.java @@ -64,50 +64,35 @@ UpdateT extractUpdate( *

This class is essentially a visitor for counter values. */ public interface CounterUpdateExtractor { - @Nullable - UpdateT longSum(CounterName name, boolean delta, Long value); + @Nullable UpdateT longSum(CounterName name, boolean delta, Long value); - @Nullable - UpdateT longMin(CounterName name, boolean delta, Long value); + @Nullable UpdateT longMin(CounterName name, boolean delta, Long value); - @Nullable - UpdateT longMax(CounterName name, boolean delta, Long value); + @Nullable UpdateT longMax(CounterName name, boolean delta, Long value); - @Nullable - UpdateT longMean(CounterName name, boolean delta, CounterMean value); + @Nullable UpdateT longMean(CounterName name, boolean delta, CounterMean value); - @Nullable - UpdateT intSum(CounterName name, boolean delta, Integer value); + @Nullable UpdateT intSum(CounterName name, boolean delta, Integer value); - @Nullable - UpdateT intMin(CounterName name, boolean delta, Integer value); + @Nullable UpdateT intMin(CounterName name, boolean delta, Integer value); - @Nullable - UpdateT intMax(CounterName name, boolean delta, Integer value); + @Nullable UpdateT intMax(CounterName name, boolean delta, Integer value); - @Nullable - UpdateT intMean(CounterName name, boolean delta, CounterMean value); + @Nullable UpdateT intMean(CounterName name, boolean delta, CounterMean value); - @Nullable - UpdateT doubleSum(CounterName name, boolean delta, Double value); + @Nullable UpdateT doubleSum(CounterName name, boolean delta, Double value); - @Nullable - UpdateT doubleMin(CounterName name, boolean delta, Double value); + @Nullable UpdateT doubleMin(CounterName name, boolean delta, Double value); - @Nullable - UpdateT doubleMax(CounterName name, boolean delta, Double value); + @Nullable UpdateT doubleMax(CounterName name, boolean delta, Double value); - @Nullable - UpdateT doubleMean(CounterName name, boolean delta, CounterMean value); + @Nullable UpdateT doubleMean(CounterName name, boolean delta, CounterMean value); - @Nullable - UpdateT boolOr(CounterName name, boolean delta, Boolean value); + @Nullable UpdateT boolOr(CounterName name, boolean delta, Boolean value); - @Nullable - UpdateT boolAnd(CounterName name, boolean delta, Boolean value); + @Nullable UpdateT boolAnd(CounterName name, boolean delta, Boolean value); - @Nullable - UpdateT distribution(CounterName name, boolean delta, CounterDistribution value); + @Nullable UpdateT distribution(CounterName name, boolean delta, CounterDistribution value); } ////////////////////////////////////////////////////////////////////////////// diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCoders.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCoders.java index ff90c6460185..c3c9c100a134 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCoders.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCoders.java @@ -168,7 +168,7 @@ static Map forCodec( if (LENGTH_PREFIX_CODER_TYPE.equals(coderType)) { if (replaceWithByteArrayCoder) { return CloudObjects.asCloudObject( - LENGTH_PREFIXED_BYTE_ARRAY_CODER, /*sdkComponents=*/ null); + LENGTH_PREFIXED_BYTE_ARRAY_CODER, /* sdkComponents= */ null); } return codec; } else if (WELL_KNOWN_CODER_TYPES.contains(coderType)) { @@ -190,7 +190,8 @@ static Map forCodec( // Wrap unknown coders with length prefix coder. if (replaceWithByteArrayCoder) { - return CloudObjects.asCloudObject(LENGTH_PREFIXED_BYTE_ARRAY_CODER, /*sdkComponents=*/ null); + return CloudObjects.asCloudObject( + LENGTH_PREFIXED_BYTE_ARRAY_CODER, /* sdkComponents= */ null); } else { Map prefixedCodec = new HashMap<>(); prefixedCodec.put(PropertyNames.OBJECT_TYPE_NAME, LENGTH_PREFIX_CODER_TYPE); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandler.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandler.java index 62057c22b8d4..ee10ec7f5d7f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandler.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandler.java @@ -135,6 +135,7 @@ protected Object handleGetObject(@Nonnull String s) { return Iterators.asEnumeration(Iterators.singletonIterator(LEVEL_KEY)); } } + // Since there are just a couple possible levels, we cache them. private static final ConcurrentHashMap resourceBundles = new ConcurrentHashMap<>(); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingInitializer.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingInitializer.java index 0f1b5c2750bc..92647340c1bf 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingInitializer.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingInitializer.java @@ -140,7 +140,8 @@ public synchronized void error(String msg, Exception ex, int code) { ex.printStackTrace(stream); } } - }; + } + ; private static DataflowWorkerLoggingHandler makeLoggingHandler( String filepathProperty, String defaultFilePath) throws IOException { diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/streaming/harness/StreamingCounters.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/streaming/harness/StreamingCounters.java index c2c4e719a660..73d70bdbb49a 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/streaming/harness/StreamingCounters.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/streaming/harness/StreamingCounters.java @@ -66,6 +66,7 @@ public static StreamingCounters create() { public abstract CounterSet pendingDeltaCounters(); public abstract CounterSet pendingCumulativeCounters(); + // Built-in delta counters. public abstract Counter windmillShuffleBytesRead(); @@ -74,6 +75,7 @@ public static StreamingCounters create() { public abstract Counter windmillStateBytesWritten(); public abstract Counter timeAtMaxActiveThreads(); + // Built-in cumulative counters. public abstract Counter javaHarnessUsedMemory(); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/MemoryMonitor.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/MemoryMonitor.java index e0288e3433eb..292383746967 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/MemoryMonitor.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/MemoryMonitor.java @@ -670,8 +670,11 @@ private static File getLoggingDir() { * repeated dumps. These files can be of comparable size to the local disk. */ public File dumpHeap() - throws MalformedObjectNameException, InstanceNotFoundException, ReflectionException, - MBeanException, IOException { + throws MalformedObjectNameException, + InstanceNotFoundException, + ReflectionException, + MBeanException, + IOException { return dumpHeap(localDumpFolder); } @@ -737,8 +740,11 @@ private void uploadJfrProfile(InputStream data) { */ @VisibleForTesting static synchronized File dumpHeap(File directory) - throws MalformedObjectNameException, InstanceNotFoundException, ReflectionException, - MBeanException, IOException { + throws MalformedObjectNameException, + InstanceNotFoundException, + ReflectionException, + MBeanException, + IOException { boolean liveObjectsOnly = false; File fileName = new File(directory, "heap_dump.hprof"); if (fileName.exists() && !fileName.delete()) { diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/BatchingShuffleEntryReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/BatchingShuffleEntryReader.java index f6918267f0b4..80edab9df3e9 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/BatchingShuffleEntryReader.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/BatchingShuffleEntryReader.java @@ -60,6 +60,7 @@ private final class ShuffleReadIterator implements Reiterator { /** The most recently read batch. */ ShuffleBatchReader.@Nullable Batch currentBatch; + /** An iterator over the most recently read batch. */ private @Nullable ListIterator entries; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/GroupingTables.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/GroupingTables.java index 40fd498a1da2..987d59d4d31e 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/GroupingTables.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/GroupingTables.java @@ -150,6 +150,7 @@ public interface Combiner { // How many bytes a word in the JVM has. static final int BYTES_PER_JVM_WORD = getBytesPerJvmWord(); + /** * The number of bytes of overhead to store an entry in the grouping table (a {@code * HashMap}), ignoring the actual number of bytes in the keys diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/ShuffleReadCounter.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/ShuffleReadCounter.java index 58cfae8eeb66..8e319096017a 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/ShuffleReadCounter.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/ShuffleReadCounter.java @@ -36,6 +36,7 @@ public class ShuffleReadCounter { public CounterSet counterSet; private Counter currentCounter; + /** * Counter to increment with the bytes read from the underlying shuffle iterator, or null if no * counting is needed. diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/WindmillStreamPool.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/WindmillStreamPool.java index f14fc40fdfdf..450ceab9b08c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/WindmillStreamPool.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/WindmillStreamPool.java @@ -44,7 +44,9 @@ public class WindmillStreamPool { private final Duration streamTimeout; private final Supplier streamSupplier; - /** @implNote Size of streams never changes once initialized. */ + /** + * @implNote Size of streams never changes once initialized. + */ private final List<@Nullable StreamData> streams; @GuardedBy("this") diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/getdata/ApplianceGetDataClient.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/getdata/ApplianceGetDataClient.java index 1c747edc1dea..7c5ddf794ae1 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/getdata/ApplianceGetDataClient.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/getdata/ApplianceGetDataClient.java @@ -137,8 +137,7 @@ private void issueReadBatch(ReadBatch batch) { boolean hadInvalidResponse = false; for (Windmill.ComputationGetDataResponse computationResponse : response.getDataList()) { for (Windmill.KeyedGetDataResponse keyResponse : computationResponse.getDataList()) { - @Nullable - SettableFuture responseFuture = + @Nullable SettableFuture responseFuture = pendingResponses.get( WindmillComputationKey.create( computationResponse.getComputationId(), diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcDirectGetWorkStream.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcDirectGetWorkStream.java index de8ebf14b709..8fdcfc62efdc 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcDirectGetWorkStream.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcDirectGetWorkStream.java @@ -249,8 +249,7 @@ protected synchronized void onFlushPending(boolean isNewStream) StreamingGetWorkRequest request = StreamingGetWorkRequest.newBuilder() .setRequest( - requestHeader - .toBuilder() + requestHeader.toBuilder() .setMaxItems(initialGetWorkBudget.items()) .setMaxBytes(initialGetWorkBudget.bytes()) .build()) diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcWindmillStreamFactory.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcWindmillStreamFactory.java index 244d2ad3fa14..2f7f1e7a9fbe 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcWindmillStreamFactory.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcWindmillStreamFactory.java @@ -134,7 +134,9 @@ private GrpcWindmillStreamFactory( this.executorServiceSupplier = executorServiceSupplier; } - /** @implNote Used for {@link AutoBuilder} {@link Builder} class, do not call directly. */ + /** + * @implNote Used for {@link AutoBuilder} {@link Builder} class, do not call directly. + */ static GrpcWindmillStreamFactory create( JobHeader jobHeader, int logEveryNStreamFailures, diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/observers/DirectStreamObserver.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/observers/DirectStreamObserver.java index 08f2778e5aca..14be4005aae5 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/observers/DirectStreamObserver.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/observers/DirectStreamObserver.java @@ -174,7 +174,9 @@ public void onNext(T value) { } } - /** @throws IllegalStateException if called multiple times or after {@link #onCompleted()}. */ + /** + * @throws IllegalStateException if called multiple times or after {@link #onCompleted()}. + */ @Override public void onError(Throwable t) { isReadyNotifier.forceTermination(); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/stubs/FailoverChannel.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/stubs/FailoverChannel.java index faa08c497c8f..f9f16a4d473a 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/stubs/FailoverChannel.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/stubs/FailoverChannel.java @@ -80,16 +80,20 @@ private static final class FailoverState { // Set when primary's connection state has been unavailable for too long. @GuardedBy("this") boolean useFallbackDueToState; + // Set when an RPC on primary fails with an error. @GuardedBy("this") boolean useFallbackDueToRPC; + // Timestamp when RPC-based fallback was triggered. Only meaningful when useFallbackDueToRPC // is true. @GuardedBy("this") long lastRPCFallbackTimeNanos; + // Time when primary first became not-ready. -1 when primary is currently READY. @GuardedBy("this") long primaryNotReadySinceNanos = -1; + // Time when the first consecutive RPC failure was observed. -1 when no failure streak. @GuardedBy("this") long firstRPCFailureSinceNanos = -1; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/PagingIterable.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/PagingIterable.java index 73f076d92013..3c68704af985 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/PagingIterable.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/PagingIterable.java @@ -105,10 +105,10 @@ protected ResultT computeNext() { currentPage = valuesAndContPosition.getValues().iterator(); StateTag.Builder nextPageBuilder = StateTag.of( - nextPagePos.getKind(), - nextPagePos.getTag(), - nextPagePos.getStateFamily(), - valuesAndContPosition.getContinuationPosition()) + nextPagePos.getKind(), + nextPagePos.getTag(), + nextPagePos.getStateFamily(), + valuesAndContPosition.getContinuationPosition()) .toBuilder(); if (secondPagePos.getSortedListRange() != null) { nextPageBuilder.setSortedListRange(secondPagePos.getSortedListRange()); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/ToIterableFunction.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/ToIterableFunction.java index 7e164df2245b..295d8cbe295f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/ToIterableFunction.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/ToIterableFunction.java @@ -31,6 +31,7 @@ public class ToIterableFunction implements Function, Iterable> { private final StateTag stateTag; private final Coder coder; + /** * Reader to request continuation pages from, or {@literal null} if no continuation pages * required. @@ -58,10 +59,10 @@ public Iterable apply( // Return an iterable which knows how to come back for more. StateTag.Builder continuationTBuilder = StateTag.of( - stateTag.getKind(), - stateTag.getTag(), - stateTag.getStateFamily(), - valuesAndContPosition.getContinuationPosition()) + stateTag.getKind(), + stateTag.getTag(), + stateTag.getStateFamily(), + valuesAndContPosition.getContinuationPosition()) .toBuilder(); if (stateTag.getSortedListRange() != null) { continuationTBuilder.setSortedListRange(stateTag.getSortedListRange()); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillBag.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillBag.java index db1f3e7a6dec..1f4bb10d0042 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillBag.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillBag.java @@ -48,6 +48,7 @@ public class WindmillBag extends SimpleWindmillState implements BagState { private final Coder elemCoder; private boolean cleared = false; + /** * If non-{@literal null}, this contains the complete contents of the bag, except for any local * additions. If {@literal null} then we don't know if Windmill contains additional values which diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateInternals.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateInternals.java index db036bee43c3..cff03e914a19 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateInternals.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateInternals.java @@ -44,6 +44,7 @@ public class WindmillStateInternals implements StateInternals { @VisibleForTesting static final ThreadLocal> COMPACT_NOW = ThreadLocal.withInitial(ShouldCompactNowFn::new); + /** * The key will be null when not in a keyed context, from the users perspective. There is still a * "key" for the Windmill computation, but it cannot be meaningfully deserialized. diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateReader.java index c609bed4eae0..23d54597df18 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateReader.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillStateReader.java @@ -219,8 +219,7 @@ public Future>> orderedListFuture( Range range, ByteString encodedTag, String stateFamily, Coder elemCoder) { // First request has no continuation position. StateTag stateTag = - StateTag.of(StateTag.Kind.ORDERED_LIST, encodedTag, stateFamily) - .toBuilder() + StateTag.of(StateTag.Kind.ORDERED_LIST, encodedTag, stateFamily).toBuilder() .setSortedListRange(Preconditions.checkNotNull(range)) .build(); return valuesToPagingIterableFuture(stateTag, elemCoder, this.stateFuture(stateTag, elemCoder)); @@ -229,8 +228,7 @@ public Future>> orderedListFuture( public Future>>> multimapFetchAllFuture( boolean omitValues, ByteString encodedTag, String stateFamily, Coder elemCoder) { StateTag stateTag = - StateTag.of(Kind.MULTIMAP_ALL, encodedTag, stateFamily) - .toBuilder() + StateTag.of(Kind.MULTIMAP_ALL, encodedTag, stateFamily).toBuilder() .setOmitValues(omitValues) .build(); return valuesToPagingIterableFuture(stateTag, elemCoder, this.stateFuture(stateTag, elemCoder)); @@ -239,8 +237,7 @@ public Future>>> multimapFetchAll public Future> multimapFetchSingleEntryFuture( ByteString encodedKey, ByteString encodedTag, String stateFamily, Coder elemCoder) { StateTag stateTag = - StateTag.of(Kind.MULTIMAP_SINGLE_ENTRY, encodedTag, stateFamily) - .toBuilder() + StateTag.of(Kind.MULTIMAP_SINGLE_ENTRY, encodedTag, stateFamily).toBuilder() .setMultimapKey(encodedKey) .build(); return valuesToPagingIterableFuture(stateTag, elemCoder, this.stateFuture(stateTag, elemCoder)); @@ -665,10 +662,10 @@ private void consumeResponse(KeyedGetDataResponse response, Set> toF // First check if it's keys()/entries() StateTag.Builder builder = StateTag.of( - Kind.MULTIMAP_ALL, - tagMultimap.getTag(), - tagMultimap.getStateFamily(), - tagMultimap.hasRequestPosition() ? tagMultimap.getRequestPosition() : null) + Kind.MULTIMAP_ALL, + tagMultimap.getTag(), + tagMultimap.getStateFamily(), + tagMultimap.hasRequestPosition() ? tagMultimap.getRequestPosition() : null) .toBuilder(); StateTag tag = builder.setOmitValues(true).build(); if (toFetch.contains(tag)) { @@ -687,7 +684,7 @@ private void consumeResponse(KeyedGetDataResponse response, Set> toF // this is get() StateTag.Builder entryTagBuilder = StateTag.of( - Kind.MULTIMAP_SINGLE_ENTRY, tagMultimap.getTag(), tagMultimap.getStateFamily()) + Kind.MULTIMAP_SINGLE_ENTRY, tagMultimap.getTag(), tagMultimap.getStateFamily()) .toBuilder(); StateTag entryTag = null; for (Windmill.TagMultimapEntry entry : tagMultimap.getEntriesList()) { diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillValue.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillValue.java index 772eece0b598..e545b8b86cba 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillValue.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillValue.java @@ -41,8 +41,10 @@ public class WindmillValue extends SimpleWindmillState implements ValueState< /** Whether we've modified the value since creation of this state. */ private boolean modified = false; + /** Whether the in memory value is the true value. */ private boolean valueIsKnown = false; + /** The size of the encoded value */ private long cachedSize = -1; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillWatermarkHold.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillWatermarkHold.java index b426b96cb5b9..c360b1fa2a48 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillWatermarkHold.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/state/WindmillWatermarkHold.java @@ -46,6 +46,7 @@ public class WindmillWatermarkHold extends WindmillState implements WatermarkHol private final String stateFamily; private boolean cleared = false; + /** * If non-{@literal null}, the known current hold value, or absent if we know there are no output * watermark holds. If {@literal null}, the current hold value could depend on holds in Windmill diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/AvroByteReaderFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/AvroByteReaderFactoryTest.java index 0c8a3fe36ae4..6ae0611744cb 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/AvroByteReaderFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/AvroByteReaderFactoryTest.java @@ -77,7 +77,10 @@ public void testCreatePlainAvroByteReader() throws Exception { WindowedValues.getFullCoder(BigEndianIntegerCoder.of(), GlobalWindow.Coder.INSTANCE); NativeReader reader = runTestCreateAvroReader( - pathToAvroFile, null, null, CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null)); + pathToAvroFile, + null, + null, + CloudObjects.asCloudObject(coder, /* sdkComponents= */ null)); assertThat(reader, new IsInstanceOf(AvroByteReader.class)); AvroByteReader avroReader = (AvroByteReader) reader; @@ -93,7 +96,10 @@ public void testCreateRichAvroByteReader() throws Exception { WindowedValues.getFullCoder(BigEndianIntegerCoder.of(), GlobalWindow.Coder.INSTANCE); NativeReader reader = runTestCreateAvroReader( - pathToAvroFile, 200L, 500L, CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null)); + pathToAvroFile, + 200L, + 500L, + CloudObjects.asCloudObject(coder, /* sdkComponents= */ null)); assertThat(reader, new IsInstanceOf(AvroByteReader.class)); AvroByteReader avroReader = (AvroByteReader) reader; diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ConcatReaderFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ConcatReaderFactoryTest.java index 867ff5140a8b..bc80f9f26ac9 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ConcatReaderFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ConcatReaderFactoryTest.java @@ -59,7 +59,7 @@ Source createSourcesWithInMemorySources(List> allData) { inMemorySourceDictionary.put(PropertyNames.SOURCE_SPEC, inMemorySourceSpec); CloudObject textSourceEncoding = - CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/ null); + CloudObjects.asCloudObject(StringUtf8Coder.of(), /* sdkComponents= */ null); inMemorySourceDictionary.put(PropertyNames.ENCODING, textSourceEncoding); sourcesList.add(inMemorySourceDictionary); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/InMemoryReaderFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/InMemoryReaderFactoryTest.java index e50caa1e0913..4a33ac405df8 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/InMemoryReaderFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/InMemoryReaderFactoryTest.java @@ -57,7 +57,7 @@ static Source createInMemoryCloudSource( Source cloudSource = new Source(); cloudSource.setSpec(spec); - cloudSource.setCodec(CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null)); + cloudSource.setCodec(CloudObjects.asCloudObject(coder, /* sdkComponents= */ null)); return cloudSource; } diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IntrinsicMapTaskExecutorFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IntrinsicMapTaskExecutorFactoryTest.java index 3443ae0022bc..72c612c4d7f9 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IntrinsicMapTaskExecutorFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IntrinsicMapTaskExecutorFactoryTest.java @@ -135,7 +135,7 @@ public class IntrinsicMapTaskExecutorFactoryTest { private static final CloudObject windowedStringCoder = CloudObjects.asCloudObject( - WindowedValues.getValueOnlyCoder(StringUtf8Coder.of()), /*sdkComponents=*/ null); + WindowedValues.getValueOnlyCoder(StringUtf8Coder.of()), /* sdkComponents= */ null); private IntrinsicMapTaskExecutorFactory mapTaskExecutorFactory; private PipelineOptions options; @@ -581,14 +581,14 @@ static ParallelInstruction createPartialGroupByKeyInstruction( FullWindowedValueCoder.of( KvCoder.of(StringUtf8Coder.of(), BigEndianIntegerCoder.of()), IntervalWindowCoder.of()), - /*sdkComponents=*/ null)); + /* sdkComponents= */ null)); InstructionOutput output = new InstructionOutput(); output.setName("pgbk_output_name"); output.setCodec( CloudObjects.asCloudObject( KvCoder.of(StringUtf8Coder.of(), IterableCoder.of(BigEndianIntegerCoder.of())), - /*sdkComponents=*/ null)); + /* sdkComponents= */ null)); output.setOriginalName("originalName"); output.setSystemName("systemName"); @@ -721,7 +721,7 @@ static ParallelInstruction createFlattenInstruction( InstructionOutput output = new InstructionOutput(); output.setName("flatten_output_name"); - output.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/ null)); + output.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /* sdkComponents= */ null)); output.setOriginalName("originalName"); output.setSystemName("systemName"); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java index 06e089807299..0cfb578878a4 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java @@ -1773,7 +1773,7 @@ private Source newIsmSource(IsmRecordCoder> coder, String t Source source = new Source(); source.setCodec( CloudObjects.asCloudObject( - WindowedValues.getFullCoder(coder, GLOBAL_WINDOW_CODER), /*sdkComponents=*/ null)); + WindowedValues.getFullCoder(coder, GLOBAL_WINDOW_CODER), /* sdkComponents= */ null)); source.setSpec(new HashMap()); source.getSpec().put(PropertyNames.OBJECT_TYPE_NAME, "IsmSource"); source.getSpec().put(WorkerPropertyNames.FILENAME, tmpFilePath); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderFactoryTest.java index 3af59d67e3dd..5797491fd7d5 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderFactoryTest.java @@ -128,7 +128,7 @@ public void testCreateReader() throws Exception { Source cloudSource = new Source(); cloudSource.setSpec(spec); cloudSource.setCodec( - CloudObjects.asCloudObject(BigEndianIntegerCoder.of(), /*sdkComponents=*/ null)); + CloudObjects.asCloudObject(BigEndianIntegerCoder.of(), /* sdkComponents= */ null)); PipelineOptions options = PipelineOptionsFactory.create(); ReaderRegistry registry = @@ -148,7 +148,8 @@ public void testCreateUnknownReader() throws Exception { CloudObject spec = CloudObject.forClassName("UnknownSource"); Source cloudSource = new Source(); cloudSource.setSpec(spec); - cloudSource.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/ null)); + cloudSource.setCodec( + CloudObjects.asCloudObject(StringUtf8Coder.of(), /* sdkComponents= */ null)); try { PipelineOptions options = PipelineOptionsFactory.create(); ReaderRegistry.defaultRegistry() diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ShuffleReaderFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ShuffleReaderFactoryTest.java index c1d87fb6f78c..6234d37a92ae 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ShuffleReaderFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ShuffleReaderFactoryTest.java @@ -90,7 +90,7 @@ void runTestCreateUngroupedShuffleReader( shuffleReaderConfig, start, end, - CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(coder, /* sdkComponents= */ null), BatchModeExecutionContext.forTesting(PipelineOptionsFactory.create(), "testStage"), UngroupedShuffleReader.class, "UngroupedShuffleSource"); @@ -118,7 +118,7 @@ void runTestCreateGroupingShuffleReader( CloudObjects.asCloudObject( FullWindowedValueCoder.of( KvCoder.of(keyCoder, IterableCoder.of(valueCoder)), IntervalWindowCoder.of()), - /*sdkComponents=*/ null), + /* sdkComponents= */ null), context, GroupingShuffleReader.class, "GroupingShuffleSource"); @@ -147,7 +147,7 @@ void runTestCreatePartitioningShuffleReader( FullWindowedValueCoder.of( KvCoder.of(keyCoder, windowedValueCoder.getValueCoder()), IntervalWindowCoder.of()), - /*sdkComponents=*/ null), + /* sdkComponents= */ null), BatchModeExecutionContext.forTesting(PipelineOptionsFactory.create(), "testStage"), PartitioningShuffleReader.class, "PartitioningShuffleSource"); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/SinkRegistryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/SinkRegistryTest.java index 5916e18a8c0e..124ac6f7868c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/SinkRegistryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/SinkRegistryTest.java @@ -60,7 +60,7 @@ public void testCreateUnknownSink() throws Exception { com.google.api.services.dataflow.model.Sink cloudSink = new com.google.api.services.dataflow.model.Sink(); cloudSink.setSpec(spec); - cloudSink.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/ null)); + cloudSink.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /* sdkComponents= */ null)); try { SinkRegistry.defaultRegistry() .create( diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java index d8a1d1b90d47..2f14b40c987a 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java @@ -3034,7 +3034,7 @@ public void testMaxThreadMetric() throws Exception { .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); ComputationState computationState = new ComputationState( @@ -3095,7 +3095,7 @@ public void testActiveThreadMetric() throws Exception { .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); ComputationState computationState = new ComputationState( @@ -3165,7 +3165,7 @@ public void testOutstandingBytesMetric() throws Exception { .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); ComputationState computationState = new ComputationState( @@ -3239,7 +3239,7 @@ public void testOutstandingBundlesMetric() throws Exception { .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); ComputationState computationState = new ComputationState( @@ -3489,8 +3489,7 @@ public void testExceptionInvalidatesCache() throws Exception { // The commit will include a timer to clean up state - this timer is irrelevant // for the current test. Also remove source_bytes_processed because it's dynamic. setValuesTimestamps( - removeDynamicFields(commit) - .toBuilder() + removeDynamicFields(commit).toBuilder() .clearOutputTimers() .clearSourceBytesProcessed()) .build(), diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java index 4bfa6efc8880..56ddc34e6b89 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java @@ -137,7 +137,7 @@ public void setUp() { executionStateRegistry, globalConfigHandle, Long.MAX_VALUE, - /*throwExceptionOnLargeOutput=*/ false); + /* throwExceptionOnLargeOutput= */ false); } private static Work createMockWork(Windmill.WorkItem workItem, Watermarks watermarks) { diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingPCollectionViewWriterDoFnFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingPCollectionViewWriterDoFnFactoryTest.java index fcf7e4d7deb9..f6ccfe97980a 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingPCollectionViewWriterDoFnFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingPCollectionViewWriterDoFnFactoryTest.java @@ -53,7 +53,7 @@ public void testConstruction() throws Exception { CloudObject coder = CloudObjects.asCloudObject( WindowedValues.getFullCoder(BigEndianIntegerCoder.of(), GlobalWindow.Coder.INSTANCE), - /*sdkComponents=*/ null); + /* sdkComponents= */ null); ParDoFn parDoFn = new StreamingPCollectionViewWriterDoFnFactory() .create( diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java index ce8ad32f71aa..5fb2c269780c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java @@ -606,8 +606,8 @@ public void testReadUnboundedReader() throws Exception { counterSet, COMPUTATION_ID, readerCache, - /*stateNameMap=*/ ImmutableMap.of(), - /*stateCache=*/ null, + /* stateNameMap= */ ImmutableMap.of(), + /* stateCache= */ null, StreamingStepMetricsContainer.createRegistry(), new DataflowExecutionStateTracker( ExecutionStateSampler.newForTest(), @@ -619,7 +619,7 @@ public void testReadUnboundedReader() throws Exception { executionStateRegistry, globalConfigHandle, Long.MAX_VALUE, - /*throwExceptionOnLargeOutput=*/ false); + /* throwExceptionOnLargeOutput= */ false); options.setNumWorkers(5); int maxElements = 10; @@ -974,7 +974,7 @@ public void testFailedWorkItemsAbort() throws Exception { counterSet, COMPUTATION_ID, new ReaderCache(Duration.standardMinutes(1), Runnable::run), - /*stateNameMap=*/ ImmutableMap.of(), + /* stateNameMap= */ ImmutableMap.of(), WindmillStateCache.builder() .setSizeMb(options.getWorkerCacheMb()) .build() @@ -990,7 +990,7 @@ public void testFailedWorkItemsAbort() throws Exception { executionStateRegistry, globalConfigHandle, Long.MAX_VALUE, - /*throwExceptionOnLargeOutput=*/ false); + /* throwExceptionOnLargeOutput= */ false); options.setNumWorkers(5); int maxElements = 100; diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCodersTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCodersTest.java index c306ccaa7b9c..770d46a984e5 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCodersTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/LengthPrefixUnknownCodersTest.java @@ -97,9 +97,9 @@ public void setup() { @Test public void testLengthPrefixUnknownCoders() throws Exception { Map lengthPrefixedCoderCloudObject = - forCodec(CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null), false); + forCodec(CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null), false); assertEqualsAsJson( - CloudObjects.asCloudObject(prefixedWindowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(prefixedWindowedValueCoder, /* sdkComponents= */ null), lengthPrefixedCoderCloudObject); } @@ -112,7 +112,7 @@ public void testLengthPrefixForLengthPrefixCoder() throws Exception { GlobalWindow.Coder.INSTANCE); Map lengthPrefixedCoderCloudObject = - forCodec(CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null), false); + forCodec(CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null), false); Coder>> expectedCoder = WindowedValues.getFullCoder( @@ -120,7 +120,7 @@ public void testLengthPrefixForLengthPrefixCoder() throws Exception { GlobalWindow.Coder.INSTANCE); assertEqualsAsJson( - CloudObjects.asCloudObject(expectedCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(expectedCoder, /* sdkComponents= */ null), lengthPrefixedCoderCloudObject); } @@ -132,26 +132,28 @@ public void testLengthPrefixAndReplaceUnknownCoder() throws Exception { KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()), GlobalWindow.Coder.INSTANCE); Map lengthPrefixedCoderCloudObject = - forCodec(CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null), true); + forCodec(CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null), true); assertEqualsAsJson( - CloudObjects.asCloudObject(prefixedAndReplacedWindowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject( + prefixedAndReplacedWindowedValueCoder, /* sdkComponents= */ null), lengthPrefixedCoderCloudObject); } @Test public void testLengthPrefixInstructionOutputCoder() throws Exception { InstructionOutput output = new InstructionOutput(); - output.setCodec(CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null)); + output.setCodec(CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null)); output.setFactory(new GsonFactory()); InstructionOutput prefixedOutput = forInstructionOutput(output, false); assertEqualsAsJson( - CloudObjects.asCloudObject(prefixedWindowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(prefixedWindowedValueCoder, /* sdkComponents= */ null), prefixedOutput.getCodec()); // Should not mutate the instruction. assertEqualsAsJson( - output.getCodec(), CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null)); + output.getCodec(), + CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null)); } @Test @@ -159,17 +161,17 @@ public void testLengthPrefixReadInstructionCoder() throws Exception { ReadInstruction readInstruction = new ReadInstruction(); readInstruction.setSource( new Source() - .setCodec(CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null))); + .setCodec(CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null))); instruction.setRead(readInstruction); ParallelInstruction prefixedInstruction = forParallelInstruction(instruction, false); assertEqualsAsJson( - CloudObjects.asCloudObject(prefixedWindowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(prefixedWindowedValueCoder, /* sdkComponents= */ null), prefixedInstruction.getRead().getSource().getCodec()); // Should not mutate the instruction. assertEqualsAsJson( readInstruction.getSource().getCodec(), - CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null)); + CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null)); } @Test @@ -177,16 +179,16 @@ public void testLengthPrefixWriteInstructionCoder() throws Exception { WriteInstruction writeInstruction = new WriteInstruction(); writeInstruction.setSink( new Sink() - .setCodec(CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null))); + .setCodec(CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null))); instruction.setWrite(writeInstruction); ParallelInstruction prefixedInstruction = forParallelInstruction(instruction, false); assertEqualsAsJson( - CloudObjects.asCloudObject(prefixedWindowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(prefixedWindowedValueCoder, /* sdkComponents= */ null), prefixedInstruction.getWrite().getSink().getCodec()); // Should not mutate the instruction. assertEqualsAsJson( - CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null), writeInstruction.getSink().getCodec()); } @@ -196,17 +198,17 @@ public void testLengthPrefixParDoInstructionCoder() throws Exception { CloudObject spec = CloudObject.forClassName(MERGE_BUCKETS_DO_FN); spec.put( WorkerPropertyNames.INPUT_CODER, - CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null)); + CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null)); parDo.setUserFn(spec); instruction.setParDo(parDo); ParallelInstruction prefixedInstruction = forParallelInstruction(instruction, false); assertEqualsAsJson( - CloudObjects.asCloudObject(prefixedWindowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(prefixedWindowedValueCoder, /* sdkComponents= */ null), prefixedInstruction.getParDo().getUserFn().get(WorkerPropertyNames.INPUT_CODER)); // Should not mutate the instruction. assertEqualsAsJson( - CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null), + CloudObjects.asCloudObject(windowedValueCoder, /* sdkComponents= */ null), parDo.getUserFn().get(WorkerPropertyNames.INPUT_CODER)); } @@ -265,7 +267,7 @@ private static ParallelInstructionNode createReadNode( new ReadInstruction() .setSource( new Source() - .setCodec(CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null)) + .setCodec(CloudObjects.asCloudObject(coder, /* sdkComponents= */ null)) .setSpec(CloudObject.forClassName(readClassName)))); parallelInstruction.setFactory(new GsonFactory()); @@ -276,7 +278,7 @@ private static InstructionOutputNode createInstructionOutputNode(String name, Co InstructionOutput instructionOutput = new InstructionOutput() .setName(name) - .setCodec(CloudObjects.asCloudObject(coder, /*sdkComponents=*/ null)); + .setCodec(CloudObjects.asCloudObject(coder, /* sdkComponents= */ null)); instructionOutput.setFactory(new GsonFactory()); return InstructionOutputNode.create(instructionOutput, "fakeId"); } diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java index c6a8581cf507..60409b49430f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java @@ -764,7 +764,9 @@ public void testDirectLoggingThrottler() { assertTrue(throttler.shouldAttemptDirectLog()); } - /** @return A throwable with a fixed stack trace. */ + /** + * @return A throwable with a fixed stack trace. + */ private Throwable createThrowable() { Throwable throwable = new Throwable("exception.test.message"); throwable.setStackTrace( diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/streaming/harness/WindmillStreamSenderTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/streaming/harness/WindmillStreamSenderTest.java index 457f75593e23..c34120872f5f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/streaming/harness/WindmillStreamSenderTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/streaming/harness/WindmillStreamSenderTest.java @@ -107,8 +107,7 @@ public void testStartStream_startsAllStreams() { .createDirectGetWorkStream( eq(connection), eq( - GET_WORK_REQUEST - .toBuilder() + GET_WORK_REQUEST.toBuilder() .setMaxItems(itemBudget) .setMaxBytes(byteBudget) .build()), @@ -138,8 +137,7 @@ public void testStartStream_onlyStartsStreamsOnce() { .createDirectGetWorkStream( eq(connection), eq( - GET_WORK_REQUEST - .toBuilder() + GET_WORK_REQUEST.toBuilder() .setMaxItems(itemBudget) .setMaxBytes(byteBudget) .build()), @@ -172,8 +170,7 @@ public void testStartStream_onlyStartsStreamsOnceConcurrent() throws Interrupted .createDirectGetWorkStream( eq(connection), eq( - GET_WORK_REQUEST - .toBuilder() + GET_WORK_REQUEST.toBuilder() .setMaxItems(itemBudget) .setMaxBytes(byteBudget) .build()), diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/CloudSourceUtilsTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/CloudSourceUtilsTest.java index 43baf71b954f..8ea92f2d50bf 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/CloudSourceUtilsTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/CloudSourceUtilsTest.java @@ -62,7 +62,7 @@ public void testFlattenBaseSpecs() throws Exception { source.getBaseSpecs().add(grandparent); source.getBaseSpecs().add(parent); source.setSpec(child); - source.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/ null)); + source.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /* sdkComponents= */ null)); Source flat = CloudSourceUtils.flattenBaseSpecs(source); assertNull(flat.getBaseSpecs()); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/TimerOrElementTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/TimerOrElementTest.java index 45433dc4c5f7..f404bda93de7 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/TimerOrElementTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/TimerOrElementTest.java @@ -56,7 +56,7 @@ public void testCoderCanBeDecodedFromCloudObject() { List component = Collections.singletonList( CloudObjects.asCloudObject( - KvCoder.of(VarLongCoder.of(), ByteArrayCoder.of()), /*sdkComponents=*/ null)); + KvCoder.of(VarLongCoder.of(), ByteArrayCoder.of()), /* sdkComponents= */ null)); Structs.addList(cloudObject, PropertyNames.COMPONENT_ENCODINGS, component); Coder decoded = CloudObjects.coderFromCloudObject(cloudObject); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/common/worker/WorkProgressUpdaterTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/common/worker/WorkProgressUpdaterTest.java index 09b39cb9602d..d957d3c80a36 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/common/worker/WorkProgressUpdaterTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/util/common/worker/WorkProgressUpdaterTest.java @@ -56,8 +56,7 @@ private interface ProgressHelper { boolean shouldCheckpoint(); /** Return the exception that (if not null) will be thrown in {@code reportProgressHelper}. */ - @Nullable - Exception shouldThrow(); + @Nullable Exception shouldThrow(); } private long startTimeMs; diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/FakeWindmillGrpcService.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/FakeWindmillGrpcService.java index 19f8c1578b46..2af91aa1875f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/FakeWindmillGrpcService.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/FakeWindmillGrpcService.java @@ -66,7 +66,8 @@ public StreamInfo(StreamObserver responseObserver) { public final StreamObserver responseObserver; public final BlockingQueue requests = new LinkedBlockingQueue<>(1000); public final CompletableFuture onDone = new CompletableFuture<>(); - }; + } + ; private final BlockingQueue commitStreams = new LinkedBlockingQueue<>(1000); private final BlockingQueue getDataStreams = new LinkedBlockingQueue<>(1000); diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcCommitWorkStreamTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcCommitWorkStreamTest.java index e9fd55fa5668..ab58734df7ce 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcCommitWorkStreamTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/client/grpc/GrpcCommitWorkStreamTest.java @@ -474,8 +474,7 @@ public void testSend_notCalledAfterShutdown_Multichunk() assertTrue( batcher.commitWorkItem( COMPUTATION_ID, - workItemCommitRequest(0) - .toBuilder() + workItemCommitRequest(0).toBuilder() .addBagUpdates(Windmill.TagBag.newBuilder().setTag(LARGE_BYTE_STRING).build()) .build(), commitStatus -> { diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/StreamingCommitFinalizerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/StreamingCommitFinalizerTest.java index 07b4b14fd115..34eb259635e0 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/StreamingCommitFinalizerTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/StreamingCommitFinalizerTest.java @@ -62,7 +62,7 @@ public void setUp() { .setNameFormat("FinalizationCallback-%d") .setDaemon(true) .build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); cleanupExecutor = Executors.newScheduledThreadPool( diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/failures/WorkFailureProcessorTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/failures/WorkFailureProcessorTest.java index 68a11895fa12..1613c2841278 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/failures/WorkFailureProcessorTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/processing/failures/WorkFailureProcessorTest.java @@ -64,7 +64,7 @@ private static WorkFailureProcessor createWorkFailureProcessor( .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); return WorkFailureProcessor.forTesting(workExecutor, failureTracker, Optional::empty, clock, 0); } diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/refresh/ActiveWorkRefresherTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/refresh/ActiveWorkRefresherTest.java index 054db878c869..6f4d9816a474 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/refresh/ActiveWorkRefresherTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/work/refresh/ActiveWorkRefresherTest.java @@ -80,7 +80,7 @@ private static BoundedQueueExecutor workExecutor() { 1, 10000000, new ThreadFactoryBuilder().setNameFormat("DataflowWorkUnits-%d").setDaemon(true).build(), - /*useFairMonitor=*/ false); + /* useFairMonitor= */ false); } private static ComputationState createComputationState(int computationIdSuffix) { diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java index 8b403f2f25f0..4b23a0bb2842 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java @@ -277,8 +277,7 @@ public RunnerApi.ArtifactInformation call() throws IOException { chunk = bytesQueue.take(); } dest.getOutputStream().close(); - return originalArtifact - .toBuilder() + return originalArtifact.toBuilder() .setTypeUrn(dest.getTypeUrn()) .setTypePayload(dest.getTypePayload()) .build(); diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/BundleSplitHandler.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/BundleSplitHandler.java index cb03238720c6..beeed0f298a2 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/BundleSplitHandler.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/BundleSplitHandler.java @@ -42,5 +42,6 @@ public void split(ProcessBundleSplitResponse splitResponse) { "%s does not support splitting.", BundleSplitHandler.class.getSimpleName())); } }; - }; + } + ; } diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactory.java index e8ef905f3585..b8f577f96e58 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactory.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactory.java @@ -111,6 +111,7 @@ public class DefaultJobBundleFactory implements JobBundleFactory { private final Semaphore availableCachesSemaphore; private final LinkedBlockingDeque availableCaches; private final boolean loadBalanceBundles; + /** Clients which were evicted due to environment expiration but still had pending references. */ private final Set evictedActiveClients; diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java index ca0094854a10..1eea683372e5 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java @@ -382,17 +382,13 @@ private static Map> forTimerSpecs( timerCoder instanceof Timer.Coder, "Expected a timer coder but received %s.", timerCoder); RunnerApi.FunctionSpec.Builder updatedSpec = - components - .getTransformsOrThrow(timerReference.transform().getId()) - .toBuilder() + components.getTransformsOrThrow(timerReference.transform().getId()).toBuilder() .getSpecBuilder(); RunnerApi.ParDoPayload.Builder updatedPayload = RunnerApi.ParDoPayload.parseFrom(updatedSpec.getPayload()).toBuilder(); updatedPayload.putTimerFamilySpecs( timerReference.localName(), - updatedPayload - .getTimerFamilySpecsOrThrow(timerReference.localName()) - .toBuilder() + updatedPayload.getTimerFamilySpecsOrThrow(timerReference.localName()).toBuilder() .setTimerFamilyCoderId(sdkCoderId) .build()); updatedSpec.setPayload(updatedPayload.build().toByteString()); @@ -400,9 +396,7 @@ private static Map> forTimerSpecs( timerReference.transform().getId(), // Since a transform can have more then one timer, update the transform inside components // and not the original - components - .getTransformsOrThrow(timerReference.transform().getId()) - .toBuilder() + components.getTransformsOrThrow(timerReference.transform().getId()).toBuilder() .setSpec(updatedSpec) .build()); diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java index d4e45c8ccf82..9e64270137b0 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java @@ -61,6 +61,7 @@ public static GrpcDataService create( } private final SettableFuture connectedClient; + /** * A collection of multiplexers which are not used to send data. A handle to these multiplexers is * maintained in order to perform an orderly shutdown. @@ -85,7 +86,9 @@ private GrpcDataService( this.outboundObserverFactory = outboundObserverFactory; } - /** @deprecated This constructor is for migrating Dataflow purpose only. */ + /** + * @deprecated This constructor is for migrating Dataflow purpose only. + */ @Deprecated public GrpcDataService() { this.connectedClient = null; diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java index 53c81d59c7c2..cb28404a475f 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java @@ -160,8 +160,7 @@ public Map getAllWorkerStatuses(long timeout, TimeUnit timeUnit) synchronized (connectedClient) { connectedClientIdsCopy = ImmutableSet.copyOf(connectedClient.keySet()); } - connectedClientIdsCopy - .parallelStream() + connectedClientIdsCopy.parallelStream() .forEach( workerId -> allStatuses.put(workerId, getSingleWorkerStatus(workerId, timeout, timeUnit))); diff --git a/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/InMemoryJobService.java b/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/InMemoryJobService.java index 8413f184115e..9fb6151c88ed 100644 --- a/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/InMemoryJobService.java +++ b/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/InMemoryJobService.java @@ -128,10 +128,13 @@ public static InMemoryJobService create( /** Map of preparationId to preparation. */ private final ConcurrentHashMap preparations; + /** Map of preparationId to staging token. */ private final ConcurrentHashMap stagingSessionTokens; + /** Map of invocationId to invocation. */ private final ConcurrentHashMap invocations; + /** InvocationIds of completed invocations in least-recently-completed order. */ private final ConcurrentLinkedDeque completedInvocationsIds; @@ -311,8 +314,7 @@ private RunnerApi.Pipeline resolveDependencies(RunnerApi.Pipeline pipeline, Stri newSubEnvs.add( subEnv.getDependenciesCount() == 0 ? subEnv - : subEnv - .toBuilder() + : subEnv.toBuilder() .clearDependencies() .addAllDependencies(resolvedDependencies.get(i + ":" + entry.getKey())) .build()); diff --git a/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/PortablePipelineJarCreator.java b/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/PortablePipelineJarCreator.java index f39ca80b991e..cfbc6e9875c4 100644 --- a/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/PortablePipelineJarCreator.java +++ b/runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/PortablePipelineJarCreator.java @@ -73,6 +73,7 @@ public class PortablePipelineJarCreator implements PortablePipelineRunner { private final Class mainClass; @VisibleForTesting JarOutputStream outputStream; + /** Wrapper over {@link #outputStream}. */ @VisibleForTesting WritableByteChannel outputChannel; @@ -202,8 +203,7 @@ private RunnerApi.ArtifactInformation writeArtifact( try (InputStream artifactStream = ArtifactRetrievalService.getArtifact(artifact)) { ByteStreams.copy(artifactStream, outputStream); } - return artifact - .toBuilder() + return artifact.toBuilder() .setTypeUrn(ArtifactRetrievalService.FILE_ARTIFACT_URN) .setTypePayload( RunnerApi.ArtifactFilePayload.newBuilder() diff --git a/runners/local-java/src/main/java/org/apache/beam/runners/local/Bundle.java b/runners/local-java/src/main/java/org/apache/beam/runners/local/Bundle.java index 7e0b42e561c3..988714280b50 100644 --- a/runners/local-java/src/main/java/org/apache/beam/runners/local/Bundle.java +++ b/runners/local-java/src/main/java/org/apache/beam/runners/local/Bundle.java @@ -24,8 +24,7 @@ /** An immutable collection of elements which are part of a {@code PCollection}. */ public interface Bundle extends Iterable> { /** Returns the PCollection that the elements of this bundle belong to. */ - @Nullable - CollectionT getPCollection(); + @Nullable CollectionT getPCollection(); /** * Returns the key that was output in the most recent {@code GroupByKey} in the execution of this diff --git a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java index 5b2db7b18ad7..c29a16e3e5fe 100644 --- a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java +++ b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java @@ -67,8 +67,10 @@ public class PortableRunner extends PipelineRunner { /** Provided pipeline options. */ private final PipelineOptions options; + /** Job API endpoint. */ private final String endpoint; + /** Channel factory used to create communication channel with job and staging services. */ private final ManagedChannelFactory channelFactory; diff --git a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismRegistrar.java b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismRegistrar.java index 13b7d74fb6ac..e140820b4aab 100644 --- a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismRegistrar.java +++ b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismRegistrar.java @@ -30,6 +30,7 @@ */ public class PrismRegistrar { private PrismRegistrar() {} + /** * Registers {@link PrismRunner} and {@link TestPrismRunner} with {@link PipelineRunnerRegistrar}. */ diff --git a/runners/spark/build.gradle b/runners/spark/build.gradle index 4d058a481820..f2818b41f20c 100644 --- a/runners/spark/build.gradle +++ b/runners/spark/build.gradle @@ -18,6 +18,10 @@ apply plugin: 'com.diffplug.spotless' +repositories { + mavenCentral() +} + /* * This build.gradle file is empty except to configure the spotless task on the java sources. * These java sources are included in the subproject's sources in order to compile against the different versions. @@ -26,7 +30,7 @@ apply plugin: 'com.diffplug.spotless' spotless { java { licenseHeader org.apache.beam.gradle.BeamModulePlugin.javaLicenseHeader - googleJavaFormat('1.7') + googleJavaFormat('1.17.0') target project.fileTree(project.projectDir) { include 'src/*/java/**/*.java' } } } \ No newline at end of file diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkNativePipelineVisitor.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkNativePipelineVisitor.java index 638823c8834a..9e3269baac4e 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkNativePipelineVisitor.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkNativePipelineVisitor.java @@ -175,7 +175,9 @@ public String toString() { private String replaceFnString( Class transformClass, String transformString, String fnFieldName) - throws IllegalAccessException, InvocationTargetException, NoSuchMethodException, + throws IllegalAccessException, + InvocationTargetException, + NoSuchMethodException, NoSuchFieldException { Object fn = transformClass.getMethod("get" + StringUtils.capitalize(fnFieldName)).invoke(transform); diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkPipelineOptions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkPipelineOptions.java index e9c358282132..121ff3dfdb9e 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkPipelineOptions.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkPipelineOptions.java @@ -35,8 +35,7 @@ public interface TestSparkPipelineOptions extends SparkPipelineOptions, TestPipe void setForceStreaming(boolean forceStreaming); @Description("A hard-coded expected number of assertions for this test pipeline.") - @Nullable - Integer getExpectedAssertions(); + @Nullable Integer getExpectedAssertions(); void setExpectedAssertions(Integer expectedAssertions); diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/batch/Aggregators.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/batch/Aggregators.java index 183445642a0b..27727ce16d11 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/batch/Aggregators.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/batch/Aggregators.java @@ -377,8 +377,7 @@ public Collection windows() { @Override public void merge(Collection merges, BoundedWindow target) { - @Nullable - MutablePair merged = + @Nullable MutablePair merged = merges.stream().reduce(null, reduceFn.apply(target), combiner(target)); if (merged != null) { buff.put(target, merged); diff --git a/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java b/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java index e2f236fa0eb6..a6eb52a93db0 100644 --- a/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java +++ b/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java @@ -75,6 +75,7 @@ public class Twister2Runner extends PipelineRunner { private static final String SIDEINPUTS = "sideInputs"; private static final String LEAVES = "leaves"; private static final String GRAPH = "graph"; + /** Provided options. */ private final Twister2PipelineOptions options; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java index d3b58dd26bd2..184052520321 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java @@ -129,6 +129,7 @@ }) public class Pipeline { private static final Logger LOG = LoggerFactory.getLogger(Pipeline.class); + /** * Thrown during execution of a {@link Pipeline}, whenever user code within that {@link Pipeline} * throws an exception. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java index 75b2b206edd1..15f12733d271 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java @@ -207,6 +207,7 @@ public static long getEncodedElementByteSizeUsingCoder(Coder target, T va throws Exception { return target.getEncodedElementByteSize(value); } + /** * Verifies all of the provided coders are deterministic. If any are not, throws a {@link * NonDeterministicException} for the {@code target} {@link Coder}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ZstdCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ZstdCoder.java index dc1fc55699c4..f59cd907b23f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ZstdCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ZstdCoder.java @@ -109,6 +109,7 @@ public T decode(InputStream is) throws IOException { public List> getCoderArguments() { return ImmutableList.of(innerCoder); } + /** * {@inheritDoc} * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java index 8fec8b455cce..8d1a97afc10d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java @@ -58,15 +58,17 @@ public class BeamFnDataGrpcMultiplexer implements AutoCloseable { private final StreamObserver inboundObserver; private final StreamObserver outboundObserver; private final ConcurrentHashMap< - /*instructionId=*/ String, CompletableFuture>> + /* instructionId= */ String, + CompletableFuture>> receivers; - private final Cache poisonedInstructionIds; + private final Cache poisonedInstructionIds; private static class PoisonedException extends RuntimeException { public PoisonedException() { super("Instruction poisoned"); } - }; + } + ; public BeamFnDataGrpcMultiplexer( Endpoints.@Nullable ApiServiceDescriptor apiServiceDescriptor, @@ -130,8 +132,7 @@ public void registerConsumer( /** Unregisters a previously registered consumer. */ public void unregisterConsumer(String instructionId) { - @Nullable - CompletableFuture> receiverFuture = + @Nullable CompletableFuture> receiverFuture = receivers.remove(instructionId); if (receiverFuture != null && !receiverFuture.isDone()) { // The future must have been inserted by the inbound observer since registerConsumer completes @@ -148,8 +149,7 @@ public void unregisterConsumer(String instructionId) { */ public void poisonInstructionId(String instructionId) { poisonedInstructionIds.put(instructionId, Boolean.TRUE); - @Nullable - CompletableFuture> receiverFuture = + @Nullable CompletableFuture> receiverFuture = receivers.remove(instructionId); if (receiverFuture != null) { // Completing exceptionally has no effect if the future was already notified. In that case diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataInboundObserver.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataInboundObserver.java index 54fe42adefee..1b23c35c7faf 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataInboundObserver.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataInboundObserver.java @@ -67,6 +67,7 @@ private static class EndpointStatus { private final Map>>> transformIdToTimerFamilyIdToTimerEndpoint; private final CancellableQueue queue; + // We use a custom exception for closing to avoid the expense of stack trace generation. @SuppressWarnings("StaticAssignmentOfThrowable") protected static class CloseException extends Exception { @@ -74,8 +75,8 @@ private CloseException() { super( "Inbound observer closed.", null, - /*enableSuppression=*/ false, - /*writableStackTrace=*/ false); + /* enableSuppression= */ false, + /* writableStackTrace= */ false); } public static final CloseException INSTANCE = new CloseException(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/GrpcFnServer.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/GrpcFnServer.java index a75bbee63b5a..a1a23dcd4660 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/GrpcFnServer.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/GrpcFnServer.java @@ -110,7 +110,9 @@ public static GrpcFnServer create( factory.create(ImmutableList.of(service), endpoint), service, endpoint); } - /** @deprecated This create function is used for Dataflow migration purpose only. */ + /** + * @deprecated This create function is used for Dataflow migration purpose only. + */ @Deprecated public static GrpcFnServer create( ServiceT service, ApiServiceDescriptor endpoint) { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/ServerFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/ServerFactory.java index 9689fabf041f..f4a4b6069d58 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/ServerFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/ServerFactory.java @@ -102,6 +102,7 @@ public abstract Server allocateAddressAndCreate( public abstract Server create( List services, Endpoints.ApiServiceDescriptor serviceDescriptor) throws IOException; + /** * Creates a {@link Server gRPC Server} using the default server factory. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java index 704843f16efc..1512f1e1cc21 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java @@ -171,8 +171,7 @@ public void process( long[] numRecords = splitNumRecords(shard.getMaxNumRecords(), numSplits); for (int i = 0; i < numSplits; i++) { out.output( - shard - .toBuilder() + shard.toBuilder() .setSource(splits.get(i)) .setMaxNumRecords(numRecords[i]) .setMaxReadTime(shard.getMaxReadTime()) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java index bbe08d31d54e..36fbe5fe4b40 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java @@ -74,7 +74,9 @@ public interface DecompressingChannelFactory extends Serializable { ReadableByteChannel createDecompressingChannel(ReadableByteChannel channel) throws IOException; } - /** @deprecated Use {@link Compression} instead */ + /** + * @deprecated Use {@link Compression} instead + */ @Deprecated public enum CompressionMode implements DecompressingChannelFactory { /** See {@link Compression#UNCOMPRESSED}. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java index 9d30efb2f113..9a23b2215e4c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java @@ -270,12 +270,16 @@ public void close() throws IOException {} static class UnboundedCountingSource extends UnboundedSource { /** The first number (>= 0) generated by this {@link UnboundedCountingSource}. */ private final long start; + /** The interval between numbers generated by this {@link UnboundedCountingSource}. */ private final long stride; + /** The number of elements to produce each period. */ private final long elementsPerPeriod; + /** The time between producing numbers from this {@link UnboundedCountingSource}. */ private final Duration period; + /** The function used to produce timestamps for the generated elements. */ private final SerializableFunction timestampFn; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/DefaultFilenamePolicy.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/DefaultFilenamePolicy.java index 1573a34c8840..aa56a0fc519e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/DefaultFilenamePolicy.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/DefaultFilenamePolicy.java @@ -204,6 +204,7 @@ public Params decode(InputStream inStream) throws IOException { } private final Params params; + /** * Constructs a new {@link DefaultFilenamePolicy}. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java index bba9b1f82f5b..101855335fef 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java @@ -127,7 +127,9 @@ public abstract class FileBasedSink private static final Logger LOG = LoggerFactory.getLogger(FileBasedSink.class); static final String TEMP_DIRECTORY_PREFIX = ".temp-beam"; - /** @deprecated use {@link Compression}. */ + /** + * @deprecated use {@link Compression}. + */ @Deprecated public enum CompressionType implements WritableByteChannelFactory { /** See {@link Compression#UNCOMPRESSED}. */ @@ -339,8 +341,7 @@ final Coder getDestinationCoderWithDefault(CoderRegistry registry) return destinationCoder; } // If dynamicDestinations doesn't provide a coder, try to find it in the coder registry. - @Nullable - TypeDescriptor descriptor = + @Nullable TypeDescriptor descriptor = extractFromTypeParameters( this, DynamicDestinations.class, @@ -1248,12 +1249,10 @@ public interface OutputFileHints extends Serializable { * @see http://www.iana.org/assignments/media-types/media-types.xhtml */ - @Nullable - String getMimeType(); + @Nullable String getMimeType(); /** Returns an optional filename suffix, eg, ".gz" is returned for {@link Compression#GZIP}. */ - @Nullable - String getSuggestedFilenameSuffix(); + @Nullable String getSuggestedFilenameSuffix(); } /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileIO.java index b7590a4c2d1a..e0ff0d51572d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileIO.java @@ -686,6 +686,7 @@ abstract static class Builder { public MatchAll withConfiguration(MatchConfiguration configuration) { return toBuilder().setConfiguration(configuration).build(); } + /** Like {@link Match#withOutputParallelization}. */ public MatchAll withOutputParallelization(boolean outputParallelization) { return toBuilder().setOutputParallelization(outputParallelization).build(); @@ -737,6 +738,7 @@ public PCollection expand(PCollection input) { return res; } } + /** Returns whether to avoid the reshuffle operation. */ public abstract boolean getOutputParallelization(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java index 6133ca9fdb39..10eab53842b7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java @@ -436,7 +436,8 @@ private static class FilterResult { public List resultSources = new ArrayList(); public List resultDestinations = new ArrayList(); public List filteredExistingSrcs = new ArrayList(); - }; + } + ; private static FilterResult filterFiles( FileSystem fileSystem, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java index a05487731a57..da60a10e38de 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java @@ -219,7 +219,9 @@ public final boolean isStarted() { /** The {@link OffsetRangeTracker} managing the range and current position of the source. */ private final OffsetRangeTracker rangeTracker; - /** @param source the {@link OffsetBasedSource} to be read by the current reader. */ + /** + * @param source the {@link OffsetBasedSource} to be read by the current reader. + */ public OffsetBasedReader(OffsetBasedSource source) { this.source = source; this.rangeTracker = new OffsetRangeTracker(source.getStartOffset(), source.getEndOffset()); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java index 51a65e2db830..3fe0030546c6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java @@ -57,7 +57,9 @@ public abstract class Source implements Serializable, HasDisplayData { */ public void validate() {} - /** @deprecated Override {@link #getOutputCoder()} instead. */ + /** + * @deprecated Override {@link #getOutputCoder()} instead. + */ @Deprecated public Coder getDefaultOutputCoder() { // If the subclass doesn't override getDefaultOutputCoder(), hopefully it overrides the proper diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TFRecordIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TFRecordIO.java index a7be9dcea538..63d568367da4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TFRecordIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TFRecordIO.java @@ -157,7 +157,9 @@ public Read withoutValidation() { return toBuilder().setValidate(false).build(); } - /** @deprecated Use {@link #withCompression}. */ + /** + * @deprecated Use {@link #withCompression}. + */ @Deprecated public Read withCompressionType(TFRecordIO.CompressionType compressionType) { return withCompression(compressionType.canonical); @@ -372,7 +374,9 @@ public Write withoutSharding() { return withNumShards(1).withShardNameTemplate(""); } - /** @deprecated use {@link #withCompression}. */ + /** + * @deprecated use {@link #withCompression}. + */ @Deprecated public Write withCompressionType(CompressionType compressionType) { return withCompression(compressionType.canonical); @@ -460,7 +464,9 @@ public void flush() throws IOException { } } - /** @deprecated Use {@link Compression}. */ + /** + * @deprecated Use {@link Compression}. + */ @Deprecated public enum CompressionType { /** See {@link Compression#AUTO}. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java index 2323b86150c2..b96465e5cab7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java @@ -346,7 +346,9 @@ public Read withMatchConfiguration(MatchConfiguration matchConfiguration) { return toBuilder().setMatchConfiguration(matchConfiguration).build(); } - /** @deprecated Use {@link #withCompression}. */ + /** + * @deprecated Use {@link #withCompression}. + */ @Deprecated public Read withCompressionType(TextIO.CompressionType compressionType) { return withCompression(compressionType.canonical); @@ -511,7 +513,9 @@ public ReadAll withMatchConfiguration(MatchConfiguration configuration) { return toBuilder().setMatchConfiguration(configuration).build(); } - /** @deprecated Use {@link #withCompression}. */ + /** + * @deprecated Use {@link #withCompression}. + */ @Deprecated public ReadAll withCompressionType(TextIO.CompressionType compressionType) { return withCompression(compressionType.canonical); @@ -1412,7 +1416,9 @@ public PDone expand(PCollection input) { } } - /** @deprecated Use {@link Compression}. */ + /** + * @deprecated Use {@link Compression}. + */ @Deprecated public enum CompressionType { /** See {@link Compression#AUTO}. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java index b0b5051f3210..85829d7e4f00 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java @@ -180,8 +180,8 @@ public static WriteFiles getSink(); - public abstract @Nullable PTransform, PCollectionView> - getComputeNumShards(); + public abstract @Nullable + PTransform, PCollectionView> getComputeNumShards(); // We don't use a side input for static sharding, as we want this value to be updatable // when a pipeline is updated. @@ -844,6 +844,7 @@ private static class MaybeDestination { this.isValid = isValid; } } + // Utility method to get the dynamic destination based on a record. Returns a MaybeDestination // because some implementations of dynamic destinations return null, despite this being prohibited // by the interface diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/fs/ResourceId.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/fs/ResourceId.java index 95b5aee78376..8e4cb269a830 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/fs/ResourceId.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/fs/ResourceId.java @@ -107,8 +107,7 @@ public interface ResourceId extends Serializable { * @return a string representing the name of file or directory, or null if there are zero * components. */ - @Nullable - String getFilename(); + @Nullable String getFilename(); /** Returns {@code true} if this {@link ResourceId} represents a directory, false otherwise. */ boolean isDirectory(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/lineage/LineageOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/lineage/LineageOptions.java index 274874ac0c5f..30dc7f8e3879 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/lineage/LineageOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/lineage/LineageOptions.java @@ -34,8 +34,7 @@ public interface LineageOptions extends PipelineOptions { + "lineage. The class must implement LineageBase and have a public constructor accepting " + "(PipelineOptions, Lineage.LineageDirection). " + "If not specified, the default Metrics-based lineage is used.") - @Nullable - Class getLineageType(); + @Nullable Class getLineageType(); void setLineageType(@Nullable Class lineageClass); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/Lineage.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/Lineage.java index dc30d82adcf4..f845d8fde940 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/Lineage.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/Lineage.java @@ -318,7 +318,9 @@ private static Set queryLineageV1(MetricResults results, Type type) { return result; } - /** @return {@link MetricQueryResults} containing lineage metrics. */ + /** + * @return {@link MetricQueryResults} containing lineage metrics. + */ private static MetricQueryResults getLineageQueryResults(MetricResults results, Type type) { MetricsFilter filter = MetricsFilter.builder() diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java index 2a88dd0025df..9a8fb3f03d63 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java @@ -171,7 +171,6 @@ public interface MetricsEnvironmentState { * * @return The previous container for the associated {@link MetricsEnvironment}. */ - @Nullable - MetricsContainer activate(@Nullable MetricsContainer metricsContainer); + @Nullable MetricsContainer activate(@Nullable MetricsContainer metricsContainer); } } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/Default.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/Default.java index 761b763f643f..5799e1380f60 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/Default.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/Default.java @@ -77,6 +77,7 @@ @interface Byte { byte value(); } + /** This represents that the default of the option is the specified short primitive value. */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) @@ -85,6 +86,7 @@ @interface Short { short value(); } + /** This represents that the default of the option is the specified int primitive value. */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ExperimentalOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ExperimentalOptions.java index c0e2e1dcb48f..366768a96368 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ExperimentalOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ExperimentalOptions.java @@ -39,8 +39,7 @@ public interface ExperimentalOptions extends PipelineOptions { "Apache Beam provides a number of experimental features that can " + "be enabled with this flag. If executing against a managed service, please contact the " + "service owners before enabling any experiments.") - @Nullable - List getExperiments(); + @Nullable List getExperiments(); void setExperiments(@Nullable List value); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java index 989e3a1e3193..306511f0d5f5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java @@ -448,8 +448,7 @@ public Long create(PipelineOptions options) { + " --gbek=type:;:, for example " + " --gbek=type:GcpSecret;version_name:my_secret/versions/latest. All variables " + " should use snake case to allow consistency across languages.") - @Nullable - String getGbek(); + @Nullable String getGbek(); void setGbek(String gbek); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PortablePipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PortablePipelineOptions.java index bfeddf5c697f..32755246b9c3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PortablePipelineOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PortablePipelineOptions.java @@ -45,8 +45,7 @@ public interface PortablePipelineOptions extends PipelineOptions, FileStagingOpt @Description( "Set the default environment type for running user code. " + "Possible options are DOCKER and PROCESS.") - @Nullable - String getDefaultEnvironmentType(); + @Nullable String getDefaultEnvironmentType(); void setDefaultEnvironmentType(String environmentType); @@ -57,8 +56,7 @@ public interface PortablePipelineOptions extends PipelineOptions, FileStagingOpt + "{\"os\": \"\", \"arch\": \"\", \"command\": \"\", " + "\"env\":{\"\": \"\"} }. " + "All fields in the json are optional except command.") - @Nullable - String getDefaultEnvironmentConfig(); + @Nullable String getDefaultEnvironmentConfig(); void setDefaultEnvironmentConfig(@Nullable String config); @@ -91,8 +89,7 @@ public interface PortablePipelineOptions extends PipelineOptions, FileStagingOpt void setLoadBalanceBundles(boolean loadBalanceBundles); @Description("The output path for the executable file to be created.") - @Nullable - String getOutputExecutablePath(); + @Nullable String getOutputExecutablePath(); void setOutputExecutablePath(String outputExecutablePath); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/StreamingOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/StreamingOptions.java index 8065e0a40cb9..53f5a1870e46 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/StreamingOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/StreamingOptions.java @@ -41,8 +41,7 @@ public interface StreamingOptions extends ApplicationNameOptions, PipelineOption "If set, attempts to produce a pipeline compatible with this prior version of the Beam SDK." + " This string should be interpreted and compared per https://semver.org/." + " See, for example, https://cloud.google.com/dataflow/docs/guides/updating-a-pipeline.") - @Nullable - String getUpdateCompatibilityVersion(); + @Nullable String getUpdateCompatibilityVersion(); void setUpdateCompatibilityVersion(@Nullable String updateCompatibilityVersion); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldAccessDescriptor.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldAccessDescriptor.java index b465212c61de..4dec9581e027 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldAccessDescriptor.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldAccessDescriptor.java @@ -539,14 +539,12 @@ private Map resolveNestedFieldsAccessed( // Resolve the field id of the field that has nested access. if (entry.getKey().getFieldId() == null) { fieldDescriptor = - fieldDescriptor - .toBuilder() + fieldDescriptor.toBuilder() .setFieldId(schema.indexOf(fieldDescriptor.getFieldName())) .build(); } else if (entry.getKey().getFieldName() == null) { fieldDescriptor = - fieldDescriptor - .toBuilder() + fieldDescriptor.toBuilder() .setFieldName(schema.nameOf(fieldDescriptor.getFieldId())) .build(); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldTypeDescriptors.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldTypeDescriptors.java index 61a235ea7aa6..2b5e7dbb81f8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldTypeDescriptors.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldTypeDescriptors.java @@ -54,6 +54,7 @@ public class FieldTypeDescriptors { .put(TypeName.BOOLEAN, TypeDescriptors.booleans()) .put(TypeName.BYTES, TypeDescriptor.of(byte[].class)) .build(); + /** Get a {@link TypeDescriptor} from a {@link FieldType}. */ public static TypeDescriptor javaTypeForFieldType(FieldType fieldType) { switch (fieldType.getTypeName()) { @@ -75,6 +76,7 @@ public static TypeDescriptor javaTypeForFieldType(FieldType fieldType) { return PRIMITIVE_MAPPING.get(fieldType.getTypeName()); } } + /** Get a {@link FieldType} from a {@link TypeDescriptor}. */ public static FieldType fieldTypeForJavaType(TypeDescriptor typeDescriptor) { // TODO: Convert for registered logical types. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueGetter.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueGetter.java index 63ab56dc7609..bb5e98b4f3d1 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueGetter.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueGetter.java @@ -31,8 +31,7 @@ */ @Internal public interface FieldValueGetter extends Serializable { - @Nullable - ValueT get(ObjectT object); + @Nullable ValueT get(ObjectT object); /** Returns the raw value of the getter before any further transformations. */ default @Nullable Object getRaw(ObjectT object) { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueTypeInformation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueTypeInformation.java index 43aac6a5e20c..ab49a8f5cdd2 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueTypeInformation.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/FieldValueTypeInformation.java @@ -158,8 +158,7 @@ public static String getNameOverride( String original, T member) { @Nullable SchemaFieldName fieldName = member.getAnnotation(SchemaFieldName.class); @Nullable SchemaCaseFormat caseFormatAnnotation = member.getAnnotation(SchemaCaseFormat.class); - @Nullable - SchemaCaseFormat classCaseFormatAnnotation = + @Nullable SchemaCaseFormat classCaseFormatAnnotation = member.getDeclaringClass().getAnnotation(SchemaCaseFormat.class); if (fieldName != null) { if (caseFormatAnnotation != null) { @@ -180,8 +179,8 @@ public static String getNameOverride( public static @Nullable String getFieldDescription( T member) { - @Nullable - SchemaFieldDescription fieldDescription = member.getAnnotation(SchemaFieldDescription.class); + @Nullable SchemaFieldDescription fieldDescription = + member.getAnnotation(SchemaFieldDescription.class); if (fieldDescription == null) { return null; } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/GetterBasedSchemaProvider.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/GetterBasedSchemaProvider.java index e08f193d4072..69c06675d001 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/GetterBasedSchemaProvider.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/GetterBasedSchemaProvider.java @@ -167,8 +167,7 @@ public SerializableFunction toRowFunction(TypeDescriptor typeDesc // important to capture the schema once here, so all invocations of the toRowFunction see the // same version of the schema. If schemaFor were to be called inside the lambda below, different // workers would see different versions of the schema. - @NonNull - Schema schema = + @NonNull Schema schema = Verify.verifyNotNull( schemaFor(typeDescriptor), "can't create a ToRowFunction with null schema"); @@ -406,8 +405,7 @@ static class GetOneOf extends Converter converter = + @NonNull FieldValueGetter<@NonNull Object, Object> converter = checkStateNotNull( converters.get(caseType.getValue()), "Missing OneOf converter for case %s.", diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java index f6e56476ae15..5d7819d9f070 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java @@ -650,8 +650,7 @@ public interface LogicalType extends Serializable { * A schema type representing how to interpret the argument. {@code null} indicates this logical * type is not parameterized by an argument. */ - @Nullable - FieldType getArgumentType(); + @Nullable FieldType getArgumentType(); /** An optional argument to configure the type. */ @SuppressWarnings("TypeParameterUnusedInFormals") @@ -663,12 +662,10 @@ public interface LogicalType extends Serializable { FieldType getBaseType(); /** Convert the input type to the type Java type used by the base {@link FieldType}. */ - @NonNull - BaseT toBaseType(@NonNull InputT input); + @NonNull BaseT toBaseType(@NonNull InputT input); /** Convert the Java type used by the base {@link FieldType} to the input type. */ - @NonNull - InputT toInputType(@NonNull BaseT base); + @NonNull InputT toInputType(@NonNull BaseT base); } /** @@ -752,7 +749,9 @@ abstract static class Builder { abstract Builder setRowSchema(@Nullable Schema rowSchema); - /** @deprecated use schema options instead. */ + /** + * @deprecated use schema options instead. + */ @Deprecated abstract Builder setMetadata(Map metadata); @@ -802,7 +801,9 @@ public static FieldType array(FieldType elementType) { return FieldType.forTypeName(TypeName.ARRAY).setCollectionElementType(elementType).build(); } - /** @deprecated Set the nullability on the elementType instead */ + /** + * @deprecated Set the nullability on the elementType instead + */ @Deprecated public static FieldType array(FieldType elementType, boolean nullable) { return FieldType.forTypeName(TypeName.ARRAY) @@ -830,7 +831,9 @@ public static FieldType map(FieldType keyType, FieldType valueType) { .build(); } - /** @deprecated Set the nullability on the valueType instead */ + /** + * @deprecated Set the nullability on the valueType instead + */ @Deprecated public static FieldType map(FieldType keyType, FieldType valueType, boolean valueTypeNullable) { return FieldType.forTypeName(TypeName.MAP) @@ -888,21 +891,27 @@ public FieldType withMetadata(String key, String metadata) { return withMetadata(key, metadata.getBytes(StandardCharsets.UTF_8)); } - /** @deprecated use schema options instead. */ + /** + * @deprecated use schema options instead. + */ @Deprecated public Map getAllMetadata() { return getMetadata().entrySet().stream() .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().array)); } - /** @deprecated use schema options instead. */ + /** + * @deprecated use schema options instead. + */ @Deprecated public byte @Nullable [] getMetadata(String key) { ByteArrayWrapper metadata = getMetadata().get(key); return (metadata != null) ? metadata.array : null; } - /** @deprecated use schema options instead. */ + /** + * @deprecated use schema options instead. + */ @Deprecated public String getMetadataString(String key) { ByteArrayWrapper metadata = getMetadata().get(key); @@ -1486,8 +1495,7 @@ public Schema toSnakeCase() { innerType = innerType.toBuilder().setRowSchema(innerSnakeCaseSchema).build(); field = field.toBuilder().setType(innerType).build(); } - return field - .toBuilder() + return field.toBuilder() .setName(CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, field.getName())) .build(); }) @@ -1505,8 +1513,7 @@ public Schema toCamelCase() { innerType = innerType.toBuilder().setRowSchema(innerCamelCaseSchema).build(); field = field.toBuilder().setType(innerType).build(); } - return field - .toBuilder() + return field.toBuilder() .setName(CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, field.getName())) .build(); }) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaProvider.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaProvider.java index 37b4952e529c..0a723725159d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaProvider.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaProvider.java @@ -31,20 +31,17 @@ public interface SchemaProvider extends Serializable { /** Lookup a schema for the given type. If no schema exists, returns null. */ - @Nullable - Schema schemaFor(TypeDescriptor typeDescriptor); + @Nullable Schema schemaFor(TypeDescriptor typeDescriptor); /** * Given a type, return a function that converts that type to a {@link Row} object If no schema * exists, returns null. */ - @Nullable - SerializableFunction toRowFunction(TypeDescriptor typeDescriptor); + @Nullable SerializableFunction toRowFunction(TypeDescriptor typeDescriptor); /** * Given a type, returns a function that converts from a {@link Row} object to that type. If no * schema exists, returns null. */ - @Nullable - SerializableFunction fromRowFunction(TypeDescriptor typeDescriptor); + @Nullable SerializableFunction fromRowFunction(TypeDescriptor typeDescriptor); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/io/Failure.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/io/Failure.java index c0ce57704a35..b4ac9a4773fb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/io/Failure.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/io/Failure.java @@ -30,6 +30,7 @@ public abstract class Failure { /** Bytes containing the payload which has failed. */ @SuppressWarnings("mutable") public abstract byte[] getPayload(); + /** Information about the cause of the failure. */ public abstract String getError(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/EnumerationType.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/EnumerationType.java index b04c36ab3e95..d34ce7053b2e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/EnumerationType.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/EnumerationType.java @@ -73,6 +73,7 @@ public static EnumerationType create(List enumValues) { public static EnumerationType create(String... enumValues) { return create(Arrays.asList(enumValues)); } + /** Return an {@link Value} corresponding to one of the enumeration strings. */ public Value valueOf(String stringValue) { return new Value( diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java index cba5ba0d82c3..fa7397d978a9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java @@ -405,7 +405,8 @@ public static Object castValue(Object inputValue, FieldType input, FieldType out return castRow((Row) inputValue, input.getRowSchema(), output.getRowSchema()); case ARRAY: - case ITERABLE:; + case ITERABLE: + ; Iterable inputValues = (Iterable) inputValue; List outputValues = new ArrayList<>(Iterables.size(inputValues)); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Join.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Join.java index b752507eb80a..aaa920e634df 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Join.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Join.java @@ -146,7 +146,8 @@ public static Impl leftOuterJoin(PCollection rhs) /** Perform a right outer join. */ public static Impl rightOuterJoin(PCollection rhs) { return new Impl<>(JoinType.RIGHT_OUTER, rhs); - }; + } + ; /** Perform an inner join, broadcasting the right side. */ public static Impl innerBroadcastJoin(PCollection rhs) { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/RenameFields.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/RenameFields.java index a568879dd232..f67695317733 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/RenameFields.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/RenameFields.java @@ -73,6 +73,7 @@ abstract static class RenamePair implements Serializable { // The FieldAccessDescriptor describing the field to renameSchema. Must reference a singleton // field. abstract FieldAccessDescriptor getFieldAccessDescriptor(); + // The new name for the field. abstract String getNewName(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java index 10a3ea64956b..53a9bb14ce65 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java @@ -237,6 +237,7 @@ private static FieldType uniquifyNames(FieldType fieldType) { return fieldType; } } + /** A {@link PTransform} representing a flattened schema. */ @AutoValue public abstract static class Flattened extends PTransform, PCollection> { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/ByteBuddyUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/ByteBuddyUtils.java index 832090926919..a512582a94cc 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/ByteBuddyUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/ByteBuddyUtils.java @@ -163,7 +163,8 @@ protected String name(TypeDescription superClass) { private static boolean overridePackage(@Nullable String targetPackage) { return targetPackage != null && !targetPackage.startsWith("java."); } - }; + } + ; static class IfNullElse implements StackManipulation { private final StackManipulation readValue; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/SelectHelpers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/SelectHelpers.java index c195718966b4..144bded228cb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/SelectHelpers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/SelectHelpers.java @@ -420,6 +420,7 @@ private static void selectIntoRowWithQualifiers( l -> { return String.join("_", l); }; + /** * This policy keeps the raw nested field name. If two differently-nested fields have the same * name, flattening will fail with this policy. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/state/Timers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/state/Timers.java index 37c62a569b8c..afc2b076722f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/state/Timers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/state/Timers.java @@ -49,8 +49,7 @@ public interface Timers { Instant currentProcessingTime(); /** Returns the current synchronized processing time or {@code null} if unknown. */ - @Nullable - Instant currentSynchronizedProcessingTime(); + @Nullable Instant currentSynchronizedProcessingTime(); /** Returns the current event time. */ Instant currentEventTime(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/state/ValueState.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/state/ValueState.java index 92cfd0bd51df..f7b17a75b038 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/state/ValueState.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/state/ValueState.java @@ -34,8 +34,7 @@ public interface ValueState extends ReadableState<@Nullable T>, State { *

Note that {@code null} will be returned if the value has never been written. */ @Override - @Nullable - T read(); + @Nullable T read(); @Override ValueState readLater(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java index 6ff5ded5318d..d051a6c0ad92 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java @@ -49,8 +49,7 @@ public interface TestPipelineOptions extends PipelineOptions { void setOnSuccessMatcher(SerializableMatcher value); @Default.Long(15 * 60) - @Nullable - Long getTestTimeoutSeconds(); + @Nullable Long getTestTimeoutSeconds(); void setTestTimeoutSeconds(Long value); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java index d521b6cb121c..fc920e3fa762 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java @@ -171,7 +171,9 @@ public static final class Globally extends PTransform, PCollec /** The desired maximum estimation error or null if not specified. */ private final @Nullable Double maximumEstimationError; - /** @see ApproximateUnique#globally(int) */ + /** + * @see ApproximateUnique#globally(int) + */ public Globally(int sampleSize) { if (sampleSize < 16) { throw new IllegalArgumentException( @@ -185,7 +187,9 @@ public Globally(int sampleSize) { this.maximumEstimationError = null; } - /** @see ApproximateUnique#globally(double) */ + /** + * @see ApproximateUnique#globally(double) + */ public Globally(double maximumEstimationError) { if (maximumEstimationError < 0.01 || maximumEstimationError > 0.5) { throw new IllegalArgumentException( @@ -228,7 +232,9 @@ public static final class PerKey /** The desired maximum estimation error or null if not specified. */ private final @Nullable Double maximumEstimationError; - /** @see ApproximateUnique#perKey(int) */ + /** + * @see ApproximateUnique#perKey(int) + */ public PerKey(int sampleSize) { if (sampleSize < 16) { throw new IllegalArgumentException( @@ -241,7 +247,9 @@ public PerKey(int sampleSize) { this.maximumEstimationError = null; } - /** @see ApproximateUnique#perKey(double) */ + /** + * @see ApproximateUnique#perKey(double) + */ public PerKey(double estimationError) { if (estimationError < 0.01 || estimationError > 0.5) { throw new IllegalArgumentException( diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/BatchElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/BatchElements.java index 35796d1b1385..f3d95cc2a052 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/BatchElements.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/BatchElements.java @@ -103,6 +103,7 @@ public static BatchElements withDefaults() { public static BatchElements withConfig(BatchConfig config) { return new BatchElements<>(config); } + /** * Configuration for {@link BatchElements}. * @@ -202,6 +203,7 @@ public Builder withTargetBatchDurationSecs(double targetBatchDurationSecs) { this.targetBatchDurationSecs = targetBatchDurationSecs; return this; } + /** * Sets the target batch duration including fixed cost. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java index a2f32b8b3dd3..50e08d438f46 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java @@ -544,6 +544,7 @@ public long getBytesPerOffset() { private static class BytesReader extends OffsetBasedReader { private int index; + /** * Use an optional to distinguish between null next element (as Optional.absent()) and no next * element (next is null). diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java index b3ea4ddd6afd..31bb5ab2295d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java @@ -70,6 +70,7 @@ public final class Deduplicate { /** The default is the {@link TimeDomain#PROCESSING_TIME processing time domain}. */ public static final TimeDomain DEFAULT_TIME_DOMAIN = TimeDomain.PROCESSING_TIME; + /** The default duration is 10 mins. */ public static final Duration DEFAULT_DURATION = Duration.standardMinutes(10); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java index 864b903b25f9..f092f403262c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java @@ -903,6 +903,7 @@ public interface MultiOutputReceiver { /** The SideInput tag ID. */ String value(); } + /** * Annotation that may be added to a {@link ProcessElement}, {@link OnTimer}, or {@link * OnWindowExpiration} method to indicate that the runner must ensure that the observable contents diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java index 05d7b7b0d920..1c9d14f3832e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java @@ -65,7 +65,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ +/** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated @SuppressWarnings({ "rawtypes" // TODO(https://github.com/apache/beam/issues/20447) @@ -74,7 +76,9 @@ public class DoFnTester implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(DoFnTester.class); - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @SuppressWarnings("unchecked") @Deprecated public static DoFnTester of(DoFn fn) { @@ -85,7 +89,9 @@ public static DoFnTester of(DoFn(fn); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void setSideInputs(Map, Map> sideInputs) { checkState( @@ -95,7 +101,9 @@ public void setSideInputs(Map, Map> sideInp this.sideInputs = sideInputs; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void setSideInput(PCollectionView sideInput, BoundedWindow window, T value) { checkState( @@ -110,13 +118,17 @@ public void setSideInput(PCollectionView sideInput, BoundedWindow window, windowValues.put(window, value); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public PipelineOptions getPipelineOptions() { return options; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public enum CloningBehavior { /** @@ -136,20 +148,26 @@ public enum CloningBehavior { DO_NOT_CLONE } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void setCloningBehavior(CloningBehavior newValue) { checkState(state == State.UNINITIALIZED, "Wrong state: %s", state); this.cloningBehavior = newValue; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public CloningBehavior getCloningBehavior() { return cloningBehavior; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List processBundle(Iterable inputElements) throws Exception { startBundle(); @@ -160,14 +178,18 @@ public List processBundle(Iterable inputElements) thr return takeOutputElements(); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated @SafeVarargs public final List processBundle(InputT... inputElements) throws Exception { return processBundle(Arrays.asList(inputElements)); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void startBundle() throws Exception { checkState( @@ -195,20 +217,26 @@ private static void unwrapUserCodeException(UserCodeException e) throws Exceptio } } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void processElement(InputT element) throws Exception { processTimestampedElement(TimestampedValue.atMinimumTimestamp(element)); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void processTimestampedElement(TimestampedValue element) throws Exception { checkNotNull(element, "Timestamped element cannot be null"); processWindowedElement(element.getValue(), element.getTimestamp(), GlobalWindow.INSTANCE); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void processWindowedElement(InputT element, Instant timestamp, final BoundedWindow window) throws Exception { @@ -327,7 +355,9 @@ public Object restriction() { } } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void finishBundle() throws Exception { checkState( @@ -349,7 +379,9 @@ public void finishBundle() throws Exception { } } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List peekOutputElements() { return peekOutputElementsWithTimestamp().stream() @@ -357,7 +389,9 @@ public List peekOutputElements() { .collect(Collectors.toList()); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List> peekOutputElementsWithTimestamp() { // TODO: Should we return an unmodifiable list? @@ -366,13 +400,17 @@ public List> peekOutputElementsWithTimestamp() { .collect(Collectors.toList()); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List> peekOutputElementsInWindow(BoundedWindow window) { return peekOutputElementsInWindow(mainOutputTag, window); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List> peekOutputElementsInWindow( TupleTag tag, BoundedWindow window) { @@ -385,13 +423,17 @@ public List> peekOutputElementsInWindow( return valuesBuilder.build(); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void clearOutputElements() { getMutableOutput(mainOutputTag).clear(); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List takeOutputElements() { List resultElems = new ArrayList<>(peekOutputElements()); @@ -399,7 +441,9 @@ public List takeOutputElements() { return resultElems; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List> takeOutputElementsWithTimestamp() { List> resultElems = @@ -408,7 +452,9 @@ public List> takeOutputElementsWithTimestamp() { return resultElems; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List peekOutputElements(TupleTag tag) { // TODO: Should we return an unmodifiable list? @@ -417,13 +463,17 @@ public List peekOutputElements(TupleTag tag) { .collect(Collectors.toList()); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public void clearOutputElements(TupleTag tag) { getMutableOutput(tag).clear(); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public List takeOutputElements(TupleTag tag) { List resultElems = new ArrayList<>(peekOutputElements(tag)); @@ -437,7 +487,9 @@ private List> getImmutableOutput(TupleTag tag) { return ImmutableList.copyOf(MoreObjects.firstNonNull(elems, Collections.emptyList())); } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated @SuppressWarnings({"unchecked", "rawtypes"}) public List> getMutableOutput(TupleTag tag) { @@ -449,7 +501,9 @@ public List> getMutableOutput(TupleTag tag) { return outputList; } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public TupleTag getMainOutputTag() { return mainOutputTag; @@ -519,7 +573,9 @@ public String getErrorContext() { } } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated public DoFn.ProcessContext createProcessContext( ValueInSingleWindow element) { @@ -689,7 +745,9 @@ public void outputWindowedValue( } } - /** @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. */ + /** + * @deprecated Use {@link TestPipeline} with the {@code DirectRunner}. + */ @Deprecated @Override public void close() throws Exception { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java index 773cdea2abc4..74930bebad37 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java @@ -321,7 +321,8 @@ protected void reportElementSize(long elementByteSize) { public long getElementByteSize() { return this.elementByteSize; } - }; + } + ; @Override public PCollection>> expand(PCollection> input) { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java index 7779d2626da5..6c5efb2559bd 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java @@ -726,6 +726,7 @@ public static class SingleOutput this.fnDisplayData = fnDisplayData; this.sideInputs = sideInputs; } + /** * Returns a new {@link ParDo} {@link PTransform} that's like this {@link PTransform} but with * the specified additional side inputs. Does not modify this {@link PTransform}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java index e3008a665921..59bb91e0cb1d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java @@ -178,30 +178,37 @@ public static > Combine.Globally> largest(int public static > TopCombineFn> largestFn(int count) { return new TopCombineFn>(count, new Natural()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the largest count long values. */ public static TopCombineFn> largestLongsFn(int count) { return new TopCombineFn>(count, new Natural()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the largest count int values. */ public static TopCombineFn> largestIntsFn(int count) { return new TopCombineFn>(count, new Natural<>()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the largest count double values. */ public static TopCombineFn> largestDoublesFn(int count) { return new TopCombineFn>(count, new Natural<>()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the smallest count values. */ public static > TopCombineFn> smallestFn(int count) { return new TopCombineFn>(count, new Reversed<>()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the smallest count long values. */ public static TopCombineFn> smallestLongsFn(int count) { return new TopCombineFn>(count, new Reversed<>()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the smallest count int values. */ public static TopCombineFn> smallestIntsFn(int count) { return new TopCombineFn>(count, new Reversed<>()) {}; } + /** Returns a {@link TopCombineFn} that aggregates the smallest count double values. */ public static TopCombineFn> smallestDoublesFn(int count) { return new TopCombineFn>(count, new Reversed<>()) {}; @@ -326,7 +333,9 @@ public static > PerKey> largestPerKey(i return Combine.perKey(largestFn(count)); } - /** @deprecated use {@link Natural} instead */ + /** + * @deprecated use {@link Natural} instead + */ @Deprecated public static class Largest> implements Comparator, Serializable { @@ -348,7 +357,9 @@ public int compare(T a, T b) { } } - /** @deprecated use {@link Reversed} instead */ + /** + * @deprecated use {@link Reversed} instead + */ @Deprecated public static class Smallest> implements Comparator, Serializable { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java index c62a341201f7..fb5d951829b9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java @@ -697,7 +697,9 @@ public AsMap inMemory(boolean inMemory) { return new AsMap<>(inMemory); } - /** @deprecated this method simply returns this AsMap unmodified */ + /** + * @deprecated this method simply returns this AsMap unmodified + */ @Deprecated() public AsMap withSingletonValues() { return this; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java index 793fac048dff..8f17dc2db8b6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java @@ -186,8 +186,7 @@ List> getOutputs() { return outputs; } - @Nullable - Instant getWatermark() { + @Nullable Instant getWatermark() { return watermark; } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/errorhandling/ErrorHandler.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/errorhandling/ErrorHandler.java index cf040470d608..a3b65f5eaa4b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/errorhandling/ErrorHandler.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/errorhandling/ErrorHandler.java @@ -75,8 +75,7 @@ public interface ErrorHandler extends AutoClose boolean isClosed(); - @Nullable - OutputT getOutput(); + @Nullable OutputT getOutput(); class PTransformErrorHandler implements ErrorHandler { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java index f1a002d6277d..d0ceff65e552 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java @@ -667,6 +667,7 @@ public Iterator iterator() { /** Keeps track of the index, in head, that this iterator points to. */ int index = -1; + /** If the index is beyond what was cached in head, this is this iterators view of tail. */ Iterator tailIter; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/ByteBuddyDoFnInvokerFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/ByteBuddyDoFnInvokerFactory.java index 9adbe3a12cf4..c8a95b87a588 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/ByteBuddyDoFnInvokerFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/ByteBuddyDoFnInvokerFactory.java @@ -293,8 +293,7 @@ public void invokeOnTimer( String timerId, String timerFamilyId, DoFnInvoker.ArgumentProvider arguments) { - @Nullable - OnTimerInvoker onTimerInvoker = + @Nullable OnTimerInvoker onTimerInvoker = timerFamilyId.isEmpty() ? onTimerInvokers.get(timerId) : onTimerFamilyInvokers.get(timerFamilyId); @@ -659,8 +658,7 @@ private static ClassLoadingStrategy getClassLoadingStrategy(Class methodHandles = Class.forName("java.lang.invoke.MethodHandles"); Object lookup = @@ -814,6 +812,7 @@ private static Implementation delegateMethodWithExtraParametersOrThrow( static class DoFnMethodDelegation implements Implementation { /** The {@link MethodDescription} of the wrapped {@link DoFn}'s method. */ protected final MethodDescription targetMethod; + /** Whether the target method returns non-void. */ private final boolean targetHasReturn; @@ -1504,8 +1503,7 @@ public StackManipulation.Size apply(MethodVisitor mv, Context context) { if (returnVarIndex != null) { // Drop the return type from the locals @SuppressWarnings("nullness") // bytebuddy MethodVisitor not annotated, so we lie - @NonNull - String nullSignature = null; + @NonNull String nullSignature = null; mv.visitLocalVariable( "res", returnType.getDescriptor(), nullSignature, wrapStart, wrapEnd, returnVarIndex); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvoker.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvoker.java index 1f122f1bf661..892774489f52 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvoker.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvoker.java @@ -193,8 +193,7 @@ interface ArgumentProvider { *

{@code null} is allowed because user keys may be null. This method may not return * null for any other reason. */ - @Nullable - Object key(); + @Nullable Object key(); /** * Provide a reference to the input sideInput with the specified tag. @@ -202,8 +201,7 @@ interface ArgumentProvider { *

{@code null} is allowed because side input values may be null. This method may not * return null for any other reason. */ - @Nullable - Object sideInput(String tagId); + @Nullable Object sideInput(String tagId); /** * Provide a reference to the selected schema field corresponding to the input argument @@ -212,19 +210,16 @@ interface ArgumentProvider { *

{@code null} is allowed because element fields may be null. This method may not * return null for any other reason. */ - @Nullable - Object schemaElement(int index); + @Nullable Object schemaElement(int index); /** Provide a reference to the input element timestamp. */ Instant timestamp(DoFn doFn); /** Provide a reference to the record id of the current element. */ - @Nullable - String currentRecordId(DoFn doFn); + @Nullable String currentRecordId(DoFn doFn); /** Provide a reference to the record offset of the current element. */ - @Nullable - Long currentRecordOffset(DoFn doFn); + @Nullable Long currentRecordOffset(DoFn doFn); /** Provide a reference to the firing timestamp of the current timer. */ Instant fireTimestamp(DoFn doFn); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java index 33fccc2e1cde..d96d35dc8e68 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java @@ -137,8 +137,8 @@ public abstract class DoFnSignature { /** Details about this {@link DoFn}'s {@link DoFn.GetInitialWatermarkEstimatorState} method. */ @Pure - public abstract @Nullable GetInitialWatermarkEstimatorStateMethod - getInitialWatermarkEstimatorState(); + public abstract @Nullable + GetInitialWatermarkEstimatorStateMethod getInitialWatermarkEstimatorState(); /** Details about this {@link DoFn}'s {@link DoFn.NewWatermarkEstimator} method. */ @Pure @@ -160,7 +160,9 @@ public abstract class DoFnSignature { @Pure public abstract @Nullable Map onTimerFamilyMethods(); - /** @deprecated use {@link #usesState()}, it's cleaner */ + /** + * @deprecated use {@link #usesState()}, it's cleaner + */ @Deprecated @Pure public boolean isStateful() { @@ -279,8 +281,7 @@ default boolean observesWindow() { } /** The type of window expected by this method, if any. */ - @Nullable - TypeDescriptor windowT(); + @Nullable TypeDescriptor windowT(); } /** A descriptor for an optional parameter of the {@link DoFn.ProcessElement} method. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java index ec624696fc7c..b63c0ae1b26e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java @@ -450,6 +450,7 @@ public Map getStateParameters() { public Map getTimerParameters() { return Collections.unmodifiableMap(timerParameters); } + /** * TimerMap parameters declared in this context, keyed by {@link * org.apache.beam.sdk.transforms.DoFn.TimerFamily}. @@ -457,6 +458,7 @@ public Map getTimerParameters() { public Map getTimerFamilyParameters() { return Collections.unmodifiableMap(timerFamilyParameters); } + /** Extra parameters in their entirety. Unmodifiable. */ public List getExtraParameters() { return Collections.unmodifiableList(extraParameters); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java index ba572f1f6925..94025470a658 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java @@ -42,7 +42,9 @@ public Instant getWatermarkThatGuaranteesFiring(BoundedWindow window) { return window.maxTimestamp(); } - /** @return false; the default trigger never finishes */ + /** + * @return false; the default trigger never finishes + */ @Override public boolean mayFinish() { return false; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java index 6e1d05491a7b..be13b1bac311 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java @@ -469,14 +469,19 @@ private synchronized double getLinearInterpolation(double percentile) { public interface BucketType extends Serializable { // Lower bound of a starting bucket. double getRangeFrom(); + // Upper bound of an ending bucket. double getRangeTo(); + // The number of buckets. int getNumBuckets(); + // Get the bucket array index for the given value. int getBucketIndex(double value); + // Get the bucket size for the given bucket array index. double getBucketSize(int index); + // Get the accumulated bucket size from bucket index 0 until endIndex. // Generally, this can be calculated as `sigma(0 <= i < endIndex) getBucketSize(i)`. double getAccumulatedBucketSize(int endIndex); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Holder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Holder.java index 8a2853c68742..aa274590554b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Holder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Holder.java @@ -41,5 +41,6 @@ public static Holder of(ValueT value) { public T get() { return value; - }; + } + ; } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/CoderTranslatorRegistrar.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/CoderTranslatorRegistrar.java index 44e8c2956aee..786b757a2225 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/CoderTranslatorRegistrar.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/CoderTranslatorRegistrar.java @@ -44,10 +44,8 @@ public interface CoderTranslatorRegistrar { boolean isKnownCoder(Coder coder, PipelineOptions options); /** Returns the CoderTranslator to use for this Coder, or null if the Coder is not known. */ - @Nullable - CoderTranslator getCoderTranslator(Class coderClass); + @Nullable CoderTranslator getCoderTranslator(Class coderClass); /** Returns the Coder to use for the given Urn, or null if the Urn is for an unknown Coder. */ - @Nullable - Class getCoderForUrn(String coderUrn); + @Nullable Class getCoderForUrn(String coderUrn); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/DefaultArtifactResolver.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/DefaultArtifactResolver.java index 4edd54a0c66c..21f2f8aaecdb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/DefaultArtifactResolver.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/DefaultArtifactResolver.java @@ -96,23 +96,17 @@ public RunnerApi.Pipeline resolveArtifacts(RunnerApi.Pipeline pipeline) { for (Map.Entry entry : pipeline.getComponents().getEnvironmentsMap().entrySet()) { List resolvedDependencies = - entry - .getValue() - .getDependenciesList() - .parallelStream() + entry.getValue().getDependenciesList().parallelStream() .flatMap(resolver) .collect(Collectors.toList()); environmentMapBuilder.put( entry.getKey(), - entry - .getValue() - .toBuilder() + entry.getValue().toBuilder() .clearDependencies() .addAllDependencies(resolvedDependencies) .build()); } - return pipeline - .toBuilder() + return pipeline.toBuilder() .setComponents( pipeline.getComponents().toBuilder().putAllEnvironments(environmentMapBuilder.build())) .build(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/Environments.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/Environments.java index 969bda88d07f..20db96e16751 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/Environments.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/Environments.java @@ -214,8 +214,7 @@ public static Environment createOrGetDefaultEnvironment(PortablePipelineOptions defaultEnvironment = createDockerEnvironment(getDockerContainerImage(options)); } } - return defaultEnvironment - .toBuilder() + return defaultEnvironment.toBuilder() .addAllDependencies(getDeferredArtifacts(options)) .addAllCapabilities(getJavaCapabilities()) .build(); @@ -324,7 +323,8 @@ public static List expandAnyOfEnvironments(Environment environment) .equals(environment.getUrn())) { try { return AnyOfEnvironmentPayload.parseFrom(environment.getPayload()) - .getEnvironmentsList().stream() + .getEnvironmentsList() + .stream() .flatMap(subenv -> expandAnyOfEnvironments(subenv).stream()); } catch (InvalidProtocolBufferException exn) { throw new RuntimeException(exn); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/External.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/External.java index 6204ae445f8c..69a008927cf0 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/External.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/External.java @@ -294,9 +294,7 @@ public OutputT expand(InputT input) { .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); expandedComponents = - response - .getComponents() - .toBuilder() + response.getComponents().toBuilder() .putAllEnvironments(resolveArtifacts(newEnvironmentsWithDependencies, endpoint)) .build(); expandedTransform = response.getTransform(); @@ -373,8 +371,7 @@ private static RunnerApi.Environment resolveArtifacts( ArtifactRetrievalServiceGrpc.ArtifactRetrievalServiceBlockingStub retrievalStub, RunnerApi.Environment environment) throws IOException { - return environment - .toBuilder() + return environment.toBuilder() .clearDependencies() .addAllDependencies(resolveArtifacts(retrievalStub, environment.getDependenciesList())) .build(); @@ -403,8 +400,7 @@ private static List resolveArtifacts( } } resolved.add( - artifact - .toBuilder() + artifact.toBuilder() .setTypeUrn("beam:artifact:type:file:v1") .setTypePayload( RunnerApi.ArtifactFilePayload.newBuilder() diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ExternalTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ExternalTranslation.java index a96370622025..c8986eb60e86 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ExternalTranslation.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ExternalTranslation.java @@ -118,9 +118,7 @@ public RunnerApi.PTransform translate( String coderId = entry.getValue().getCoderId(); mergingComponentsBuilder.putPcollections( entry.getKey(), - entry - .getValue() - .toBuilder() + entry.getValue().toBuilder() .setCoderId( Preconditions.checkNotNull(coderRenameMap.getOrDefault(coderId, coderId))) .build()); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/PTransformTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/PTransformTranslation.java index c7b42f59036d..5f8782265f17 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/PTransformTranslation.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/PTransformTranslation.java @@ -150,6 +150,7 @@ public class PTransformTranslation { "beam:transform:sdf_pair_with_restriction:v1"; public static final String SPLITTABLE_TRUNCATE_SIZED_RESTRICTION_URN = "beam:transform:sdf_truncate_sized_restrictions:v1"; + /** * @deprecated runners should move away from using `SplittableProcessKeyedElements` and prefer to * internalize any necessary SplittableDoFn expansion. @@ -157,6 +158,7 @@ public class PTransformTranslation { @Deprecated public static final String SPLITTABLE_PROCESS_KEYED_URN = "beam:transform:sdf_process_keyed_elements:v1"; + /** * @deprecated runners should move away from using `SplittableProcessElements` and prefer to * internalize any necessary SplittableDoFn expansion. @@ -329,8 +331,7 @@ public static String urnForTransform(PTransform transform) { * the Java representation while registering components that transform references. */ public interface TransformTranslator> { - @Nullable - String getUrn(T transform); + @Nullable String getUrn(T transform); boolean canTranslate(PTransform pTransform); @@ -656,9 +657,8 @@ default String getUrn(T transform) { * value is null, transform should include an empty spec. * @throws IOException */ - @Nullable - FunctionSpec translate(AppliedPTransform application, SdkComponents components) - throws IOException; + @Nullable FunctionSpec translate( + AppliedPTransform application, SdkComponents components) throws IOException; /** * Generates a Row-based construction configuration for the provided transform. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ParDoTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ParDoTranslation.java index e8788efb62e7..76e85500ed71 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ParDoTranslation.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/ParDoTranslation.java @@ -88,37 +88,44 @@ public class ParDoTranslation { */ public static final String REQUIRES_STATEFUL_PROCESSING_URN = "beam:requirement:pardo:stateful:v1"; + /** * This requirement indicates the requests_finalization field of ParDo transform payloads must be * inspected. */ public static final String REQUIRES_BUNDLE_FINALIZATION_URN = "beam:requirement:pardo:finalization:v1"; + /** * This requirement indicates the requires_stable_input field of ParDo transform payloads must be * inspected. */ public static final String REQUIRES_STABLE_INPUT_URN = "beam:requirement:pardo:stable_input:v1"; + /** * This requirement indicates the requires_time_sorted_input field of ParDo transform payloads * must be inspected. */ public static final String REQUIRES_TIME_SORTED_INPUT_URN = "beam:requirement:pardo:time_sorted_input:v1"; + /** * This requirement indicates the restriction_coder_id field of ParDo transform payloads must be * inspected. */ public static final String REQUIRES_SPLITTABLE_DOFN_URN = "beam:requirement:pardo:splittable_dofn:v1"; + /** This requirement indicates that the ParDo requires a callback on each window expiration. */ public static final String REQUIRES_ON_WINDOW_EXPIRATION_URN = "beam:requirement:pardo:on_window_expiration:v1"; /** Represents a user state specification that supports a bag. */ public static final String BAG_USER_STATE = "beam:user_state:bag:v1"; + /** Represents a user state specification that supports a multimap. */ public static final String MULTIMAP_USER_STATE = "beam:user_state:multimap:v1"; + /** Represents a user state specification that supports an ordered list. */ public static final String ORDERED_LIST_USER_STATE = "beam:user_state:ordered_list:v1"; @@ -149,8 +156,10 @@ public class ParDoTranslation { /** The URN for an unknown Java {@link DoFn}. */ public static final String CUSTOM_JAVA_DO_FN_URN = "beam:dofn:javasdk:0.1"; + /** The URN for an unknown Java {@link ViewFn}. */ public static final String CUSTOM_JAVA_VIEW_FN_URN = "beam:viewfn:javasdk:0.1"; + /** The URN for an unknown Java {@link WindowMappingFn}. */ public static final String CUSTOM_JAVA_WINDOW_MAPPING_FN_URN = "beam:windowmappingfn:javasdk:0.1"; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SdkComponents.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SdkComponents.java index 6288649aba3d..7de3c714f4be 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SdkComponents.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SdkComponents.java @@ -323,10 +323,7 @@ public String getEnvironmentIdFor(ResourceHints resourceHints) { environmentIdsByResourceHints.put(resourceHints, baseEnvironmentId); } else { Environment env = - componentsBuilder - .getEnvironmentsMap() - .get(baseEnvironmentId) - .toBuilder() + componentsBuilder.getEnvironmentsMap().get(baseEnvironmentId).toBuilder() .putAllResourceHints( Maps.transformValues( resourceHints.hints(), hint -> ByteString.copyFrom(hint.toBytes()))) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SplittableParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SplittableParDo.java index 74af80d6feee..e99f1c908941 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SplittableParDo.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/SplittableParDo.java @@ -743,6 +743,7 @@ public static void convertReadBasedSplittableDoFnsToPrimitiveReads(Pipeline pipe public static final PTransformOverride PRIMITIVE_BOUNDED_READ_OVERRIDE = PTransformOverride.of( PTransformMatchers.classEqualTo(Read.Bounded.class), new BoundedReadOverrideFactory<>()); + /** * A transform override for {@link Read.Unbounded} that converts it to a {@link * PrimitiveUnboundedRead}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java index bba880288d44..d3f9d9f91948 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java @@ -275,9 +275,7 @@ private RunnerApi.Pipeline updateTransformViaTransformService( .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); RunnerApi.Components expandedComponents = - response - .getComponents() - .toBuilder() + response.getComponents().toBuilder() .putAllEnvironments( External.ExpandableTransform.resolveArtifacts( newEnvironmentsWithDependencies, transformServiceEndpoint)) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnboundedReadFromBoundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnboundedReadFromBoundedSource.java index 7256be139e72..3ab85ee03699 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnboundedReadFromBoundedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnboundedReadFromBoundedSource.java @@ -215,14 +215,12 @@ public Checkpoint( public void finalizeCheckpoint() {} @VisibleForTesting - @Nullable - List> getResidualElements() { + @Nullable List> getResidualElements() { return residualElements; } @VisibleForTesting - @Nullable - BoundedSource getResidualSource() { + @Nullable BoundedSource getResidualSource() { return residualSource; } } @@ -234,6 +232,7 @@ static class CheckpointCoder extends StructuredCoder> { private final Coder>> elemsCoder; // The coder from the BoundedReader for coding each element private final Coder elemCoder; + // The nullable and serializable coder for the BoundedSource. @SuppressWarnings("rawtypes") private final Coder sourceCoder; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ExecutableStage.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ExecutableStage.java index b8399e3bcf9d..6c457721f3f8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ExecutableStage.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ExecutableStage.java @@ -189,8 +189,7 @@ default PTransform toPTransform(String uniqueName) { payload.addTransforms(transform.getId()); } payload.setComponents( - getComponents() - .toBuilder() + getComponents().toBuilder() .clearTransforms() .putAllTransforms( getTransforms().stream() diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ImmutableExecutableStage.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ImmutableExecutableStage.java index 4dc649e02ad5..101906d35e3e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ImmutableExecutableStage.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ImmutableExecutableStage.java @@ -39,8 +39,7 @@ public static ImmutableExecutableStage ofFullComponents( Collection outputs, Collection wireCoderSettings) { Components prunedComponents = - components - .toBuilder() + components.toBuilder() .clearTransforms() .putAllTransforms( transforms.stream() diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/OutputDeduplicator.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/OutputDeduplicator.java index 14df79941bec..4ec2853fffb3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/OutputDeduplicator.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/OutputDeduplicator.java @@ -298,9 +298,7 @@ private static ExecutableStage deduplicateStageOutput( updatedOutputs.add(originalToPartial.getOrDefault(output.getId(), output)); } RunnerApi.Components updatedStageComponents = - stage - .getComponents() - .toBuilder() + stage.getComponents().toBuilder() .clearTransforms() .putAllTransforms( updatedTransforms.stream() diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ProtoOverrides.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ProtoOverrides.java index 3052a1f6d60c..8c05408002ae 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ProtoOverrides.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/ProtoOverrides.java @@ -98,8 +98,7 @@ public interface TransformReplacement { * *

Introduced components must not collide with any components in the existing components. */ - @Nullable - MessageWithComponents getReplacement( + @Nullable MessageWithComponents getReplacement( String transformId, ComponentsOrBuilder existingComponents); } } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SideInputReference.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SideInputReference.java index 8f16e0ac6d6c..379b83967574 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SideInputReference.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SideInputReference.java @@ -55,8 +55,10 @@ public static SideInputReference fromSideInputId( /** The PTransform that uses this side input. */ public abstract PTransformNode transform(); + /** The local name the referencing PTransform uses to refer to this side input. */ public abstract String localName(); + /** The PCollection that backs this side input. */ public abstract PCollectionNode collection(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SplittableParDoExpander.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SplittableParDoExpander.java index 66ab705e28e9..2fc496b0f22d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SplittableParDoExpander.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/SplittableParDoExpander.java @@ -225,8 +225,7 @@ public MessageWithComponents getReplacement( rval.getComponentsBuilder().putTransforms(splitAndSizeId, splitAndSize.build()); } PTransform.Builder newCompositeRoot = - splittableParDo - .toBuilder() + splittableParDo.toBuilder() // Clear the original splittable ParDo spec and add all the new transforms as // children. .clearSpec() diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/TimerReference.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/TimerReference.java index 6436d30be309..c13eb7abc286 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/TimerReference.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/TimerReference.java @@ -44,6 +44,7 @@ public static TimerReference fromTimerId( /** The PTransform that uses this timer. */ public abstract PipelineNode.PTransformNode transform(); + /** The local name the referencing PTransform uses to refer to this timer. */ public abstract String localName(); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/UserStateReference.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/UserStateReference.java index cf0e22d4fef9..2c5e468e65c2 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/UserStateReference.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/graph/UserStateReference.java @@ -59,8 +59,10 @@ public static UserStateReference fromUserStateId( /** The id of the PTransform that uses this user state. */ public abstract PTransformNode transform(); + /** The local name the referencing PTransform uses to refer to this user state. */ public abstract String localName(); + /** The PCollection that represents the input to the PTransform. */ public abstract PCollectionNode collection(); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java index e5b342510e6a..7bcc0f6b2d0d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java @@ -196,6 +196,7 @@ public OutputT apply( // Internal details below here. final Pipeline pipeline; + /** * The {@link PCollection PCollections} contained by this {@link PCollectionList}, and an * arbitrary tags associated with each. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java index fab7f660a244..b52a2335eb3b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java @@ -54,8 +54,8 @@ public interface PCollectionView extends PValue, Serializable { *

The {@link PCollection} may not be available in all contexts. */ @Internal - @Nullable - PCollection getPCollection(); + @Nullable PCollection getPCollection(); + /** * For internal use only. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java index e4d2fe649ff3..9fa219eddbca 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java @@ -768,6 +768,7 @@ public TypeDescriptor> getTypeDescriptor() { */ private static class ListOverMultimapView extends AbstractList implements RandomAccess { private final MultimapView> primitiveView; + /** * A mapping from non over-lapping ranges to the number of elements at each position within * that range. Ranges not specified in the mapping implicitly have 0 elements at those diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValues.java index 94add5702c81..de47a97cd755 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValues.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValues.java @@ -97,8 +97,7 @@ public static Map, PCollection> fullyExpand( PCollection.class.getSimpleName(), valueComponent.getValue())); } - @Nullable - PCollection previous = + @Nullable PCollection previous = result.put(valueComponent.getKey(), (PCollection) valueComponent.getValue()); if (previous != null) { throw new IllegalArgumentException( diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/RowUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/RowUtils.java index 92f60113620a..978575e6b9db 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/RowUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/RowUtils.java @@ -261,8 +261,7 @@ void setOverrides(List values) { topNode.setOverrides(overrides); } - @Nullable - FieldOverride getOverride(FieldAccessDescriptor fieldAccessDescriptor) { + @Nullable FieldOverride getOverride(FieldAccessDescriptor fieldAccessDescriptor) { return topNode.getOverride(fieldAccessDescriptor); } @@ -326,8 +325,7 @@ void setOverrides(List overrides) { this.fieldOverrides = overrides; } - @Nullable - FieldOverride getOverride(FieldAccessDescriptor fieldAccessDescriptor) { + @Nullable FieldOverride getOverride(FieldAccessDescriptor fieldAccessDescriptor) { FieldOverride override = null; if (!fieldAccessDescriptor.getFieldsAccessed().isEmpty()) { FieldDescriptor fieldDescriptor = diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/ValueInSingleWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/ValueInSingleWindow.java index ef7b40aabe44..9ce5c32c5d90 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/ValueInSingleWindow.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/ValueInSingleWindow.java @@ -46,7 +46,8 @@ public abstract class ValueInSingleWindow { @SuppressWarnings("nullness") public T getValue() { return getNullableValue(); - }; + } + ; /** * Workaround for autovalue code generation, which does not allow type variables to be diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValue.java index 13796018d7aa..adc37b18ebf9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValue.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValue.java @@ -47,14 +47,11 @@ public interface WindowedValue { @Pure PaneInfo getPaneInfo(); - @Nullable - String getRecordId(); + @Nullable String getRecordId(); - @Nullable - Context getOpenTelemetryContext(); + @Nullable Context getOpenTelemetryContext(); - @Nullable - Long getRecordOffset(); + @Nullable Long getRecordOffset(); CausedByDrain causedByDrain(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValues.java index 33700a9dc0d2..8d5361b90900 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValues.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/WindowedValues.java @@ -345,7 +345,9 @@ public static WindowedValue of( } } - /** @deprecated for use only in compatibility with old broken code */ + /** + * @deprecated for use only in compatibility with old broken code + */ @Deprecated static WindowedValue createWithoutValidation( T value, diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/FilterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/FilterTest.java index 8cf95eae4905..9514d4ae1fe2 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/FilterTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/FilterTest.java @@ -49,7 +49,8 @@ abstract static class Simple { abstract int getField2(); abstract int getField3(); - }; + } + ; @DefaultSchema(AutoValueSchema.class) @AutoValue diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/SelectTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/SelectTest.java index eea4ab03dfcb..75d69fd10a9a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/SelectTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/SelectTest.java @@ -65,7 +65,8 @@ abstract static class Schema1 { static Schema1 create() { return new AutoValue_SelectTest_Schema1("field1", 42, 3.14); } - }; + } + ; /** A class matching the schema resulting from selection field1, field3. */ @DefaultSchema(AutoValueSchema.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/TypedSchemaTransformProviderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/TypedSchemaTransformProviderTest.java index 275172d971c1..072c8c744627 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/TypedSchemaTransformProviderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/transforms/TypedSchemaTransformProviderTest.java @@ -51,7 +51,8 @@ static Configuration create(String stringField, int integerField) { return new AutoValue_TypedSchemaTransformProviderTest_Configuration( stringField, integerField); } - }; + } + ; private static class FakeTypedSchemaIOProvider extends TypedSchemaTransformProvider { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java index e79913c7cf55..795c18f8ca12 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java @@ -185,6 +185,7 @@ public void testCreateParameterizedType() throws Exception { p.run(); } + /** An unserializable class to demonstrate encoding of elements. */ private static class UnserializableRecord { private final String myString; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ExternalTranslationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ExternalTranslationTest.java index 8ee101235c65..23fa1385c459 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ExternalTranslationTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ExternalTranslationTest.java @@ -83,9 +83,7 @@ public ExpansionApi.ExpansionResponse expand(ExpansionApi.ExpansionRequest reque ExpansionApi.ExpansionResponse.newBuilder() .setComponents(components) .setTransform( - components - .getTransformsOrThrow(transformId) - .toBuilder() + components.getTransformsOrThrow(transformId).toBuilder() .setUniqueName(transformId)) .addAllRequirements(requirementsBuilder.build()) .build(); diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ValidateRunnerXlangTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ValidateRunnerXlangTest.java index 06288c07dbff..6db2105caf79 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ValidateRunnerXlangTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/ValidateRunnerXlangTest.java @@ -239,6 +239,7 @@ protected void pythonDependenciesTest(Pipeline pipeline) { PAssert.that(col).containsInAnyOrder("The Dormouse's story"); } } + /** * Motivation behind singleInputOutputTest. * diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyPipelineFuserTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyPipelineFuserTest.java index 9b403b869437..2550f3399398 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyPipelineFuserTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyPipelineFuserTest.java @@ -104,8 +104,7 @@ private static PCollection pc(String name) { public void singleEnvironmentBecomesASingleStage() { String name = "read.out"; Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "read", PTransform.newBuilder() @@ -181,8 +180,7 @@ public void singleEnvironmentBecomesASingleStage() { @Test public void transformsWithNoEnvironmentBecomeRunnerExecuted() { Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "mystery", PTransform.newBuilder() @@ -227,8 +225,7 @@ public void transformsWithNoEnvironmentBecomeRunnerExecuted() { @Test public void singleEnvironmentAcrossGroupByKeyMultipleStages() { Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "read", PTransform.newBuilder() @@ -305,8 +302,7 @@ public void singleEnvironmentAcrossGroupByKeyMultipleStages() { @Test public void multipleEnvironmentsBecomesMultipleStages() { Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "read", PTransform.newBuilder() @@ -696,8 +692,7 @@ public void flattenWithHeterogeneousInputsSingleEnvOutputPartiallyMaterialized() @Test public void flattenAfterNoEnvDoesNotFuse() { Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "flatten", PTransform.newBuilder() @@ -972,8 +967,7 @@ public void statefulParDoRootsStage() { .build(); Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("parDo", parDoTransform) .putPcollections("parDo.out", pc("parDo.out")) .putTransforms("stateful", statefulTransform) @@ -1048,8 +1042,7 @@ public void parDoWithTimerRootsStage() { .build(); Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("parDo", parDoTransform) .putPcollections("parDo.out", pc("parDo.out")) .putTransforms("timer", timerTransform) @@ -1107,8 +1100,7 @@ public void parDoWithStateAndTimerRootsStage() { .build(); Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("timer", timerTransform) .putPcollections("timer.out", pc("timer.out")) .putPcollections("output.out", pc("output.out")) @@ -1145,8 +1137,7 @@ public void parDoWithStateAndTimerRootsStage() { @Test public void compositesIgnored() { Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "read", PTransform.newBuilder() @@ -1358,8 +1349,12 @@ public void sanitizedTransforms() throws Exception { fused.getFusedStages().stream() .flatMap( s -> - s.getComponents().getTransformsOrThrow(flattenTransform.getUniqueName()) - .getInputsMap().values().stream()) + s + .getComponents() + .getTransformsOrThrow(flattenTransform.getUniqueName()) + .getInputsMap() + .values() + .stream()) .collect(Collectors.toList()), containsInAnyOrder(read1Output.getUniqueName(), read2Output.getUniqueName())); } diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyStageFuserTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyStageFuserTest.java index 242e84104ea6..b4c08aa42e94 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyStageFuserTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/construction/graph/GreedyStageFuserTest.java @@ -98,8 +98,7 @@ public void differentEnvironmentsThrows() { // read.out can't be fused with both 'go' and 'py', so we should refuse to create this stage QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms( "read", PTransform.newBuilder() @@ -176,8 +175,7 @@ public void noEnvironmentThrows() { .build(); QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("runnerTransform", gbkTransform) .putPcollections( "gbk.out", PCollection.newBuilder().setUniqueName("gbk.out").build()) @@ -226,8 +224,7 @@ public void fusesCompatibleEnvironments() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("parDo", parDoTransform) .putPcollections( "parDo.out", PCollection.newBuilder().setUniqueName("parDo.out").build()) @@ -286,8 +283,7 @@ public void materializesWithStatefulConsumer() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("parDo", parDoTransform) .putPcollections( "parDo.out", PCollection.newBuilder().setUniqueName("parDo.out").build()) @@ -347,8 +343,7 @@ public void materializesWithConsumerWithTimer() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("parDo", parDoTransform) .putPcollections( "parDo.out", PCollection.newBuilder().setUniqueName("parDo.out").build()) @@ -430,8 +425,7 @@ public void fusesFlatten() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections( "read.out", PCollection.newBuilder().setUniqueName("read.out").build()) @@ -514,8 +508,7 @@ public void fusesFlattenWithDifferentEnvironmentInputs() { .build(); Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections("read.out", PCollection.newBuilder().setUniqueName("read.out").build()) .putTransforms("envRead", otherEnvRead) @@ -627,8 +620,7 @@ public void flattenWithHeterogeneousInputsAndOutputs() { PCollection flattenPc = PCollection.newBuilder().setUniqueName("flatten.out").build(); Components components = - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("pyRead", pyRead) .putPcollections( "pyRead.out", PCollection.newBuilder().setUniqueName("pyRead.out").build()) @@ -706,8 +698,7 @@ public void materializesWithDifferentEnvConsumer() { PCollection parDoOutput = PCollection.newBuilder().setUniqueName("parDo.out").build(); QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("parDo", parDoTransform) .putPcollections("parDo.out", parDoOutput) .putTransforms( @@ -772,8 +763,7 @@ public void materializesWithDifferentEnvSibling() { .build(); QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections( "read.out", PCollection.newBuilder().setUniqueName("read.out").build()) @@ -853,8 +843,7 @@ public void materializesWithSideInputConsumer() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections( "read.out", PCollection.newBuilder().setUniqueName("read.out").build()) @@ -957,8 +946,7 @@ public void sideInputIncludedInStage() { PCollection.newBuilder().setUniqueName("side_read.out").build(); QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections( "read.out", PCollection.newBuilder().setUniqueName("read.out").build()) @@ -1042,8 +1030,7 @@ public void executableStageProducingSideInputMaterializesIt() { PCollection impulsePC = PCollection.newBuilder().setUniqueName("impulsePC").build(); QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("impulse", impulse) .putTransforms("createSide", createSide) .putTransforms("processMain", processMain) @@ -1098,8 +1085,7 @@ public void userStateIncludedInStage() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections("read.out", userStateMainInputPCollection) .putTransforms( @@ -1156,8 +1142,7 @@ public void materializesWithGroupByKeyConsumer() { QueryablePipeline p = QueryablePipeline.forPrimitivesIn( - partialComponents - .toBuilder() + partialComponents.toBuilder() .putTransforms("read", readTransform) .putPcollections( "read.out", PCollection.newBuilder().setUniqueName("read.out").build()) diff --git a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java index c93de2014798..44e8dbd2dc03 100644 --- a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java +++ b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java @@ -715,9 +715,7 @@ private Map loadRegisteredTransforms() { .collect(Collectors.toList())); RunnerApi.Components components = pipelineProto.getComponents(); RunnerApi.PTransform expandedTransform = - components - .getTransformsOrThrow(expandedTransformId) - .toBuilder() + components.getTransformsOrThrow(expandedTransformId).toBuilder() .setUniqueName(expandedTransformId) .clearOutputs() .putAllOutputs(outputMap) diff --git a/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroIO.java b/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroIO.java index 2ddde14bcc26..6b23695c21ae 100644 --- a/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroIO.java +++ b/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroIO.java @@ -1430,8 +1430,8 @@ public abstract static class TypedWrite abstract @Nullable FilenamePolicy getFilenamePolicy(); - abstract @Nullable DynamicAvroDestinations - getDynamicDestinations(); + abstract @Nullable + DynamicAvroDestinations getDynamicDestinations(); abstract AvroSink.@Nullable DatumWriterFactory getDatumWriterFactory(); @@ -1442,6 +1442,7 @@ public abstract static class TypedWrite * https://avro.apache.org/docs/1.7.7/api/java/org/apache/avro/file/CodecFactory.html */ abstract SerializableAvroCodecFactory getCodec(); + /** Avro file metadata. */ abstract ImmutableMap getMetadata(); @@ -1466,7 +1467,9 @@ abstract Builder setTempDirectory( abstract Builder setShardTemplate( @Nullable String shardTemplate); - /** @deprecated Use {@link AvroIO.TypedWrite.Builder#setRecordClass(Class)} instead */ + /** + * @deprecated Use {@link AvroIO.TypedWrite.Builder#setRecordClass(Class)} instead + */ @Deprecated @SuppressWarnings("unchecked") public Builder setGenericRecords(boolean genericRecords) { @@ -2001,6 +2004,7 @@ public static DynamicAvroDestinations con return new ConstantAvroDestination<>( filenamePolicy, schema, metadata, codec, formatFunction, datumWriterFactory); } + ///////////////////////////////////////////////////////////////////////////// /** diff --git a/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroSource.java b/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroSource.java index c6c7fa426dbf..6d65c800c0a2 100644 --- a/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroSource.java +++ b/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/io/AvroSource.java @@ -418,8 +418,7 @@ public Coder getOutputCoder() { } @VisibleForTesting - @Nullable - String getReaderSchemaString() { + @Nullable String getReaderSchemaString() { return mode.readerSchemaString; } diff --git a/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/schemas/utils/AvroUtils.java b/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/schemas/utils/AvroUtils.java index 99eb7f961901..1dfd04a9f1f8 100644 --- a/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/schemas/utils/AvroUtils.java +++ b/sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/schemas/utils/AvroUtils.java @@ -185,8 +185,7 @@ private static ReflectData newReflectData(Class clazz) { // getClassLoader returns @Nullable Classloader, but it's ok, as ReflectData constructor // actually tolerates null classloader argument despite lacking the @Nullable annotation @SuppressWarnings("nullness") - @NonNull - ClassLoader classLoader = clazz.getClassLoader(); + @NonNull ClassLoader classLoader = clazz.getClassLoader(); return new ReflectData(classLoader); } diff --git a/sdks/java/extensions/avro/src/test/java/org/apache/beam/sdk/extensions/avro/io/AvroIOTest.java b/sdks/java/extensions/avro/src/test/java/org/apache/beam/sdk/extensions/avro/io/AvroIOTest.java index 2a0bc36f6e9e..7e359e78dce7 100644 --- a/sdks/java/extensions/avro/src/test/java/org/apache/beam/sdk/extensions/avro/io/AvroIOTest.java +++ b/sdks/java/extensions/avro/src/test/java/org/apache/beam/sdk/extensions/avro/io/AvroIOTest.java @@ -281,7 +281,9 @@ private enum WriteMethod { AVROIO_WRITE, AVROIO_SINK_WITH_CLASS, AVROIO_SINK_WITH_SCHEMA, - /** @deprecated Test code for the deprecated {AvroIO.RecordFormatter}. */ + /** + * @deprecated Test code for the deprecated {AvroIO.RecordFormatter}. + */ @Deprecated AVROIO_SINK_WITH_FORMATTER } diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/accumulators/AccumulatorProvider.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/accumulators/AccumulatorProvider.java index d3e396ff491e..9a5cd0bb1fdd 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/accumulators/AccumulatorProvider.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/accumulators/AccumulatorProvider.java @@ -52,6 +52,7 @@ static AccumulatorProvider.Factory of(Pipeline pipeline) { * @return Instance of a counter. */ Counter getCounter(String namespace, String name); + /** * Get an existing instance of a histogram or create a new one. * diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAware.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAware.java index bafa8fc43a33..40e02dfabdbb 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAware.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAware.java @@ -28,7 +28,9 @@ * are chained together and type of input(s) equals to types of previous transform output(s). Source * transforms needs to know which type of elements are producing explicitly. */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class TypeAware { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAwareness.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAwareness.java index 5f2b2e8ecac9..d917e2ea776c 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAwareness.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/type/TypeAwareness.java @@ -21,7 +21,9 @@ import org.apache.beam.sdk.values.TypeDescriptor; /** Utility methods related to type awareness. */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class TypeAwareness { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/Fold.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/Fold.java index 5ee82ca0b385..0074ddf5258a 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/Fold.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/Fold.java @@ -27,7 +27,9 @@ import org.apache.beam.sdk.extensions.euphoria.core.translate.SingleValueContext; /** Apply a folding function. */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class Fold implements Serializable { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/PCollectionLists.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/PCollectionLists.java index 13bf07da444f..952da93f9104 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/PCollectionLists.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/client/util/PCollectionLists.java @@ -22,7 +22,9 @@ import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Iterables; /** Utilities related to {@link PCollection}s. */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class PCollectionLists { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/BeamAccumulatorProvider.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/BeamAccumulatorProvider.java index 33a5f87ae97e..9cd2d00cf9d2 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/BeamAccumulatorProvider.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/BeamAccumulatorProvider.java @@ -33,7 +33,9 @@ * Translation of accumulators to {@link Metrics}. Metric's namespace is taken from operator name. * So for better orientation in metrics it's recommended specify operator name with method .named(). */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class BeamAccumulatorProvider implements AccumulatorProvider { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/FlatMapTranslator.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/FlatMapTranslator.java index 62d047a8db43..11aebfcf9b42 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/FlatMapTranslator.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/FlatMapTranslator.java @@ -39,7 +39,9 @@ * @param type of input * @param type of output */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class FlatMapTranslator implements OperatorTranslator> { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/provider/CompositeProvider.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/provider/CompositeProvider.java index ef82a2d08d42..a1543e9e2224 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/provider/CompositeProvider.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/translate/provider/CompositeProvider.java @@ -31,7 +31,9 @@ * An implementation of {@link TranslatorProvider} which allows to stack other {@link * TranslatorProvider TranslatorProviders} in order given on construction time. */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class CompositeProvider implements TranslatorProvider { diff --git a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/util/IOUtils.java b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/util/IOUtils.java index 438a8f2605f9..129a5a759d9d 100644 --- a/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/util/IOUtils.java +++ b/sdks/java/extensions/euphoria/src/main/java/org/apache/beam/sdk/extensions/euphoria/core/util/IOUtils.java @@ -21,7 +21,9 @@ import java.util.stream.Stream; /** Util class that helps iterate over methods throwing {@link IOException}. */ -/** @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. */ +/** + * @deprecated Use Java SDK directly, Euphoria is scheduled for removal in a future release. + */ @Deprecated public class IOUtils { diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/CredentialFactory.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/CredentialFactory.java index 6e1e71dbb4ed..8fe9521ad9cb 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/CredentialFactory.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/CredentialFactory.java @@ -24,6 +24,5 @@ /** Construct an oauth credential to be used by the SDK and the SDK workers. */ public interface CredentialFactory { - @Nullable - Credentials getCredential() throws IOException, GeneralSecurityException; + @Nullable Credentials getCredential() throws IOException, GeneralSecurityException; } diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/GcpCredentialFactory.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/GcpCredentialFactory.java index ea7b511f239a..d522d9028ae5 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/GcpCredentialFactory.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/auth/GcpCredentialFactory.java @@ -57,8 +57,7 @@ public static GcpCredentialFactory fromOptions(PipelineOptions options) { GcpOptions gcpOptions = options.as(GcpOptions.class); @Nullable String impersonateServiceAccountArg = gcpOptions.getImpersonateServiceAccount(); - @Nullable - List impersonateServiceAccountChain = + @Nullable List impersonateServiceAccountChain = impersonateServiceAccountArg == null ? null : Arrays.asList(impersonateServiceAccountArg.split(",")); diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcpOptions.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcpOptions.java index 4f36195fabac..0b3be3096159 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcpOptions.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcpOptions.java @@ -109,7 +109,9 @@ public interface GcpOptions extends GoogleApiDebugOptions, PipelineOptions { + "This option is deprecated, and will be replaced by workerZone.") String getZone(); - /** @deprecated Use {@link #setWorkerZone} instead. */ + /** + * @deprecated Use {@link #setWorkerZone} instead. + */ @Deprecated void setZone(String value); @@ -223,15 +225,16 @@ public List create(PipelineOptions options) { + " either a single service account as the impersonator, or a" + " comma-separated list of service accounts to create an" + " impersonation delegation chain.") - @Nullable - String getImpersonateServiceAccount(); + @Nullable String getImpersonateServiceAccount(); void setImpersonateServiceAccount(String impersonateServiceAccount); /** Experiment to turn on the Streaming Engine experiment. */ String STREAMING_ENGINE_EXPERIMENT = "enable_streaming_engine"; - /** @deprecated Use STREAMING_ENGINE_EXPERIMENT instead. */ + /** + * @deprecated Use STREAMING_ENGINE_EXPERIMENT instead. + */ @Deprecated String WINDMILL_SERVICE_EXPERIMENT = "enable_windmill_service"; @Description( @@ -357,8 +360,7 @@ public Boolean create(PipelineOptions options) { */ @Description("A GCS path for storing temporary files in GCP.") @Default.InstanceFactory(GcpTempLocationFactory.class) - @Nullable - String getGcpTempLocation(); + @Nullable String getGcpTempLocation(); void setGcpTempLocation(String value); @@ -592,8 +594,7 @@ private static HttpRequestInitializer chainHttpRequestInitializer( "GCP Cloud KMS key for Dataflow pipelines. Also used by gcpTempLocation as the default key " + "for new buckets. Key format is: " + "projects//locations//keyRings//cryptoKeys/") - @Nullable - String getDataflowKmsKey(); + @Nullable String getDataflowKmsKey(); void setDataflowKmsKey(String dataflowKmsKey); } diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcsOptions.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcsOptions.java index 2da382a5b674..55cdd004b1e1 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcsOptions.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/options/GcsOptions.java @@ -53,7 +53,9 @@ public GoogleCloudStorageReadOptions create(PipelineOptions options) { } } - /** @deprecated This option will be removed in a future release. */ + /** + * @deprecated This option will be removed in a future release. + */ @JsonIgnore @Description( "The GoogleCloudStorageReadOptions instance that should be used to read from Google Cloud Storage.") @@ -62,7 +64,9 @@ public GoogleCloudStorageReadOptions create(PipelineOptions options) { @Deprecated GoogleCloudStorageReadOptions getGoogleCloudStorageReadOptions(); - /** @deprecated This option will be removed in a future release. */ + /** + * @deprecated This option will be removed in a future release. + */ @Deprecated void setGoogleCloudStorageReadOptions(GoogleCloudStorageReadOptions value); @@ -104,8 +108,7 @@ public GoogleCloudStorageReadOptions create(PipelineOptions options) { + "information on the restrictions and performance implications of this value.\n\n" + "https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/util/src/main/java/" + "com/google/cloud/hadoop/util/AsyncWriteChannelOptions.java") - @Nullable - Integer getGcsUploadBufferSizeBytes(); + @Nullable Integer getGcsUploadBufferSizeBytes(); void setGcsUploadBufferSizeBytes(@Nullable Integer bytes); @@ -146,20 +149,17 @@ public GoogleCloudStorageReadOptions create(PipelineOptions options) { void setGcsPerformanceMetrics(Boolean reportPerformanceMetrics); @Description("Read timeout for gcs http requests") - @Nullable - Integer getGcsHttpRequestReadTimeout(); + @Nullable Integer getGcsHttpRequestReadTimeout(); void setGcsHttpRequestReadTimeout(@Nullable Integer timeoutMs); @Description("Write timeout for gcs http requests.") - @Nullable - Integer getGcsHttpRequestWriteTimeout(); + @Nullable Integer getGcsHttpRequestWriteTimeout(); void setGcsHttpRequestWriteTimeout(@Nullable Integer timeoutMs); @Description("Batching limit for rewrite ops which will copy data.") - @Nullable - Integer getGcsRewriteDataOpBatchLimit(); + @Nullable Integer getGcsRewriteDataOpBatchLimit(); void setGcsRewriteDataOpBatchLimit(@Nullable Integer timeoutMs); diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java index ed727d495cf8..5ed97d935c66 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java @@ -85,13 +85,17 @@ public GcsUtil create(PipelineOptions options) { } } - /** @deprecated use {@link GcsPath#getNonWildcardPrefix(String)} instead. */ + /** + * @deprecated use {@link GcsPath#getNonWildcardPrefix(String)} instead. + */ @Deprecated public static String getNonWildcardPrefix(String globExp) { return GcsPath.getNonWildcardPrefix(globExp); } - /** @deprecated use {@link GcsPath#isWildcard(GcsPath)} instead. */ + /** + * @deprecated use {@link GcsPath#isWildcard(GcsPath)} instead. + */ @Deprecated public static boolean isWildcard(GcsPath spec) { return GcsPath.isWildcard(spec); @@ -120,7 +124,9 @@ public long fileSize(GcsPath path) throws IOException { return delegate.fileSize(path); } - /** @deprecated use {@link #getBlob(GcsPath, BlobGetOption...)}. */ + /** + * @deprecated use {@link #getBlob(GcsPath, BlobGetOption...)}. + */ @Deprecated public StorageObject getObject(GcsPath gcsPath) throws IOException { return delegate.getObject(gcsPath); @@ -133,7 +139,9 @@ public Blob getBlob(GcsPath gcsPath, BlobGetOption... options) throws IOExceptio throw new IOException("GcsUtil V2 not initialized."); } - /** @deprecated use {@link #getBlobs(Iterable, BlobGetOption...)}. */ + /** + * @deprecated use {@link #getBlobs(Iterable, BlobGetOption...)}. + */ @Deprecated public List getObjects(List gcsPaths) throws IOException { List legacy = delegate.getObjects(gcsPaths); @@ -150,14 +158,18 @@ public List getBlobs(Iterable gcsPaths, BlobGetOption... op throw new IOException("GcsUtil V2 not initialized."); } - /** @deprecated use {@link #listBlobs(String, String, String, BlobListOption...)}. */ + /** + * @deprecated use {@link #listBlobs(String, String, String, BlobListOption...)}. + */ @Deprecated public Objects listObjects(String bucket, String prefix, @Nullable String pageToken) throws IOException { return delegate.listObjects(bucket, prefix, pageToken); } - /** @deprecated use {@link #listBlobs(String, String, String, String, BlobListOption...)}. */ + /** + * @deprecated use {@link #listBlobs(String, String, String, String, BlobListOption...)}. + */ @Deprecated public Objects listObjects( String bucket, String prefix, @Nullable String pageToken, @Nullable String delimiter) @@ -201,13 +213,17 @@ public SeekableByteChannel openV2(GcsPath path, BlobSourceOption... options) thr throw new IOException("GcsUtil V2 not initialized."); } - /** @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. */ + /** + * @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. + */ @Deprecated public WritableByteChannel create(GcsPath path, String type) throws IOException { return delegate.create(path, type); } - /** @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. */ + /** + * @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. + */ @Deprecated public WritableByteChannel create(GcsPath path, String type, Integer uploadBufferSizeBytes) throws IOException { @@ -302,7 +318,9 @@ public long bucketOwner(GcsPath path) throws IOException { return delegate.bucketOwner(path); } - /** @deprecated use {@link #createBucket(BucketInfo)}. */ + /** + * @deprecated use {@link #createBucket(BucketInfo)}. + */ @Deprecated public void createBucket(String projectId, Bucket bucket) throws IOException { delegate.createBucket(projectId, bucket); @@ -316,7 +334,9 @@ public void createBucket(BucketInfo bucketInfo) throws IOException { } } - /** @deprecated use {@link #getBucketWithOptions(GcsPath, BucketGetOption...)} . */ + /** + * @deprecated use {@link #getBucketWithOptions(GcsPath, BucketGetOption...)} . + */ @Deprecated public @Nullable Bucket getBucket(GcsPath path) throws IOException { return delegate.getBucket(path); @@ -330,7 +350,9 @@ public void createBucket(BucketInfo bucketInfo) throws IOException { throw new IOException("GcsUtil V2 not initialized."); } - /** @deprecated use {@link #removeBucket(BucketInfo)}. */ + /** + * @deprecated use {@link #removeBucket(BucketInfo)}. + */ @Deprecated public void removeBucket(Bucket bucket) throws IOException { delegate.removeBucket(bucket); diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtilV1.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtilV1.java index 1ade4be6fdb5..2eb13ac41432 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtilV1.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtilV1.java @@ -166,8 +166,10 @@ public GcsUtilV1 create(PipelineOptions options) { /** Maximum number of requests permitted in a GCS batch request. */ private static final int MAX_REQUESTS_PER_BATCH = 100; + /** Default maximum number of requests permitted in a GCS batch request where data is copied. */ private static final int MAX_REQUESTS_PER_COPY_BATCH = 10; + /** Maximum number of concurrent batches of requests executing on GCS. */ private static final int MAX_CONCURRENT_BATCHES = 256; @@ -195,6 +197,7 @@ public boolean shouldRetry(IOException e) { private Supplier batchRequestSupplier; private final HttpRequestInitializer httpRequestInitializer; + /** Buffer size for GCS uploads (in bytes). */ private final @Nullable Integer uploadBufferSizeBytes; @@ -364,8 +367,7 @@ public List expand(GcsPath gcsPattern) throws IOException { } @VisibleForTesting - @Nullable - Integer getUploadBufferSizeBytes() { + @Nullable Integer getUploadBufferSizeBytes() { return uploadBufferSizeBytes; } @@ -611,14 +613,18 @@ SeekableByteChannel open(GcsPath path, GoogleCloudStorageReadOptions readOptions } } - /** @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. */ + /** + * @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. + */ @Deprecated public WritableByteChannel create(GcsPath path, String type) throws IOException { CreateOptions.Builder builder = CreateOptions.builder().setContentType(type); return create(path, builder.build()); } - /** @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. */ + /** + * @deprecated Use {@link #create(GcsPath, CreateOptions)} instead. + */ @Deprecated public WritableByteChannel create(GcsPath path, String type, Integer uploadBufferSizeBytes) throws IOException { @@ -671,8 +677,7 @@ public abstract static class Builder { */ public WritableByteChannel create(GcsPath path, CreateOptions options) throws IOException { AsyncWriteChannelOptions wcOptions = googleCloudStorageOptions.getWriteChannelOptions(); - @Nullable - Integer uploadBufferSizeBytes = + @Nullable Integer uploadBufferSizeBytes = options.getUploadBufferSizeBytes() != null ? options.getUploadBufferSizeBytes() : getUploadBufferSizeBytes(); @@ -834,8 +839,7 @@ void verifyBucketAccessible(GcsPath path, BackOff backoff, Sleeper sleeper) thro } @VisibleForTesting - @Nullable - Bucket getBucket(GcsPath path, BackOff backoff, Sleeper sleeper) throws IOException { + @Nullable Bucket getBucket(GcsPath path, BackOff backoff, Sleeper sleeper) throws IOException { Storage.Buckets.Get getBucket = storageClient.buckets().get(path.getBucket()); try { @@ -1181,9 +1185,9 @@ public void copy(Iterable srcFilenames, Iterable destFilenames) rewriteHelper( srcFilenames, destFilenames, - /*deleteSource=*/ false, - /*ignoreMissingSource=*/ false, - /*ignoreExistingDest=*/ false); + /* deleteSource= */ false, + /* ignoreMissingSource= */ false, + /* ignoreExistingDest= */ false); } public void rename( @@ -1197,7 +1201,11 @@ public void rename( final boolean ignoreExistingDest = moveOptionSet.contains(StandardMoveOptions.SKIP_IF_DESTINATION_EXISTS); rewriteHelper( - srcFilenames, destFilenames, /*deleteSource=*/ true, ignoreMissingSrc, ignoreExistingDest); + srcFilenames, + destFilenames, + /* deleteSource= */ true, + ignoreMissingSrc, + ignoreExistingDest); } private void rewriteHelper( diff --git a/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/DLPReidentifyText.java b/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/DLPReidentifyText.java index b5678d6c77b5..206ab5e02c45 100644 --- a/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/DLPReidentifyText.java +++ b/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/DLPReidentifyText.java @@ -137,6 +137,7 @@ public abstract static class Builder { * @param batchSize Size of input elements batch to be sent to Cloud DLP service in one request. */ public abstract Builder setBatchSizeBytes(int batchSize); + /** * Sets list of column names if the input KV value is a delimited row. * diff --git a/sdks/java/extensions/ml/src/test/java/org/apache/beam/sdk/extensions/ml/CloudVisionTest.java b/sdks/java/extensions/ml/src/test/java/org/apache/beam/sdk/extensions/ml/CloudVisionTest.java index 27f68b46e583..e7cf632ad927 100644 --- a/sdks/java/extensions/ml/src/test/java/org/apache/beam/sdk/extensions/ml/CloudVisionTest.java +++ b/sdks/java/extensions/ml/src/test/java/org/apache/beam/sdk/extensions/ml/CloudVisionTest.java @@ -33,7 +33,8 @@ public class CloudVisionTest { private static final String TEST_URI = "test_uri"; - private static final ByteString TEST_BYTES = ByteString.copyFromUtf8("12345");; + private static final ByteString TEST_BYTES = ByteString.copyFromUtf8("12345"); + ; private List features = Collections.singletonList(Feature.newBuilder().setType(Feature.Type.LABEL_DETECTION).build()); diff --git a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/ContiguousSequenceRange.java b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/ContiguousSequenceRange.java index 409379916d0a..45f9836e5bbf 100644 --- a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/ContiguousSequenceRange.java +++ b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/ContiguousSequenceRange.java @@ -91,8 +91,8 @@ private ContiguousSequenceRangeCoder() {} @Override public void encode( ContiguousSequenceRange value, @UnknownKeyFor @NonNull @Initialized OutputStream outStream) - throws @UnknownKeyFor @NonNull @Initialized CoderException, @UnknownKeyFor @NonNull - @Initialized IOException { + throws @UnknownKeyFor @NonNull @Initialized CoderException, + @UnknownKeyFor @NonNull @Initialized IOException { VarLongCoder.of().encode(value.getStart(), outStream); VarLongCoder.of().encode(value.getEnd(), outStream); InstantCoder.of().encode(value.getTimestamp(), outStream); @@ -100,8 +100,8 @@ public void encode( @Override public ContiguousSequenceRange decode(@UnknownKeyFor @NonNull @Initialized InputStream inStream) - throws @UnknownKeyFor @NonNull @Initialized CoderException, @UnknownKeyFor @NonNull - @Initialized IOException { + throws @UnknownKeyFor @NonNull @Initialized CoderException, + @UnknownKeyFor @NonNull @Initialized IOException { long start = VarLongCoder.of().decode(inStream); long end = VarLongCoder.of().decode(inStream); Instant timestamp = InstantCoder.of().decode(inStream); diff --git a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/EventExaminer.java b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/EventExaminer.java index b5de67f16ced..e0f5941387d6 100644 --- a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/EventExaminer.java +++ b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/EventExaminer.java @@ -48,8 +48,7 @@ public interface EventExaminer> * @param event the first event in the sequence. * @return the state to persist. */ - @NonNull - StateT createStateOnInitialEvent(EventT event); + @NonNull StateT createStateOnInitialEvent(EventT event); /** * Is this event the last expected event for a given key and window? diff --git a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/combiner/SequenceRangeAccumulator.java b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/combiner/SequenceRangeAccumulator.java index 70c22f54056c..b22840400208 100644 --- a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/combiner/SequenceRangeAccumulator.java +++ b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/combiner/SequenceRangeAccumulator.java @@ -264,8 +264,8 @@ private SequenceRangeAccumulatorCoder() {} @Override public void encode( SequenceRangeAccumulator value, @UnknownKeyFor @NonNull @Initialized OutputStream outStream) - throws @UnknownKeyFor @NonNull @Initialized CoderException, @UnknownKeyFor @NonNull - @Initialized IOException { + throws @UnknownKeyFor @NonNull @Initialized CoderException, + @UnknownKeyFor @NonNull @Initialized IOException { numberOfRangesCoder.encode(value.numberOfRanges(), outStream); initialSequenceCoder.encode(value.initialSequence, outStream); for (Entry> entry : value.data.entrySet()) { @@ -278,8 +278,8 @@ public void encode( @Override public SequenceRangeAccumulator decode( @UnknownKeyFor @NonNull @Initialized InputStream inStream) - throws @UnknownKeyFor @NonNull @Initialized CoderException, @UnknownKeyFor @NonNull - @Initialized IOException { + throws @UnknownKeyFor @NonNull @Initialized CoderException, + @UnknownKeyFor @NonNull @Initialized IOException { SequenceRangeAccumulator result = new SequenceRangeAccumulator(); int numberOfRanges = numberOfRangesCoder.decode(inStream); result.initialSequence = initialSequenceCoder.decode(inStream); diff --git a/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/Event.java b/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/Event.java index 3cf879d8239b..1c47ec8de7e9 100644 --- a/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/Event.java +++ b/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/Event.java @@ -36,12 +36,18 @@ public static Event create(long sequence, String groupId, String value) { return new AutoValue_Event(sequence, groupId, value); } - /** @return event sequence number */ + /** + * @return event sequence number + */ public abstract long getSequence(); - /** @return the group id event is associated with */ + /** + * @return the group id event is associated with + */ public abstract String getKey(); - /** @return value of the event */ + /** + * @return value of the event + */ public abstract String getValue(); } diff --git a/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoBeamConverter.java b/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoBeamConverter.java index 559b8dd1b518..b050727309c1 100644 --- a/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoBeamConverter.java +++ b/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoBeamConverter.java @@ -316,13 +316,11 @@ static BeamToProtoConverter identity() { return (BeamToProtoConverter) IDENTITY; } - @NonNull - ProtoT convert(@NonNull BeamT value); + @NonNull ProtoT convert(@NonNull BeamT value); } interface FromProtoGetter { - @Nullable - BeamT getFromProto(Message message); + @Nullable BeamT getFromProto(Message message); } @FunctionalInterface @@ -333,8 +331,7 @@ static ProtoToBeamConverter identity() { return (ProtoToBeamConverter) IDENTITY; } - @NonNull - BeamT convert(@NonNull ProtoT protoValue); + @NonNull BeamT convert(@NonNull ProtoT protoValue); } interface ToProtoSetter { diff --git a/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoDynamicMessageSchema.java b/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoDynamicMessageSchema.java index 1caeca339d39..524bbf6bf535 100644 --- a/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoDynamicMessageSchema.java +++ b/sdks/java/extensions/protobuf/src/main/java/org/apache/beam/sdk/extensions/protobuf/ProtoDynamicMessageSchema.java @@ -26,7 +26,9 @@ import org.apache.beam.sdk.values.Row; import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Preconditions; -/** @deprecated Use {@link ProtoBeamConverter} */ +/** + * @deprecated Use {@link ProtoBeamConverter} + */ @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) }) diff --git a/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java b/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java index fb44f8272154..3d368cb0087f 100644 --- a/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java +++ b/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java @@ -180,8 +180,8 @@ PythonExternalTransform from(String transformName, String expan * @return updated wrapper for the cross-language transform. */ public PythonExternalTransform withArgs(@NonNull Object... args) { - @Nullable - Object @NonNull [] result = Arrays.copyOf(this.argsArray, this.argsArray.length + args.length); + @Nullable Object @NonNull [] result = + Arrays.copyOf(this.argsArray, this.argsArray.length + args.length); System.arraycopy(args, 0, result, this.argsArray.length, args.length); this.argsArray = result; return this; diff --git a/sdks/java/extensions/sql/datacatalog/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/datacatalog/DataCatalogTableProvider.java b/sdks/java/extensions/sql/datacatalog/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/datacatalog/DataCatalogTableProvider.java index 7ac67a9fc9dc..19cc37b3f0dd 100644 --- a/sdks/java/extensions/sql/datacatalog/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/datacatalog/DataCatalogTableProvider.java +++ b/sdks/java/extensions/sql/datacatalog/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/datacatalog/DataCatalogTableProvider.java @@ -168,10 +168,7 @@ public static DataCatalogClient createDataCatalogClient(DataCatalogPipelineOptio .setRetryableCodes( ImmutableSet.of(Code.PERMISSION_DENIED, Code.DEADLINE_EXCEEDED, Code.UNAVAILABLE)) .setRetrySettings( - builder - .lookupEntrySettings() - .getRetrySettings() - .toBuilder() + builder.lookupEntrySettings().getRetrySettings().toBuilder() .setMaxRetryDelay(Duration.ofMinutes(1L)) .build()); builder @@ -179,10 +176,7 @@ public static DataCatalogClient createDataCatalogClient(DataCatalogPipelineOptio .setRetryableCodes( ImmutableSet.of(Code.PERMISSION_DENIED, Code.DEADLINE_EXCEEDED, Code.UNAVAILABLE)) .setRetrySettings( - builder - .updateEntrySettings() - .getRetrySettings() - .toBuilder() + builder.updateEntrySettings().getRetrySettings().toBuilder() .setMaxRetryDelay(Duration.ofMinutes(1L)) .build()); diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java index eee1ca442972..22988f6c4d72 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java @@ -35,6 +35,7 @@ @Internal public class BeamSqlCli { private BeamSqlEnv env; + /** The store which persists all the table meta data. */ private MetaStore metaStore; diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/SqlTransform.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/SqlTransform.java index 8365f56e27de..bf6d78bf7f6e 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/SqlTransform.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/SqlTransform.java @@ -262,6 +262,7 @@ public SqlTransform withDdlString(String ddlString) { public SqlTransform withAutoLoading(boolean autoLoading) { return toBuilder().setAutoLoading(autoLoading).build(); } + /** * register a UDF function used in this query. * diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java index d84783118bbd..14486f093378 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java @@ -199,6 +199,7 @@ public BeamSqlEnvBuilder setRuleSets(Collection ruleSets) { this.ruleSets = ruleSets; return this; } + /** Register a UDF function which can be used in SQL expression. */ public BeamSqlEnvBuilder addUdf(String functionName, Class clazz, String method) { functionSet.add(new SimpleEntry<>(functionName, UdfImpl.create(clazz, method))); diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogManagerSchema.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogManagerSchema.java index 098b72b28695..8b8f0bb71a93 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogManagerSchema.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogManagerSchema.java @@ -170,8 +170,8 @@ public void maybeRegisterProvider(TableName path, String type) { @Override public @Nullable Table getTable(String table) { - @Nullable - CatalogSchema catalogSchema = catalogSubSchemas.get(catalogManager.currentCatalog().name()); + @Nullable CatalogSchema catalogSchema = + catalogSubSchemas.get(catalogManager.currentCatalog().name()); return catalogSchema != null ? catalogSchema.getTable(table) : null; } diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogSchema.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogSchema.java index 57d462670ece..b8a2b0031135 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogSchema.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/CatalogSchema.java @@ -53,6 +53,7 @@ public class CatalogSchema implements Schema { private final JdbcConnection connection; private final Catalog catalog; private final Map subSchemas = new HashMap<>(); + /** * Creates a Calcite {@link Schema} representing a {@link CatalogManager}. This will typically be * the root node of a pipeline. diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdfImplReflectiveFunctionBase.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdfImplReflectiveFunctionBase.java index fa6293bbb175..93a3ca6661ff 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdfImplReflectiveFunctionBase.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdfImplReflectiveFunctionBase.java @@ -39,6 +39,7 @@ public abstract class UdfImplReflectiveFunctionBase implements Function { /** Method that implements the function. */ public final Method method; + /** Types of parameter for the function call. */ public final List parameters; diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/CEPLiteral.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/CEPLiteral.java index 7f33afdcf511..fc67b63e2817 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/CEPLiteral.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/CEPLiteral.java @@ -314,7 +314,8 @@ public int compareTo(Object other) { public int compareTo(Object other) { throw new IllegalStateException("the class must be subclassed properly to use this method"); - }; + } + ; @Override public boolean equals(Object other) { diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/PatternCondition.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/PatternCondition.java index cd9dc75d5f8d..222e8fe9c0b5 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/PatternCondition.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/cep/PatternCondition.java @@ -27,7 +27,8 @@ public abstract class PatternCondition implements Serializable { @SuppressWarnings("unused") // Affects a public api - PatternCondition(CEPPattern pattern) {}; + PatternCondition(CEPPattern pattern) {} + ; public abstract boolean eval(Row eleRow); } diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateDatabase.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateDatabase.java index 877b6721152c..3c8cd0124495 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateDatabase.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateDatabase.java @@ -79,8 +79,8 @@ public void execute(CalcitePrepare.Context context) { Schema schema = pair.left.schema; List components = Lists.newArrayList(Splitter.on('.').split(databaseName.toString())); - @Nullable - String catalogName = components.size() > 1 ? components.get(components.size() - 2) : null; + @Nullable String catalogName = + components.size() > 1 ? components.get(components.size() - 2) : null; if (!(schema instanceof CatalogManagerSchema)) { throw SqlUtil.newContextException( diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java index a1a9ae861b60..35f9fd49415c 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java @@ -57,7 +57,8 @@ default void withErrorsTransformer(@Nullable PTransform, POutpu default List getPCollectionInputs() { return getInputs(); - }; + } + ; PTransform, PCollection> buildPTransform(); diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnnestRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnnestRel.java index a37ade47f925..dc97ee7cf58c 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnnestRel.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnnestRel.java @@ -134,6 +134,7 @@ private UnnestFn(Schema outputSchema, List unnestIndices) { this.outputSchema = outputSchema; this.unnestIndices = unnestIndices; } + /** * This is recursive call to get all the values of the nested rows. The recursion is bounded by * the amount of nesting with in the data. This mirrors the unnest behavior of calcite towards diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamBuiltinAggregations.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamBuiltinAggregations.java index 3fc299bd5a33..9f9c3aa58b4f 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamBuiltinAggregations.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamBuiltinAggregations.java @@ -561,6 +561,7 @@ static class BitOr extends CombineFn { static class Accum implements Serializable { /** True if no inputs have been seen yet. */ boolean isEmpty = true; + /** The bitwise-or of the inputs seen so far. */ long bitOr = 0L; } @@ -609,6 +610,7 @@ static class BitAnd extends CombineFn { static class Accum implements Serializable { /** True if no inputs have been seen yet. */ boolean isEmpty = true; + /** The bitwise-and of the inputs seen so far. */ long bitAnd = -1L; } @@ -652,6 +654,7 @@ public static class BitXOr extends CombineFn databases(); diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java index 858dbcd5bf76..89e7a00f14f0 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java @@ -46,8 +46,7 @@ public interface CatalogManager { Catalog currentCatalog(); /** Attempts to fetch the catalog with this name. May produce null if it does not exist. */ - @Nullable - Catalog getCatalog(String name); + @Nullable Catalog getCatalog(String name); /** Drops the catalog with this name. No-op if the catalog already does not exist. */ void dropCatalog(String name); diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java index 35ea74996f31..61cd4b167a10 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java @@ -79,6 +79,7 @@ }) public class MongoDbTable extends SchemaBaseBeamTable implements Serializable { private static final Logger LOG = LoggerFactory.getLogger(MongoDbTable.class); + // Should match: mongodb://username:password@localhost:27017/database/collection @VisibleForTesting final Pattern locationPattern = diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java index 0fcce72578d9..5f0c060a3d3a 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java @@ -37,6 +37,7 @@ public class TestBoundedTable extends TestTable { /** rows written to this table. */ private static final ConcurrentLinkedQueue CONTENT = new ConcurrentLinkedQueue<>(); + /** rows flow out from this table. */ private final List rows = new ArrayList<>(); diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java index c18df6b0d3f8..569e9ccfd07f 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java @@ -39,6 +39,7 @@ public class TestUnboundedTable extends TestTable { /** rows flow out from this table with the specified watermark instant. */ private final List>> timestampedRows = new ArrayList<>(); + /** specify the index of column in the row which stands for the event time field. */ private int timestampField; diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/store/InMemoryMetaStore.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/store/InMemoryMetaStore.java index 8892cd889fd1..7fd74b6b85af 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/store/InMemoryMetaStore.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/store/InMemoryMetaStore.java @@ -89,8 +89,8 @@ protected void validateTableType(Table table) { return; } // check if there is a nested metastore that supports this table - @Nullable - InMemoryMetaStore nestedMemoryMetastore = (InMemoryMetaStore) providers.get(getTableType()); + @Nullable InMemoryMetaStore nestedMemoryMetastore = + (InMemoryMetaStore) providers.get(getTableType()); if (nestedMemoryMetastore != null) { nestedMemoryMetastore.validateTableType(table); } else { diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtilsTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtilsTest.java index 481a700c0c99..dde5a1f89ae3 100644 --- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtilsTest.java +++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtilsTest.java @@ -57,7 +57,7 @@ Map calciteRowTypeFields(Schema schema) { x -> x, x -> dataType - .getField(x, /*caseSensitive=*/ true, /*elideRecord=*/ false) + .getField(x, /* caseSensitive= */ true, /* elideRecord= */ false) .getType())); } diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/TestSchemaIOTableProviderWrapper.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/TestSchemaIOTableProviderWrapper.java index f005f98d7bdc..a8ca6cde5da6 100644 --- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/TestSchemaIOTableProviderWrapper.java +++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/TestSchemaIOTableProviderWrapper.java @@ -113,6 +113,7 @@ private static class TestProjectionProducer extends PTransform>> { /** The schema of the input data. */ private final Schema schema; + /** The fields to be projected. */ private final FieldAccessDescriptor fieldAccessDescriptor; diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTestTable.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTestTable.java index d0f6427a262e..c8ec946de1c6 100644 --- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTestTable.java +++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTestTable.java @@ -138,6 +138,7 @@ public synchronized void assign(final Collection assigned) { super.assign(realPartitions); assignedPartitions.set(ImmutableList.copyOf(realPartitions)); } + // Override offsetsForTimes() in order to look up the offsets by timestamp. @Override public synchronized Map offsetsForTimes( diff --git a/sdks/java/extensions/yaml/src/main/java/org/apache/beam/sdk/extensions/yaml/YamlTransform.java b/sdks/java/extensions/yaml/src/main/java/org/apache/beam/sdk/extensions/yaml/YamlTransform.java index 11245c4b4ca3..297990d57728 100644 --- a/sdks/java/extensions/yaml/src/main/java/org/apache/beam/sdk/extensions/yaml/YamlTransform.java +++ b/sdks/java/extensions/yaml/src/main/java/org/apache/beam/sdk/extensions/yaml/YamlTransform.java @@ -51,6 +51,7 @@ public class YamlTransform /** The YAML definition of this transform. */ private final String yamlDefinition; + /** * If non-null, the set of input tags that are expected to be passed to this transform. * diff --git a/sdks/java/extensions/zetasketch/src/test/java/org/apache/beam/sdk/extensions/zetasketch/ApproximateCountDistinctTest.java b/sdks/java/extensions/zetasketch/src/test/java/org/apache/beam/sdk/extensions/zetasketch/ApproximateCountDistinctTest.java index 559ad5df99a7..49cbbb9789c9 100644 --- a/sdks/java/extensions/zetasketch/src/test/java/org/apache/beam/sdk/extensions/zetasketch/ApproximateCountDistinctTest.java +++ b/sdks/java/extensions/zetasketch/src/test/java/org/apache/beam/sdk/extensions/zetasketch/ApproximateCountDistinctTest.java @@ -63,6 +63,7 @@ public void testIntegerBuilder() { PAssert.that(result).containsInAnyOrder(1L); p.run(); } + /** Test correct Builder is returned from Generic type. * */ @Test public void testStringBuilder() { @@ -75,6 +76,7 @@ public void testStringBuilder() { PAssert.that(result).containsInAnyOrder(1L); p.run(); } + /** Test correct Builder is returned from Generic type. * */ @Test public void testLongBuilder() { @@ -86,6 +88,7 @@ public void testLongBuilder() { PAssert.that(result).containsInAnyOrder(1L); p.run(); } + /** Test correct Builder is returned from Generic type. * */ @Test public void testBytesBuilder() { diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java index 0fbc92c1f7d8..c55cead363bd 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java @@ -301,6 +301,7 @@ public final void addRunnerForPTransform(Context context) throws IOException { * otherwise. */ private RestrictionTracker currentTracker; + /** * If non-null, set to true after currentTracker has had a tryClaim issued on it. Used to ignore * checkpoint split requests if no progress was made. @@ -1277,8 +1278,8 @@ private void processTimer( private boolean timerModified( Table> modifiedTimerIds, String timerFamilyOrId, Timer timer) { - @Nullable - Timer modifiedTimer = modifiedTimerIds.get(timerFamilyOrId, timer.getDynamicTimerTag()); + @Nullable Timer modifiedTimer = + modifiedTimerIds.get(timerFamilyOrId, timer.getDynamicTimerTag()); return modifiedTimer != null && !modifiedTimer.equals(timer); } diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java index af7638d61cab..a95bdf83b5bd 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java @@ -31,8 +31,7 @@ public interface HandlesSplits { /** Returns null if the split was unsuccessful. */ - @Nullable - SplitResult trySplit(double fractionOfRemainder); + @Nullable SplitResult trySplit(double fractionOfRemainder); /** Returns the current progress of the active element as a fraction between 0.0 and 1.0. */ double getProgress(); diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ExecutionStateSampler.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ExecutionStateSampler.java index 8261839c58cc..e7a8b5c7aa30 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ExecutionStateSampler.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ExecutionStateSampler.java @@ -531,6 +531,7 @@ private class ExecutionStateImpl implements ExecutionState { @GuardedBy("this") private long lastReportedValue; + // Read and written by the bundle processing thread frequently. private @Nullable ExecutionStateImpl previousState; diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/Metrics.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/Metrics.java index 5c8e2a0758e1..a763a66300ae 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/Metrics.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/Metrics.java @@ -81,8 +81,10 @@ public static BundleDistribution bundleProcessingThreadDistribution( private static class BundleProcessingThreadCounter implements BundleCounter { private final MetricName name; private final String shortId; + /** Guarded by {@link BundleProcessor#getProgressRequestLock}. */ private boolean hasReportedValue; + /** Guarded by {@link BundleProcessor#getProgressRequestLock}. */ private long lastReportedValue; diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java index 5a57b137bf6b..2617783682fe 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java @@ -1062,13 +1062,13 @@ public static BundleProcessor create( metricsEnvironmentStateForBundle, stateTracker, beamFnStateClient, - /*inboundEndpointApiServiceDescriptors=*/ new ArrayList<>(), - /*inboundDataEndpoints=*/ new ArrayList<>(), - /*timerEndpoints=*/ new ArrayList<>(), + /* inboundEndpointApiServiceDescriptors= */ new ArrayList<>(), + /* inboundDataEndpoints= */ new ArrayList<>(), + /* timerEndpoints= */ new ArrayList<>(), bundleFinalizationCallbackRegistrations, - /*channelRoots=*/ new ArrayList<>(), + /* channelRoots= */ new ArrayList<>(), // We rely on the stable iteration order of outboundAggregators, thus using LinkedHashMap. - /*outboundAggregators=*/ new LinkedHashMap<>(), + /* outboundAggregators= */ new LinkedHashMap<>(), runnerCapabilities, new ReentrantLock()); } diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java index de5031085b8c..7f61008bc66b 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java @@ -139,8 +139,7 @@ public void asyncClose() throws Exception { // Respect the batching limit by outputting the previous batch of // elements. beamFnStateClient.handle( - request - .toBuilder() + request.toBuilder() .setAppend( StateAppendRequest.newBuilder() .setData(out.consumePrefixToByteString(previousSize)))); @@ -150,15 +149,13 @@ public void asyncClose() throws Exception { // exceeding runner state limits due to large elements, we output // without additional batching. beamFnStateClient.handle( - request - .toBuilder() + request.toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset()))); } } if (!out.isEmpty()) { beamFnStateClient.handle( - request - .toBuilder() + request.toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset()))); } } diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapUserState.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapUserState.java index 83d78ff836c7..c62b7ab9c8ad 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapUserState.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapUserState.java @@ -442,8 +442,7 @@ private void startStateApiWrites() { for (KV> entry : pendingAdds.values()) { StateRequest request = createUserStateRequest(entry.getKey()); beamFnStateClient.handle( - request - .toBuilder() + request.toBuilder() .setAppend( StateAppendRequest.newBuilder().setData(encodeValues(entry.getValue())))); } diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java index e01a25d3dec3..1b4d7ec371ba 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java @@ -789,8 +789,7 @@ public void prefetch() { public CompletableFuture loadPrefetchedResponse(ByteString continuationToken) { return beamFnStateClient.handle( - stateRequestForFirstChunk - .toBuilder() + stateRequestForFirstChunk.toBuilder() .setGet(StateGetRequest.newBuilder().setContinuationToken(continuationToken))); } diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/status/MemoryMonitor.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/status/MemoryMonitor.java index 9286c72facae..9dbd1c963596 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/status/MemoryMonitor.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/status/MemoryMonitor.java @@ -620,8 +620,11 @@ private static File getHeapDumpDir() { * repeated dumps. These files can be of comparable size to the local disk. */ public File dumpHeap() - throws MalformedObjectNameException, InstanceNotFoundException, ReflectionException, - MBeanException, IOException { + throws MalformedObjectNameException, + InstanceNotFoundException, + ReflectionException, + MBeanException, + IOException { Preconditions.checkState( canDumpHeap, "Bug! Attempt to dump heap even though it should be disabled."); return dumpHeap(localDumpFolder); @@ -634,8 +637,11 @@ public File dumpHeap() * repeated dumps. These files can be of comparable size to the local disk. */ private static synchronized File dumpHeap(File directory) - throws MalformedObjectNameException, InstanceNotFoundException, ReflectionException, - MBeanException, IOException { + throws MalformedObjectNameException, + InstanceNotFoundException, + ReflectionException, + MBeanException, + IOException { boolean liveObjectsOnly = false; File fileName = new File(directory, "heap_dump.hprof"); diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java index 40ebebacd285..f4312e84f57a 100644 --- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java +++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java @@ -105,9 +105,7 @@ public class BeamFnDataReadRunnerTest { MessageWithComponents coderAndComponents = CoderTranslation.toProto(CODER); CODER_SPEC = coderAndComponents.getCoder(); COMPONENTS = - coderAndComponents - .getComponents() - .toBuilder() + coderAndComponents.getComponents().toBuilder() .putCoders(CODER_SPEC_ID, CODER_SPEC) .putCoders(ELEMENT_CODER_SPEC_ID, CoderTranslation.toProto(ELEMENT_CODER).getCoder()) .build(); diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataWriteRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataWriteRunnerTest.java index 70a894e7b375..f4e46fd8ae74 100644 --- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataWriteRunnerTest.java +++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataWriteRunnerTest.java @@ -87,9 +87,7 @@ public class BeamFnDataWriteRunnerTest { MessageWithComponents coderAndComponents = CoderTranslation.toProto(WIRE_CODER); WIRE_CODER_SPEC = coderAndComponents.getCoder(); COMPONENTS = - coderAndComponents - .getComponents() - .toBuilder() + coderAndComponents.getComponents().toBuilder() .putCoders(WIRE_CODER_ID, WIRE_CODER_SPEC) .putCoders(ELEM_CODER_ID, CoderTranslation.toProto(ELEM_CODER).getCoder()) .build(); diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/CombineRunnersTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/CombineRunnersTest.java index fc3cad970f73..be86ad074d9b 100644 --- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/CombineRunnersTest.java +++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/CombineRunnersTest.java @@ -353,6 +353,7 @@ public void testConvertToAccumulators() throws Exception { valueInGlobalWindow(KV.of("B", 5)), valueInGlobalWindow(KV.of("C", 7)))); } + /** * Create a Combine Grouped Values function that is given lists of values that are grouped by key * and validates that the lists are properly combined. diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java index 50a2fec0b5a2..5ef44e79feb3 100644 --- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java +++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java @@ -717,7 +717,9 @@ public void testSideInputIsAccessibleForDownstreamCallers() throws Exception { fakeClient.getData()); } - /** @return a test MetricUpdate for expected metrics to compare against */ + /** + * @return a test MetricUpdate for expected metrics to compare against + */ public MetricUpdate create(String stepName, MetricName name, long value) { return MetricUpdate.create(MetricKey.create(stepName, name), value); } diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/SplittableTruncateSizedRestrictionsDoFnRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/SplittableTruncateSizedRestrictionsDoFnRunnerTest.java index 615a681095e1..fa1ae7aab625 100644 --- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/SplittableTruncateSizedRestrictionsDoFnRunnerTest.java +++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/SplittableTruncateSizedRestrictionsDoFnRunnerTest.java @@ -110,7 +110,9 @@ public static class ExecutionTest implements Serializable { public static final String TEST_TRANSFORM_ID = "pTransformId"; - /** @return a test MetricUpdate for expected metrics to compare against */ + /** + * @return a test MetricUpdate for expected metrics to compare against + */ public MetricUpdate create(String stepName, MetricName name, long value) { return MetricUpdate.create(MetricKey.create(stepName, name), value); } diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateBackedIterableTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateBackedIterableTest.java index 9b1d51748e68..f5a36addf87d 100644 --- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateBackedIterableTest.java +++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateBackedIterableTest.java @@ -154,7 +154,8 @@ private static class TestByteObserver extends ElementByteSizeObserver { protected void reportElementSize(long elementByteSize) { total += elementByteSize; } - }; + } + ; @Test public void testByteObservingStateBackedIterable() throws Exception { diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/dynamodb/DynamoDBIO.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/dynamodb/DynamoDBIO.java index b7cf43f93eb2..b99d639afcba 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/dynamodb/DynamoDBIO.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/dynamodb/DynamoDBIO.java @@ -280,8 +280,7 @@ public void processElement( do { ScanRequest scanRequest = spec.getScanRequestFn().apply(null); ScanRequest scanRequestWithSegment = - scanRequest - .toBuilder() + scanRequest.toBuilder() .segment(spec.getSegmentId()) .exclusiveStartKey(lastEvaluatedKey) .build(); diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscriber.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscriber.java index 96ee35e5d45f..0637bf2e3a4f 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscriber.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscriber.java @@ -272,6 +272,7 @@ private class ShardEventsSubscriber implements Subscriber, SubscribeToShardResponseHandler.Visitor { /** Tracks continuation sequence number. */ @Nullable String sequenceNumber; + /** Current active subscription to request more events or cancel it. */ @Nullable Subscription subscription; diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscribersPool.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscribersPool.java index 6281088253c8..d0cad2f382d7 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscribersPool.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/EFOShardSubscribersPool.java @@ -185,8 +185,7 @@ void start(Iterable checkpoints) { * it is better to poll again instead of having {@link EFOKinesisReader#advance()} signalling * false to Beam. Otherwise, Beam would poll again later, which would introduce unnecessary delay. */ - @Nullable - KinesisRecord getNextRecord() throws IOException { + @Nullable KinesisRecord getNextRecord() throws IOException { while (true) { if (!isStopped && subscriptionError != null) { // Stop the pool to cancel all subscribers and prevent new subscriptions. diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/WatermarkPolicyFactory.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/WatermarkPolicyFactory.java index 2d8759640cc6..e229c72bb8dc 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/WatermarkPolicyFactory.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/WatermarkPolicyFactory.java @@ -125,8 +125,7 @@ public Instant getWatermark() { @Override public void update(KinesisRecord record) { watermarkParameters = - watermarkParameters - .toBuilder() + watermarkParameters.toBuilder() .setEventTime( Ordering.natural() .max( diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/AwsModule.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/AwsModule.java index 69331cc05dc1..2edee7d6fccd 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/AwsModule.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/AwsModule.java @@ -341,7 +341,6 @@ public boolean canCreateUsingDefault() { } private interface Instantiator { - @NonNull - T create(); + @NonNull T create(); } } diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/S3Options.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/S3Options.java index e59a8ebca2e2..26668d9ccb05 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/S3Options.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/options/S3Options.java @@ -49,8 +49,7 @@ public interface S3Options extends AwsOptions { void setS3ThreadPoolSize(int value); @Description("Algorithm for SSE-S3 encryption, e.g. AES256.") - @Nullable - String getSSEAlgorithm(); + @Nullable String getSSEAlgorithm(); void setSSEAlgorithm(String value); @@ -64,8 +63,7 @@ public interface S3Options extends AwsOptions { void setSSECustomerKey(SSECustomerKey sseCustomerKey); @Description("KMS key id for SSE-KMS encryption, e.g. arn:aws:kms:....") - @Nullable - String getSSEKMSKeyId(); + @Nullable String getSSEKMSKeyId(); void setSSEKMSKeyId(String value); diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/sqs/SqsIO.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/sqs/SqsIO.java index 536f2740e81e..71387c5e5084 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/sqs/SqsIO.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/sqs/SqsIO.java @@ -151,7 +151,9 @@ public static Read read() { .build(); } - /** @deprecated Use {@link #writeBatches()} for more configuration options. */ + /** + * @deprecated Use {@link #writeBatches()} for more configuration options. + */ @Deprecated public static Write write() { return new AutoValue_SqsIO_Write.Builder() diff --git a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/common/ObjectPoolTest.java b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/common/ObjectPoolTest.java index 154957ee3c09..130024235a99 100644 --- a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/common/ObjectPoolTest.java +++ b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/common/ObjectPoolTest.java @@ -159,5 +159,6 @@ static class Provider implements Function { public AutoCloseable apply(String configName) { return mock(AutoCloseable.class, configName); } - }; + } + ; } diff --git a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/kinesis/testing/KinesisIOIT.java b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/kinesis/testing/KinesisIOIT.java index 23486fccf284..d13a6223513d 100644 --- a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/kinesis/testing/KinesisIOIT.java +++ b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/kinesis/testing/KinesisIOIT.java @@ -79,8 +79,7 @@ public interface ITOptions extends ITEnvironment.ITOptions { void setKinesisStream(String value); @Description("Kinesis consumer ARN - set if want to test EFO") - @Nullable - String getConsumerArn(); + @Nullable String getConsumerArn(); void setConsumerArn(@Nullable String value); @@ -104,8 +103,7 @@ public interface ITOptions extends ITEnvironment.ITOptions { @Description( "Create EFO consumer with the given name. If set, consumer ARN conf will be ignored.") - @Nullable - String getCreateConsumerWithName(); + @Nullable String getCreateConsumerWithName(); void setCreateConsumerWithName(@Nullable String createConsumerWithName); } diff --git a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java index 5812ab410842..f50a1e54a6e0 100644 --- a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java +++ b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java @@ -376,8 +376,7 @@ private void testMultipartCopy(S3FileSystem s3FileSystem, SSECustomerKey sseCust .build(); UploadPartCopyRequest uploadPartCopyRequest2 = - uploadPartCopyRequest1 - .toBuilder() + uploadPartCopyRequest1.toBuilder() .partNumber(2) .copySourceRange( String.format( diff --git a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3TestUtils.java b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3TestUtils.java index 8b6d8e824bb9..2717bbefd7a6 100644 --- a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3TestUtils.java +++ b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3TestUtils.java @@ -116,8 +116,7 @@ static S3Options s3OptionsWithSSEKMSKeyId() { } static S3FileSystemConfiguration s3ConfigWithMultipleSSEOptions(String scheme) { - return s3ConfigWithSSEKMSKeyId(scheme) - .toBuilder() + return s3ConfigWithSSEKMSKeyId(scheme).toBuilder() .setSSECustomerKey( SSECustomerKey.builder() .key("86glyTlCNZgccSxW8JxMa6ZdjdK3N141glAysPUZ3AA=") diff --git a/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosIO.java b/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosIO.java index 851d18d0eab7..d1c8234607eb 100644 --- a/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosIO.java +++ b/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosIO.java @@ -182,7 +182,8 @@ public List> split( FeedRangeInternal.convert(range.toFeedRange()) .trySplit( document.getPartitionKeyRangeCache(), null, getCollectionObservable, splits) - .block().stream() + .block() + .stream() .map(NormalizedRange::fromFeedRange) .collect(Collectors.toList()); diff --git a/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosOptions.java b/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosOptions.java index 0c4b65b9bd8f..b7551bcf46f9 100644 --- a/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosOptions.java +++ b/sdks/java/io/azure-cosmos/src/main/java/org/apache/beam/sdk/io/azure/cosmos/CosmosOptions.java @@ -40,15 +40,13 @@ public interface CosmosOptions extends AzureOptions { /** The Azure Cosmos service endpoint used by the Cosmos client. */ @Description("Sets the cosmos service endpoint") - @Nullable - String getCosmosServiceEndpoint(); + @Nullable String getCosmosServiceEndpoint(); void setCosmosServiceEndpoint(String endpoint); /** The Azure Cosmos key used to perform authentication for accessing resource. */ @Description("Sets the cosmos service endpoint") - @Nullable - String getCosmosKey(); + @Nullable String getCosmosKey(); void setCosmosKey(String key); diff --git a/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/blobstore/AzfsResourceId.java b/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/blobstore/AzfsResourceId.java index 592b09c39718..cf380e8a4e39 100644 --- a/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/blobstore/AzfsResourceId.java +++ b/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/blobstore/AzfsResourceId.java @@ -38,6 +38,7 @@ class AzfsResourceId implements ResourceId { static final String SCHEME = "azfs"; private static final Pattern AZFS_URI = Pattern.compile("(?[^:]+)://(?[^/]+)/(?[^/]+)(?:/(?.*))?"); + /** Matches a glob containing a wildcard, capturing the portion before the first wildcard. */ private static final Pattern GLOB_PREFIX = Pattern.compile("(?[^\\[*?]*)[\\[*?].*"); diff --git a/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/BlobstoreOptions.java b/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/BlobstoreOptions.java index 7471452dfce0..05fe6f42a7b5 100644 --- a/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/BlobstoreOptions.java +++ b/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/BlobstoreOptions.java @@ -41,8 +41,7 @@ void setBlobstoreClientFactoryClass( Class blobstoreClientFactoryClass); @Description("Adds a pipeline policy to apply on each request sent to the blob service client.") - @Nullable - HttpPipelinePolicy getPipelinePolicy(); + @Nullable HttpPipelinePolicy getPipelinePolicy(); void setPipelinePolicy(HttpPipelinePolicy pipelinePolicy); @@ -74,21 +73,18 @@ void setBlobstoreClientFactoryClass( /** The Azure Blobstore service endpoint used by the Blob service client. */ @Description("Sets the blob service endpoint, additionally parses it for information (SAS token)") - @Nullable - String getBlobServiceEndpoint(); + @Nullable String getBlobServiceEndpoint(); void setBlobServiceEndpoint(String endpoint); @Description( "Sets the HttpClient to use for sending a receiving requests to and from the service.") - @Nullable - HttpClient getHttpClient(); + @Nullable HttpClient getHttpClient(); void setHttpClient(HttpClient httpClient); @Description("Sets the HttpPipeline to use for the service client.") - @Nullable - HttpPipeline getHttpPipeline(); + @Nullable HttpPipeline getHttpPipeline(); void setHttpPipeline(HttpPipeline httpPipeline); } diff --git a/sdks/java/io/cassandra/build.gradle b/sdks/java/io/cassandra/build.gradle index 5256ac1f2148..e41b8a9144d0 100644 --- a/sdks/java/io/cassandra/build.gradle +++ b/sdks/java/io/cassandra/build.gradle @@ -62,7 +62,7 @@ dependencies { configurations.all (Configuration it) -> { // error-prone requires newer guava, don't override for annotation processing // https://github.com/google/error-prone/issues/2745 - if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor") { + if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor" || it.name.startsWith("spotless") || it.name.startsWith("checkstyle")) { return } resolutionStrategy { diff --git a/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/ReadFn.java b/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/ReadFn.java index 8f16e729bc86..1ddf52c87dde 100644 --- a/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/ReadFn.java +++ b/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/ReadFn.java @@ -47,8 +47,13 @@ public void processElement(@Element Read read, OutputReceiver receiver) th Session session = ConnectionManager.getSession(read); Mapper mapper = read.mapperFactoryFn().apply(session); String partitionKey = - session.getCluster().getMetadata().getKeyspace(read.keyspace().get()) - .getTable(read.table().get()).getPartitionKey().stream() + session + .getCluster() + .getMetadata() + .getKeyspace(read.keyspace().get()) + .getTable(read.table().get()) + .getPartitionKey() + .stream() .map(ColumnMetadata::getName) .map(ReadFn::quoteIdentifier) .collect(Collectors.joining(",")); diff --git a/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java b/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java index f63c819d4202..0b8b926ec9c8 100644 --- a/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java +++ b/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java @@ -841,6 +841,7 @@ private static RingRange fromEncodedKey(Metadata metadata, ByteBuffer... bb) { } private static final String CASSANDRA_TABLE_WRITE = "scientist_write"; + /** Simple Cassandra entity used in write tests. */ @Table(name = CASSANDRA_TABLE_WRITE, keyspace = CASSANDRA_KEYSPACE) static class ScientistWrite extends Scientist {} diff --git a/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java b/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java index a8875407b43c..8607cd6fa381 100644 --- a/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java +++ b/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java @@ -431,6 +431,7 @@ public Write withProperties(Properties properties) { merged.putAll(properties); return toBuilder().properties(merged).build(); } + /** Builder for {@link Write}. */ @AutoValue.Builder abstract static class Builder { diff --git a/sdks/java/io/common/src/main/java/org/apache/beam/sdk/io/common/IOTestPipelineOptions.java b/sdks/java/io/common/src/main/java/org/apache/beam/sdk/io/common/IOTestPipelineOptions.java index cc66fb97718d..f2fccd493637 100644 --- a/sdks/java/io/common/src/main/java/org/apache/beam/sdk/io/common/IOTestPipelineOptions.java +++ b/sdks/java/io/common/src/main/java/org/apache/beam/sdk/io/common/IOTestPipelineOptions.java @@ -33,14 +33,12 @@ public interface IOTestPipelineOptions extends TestPipelineOptions { void setNumberOfRecords(Integer count); @Description("BigQuery dataset to publish results to.") - @Nullable - String getBigQueryDataset(); + @Nullable String getBigQueryDataset(); void setBigQueryDataset(@Nullable String dataset); @Description("BigQuery table to publish results to.") - @Nullable - String getBigQueryTable(); + @Nullable String getBigQueryTable(); void setBigQueryTable(@Nullable String tableName); diff --git a/sdks/java/io/contextualtextio/src/test/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOTest.java b/sdks/java/io/contextualtextio/src/test/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOTest.java index 68a1dd3395cd..bce7db2f264c 100644 --- a/sdks/java/io/contextualtextio/src/test/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOTest.java +++ b/sdks/java/io/contextualtextio/src/test/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOTest.java @@ -335,6 +335,7 @@ private static String getFileSuffix(Compression compression) { return ""; } } + /** Tests for reading from different size of files with various Compression. */ @RunWith(Parameterized.class) public static class CompressedReadTest { diff --git a/sdks/java/io/csv/src/main/java/org/apache/beam/sdk/io/csv/CsvRowConversions.java b/sdks/java/io/csv/src/main/java/org/apache/beam/sdk/io/csv/CsvRowConversions.java index 01d944c2bad1..4cd8da06f5d0 100644 --- a/sdks/java/io/csv/src/main/java/org/apache/beam/sdk/io/csv/CsvRowConversions.java +++ b/sdks/java/io/csv/src/main/java/org/apache/beam/sdk/io/csv/CsvRowConversions.java @@ -65,8 +65,7 @@ public String apply(Row input) { return getCSVFormat().format(values); } - @NonNull - String[] getHeader() { + @NonNull String[] getHeader() { return checkNotNull(getCSVFormat().getHeader()); } diff --git a/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/OffsetRetainer.java b/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/OffsetRetainer.java index b1fe5a58bbe1..be76301da0fb 100644 --- a/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/OffsetRetainer.java +++ b/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/OffsetRetainer.java @@ -52,8 +52,7 @@ public interface OffsetRetainer extends Serializable { * Implementations should handle transient I/O errors gracefully and return {@code null} on * failure rather than propagating an exception. */ - @Nullable - Map loadOffset(); + @Nullable Map loadOffset(); /** * Persists the given offset so it can be recovered after a pipeline restart. diff --git a/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOMySqlConnectorIT.java b/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOMySqlConnectorIT.java index 3fe86a29cce5..d77bf14e21b7 100644 --- a/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOMySqlConnectorIT.java +++ b/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOMySqlConnectorIT.java @@ -61,6 +61,7 @@ public class DebeziumIOMySqlConnectorIT { private static final Logger LOG = LoggerFactory.getLogger(DebeziumIOMySqlConnectorIT.class); + /** * Debezium - MySqlContainer * diff --git a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIOTestUtils.java b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIOTestUtils.java index 41c8082989b7..9d8944676086 100644 --- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIOTestUtils.java +++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIOTestUtils.java @@ -289,6 +289,7 @@ static long refreshIndexAndGetCurrentNumDocs( RestClient restClient, String index, String type, int backendVersion) throws IOException { return refreshIndexAndGetCurrentNumDocs(restClient, index, type, backendVersion, null); } + /** * Forces a refresh of the given index to make recently inserted documents available for search. * diff --git a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java index 2e3f9e9729b5..5c83bad14a68 100644 --- a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java +++ b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java @@ -2140,6 +2140,7 @@ public Document withTimestamp(Instant timestamp) { return toBuilder().setTimestamp(timestamp).build(); } } + /** * A {@link PTransform} writing data to Elasticsearch. * @@ -2244,6 +2245,7 @@ public Write withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) { docToBulk = docToBulk.withIsDeleteFn(isDeleteFn); return this; } + // End building Doc2Bulk /** Refer to {@link BulkIO#withConnectionConfiguration}. */ @@ -2382,7 +2384,9 @@ abstract static class Builder { abstract Builder setUseStatefulBatches(boolean useStatefulBatches); - /** @deprecated Use {@link #setMaxParallelRequests} instead. */ + /** + * @deprecated Use {@link #setMaxParallelRequests} instead. + */ @Deprecated abstract Builder setMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow); diff --git a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/common/FileBasedIOTestPipelineOptions.java b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/common/FileBasedIOTestPipelineOptions.java index aed7986dd0e0..be8f748d1cd8 100644 --- a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/common/FileBasedIOTestPipelineOptions.java +++ b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/common/FileBasedIOTestPipelineOptions.java @@ -38,8 +38,7 @@ public interface FileBasedIOTestPipelineOptions extends IOTestPipelineOptions { void setCompressionType(String compressionType); @Description("Number of files this test will create during the write phase.") - @Nullable - Integer getNumberOfShards(); + @Nullable Integer getNumberOfShards(); void setNumberOfShards(@Nullable Integer value); diff --git a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/CsvWriteSchemaTransformFormatProviderTest.java b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/CsvWriteSchemaTransformFormatProviderTest.java index 777f468771a9..b6b661659930 100644 --- a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/CsvWriteSchemaTransformFormatProviderTest.java +++ b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/CsvWriteSchemaTransformFormatProviderTest.java @@ -101,8 +101,7 @@ private static List toCsv(List rows, Schema beamSchema, CSVFormat c @Override protected FileWriteSchemaTransformConfiguration buildConfiguration(String folder) { - return defaultConfiguration(folder) - .toBuilder() + return defaultConfiguration(folder).toBuilder() .setNumShards(1) .setCsvConfiguration( csvConfigurationBuilder() diff --git a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java index 54c885f5415f..47fb43fadbc6 100644 --- a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java +++ b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java @@ -225,8 +225,7 @@ public void testWriteAndReadWithSchemaTransforms() { .build(); if (getFormat().equals("parquet")) { writeConfig = - writeConfig - .toBuilder() + writeConfig.toBuilder() .setParquetConfiguration( parquetConfigurationBuilder() .setCompressionCodecName(CompressionCodecName.GZIP.name()) diff --git a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileWriteSchemaTransformFormatProviderTest.java b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileWriteSchemaTransformFormatProviderTest.java index e7bae565d22a..64ad0be3c3a6 100644 --- a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileWriteSchemaTransformFormatProviderTest.java +++ b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileWriteSchemaTransformFormatProviderTest.java @@ -151,8 +151,7 @@ public void withCompression() { public void invalidConfigurationWithParquet() { String to = folder(getFormat(), "configuration_with_parquet"); FileWriteSchemaTransformConfiguration configuration = - buildConfiguration(to) - .toBuilder() + buildConfiguration(to).toBuilder() .setParquetConfiguration( parquetConfigurationBuilder() .setCompressionCodecName(CompressionCodecName.GZIP.name()) @@ -184,8 +183,7 @@ public void invalidConfigurationWithParquet() { public void invalidConfigurationWithXml() { String to = folder(getFormat(), "configuration_with_xml"); FileWriteSchemaTransformConfiguration configuration = - buildConfiguration(to) - .toBuilder() + buildConfiguration(to).toBuilder() .setXmlConfiguration( xmlConfigurationBuilder() .setRootElement("rootElement") @@ -289,8 +287,7 @@ public void csvConfigurationSet() { String to = folder(getFormat(), "csv_configuration"); FileWriteSchemaTransformProvider provider = new FileWriteSchemaTransformProvider(); FileWriteSchemaTransformConfiguration configuration = - buildConfiguration(to) - .toBuilder() + buildConfiguration(to).toBuilder() .setCsvConfiguration( csvConfigurationBuilder() .setPredefinedCsvFormat(CSVFormat.Predefined.Default.name()) diff --git a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/XmlRowAdapterTest.java b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/XmlRowAdapterTest.java index a23476e0adb0..9621ab928f5b 100644 --- a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/XmlRowAdapterTest.java +++ b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/XmlRowAdapterTest.java @@ -67,7 +67,10 @@ public class XmlRowAdapterTest { @Test public void allPrimitiveDataTypes() - throws XPathExpressionException, JAXBException, IOException, SAXException, + throws XPathExpressionException, + JAXBException, + IOException, + SAXException, ParserConfigurationException { for (Row row : DATA.allPrimitiveDataTypesRows) { @@ -90,7 +93,10 @@ public void allPrimitiveDataTypes() @Test public void nullableAllPrimitiveDataTypes() - throws XPathExpressionException, JAXBException, IOException, SAXException, + throws XPathExpressionException, + JAXBException, + IOException, + SAXException, ParserConfigurationException { for (Row row : DATA.nullableAllPrimitiveDataTypesRows) { NodeList entries = xmlDocumentEntries(row); @@ -116,7 +122,10 @@ public void nullableAllPrimitiveDataTypes() @Test public void timeContaining() - throws XPathExpressionException, JAXBException, IOException, ParserConfigurationException, + throws XPathExpressionException, + JAXBException, + IOException, + ParserConfigurationException, SAXException { String instant = "instant"; DateTimeFormatter formatter = ISODateTimeFormat.dateTime(); @@ -171,7 +180,10 @@ private static Map keyValues(String testName, NodeList entries) { } private NodeList xmlDocumentEntries(Row row) - throws JAXBException, IOException, SAXException, XPathExpressionException, + throws JAXBException, + IOException, + SAXException, + XPathExpressionException, ParserConfigurationException { JAXBContext context = JAXBContext.newInstance(XmlRowAdapter.class); Marshaller marshaller = context.createMarshaller(); diff --git a/sdks/java/io/google-ads/src/main/java/org/apache/beam/sdk/io/googleads/GoogleAdsOptions.java b/sdks/java/io/google-ads/src/main/java/org/apache/beam/sdk/io/googleads/GoogleAdsOptions.java index 738760c22eb2..606ceef5a423 100644 --- a/sdks/java/io/google-ads/src/main/java/org/apache/beam/sdk/io/googleads/GoogleAdsOptions.java +++ b/sdks/java/io/google-ads/src/main/java/org/apache/beam/sdk/io/googleads/GoogleAdsOptions.java @@ -73,8 +73,7 @@ public interface GoogleAdsOptions extends PipelineOptions { /** Google Ads developer token for the user connecting to the Google Ads API. */ @Description("Google Ads developer token for the user connecting to the Google Ads API.") - @Nullable - String getGoogleAdsDeveloperToken(); + @Nullable String getGoogleAdsDeveloperToken(); void setGoogleAdsDeveloperToken(String developerToken); @@ -103,8 +102,7 @@ void setGoogleAdsCredentialFactoryClass( "The credential instance that should be used to authenticate against the Google Ads API. " + "Defaults to a credential instance constructed by the credential factory.") @Default.InstanceFactory(GoogleAdsCredentialsFactory.class) - @Nullable - Credentials getGoogleAdsCredential(); + @Nullable Credentials getGoogleAdsCredential(); void setGoogleAdsCredential(Credentials credential); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AppendClientInfo.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AppendClientInfo.java index 55c6007e1986..4adae9ccea0a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AppendClientInfo.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AppendClientInfo.java @@ -83,7 +83,8 @@ abstract Builder setCloseAppendClient( abstract Builder setStreamName(@Nullable String name); abstract AppendClientInfo build(); - }; + } + ; abstract Builder toBuilder(); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java index 2dbc4316b883..91eafb39ef36 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java @@ -58,6 +58,7 @@ class BigQueryAvroUtils { .map(Package::getImplementationVersion) .orElse(""); private static final String TIMESTAMP_NANOS_LOGICAL_TYPE = "timestamp-nanos"; + // org.apache.avro.LogicalType static class DateTimeLogicalType extends LogicalType { public DateTimeLogicalType() { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryHelpers.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryHelpers.java index 55c703438f02..37e8d27efb7c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryHelpers.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryHelpers.java @@ -560,8 +560,7 @@ static Status parseStatus(@Nullable Job job) { @SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/20497) }) - @NonNull - T result = BigQueryIO.JSON_FACTORY.fromString(json, clazz); + @NonNull T result = BigQueryIO.JSON_FACTORY.fromString(json, clazz); return result; } catch (IOException e) { throw new RuntimeException( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java index b222b358f547..5c04d9bca384 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java @@ -675,7 +675,10 @@ public static TypedRead readTableRowsWithSchema() { BigQueryUtils.tableRowToBeamRow(), BigQueryUtils.tableRowFromBeamRow()); } - /** @deprecated this method may have breaking changes introduced, use with caution */ + + /** + * @deprecated this method may have breaking changes introduced, use with caution + */ @Deprecated public static DynamicRead readDynamicallyTableRows() { return new AutoValue_BigQueryIO_DynamicRead.Builder() @@ -688,7 +691,10 @@ public static DynamicRead readDynamicallyTableRows() { .setBadRecordRouter(BadRecordRouter.THROWING_ROUTER) .build(); } - /** @deprecated this method may have breaking changes introduced, use with caution */ + + /** + * @deprecated this method may have breaking changes introduced, use with caution + */ @Deprecated public static DynamicRead readDynamically( SerializableFunction parseFn, Coder outputCoder) { @@ -836,7 +842,10 @@ public TableRow apply(SchemaAndRecord schemaAndRecord) { return BigQueryAvroUtils.convertGenericRecordToTableRow(schemaAndRecord.getRecord()); } } - /** @deprecated this class may have breaking changes introduced, use with caution */ + + /** + * @deprecated this class may have breaking changes introduced, use with caution + */ @Deprecated @AutoValue public abstract static class DynamicRead @@ -1275,8 +1284,8 @@ abstract Builder setBadRecordErrorHandler( abstract @Nullable SerializableFunction getParseFn(); - abstract @Nullable SerializableFunction> - getDatumReaderFactory(); + abstract @Nullable + SerializableFunction> getDatumReaderFactory(); abstract @Nullable QueryPriority getQueryPriority(); @@ -2563,8 +2572,7 @@ public static Write writeProtos(Class protoMessageClas throw new IllegalArgumentException("DynamicMessage is not supported."); } try { - return BigQueryIO.write() - .toBuilder() + return BigQueryIO.write().toBuilder() .setFormatFunction(FormatProto.fromClass(protoMessageClass)) .build() .withWriteProtosClass(protoMessageClass); @@ -2685,8 +2693,8 @@ public enum Method { abstract @Nullable ValueProvider getJsonTableRef(); - abstract @Nullable SerializableFunction, TableDestination> - getTableFunction(); + abstract @Nullable + SerializableFunction, TableDestination> getTableFunction(); abstract @Nullable TableRowFormatFunction getFormatFunction(); @@ -2694,8 +2702,8 @@ public enum Method { abstract RowWriterFactory.@Nullable AvroRowWriterFactory getAvroRowWriterFactory(); - abstract @Nullable SerializableFunction<@Nullable TableSchema, org.apache.avro.Schema> - getAvroSchemaFactory(); + abstract @Nullable + SerializableFunction<@Nullable TableSchema, org.apache.avro.Schema> getAvroSchemaFactory(); abstract boolean getUseAvroLogicalTypes(); @@ -2785,8 +2793,8 @@ public enum Method { abstract @Nullable String getWriteTempDataset(); - abstract @Nullable SerializableFunction - getRowMutationInformationFn(); + abstract @Nullable + SerializableFunction getRowMutationInformationFn(); abstract ErrorHandler getBadRecordErrorHandler(); @@ -4406,8 +4414,7 @@ public void populateDisplayData(DisplayData.Builder builder) { * *

If the table's project is not specified, use the executing project. */ - @Nullable - ValueProvider getTableWithDefaultProject(BigQueryOptions bqOptions) { + @Nullable ValueProvider getTableWithDefaultProject(BigQueryOptions bqOptions) { ValueProvider table = getTable(); if (table == null) { return table; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java index 66458a8339f9..0288caeb2c49 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java @@ -131,15 +131,13 @@ enum TableMetadataView { * *

Returns null if the table is not found. */ - @Nullable - Table getTable(TableReference tableRef) throws InterruptedException, IOException; + @Nullable Table getTable(TableReference tableRef) throws InterruptedException, IOException; - @Nullable - Table getTable(TableReference tableRef, List selectedFields) + @Nullable Table getTable(TableReference tableRef, List selectedFields) throws InterruptedException, IOException; - @Nullable - Table getTable(TableReference tableRef, List selectedFields, TableMetadataView view) + @Nullable Table getTable( + TableReference tableRef, List selectedFields, TableMetadataView view) throws InterruptedException, IOException; /** Creates the specified table if it does not exist. */ @@ -216,8 +214,7 @@ interface WriteStreamService extends AutoCloseable { WriteStream createWriteStream(String tableUrn, WriteStream.Type type) throws IOException, InterruptedException; - @Nullable - TableSchema getWriteStreamSchema(String writeStream); + @Nullable TableSchema getWriteStreamSchema(String writeStream); /** * Create an append client for a given Storage API write stream. The stream must be created @@ -250,8 +247,7 @@ interface StreamAppendClient extends AutoCloseable { ApiFuture appendRows(long offset, ProtoRows rows) throws Exception; /** If the table schema has been updated, returns the new schema. Otherwise returns null. */ - @Nullable - TableSchema getUpdatedSchema(); + @Nullable TableSchema getUpdatedSchema(); /** * If the previous call to appendRows blocked due to flow control, returns how long the call diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java index aa9a5fd310b0..0d408fa0adaf 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java @@ -459,8 +459,8 @@ public Job pollJob(JobReference jobRef, int maxAttempts) throws InterruptedExcep } @VisibleForTesting - @Nullable - Job pollJob(JobReference jobRef, Sleeper sleeper, BackOff backoff) throws InterruptedException { + @Nullable Job pollJob(JobReference jobRef, Sleeper sleeper, BackOff backoff) + throws InterruptedException { do { try { Job job = @@ -672,8 +672,7 @@ Bigquery getClient() { } @VisibleForTesting - @Nullable - Table getTable( + @Nullable Table getTable( TableReference ref, List selectedFields, TableMetadataView view, @@ -747,8 +746,8 @@ public void createTable(Table table) throws InterruptedException, IOException { } @VisibleForTesting - @Nullable - Table tryCreateTable(Table table, BackOff backoff, Sleeper sleeper) throws IOException { + @Nullable Table tryCreateTable(Table table, BackOff backoff, Sleeper sleeper) + throws IOException { boolean retry = false; while (true) { try { @@ -1534,8 +1533,7 @@ public WriteStream createWriteStream(String tableUrn, WriteStream.Type type) @Override public @Nullable TableSchema getWriteStreamSchema(String writeStream) { - @Nullable - WriteStream stream = + @Nullable WriteStream stream = newWriteClient.getWriteStream( GetWriteStreamRequest.newBuilder() .setView(WriteStreamView.FULL) @@ -1906,9 +1904,7 @@ public void onRetryAttempt(Status status, Metadata metadata) { settingsBuilder.getStubSettingsBuilder().createReadSessionSettings(); createReadSessionSettings.setRetrySettings( - createReadSessionSettings - .getRetrySettings() - .toBuilder() + createReadSessionSettings.getRetrySettings().toBuilder() .setInitialRpcTimeout(org.threeten.bp.Duration.ofHours(2)) .setMaxRpcTimeout(org.threeten.bp.Duration.ofHours(2)) .setTotalTimeout(org.threeten.bp.Duration.ofHours(2)) @@ -1919,9 +1915,7 @@ public void onRetryAttempt(Status status, Metadata metadata) { settingsBuilder.getStubSettingsBuilder().splitReadStreamSettings(); splitReadStreamSettings.setRetrySettings( - splitReadStreamSettings - .getRetrySettings() - .toBuilder() + splitReadStreamSettings.getRetrySettings().toBuilder() .setInitialRpcTimeout(org.threeten.bp.Duration.ofSeconds(30)) .setMaxRpcTimeout(org.threeten.bp.Duration.ofSeconds(30)) .setTotalTimeout(org.threeten.bp.Duration.ofSeconds(30)) diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageAvroReader.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageAvroReader.java index 50ce6a89f7a9..f1ca5ec52db0 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageAvroReader.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageAvroReader.java @@ -69,7 +69,7 @@ public GenericRecord readSingleRecord() throws IOException { "nullness" // reused record is null but avro not annotated }) // record should not be reused, mutating outputted values is unsafe - GenericRecord newRecord = datumReader.read(/*reuse=*/ null, decoder); + GenericRecord newRecord = datumReader.read(/* reuse= */ null, decoder); return newRecord; } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageQuerySource.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageQuerySource.java index 064b9bebaf16..15e43774fe4b 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageQuerySource.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageQuerySource.java @@ -100,7 +100,7 @@ public static BigQueryStorageQuerySource create( parseFn, outputCoder, bqServices, - /*picosTimestampPrecision=*/ null); + /* picosTimestampPrecision= */ null); } public static BigQueryStorageQuerySource create( @@ -128,7 +128,7 @@ public static BigQueryStorageQuerySource create( parseFn, outputCoder, bqServices, - /*picosTimestampPrecision=*/ null); + /* picosTimestampPrecision= */ null); } private final String stepUuid; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageTableSource.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageTableSource.java index 8b7240158dc1..31a093a4cd6c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageTableSource.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageTableSource.java @@ -66,7 +66,7 @@ public static BigQueryStorageTableSource create( outputCoder, bqServices, projectionPushdownApplied, - /*picosTimestampPrecision=*/ null); + /* picosTimestampPrecision= */ null); } public static BigQueryStorageTableSource create( @@ -84,8 +84,8 @@ public static BigQueryStorageTableSource create( parseFn, outputCoder, bqServices, - /*projectionPushdownApplied=*/ false, - /*picosTimestampPrecision=*/ null); + /* projectionPushdownApplied= */ false, + /* picosTimestampPrecision= */ null); } public static BigQueryStorageTableSource create( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/DynamicDestinations.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/DynamicDestinations.java index 105da60c75b1..74f998da81dd 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/DynamicDestinations.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/DynamicDestinations.java @@ -96,8 +96,7 @@ public SideInputT sideInput(PCollectionView view) { } /** Get the current PipelineOptions if set. */ - @Nullable - PipelineOptions getPipelineOptions() { + @Nullable PipelineOptions getPipelineOptions() { return options; } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsBeamRow.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsBeamRow.java index 401395030542..2fd0796cf3f6 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsBeamRow.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsBeamRow.java @@ -38,8 +38,8 @@ class StorageApiDynamicDestinationsBeamRow { private final TableSchema tableSchema; private final SerializableFunction toRow; - private final @Nullable SerializableBiFunction< - TableRowToStorageApiProto.@Nullable SchemaInformation, T, TableRow> + private final @Nullable + SerializableBiFunction formatRecordOnFailureFunction; private final boolean usesCdc; @@ -50,7 +50,7 @@ class StorageApiDynamicDestinationsBeamRow toRow, @Nullable SerializableBiFunction - formatRecordOnFailureFunction, + formatRecordOnFailureFunction, boolean usesCdc) { super(inner); this.tableSchema = BeamRowToStorageApiProto.protoTableSchemaFromBeamSchema(schema); @@ -130,5 +130,6 @@ public TableRow toFailsafeTableRow(T element) { return BigQueryUtils.toTableRow(toRow.apply(element)); } } - }; + } + ; } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsProto.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsProto.java index 2a1d8f3be6f2..0303a3bfd2fe 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsProto.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsProto.java @@ -137,7 +137,8 @@ public TableRow toFailsafeTableRow(T element) { } } } - }; + } + ; private static DescriptorProtos.DescriptorProto fixNestedTypes( Descriptors.Descriptor descriptor) { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsTableRow.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsTableRow.java index 1710d32689c9..86307eb70175 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsTableRow.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDynamicDestinationsTableRow.java @@ -272,8 +272,7 @@ public StorageApiWritePayload toMessage( boolean ignoreUnknown = ignoreUnknownValues || autoSchemaUpdates; @Nullable TableRow unknownFields = autoSchemaUpdates ? new TableRow() : null; boolean allowMissingFields = autoSchemaUpdates; - @Nullable - Message msg = + @Nullable Message msg = TableRowToStorageApiProto.messageFromTableRow( schemaInformation, descriptorToUse, @@ -289,5 +288,6 @@ public StorageApiWritePayload toMessage( unknownFields, formatRecordOnFailureFunction != null ? toFailsafeTableRow(element) : null); } - }; + } + ; } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWriteUnshardedRecords.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWriteUnshardedRecords.java index 2dfc8b2f1c00..e96e8515b7a5 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWriteUnshardedRecords.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWriteUnshardedRecords.java @@ -410,8 +410,7 @@ SchemaAndDescriptor getCurrentTableSchema(String stream, @Nullable TableSchema u CreateTableHelpers.createTableWrapper( () -> { if (autoUpdateSchema) { - @Nullable - TableSchema streamSchema = + @Nullable TableSchema streamSchema = Preconditions.checkStateNotNull(maybeWriteStreamService) .getWriteStreamSchema(streamName); if (streamSchema != null) { @@ -931,10 +930,9 @@ String retrieveErrorDetails(Iterable failedContext) { void postFlush() { // If we got a response indicating an updated schema, recreate the client. if (this.appendClientInfo != null && autoUpdateSchema) { - @Nullable - StreamAppendClient streamAppendClient = appendClientInfo.getStreamAppendClient(); - @Nullable - TableSchema updatedTableSchemaReturned = + @Nullable StreamAppendClient streamAppendClient = + appendClientInfo.getStreamAppendClient(); + @Nullable TableSchema updatedTableSchemaReturned = (streamAppendClient != null) ? streamAppendClient.getUpdatedSchema() : null; if (updatedTableSchemaReturned != null) { Optional updatedTableSchema = @@ -1180,8 +1178,7 @@ public void process( pipelineOptions.as(BigQueryOptions.class))); OutputReceiver failedRowsReceiver = o.get(failedRowsTag); - @Nullable - OutputReceiver successfulRowsReceiver = + @Nullable OutputReceiver successfulRowsReceiver = (successfulRowsTag != null) ? o.get(successfulRowsTag) : null; int recordBytes = element.getValue().getPayload().length; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java index cbace6e7ff40..fceee514607e 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java @@ -274,12 +274,13 @@ public String toString() { + " tryIteration: " + tryIteration; } - }; + } + ; @AutoValue abstract static class CreateRetryManagerResult { - abstract @Nullable RetryManager> - getRetryManager(); + abstract @Nullable + RetryManager> getRetryManager(); abstract boolean getSchemaMismatchSeen(); @@ -859,8 +860,7 @@ public void process( // So before creating a StreamWriter below, we fetch the table schema to check if we // missed an update. If so, use the new schema instead of the base schema. // TODO: There's still a race here! - @Nullable - TableSchema streamSchema = + @Nullable TableSchema streamSchema = MoreObjects.firstNonNull( writeStreamService.getWriteStreamSchema(getOrCreateStream.get()), TableSchema.getDefaultInstance()); @@ -1073,12 +1073,11 @@ public void process( appendSplitDistribution.update(numAppends); if (autoUpdateSchema) { - @Nullable - StreamAppendClient streamAppendClient = appendClientHolder.getStreamAppendClient(); + @Nullable StreamAppendClient streamAppendClient = + appendClientHolder.getStreamAppendClient(); TableSchema originalSchema = appendClientHolder.get().getTableSchema(); - @Nullable - TableSchema updatedSchemaReturned = + @Nullable TableSchema updatedSchemaReturned = (streamAppendClient != null) ? streamAppendClient.getUpdatedSchema() : null; // Update the table schema and clear the append client. if (updatedSchemaReturned != null) { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StreamingInsertsMetrics.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StreamingInsertsMetrics.java index c372fa801151..95950ed4e2a1 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StreamingInsertsMetrics.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StreamingInsertsMetrics.java @@ -86,6 +86,7 @@ abstract class StreamingInsertsMetricsImpl implements StreamingInsertsMetrics { abstract ConcurrentLinkedQueue rpcLatencies(); abstract ConcurrentLinkedQueue rpcErrorStatus(); + // Represents for rows that are retried because of a failed // InsertAll RPC. abstract ConcurrentLinkedQueue> retriedRowsByStatus(); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowJsonCoder.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowJsonCoder.java index f8e877fe98e6..502857d4e906 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowJsonCoder.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowJsonCoder.java @@ -70,7 +70,8 @@ public long getEncodedElementByteSize(TableRow value) throws Exception { // FAIL_ON_EMPTY_BEANS is disabled in order to handle null values in // TableRow. - private static final ObjectMapper MAPPER;; + private static final ObjectMapper MAPPER; + ; private static final TableRowJsonCoder INSTANCE; private static final TypeDescriptor TYPE_DESCRIPTOR; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java index ba72bb8682fd..258e9ad21037 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java @@ -880,8 +880,7 @@ public static Descriptor wrapDescriptorProto(DescriptorProto descriptorProto) BigQuerySchemaUtil.isProtoCompatible(key) ? key : BigQuerySchemaUtil.generatePlaceholderFieldName(key); - @Nullable - FieldDescriptor fieldDescriptor = + @Nullable FieldDescriptor fieldDescriptor = (descriptor == null) ? null : descriptor.findFieldByName(protoFieldName); if (fieldDescriptor == null) { @@ -976,8 +975,7 @@ public static Descriptor wrapDescriptorProto(DescriptorProto descriptorProto) return (TableRow) unknownFields.computeIfAbsent(key, k -> nestedUnknown); }; - @Nullable - Object value = + @Nullable Object value = messageValueFromFieldValue( fieldSchemaInformation, fieldDescriptor, @@ -1141,8 +1139,8 @@ public static Descriptor wrapDescriptorProto(DescriptorProto descriptorProto) return null; } TableRow localUnknownFields = Preconditions.checkStateNotNull(unknownFields); - @Nullable - TableRow nested = (TableRow) localUnknownFields.getF().get(finalIndex).getV(); + @Nullable TableRow nested = + (TableRow) localUnknownFields.getF().get(finalIndex).getV(); if (nested == null) { nested = new TableRow(); localUnknownFields.getF().set(finalIndex, new TableCell().setV(nested)); @@ -1150,8 +1148,7 @@ public static Descriptor wrapDescriptorProto(DescriptorProto descriptorProto) return nested; }; - @Nullable - Object value = + @Nullable Object value = messageValueFromFieldValue( fieldSchemaInformation, fieldDescriptor, @@ -1547,9 +1544,8 @@ public static ByteString mergeNewFields( private static @Nullable Object messageValueFromFieldValue( SchemaInformation schemaInformation, - @Nullable - FieldDescriptor - fieldDescriptor, // Null in the case of recursively finding missing fields. + @Nullable FieldDescriptor + fieldDescriptor, // Null in the case of recursively finding missing fields. @Nullable Object bqValue, boolean ignoreUnknownValues, boolean allowMissingRequiredFields, @@ -1674,8 +1670,7 @@ public static ByteString mergeNewFields( .build(); } else { - @Nullable - ThrowingBiFunction converter = + @Nullable ThrowingBiFunction converter = TYPE_MAP_PROTO_CONVERTERS.get(schemaInformation.getType()); if (converter == null) { throw new RuntimeException("Unknown type " + schemaInformation.getType()); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaCache.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaCache.java index 554a67e119dc..41d8d21581f2 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaCache.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaCache.java @@ -178,8 +178,7 @@ private static String tableKey(TableReference tableReference) { try { // requesting the BASIC view will prevent BQ backend to run calculations // related with storage stats that are not needed here. - @Nullable - Table table = + @Nullable Table table = datasetService.getTable( tableReference, Collections.emptyList(), DatasetService.TableMetadataView.BASIC); schemaHolder = @@ -224,8 +223,7 @@ public void refreshSchema( String key = tableKey(tableReference); @Nullable SchemaHolder schemaHolder = cachedSchemas.get(key); int nextVersion = schemaHolder != null ? schemaHolder.getVersion() + 1 : 0; - @Nullable - Refresh existing = + @Nullable Refresh existing = tablesToRefresh.putIfAbsent( key, Refresh.of(datasetService, writeStreamService, options, nextVersion)); // Wait at least until the next version. @@ -358,8 +356,7 @@ public void refreshThread() { Map schemas = Maps.newHashMapWithExpectedSize(tables.size()); for (Map.Entry entry : tables.entrySet()) { Refresh refresh = entry.getValue(); - @Nullable - TableSchema tableSchema = + @Nullable TableSchema tableSchema = optimizedGetSchema( entry.getKey(), refresh.getDatasetService(), diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaUpdateUtils.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaUpdateUtils.java index 33f47afeba0f..c867ac94f5eb 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaUpdateUtils.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableSchemaUpdateUtils.java @@ -104,8 +104,7 @@ private static Result getUpdatedSchema( isEquivalent = isEquivalent && updatedTableFields.isEquivalent(); isEquivalent = isEquivalent - && tableFieldSchema - .toBuilder() + && tableFieldSchema.toBuilder() .clearFields() .build() .equals(newTableFieldSchema.toBuilder().clearFields().build()); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpgradeTableSchema.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpgradeTableSchema.java index 6c3c028b6d0d..899c3169bbaf 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpgradeTableSchema.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpgradeTableSchema.java @@ -76,8 +76,7 @@ public static TableSchema getIncrementalSchema( // TODO(reuvenlax): Fix this so that arbitrary types can be selected. TableFieldSchema.Type type = e.isStruct() ? TableFieldSchema.Type.STRUCT : TableFieldSchema.Type.STRING; - @Nullable - TableFieldSchema oldValue = + @Nullable TableFieldSchema oldValue = newFields .computeIfAbsent(prefix, p -> Maps.newLinkedHashMap()) .put( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java index fb9ad2e8d0f5..a95bfc5f7c87 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java @@ -178,6 +178,7 @@ public PendingJobData( this.isFirstPane = isFirstPane; } } + // All pending load jobs. private List pendingJobs = Lists.newArrayList(); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfig.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfig.java index 15230c8adef9..a69b4dbfc694 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfig.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfig.java @@ -62,8 +62,9 @@ public abstract class BigtableConfig implements Serializable { abstract @Nullable BigtableOptions getBigtableOptions(); /** Configurator of the effective Bigtable Options. */ - abstract @Nullable SerializableFunction - getBigtableOptionsConfigurator(); + abstract @Nullable + SerializableFunction + getBigtableOptionsConfigurator(); /** Weather validate that table exists before writing. */ abstract boolean getValidate(); @@ -98,13 +99,17 @@ abstract static class Builder { abstract Builder setAppProfileId(ValueProvider appProfileId); - /** @deprecated please set the options directly in BigtableIO. */ + /** + * @deprecated please set the options directly in BigtableIO. + */ @Deprecated abstract Builder setBigtableOptions(BigtableOptions options); abstract Builder setValidate(boolean validate); - /** @deprecated please set the options directly in BigtableIO. */ + /** + * @deprecated please set the options directly in BigtableIO. + */ @Deprecated abstract Builder setBigtableOptionsConfigurator( SerializableFunction optionsConfigurator); @@ -137,14 +142,18 @@ BigtableConfig withAppProfileId(ValueProvider appProfileId) { return toBuilder().setAppProfileId(appProfileId).build(); } - /** @deprecated please set the options directly in BigtableIO. */ + /** + * @deprecated please set the options directly in BigtableIO. + */ @Deprecated public BigtableConfig withBigtableOptions(BigtableOptions options) { checkArgument(options != null, "Bigtable options can not be null"); return toBuilder().setBigtableOptions(options).build(); } - /** @deprecated please set the options directly in BigtableIO. */ + /** + * @deprecated please set the options directly in BigtableIO. + */ @Deprecated public BigtableConfig withBigtableOptionsConfigurator( SerializableFunction configurator) { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfigTranslator.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfigTranslator.java index 78fef141515f..98459be65732 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfigTranslator.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableConfigTranslator.java @@ -211,8 +211,7 @@ private static void configureChannelPool( InstantiatingGrpcChannelProvider grpcChannelProvider = (InstantiatingGrpcChannelProvider) stubSettings.getTransportChannelProvider(); stubSettings.setTransportChannelProvider( - grpcChannelProvider - .toBuilder() + grpcChannelProvider.toBuilder() .setChannelPoolSettings(ChannelPoolSettings.staticallySized(config.getChannelCount())) .build()); } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java index 100078d32e70..02236469e325 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java @@ -642,8 +642,7 @@ public Read withMaxBufferElementCount(@Nullable Integer maxBufferElementCount) { BigtableReadOptions bigtableReadOptions = getBigtableReadOptions(); return toBuilder() .setBigtableReadOptions( - bigtableReadOptions - .toBuilder() + bigtableReadOptions.toBuilder() .setMaxBufferElementCount(maxBufferElementCount) .build()) .build(); @@ -794,7 +793,7 @@ public abstract static class Write static SerializableFunction enableBulkApiConfigurator( final @Nullable SerializableFunction - userConfigurator) { + userConfigurator) { return optionsBuilder -> { if (userConfigurator != null) { optionsBuilder = userConfigurator.apply(optionsBuilder); @@ -1142,14 +1141,18 @@ public Write withFlowControl(boolean enableFlowControl) { .build(); } - /** @deprecated This method has been deprecated in Beam 2.60.0. It does not have an effect. */ + /** + * @deprecated This method has been deprecated in Beam 2.60.0. It does not have an effect. + */ @Deprecated public Write withThrottlingTargetMs(int throttlingTargetMs) { LOG.warn("withThrottlingTargetMs has been removed and does not have effect."); return this; } - /** @deprecated This method has been deprecated in Beam 2.60.0. It does not have an effect. */ + /** + * @deprecated This method has been deprecated in Beam 2.60.0. It does not have an effect. + */ @Deprecated public Write withThrottlingReportTargetMs(int throttlingReportTargetMs) { LOG.warn("withThrottlingReportTargetMs has been removed and does not have an effect."); @@ -1264,8 +1267,7 @@ public PCollection expand( new BigtableWriterFn( factory, bigtableConfig, - bigtableWriteOptions - .toBuilder() + bigtableWriteOptions.toBuilder() .setCloseWaitTimeout(closeWaitTimeout) .build(), input.getCoder(), @@ -2125,6 +2127,7 @@ public KV> getRecord() { return record; } } + /** * Overwrite options to determine what to do if change stream name is being reused and there * exists metadata of the same change stream name. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/changestreams/ChangeStreamMetrics.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/changestreams/ChangeStreamMetrics.java index f732a52fd37e..b481e6d91d2a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/changestreams/ChangeStreamMetrics.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/changestreams/ChangeStreamMetrics.java @@ -27,6 +27,7 @@ @Internal public class ChangeStreamMetrics implements Serializable { private static final long serialVersionUID = 7298901109362981596L; + // ------------------------ // Partition record metrics diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java index c9507475648d..d6578d1c437d 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java @@ -1398,6 +1398,7 @@ public WriteWithSummary withHintNumWorkers(ValueProvider hintNumWorkers public static class Write extends PTransform, PDone> { WriteWithSummary inner; + /** * Note that {@code projectId} is only {@code @Nullable} as a matter of build order, but if it * is {@code null} at instantiation time, an error will be thrown. @@ -1982,6 +1983,7 @@ private abstract static class Mutate protected @Nullable String localhost; protected boolean throttleRampup; protected ValueProvider hintNumWorkers; + /** A function that transforms each {@code T} into a mutation. */ private final SimpleFunction mutationFn; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreOptions.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreOptions.java index 8b90594bb655..c816986d585a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreOptions.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreOptions.java @@ -35,8 +35,7 @@ public interface FirestoreOptions extends PipelineOptions { * Firestore clients. * @see com.google.cloud.firestore.FirestoreOptions.Builder#setEmulatorHost(java.lang.String) */ - @Nullable - String getEmulatorHost(); + @Nullable String getEmulatorHost(); /** * Define a host port pair to allow connecting to a Cloud Firestore emulator instead of the live @@ -76,8 +75,7 @@ public interface FirestoreOptions extends PipelineOptions { /** The Firestore project ID to connect to. */ @Description("Firestore project ID") - @Nullable - String getFirestoreProject(); + @Nullable String getFirestoreProject(); /** * Set the Firestore project ID, it will override the value from {@link GcpOptions#getProject()}. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java index 3f22e636e8ab..66e4eca0d873 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java @@ -2005,14 +2005,12 @@ public final BldrT setDatabaseId(@Nullable String databaseId) { } @VisibleForTesting - @Nullable - String getProjectId() { + @Nullable String getProjectId() { return this.projectId; } @VisibleForTesting - @Nullable - String getDatabaseId() { + @Nullable String getDatabaseId() { return this.databaseId; } } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosImpl.java index 978b47e83b0c..06b7dbe38013 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosImpl.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosImpl.java @@ -72,6 +72,7 @@ final class RpcQosImpl implements RpcQos { .stream() .map(Code::getNumber) .collect(ImmutableSet.toImmutableSet()); + /** * The target minimum number of requests per samplePeriodMs, even if no requests succeed. Must be * greater than 0, else we could throttle to zero. Because every decision is probabilistic, there diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosOptions.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosOptions.java index 8945712065f7..950dbe9374b9 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosOptions.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosOptions.java @@ -429,6 +429,7 @@ public static final class Builder { * them. */ private static final long FIRESTORE_RPC_BYTES_MAX = (long) (9.5 * 1024 * 1024); + /** The Cloud Firestore API has a limit of 500 document updates per request. */ private static final int FIRESTORE_SINGLE_REQUEST_UPDATE_DOCUMENTS_MAX = 500; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java index e8c1d601912c..99a13481b8de 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java @@ -73,6 +73,7 @@ private ReadStudyMetadata() {} /** TupleTag for the main output. */ public static final TupleTag METADATA = new TupleTag() {}; + /** TupleTag for any error response. */ public static final TupleTag ERROR_MESSAGE = new TupleTag() {}; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java index 70be676c75fe..d524b1e60df4 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java @@ -462,6 +462,7 @@ public static class Result implements POutput, PInput { private PCollection resources; private PCollection> failedReads; + /** The Pct. */ PCollectionTuple pct; @@ -524,6 +525,7 @@ public void finishSpecifyingOutput( /** The tag for the main output of FHIR resources. */ public static final TupleTag OUT = new TupleTag() {}; + /** The tag for the deadletter output of FHIR resources. */ public static final TupleTag> DEAD_LETTER = new TupleTag>() {}; @@ -637,12 +639,15 @@ public abstract static class Write extends PTransform, Write /** The tag for successful writes to FHIR store. */ public static final TupleTag SUCCESSFUL_BODY = new TupleTag() {}; + /** The tag for the failed writes to FHIR store. */ public static final TupleTag> FAILED_BODY = new TupleTag>() {}; + /** The tag for the files that failed to FHIR store. */ public static final TupleTag> FAILED_FILES = new TupleTag>() {}; + /** The tag for temp files for import to FHIR store. */ public static final TupleTag TEMP_FILES = new TupleTag() {}; @@ -1440,6 +1445,7 @@ static class ExecuteBundlesFn extends DoFn fhirStore; @@ -1911,6 +1917,7 @@ public void finishSpecifyingOutput( /** The tag for the main output of FHIR Resources from a search. */ public static final TupleTag> OUT = new TupleTag>() {}; + /** The tag for the deadletter output of FHIR Resources. */ public static final TupleTag> DEAD_LETTER = new TupleTag>() {}; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOPatientEverything.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOPatientEverything.java index e0d3647d25e6..4d13b0a9b910 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOPatientEverything.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOPatientEverything.java @@ -57,6 +57,7 @@ public class FhirIOPatientEverything /** The tag for the main output of FHIR Resources from a GetPatientEverything request. */ public static final TupleTag OUT = new TupleTag() {}; + /** The tag for the deadletter output of FHIR Resources from a GetPatientEverything request. */ public static final TupleTag> DEAD_LETTER = new TupleTag>() {}; @@ -74,6 +75,7 @@ public abstract static class PatientEverythingParameter implements Serializable * projects/{p}/locations/{l}/datasets/{d}/fhirStores/{f}/fhir/{resourceType}/{id}. */ abstract String getResourceName(); + /** Optional filters for the request, eg. start, end, _type, _since, _count */ abstract @Nullable Map getFilters(); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirSearchParameter.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirSearchParameter.java index c87368964ea4..547f398a0d8f 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirSearchParameter.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirSearchParameter.java @@ -31,11 +31,13 @@ public class FhirSearchParameter { /** FHIR resource type. */ private final String resourceType; + /** * The key is used as a key for the search query, if there is source information to propagate * through the pipeline. */ private final String key; + /** * The search query. For an OR search, put both query values in a single string. For an AND * search, use a list. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IO.java index 3647ef7671eb..7e7da6c76aa1 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IO.java @@ -294,6 +294,7 @@ private Result(PCollectionTuple pct) { /** The tag for the main output of HL7v2 Messages. */ public static final TupleTag OUT = new TupleTag() {}; + /** The tag for the deadletter output of HL7v2 Messages. */ public static final TupleTag> DEAD_LETTER = new TupleTag>() {}; @@ -416,6 +417,7 @@ private Result(PCollectionTuple pct) { /** The tag for the main output of HL7v2 read responses. */ public static final TupleTag OUT = new TupleTag() {}; + /** The tag for the deadletter output of HL7v2 read responses. */ public static final TupleTag> DEAD_LETTER = new TupleTag>() {}; @@ -753,6 +755,7 @@ public abstract static class Write extends PTransform, /** The tag for the successful writes to HL7v2 store`. */ public static final TupleTag> SUCCESS = new TupleTag>() {}; + /** The tag for the failed writes to HL7v2 store`. */ public static final TupleTag> FAILED = new TupleTag>() {}; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2Message.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2Message.java index 64f27bec600b..11ab75ecc7b2 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2Message.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2Message.java @@ -173,6 +173,7 @@ public String getSchematizedData() { public void setSchematizedData(String schematizedData) { this.schematizedData = schematizedData; } + /** * Gets labels. * diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HealthcareApiClient.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HealthcareApiClient.java index 39c30b949425..e5fd3882b596 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HealthcareApiClient.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HealthcareApiClient.java @@ -283,6 +283,7 @@ HttpBody getPatientEverything( */ FhirStore createFhirStore(String dataset, String name, String version, String pubsubTopic) throws IOException; + /** * Create FHIR Store. * diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PreparePubsubWriteDoFn.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PreparePubsubWriteDoFn.java index 9171bdf28494..cb2531f2584e 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PreparePubsubWriteDoFn.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PreparePubsubWriteDoFn.java @@ -50,6 +50,7 @@ public class PreparePubsubWriteDoFn extends DoFn private boolean logOrderingKeyUnconfigured = false; private SerializableFunction, PubsubMessage> formatFunction; @Nullable SerializableFunction, PubsubIO.PubsubTopic> topicFunction; + /** Last TopicPath that reported Lineage. */ private transient @Nullable String reportedLineage; @@ -148,8 +149,8 @@ static int validatePubsubMessage(PubsubMessage message, int maxPublishBatchSize) PreparePubsubWriteDoFn( SerializableFunction, PubsubMessage> formatFunction, - @Nullable - SerializableFunction, PubsubIO.PubsubTopic> topicFunction, + @Nullable SerializableFunction, PubsubIO.PubsubTopic> + topicFunction, boolean usesOrderingKey, int maxPublishBatchSize, BadRecordRouter badRecordRouter, diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.java index d62d294ed2a7..256c3c0d4d1a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.java @@ -1134,13 +1134,11 @@ public PCollection expand(PBegin input) { "PubSubIO cannot be configured with both a dead letter topic and a bad record router"); } - @Nullable - ValueProvider topicPath = + @Nullable ValueProvider topicPath = getTopicProvider() == null ? null : NestedValueProvider.of(getTopicProvider(), new TopicPathTranslator()); - @Nullable - ValueProvider subscriptionPath = + @Nullable ValueProvider subscriptionPath = getSubscriptionProvider() == null ? null : NestedValueProvider.of(getSubscriptionProvider(), new SubscriptionPathTranslator()); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubSchemaIOProvider.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubSchemaIOProvider.java index d39cee3ddc06..b7806511f6c6 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubSchemaIOProvider.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubSchemaIOProvider.java @@ -235,8 +235,7 @@ public PCollection expand(PBegin begin) { @Override public PTransform, POutput> buildWriter() { - @Nullable - PayloadSerializer serializer = + @Nullable PayloadSerializer serializer = needsSerializer() ? config.serializer(stripFromTimestampField(dataSchema)) : null; return new PTransform, POutput>() { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java index 3fe7d51aec1e..45b559f97cad 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java @@ -565,6 +565,7 @@ public PubsubUnboundedSink( RecordIdMethod.RANDOM, pubsubRootUrl); } + /** Get the topic being written to. */ public @Nullable TopicPath getTopic() { return topic != null ? topic.get() : null; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadOperation.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadOperation.java index 933394982e30..6549a679003c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadOperation.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadOperation.java @@ -123,14 +123,14 @@ public ReadOperation withPartitionOptions(PartitionOptions partitionOptions) { private static final Pattern queryPattern = Pattern.compile( "SELECT\\s+.+FROM\\s+\\[?(?[^\\s\\[\\]]+)\\]?", Pattern.CASE_INSENSITIVE); + /** * Get table name associated with this operation. * *

Currently only supports explicitly set table, and limited cases of set query. Return null * for unsupported cases. */ - @Nullable - String tryGetTableName() { + @Nullable String tryGetTableName() { if (!Strings.isNullOrEmpty(getTable())) { return getTable(); } else if (getQuery() != null) { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java index cccfced08218..44f8a247be51 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java @@ -1760,7 +1760,9 @@ public abstract static class ReadChangeStream abstract @Nullable RpcPriority getRpcPriority(); - /** @deprecated This configuration has no effect, as tracing is not available */ + /** + * @deprecated This configuration has no effect, as tracing is not available + */ @Deprecated abstract @Nullable Double getTraceSampleProbability(); @@ -2158,8 +2160,7 @@ SpannerConfig buildChangeStreamSpannerConfig() { // Set default retry timeouts for ReadChangeStream if (changeStreamSpannerConfig.getExecuteStreamingSqlRetrySettings() == null) { changeStreamSpannerConfig = - changeStreamSpannerConfig - .toBuilder() + changeStreamSpannerConfig.toBuilder() .setExecuteStreamingSqlRetrySettings( RetrySettings.newBuilder() .setTotalTimeout(org.threeten.bp.Duration.ofMinutes(5)) diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/ChangeStreamsConstants.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/ChangeStreamsConstants.java index ea325a118728..afc26e54c53e 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/ChangeStreamsConstants.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/ChangeStreamsConstants.java @@ -83,6 +83,7 @@ public class ChangeStreamsConstants { */ private static final String SAMPLE_PARTITION_TOKEN = String.join("", Collections.nCopies(140, "*")); + /** * We use a bogus partition here to estimate the average size of a partition metadata record. * diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/DataChangeRecordAction.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/DataChangeRecordAction.java index 555b1fefbebc..b122f8d73a3a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/DataChangeRecordAction.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/DataChangeRecordAction.java @@ -43,7 +43,9 @@ public class DataChangeRecordAction { private static final Logger LOG = LoggerFactory.getLogger(DataChangeRecordAction.class); private final ThroughputEstimator throughputEstimator; - /** @param throughputEstimator an estimator to calculate local throughput of this action. */ + /** + * @param throughputEstimator an estimator to calculate local throughput of this action. + */ public DataChangeRecordAction(ThroughputEstimator throughputEstimator) { this.throughputEstimator = throughputEstimator; } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dao/PartitionMetadataAdminDao.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dao/PartitionMetadataAdminDao.java index 80bf178f49a9..a3f36ef1dd84 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dao/PartitionMetadataAdminDao.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dao/PartitionMetadataAdminDao.java @@ -39,39 +39,49 @@ public class PartitionMetadataAdminDao { /** Metadata table column name for the partition token. */ public static final String COLUMN_PARTITION_TOKEN = "PartitionToken"; + /** Metadata table column name for parent partition tokens. */ public static final String COLUMN_PARENT_TOKENS = "ParentTokens"; + /** * Metadata table column name for the timestamp to start the change stream query of the partition. */ public static final String COLUMN_START_TIMESTAMP = "StartTimestamp"; + /** * Metadata table column name for the timestamp to end the change stream query of the partition. */ public static final String COLUMN_END_TIMESTAMP = "EndTimestamp"; + /** Metadata table column name for the change stream query heartbeat interval in millis. */ public static final String COLUMN_HEARTBEAT_MILLIS = "HeartbeatMillis"; + /** * Metadata table column name for the state that the partition is currently in. Possible states * can be seen in {@link * org.apache.beam.sdk.io.gcp.spanner.changestreams.model.PartitionMetadata.State}. */ public static final String COLUMN_STATE = "State"; + /** Metadata table column name for the current watermark of the partition. */ public static final String COLUMN_WATERMARK = "Watermark"; + /** Metadata table column name for the timestamp at which the partition row was first created. */ public static final String COLUMN_CREATED_AT = "CreatedAt"; + /** * Metadata table column name for the timestamp at which the partition was scheduled by the {@link * org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.DetectNewPartitionsDoFn} SDF. */ public static final String COLUMN_SCHEDULED_AT = "ScheduledAt"; + /** * Metadata table column name for the timestamp at which the partition was marked as running by * the {@link org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.ReadChangeStreamPartitionDoFn} * SDF. */ public static final String COLUMN_RUNNING_AT = "RunningAt"; + /** * Metadata table column name for the timestamp at which the partition was marked as finished by * the {@link org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.ReadChangeStreamPartitionDoFn} diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dofn/ReadChangeStreamPartitionDoFn.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dofn/ReadChangeStreamPartitionDoFn.java index de509257ad8f..d3c4c3c397a8 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dofn/ReadChangeStreamPartitionDoFn.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/dofn/ReadChangeStreamPartitionDoFn.java @@ -75,6 +75,7 @@ public class ReadChangeStreamPartitionDoFn extends DoFn childPartitionJsonFrom(partition.getPartitionToken(), value)) .collect(Collectors.toList()), changeStreamRecordMetadataFrom( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/TableContainer.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/TableContainer.java index b44b9596cc12..61353f2f7282 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/TableContainer.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/TableContainer.java @@ -88,8 +88,7 @@ void setPrimaryKeyColumns(List primaryKeyColumns) { this.primaryKeyColumnIndices = primaryColumnFieldIndices(primaryKeyColumns, table); } - @Nullable - List getPrimaryKey(TableRow tableRow) { + @Nullable List getPrimaryKey(TableRow tableRow) { if (primaryKeyColumns == null) { return null; } diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java index 1d4fe01c1800..af3098fd3c99 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java @@ -300,15 +300,15 @@ public void testQuerySourceEstimatedSize() throws Exception { BigQueryStorageQuerySource querySource = BigQueryStorageQuerySource.create( - /* stepUuid = */ "stepUuid", + /* stepUuid= */ "stepUuid", ValueProvider.StaticValueProvider.of(fakeQuery), - /* flattenResults = */ true, - /* useLegacySql = */ true, - /* priority = */ QueryPriority.INTERACTIVE, - /* location = */ null, - /* queryTempDataset = */ null, - /* queryTempProject = */ null, - /* kmsKey = */ null, + /* flattenResults= */ true, + /* useLegacySql= */ true, + /* priority= */ QueryPriority.INTERACTIVE, + /* location= */ null, + /* queryTempDataset= */ null, + /* queryTempProject= */ null, + /* kmsKey= */ null, null, new TableRowParser(), TableRowJsonCoder.of(), @@ -416,13 +416,13 @@ private void doQuerySourceInitialSplit( BigQueryStorageQuerySource.create( stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), - /* flattenResults = */ true, - /* useLegacySql = */ true, - /* priority = */ QueryPriority.BATCH, - /* location = */ null, - /* queryTempDataset = */ null, - /* queryTempProject = */ null, - /* kmsKey = */ null, + /* flattenResults= */ true, + /* useLegacySql= */ true, + /* priority= */ QueryPriority.BATCH, + /* location= */ null, + /* queryTempDataset= */ null, + /* queryTempProject= */ null, + /* kmsKey= */ null, null, new TableRowParser(), TableRowJsonCoder.of(), @@ -518,13 +518,13 @@ public void testQuerySourceInitialSplit_NoReferencedTables() throws Exception { BigQueryStorageQuerySource.create( stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), - /* flattenResults = */ true, - /* useLegacySql = */ true, - /* priority = */ QueryPriority.BATCH, - /* location = */ null, - /* queryTempDataset = */ null, - /* queryTempProject = */ null, - /* kmsKey = */ null, + /* flattenResults= */ true, + /* useLegacySql= */ true, + /* priority= */ QueryPriority.BATCH, + /* location= */ null, + /* queryTempDataset= */ null, + /* queryTempProject= */ null, + /* kmsKey= */ null, null, new TableRowParser(), TableRowJsonCoder.of(), @@ -667,13 +667,13 @@ public void testQuerySourceInitialSplitWithBigQueryProject_EmptyResult() throws BigQueryStorageQuerySource.create( stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), - /* flattenResults = */ true, - /* useLegacySql = */ true, - /* priority = */ QueryPriority.BATCH, - /* location = */ null, - /* queryTempDataset = */ null, - /* queryTempProject = */ null, - /* kmsKey = */ null, + /* flattenResults= */ true, + /* useLegacySql= */ true, + /* priority= */ QueryPriority.BATCH, + /* location= */ null, + /* queryTempDataset= */ null, + /* queryTempProject= */ null, + /* kmsKey= */ null, DataFormat.AVRO, new TableRowParser(), TableRowJsonCoder.of(), @@ -740,13 +740,13 @@ public void testQuerySourceInitialSplit_EmptyResult() throws Exception { BigQueryStorageQuerySource.create( stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), - /* flattenResults = */ true, - /* useLegacySql = */ true, - /* priority = */ QueryPriority.BATCH, - /* location = */ null, - /* queryTempDataset = */ null, - /* queryTempProject = */ null, - /* kmsKey = */ null, + /* flattenResults= */ true, + /* useLegacySql= */ true, + /* priority= */ QueryPriority.BATCH, + /* location= */ null, + /* queryTempDataset= */ null, + /* queryTempProject= */ null, + /* kmsKey= */ null, null, new TableRowParser(), TableRowJsonCoder.of(), @@ -763,15 +763,15 @@ public void testQuerySourceInitialSplit_EmptyResult() throws Exception { public void testQuerySourceCreateReader() throws Exception { BigQueryStorageQuerySource querySource = BigQueryStorageQuerySource.create( - /* stepUuid = */ "testStepUuid", + /* stepUuid= */ "testStepUuid", ValueProvider.StaticValueProvider.of("SELECT * FROM `dataset.table`"), - /* flattenResults = */ false, - /* useLegacySql = */ false, - /* priority = */ QueryPriority.INTERACTIVE, - /* location = */ "asia-northeast1", - /* queryTempDataset = */ null, - /* queryTempProject = */ null, - /* kmsKey = */ null, + /* flattenResults= */ false, + /* useLegacySql= */ false, + /* priority= */ QueryPriority.INTERACTIVE, + /* location= */ "asia-northeast1", + /* queryTempDataset= */ null, + /* queryTempProject= */ null, + /* kmsKey= */ null, null, new TableRowParser(), TableRowJsonCoder.of(), diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java index 783114cbacca..e3085b7c6604 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java @@ -1453,7 +1453,7 @@ public void testStreamSourceSplitAtFractionFailsWhenParentIsPastSplitPoint() thr "Given row offset is invalid for stream.", new StatusRuntimeException(Status.FAILED_PRECONDITION), GrpcStatusCode.of(Code.FAILED_PRECONDITION), - /* retryable = */ false)); + /* retryable= */ false)); BigQueryStorageStreamSource streamSource = BigQueryStorageStreamSource.create( @@ -2412,7 +2412,7 @@ public void testStreamSourceSplitAtFractionFailsWhenParentIsPastSplitPointArrow( "Given row offset is invalid for stream.", new StatusRuntimeException(Status.FAILED_PRECONDITION), GrpcStatusCode.of(Code.FAILED_PRECONDITION), - /* retryable = */ false)); + /* retryable= */ false)); BigQueryStorageStreamSource streamSource = BigQueryStorageStreamSource.create( diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDataTriggeredSchemaUpdateIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDataTriggeredSchemaUpdateIT.java index 1d35fea44966..d90dedaf06f6 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDataTriggeredSchemaUpdateIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiDataTriggeredSchemaUpdateIT.java @@ -174,7 +174,8 @@ TableRow getRow(int i) { row.set("req", ImmutableList.of("43", "44")); } return row; - }; + } + ; } @Test diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProtoIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProtoIT.java index aedba31f62fa..40034f42ab8d 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProtoIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProtoIT.java @@ -424,7 +424,8 @@ public void testNestedRichTypesAndNull() throws IOException, InterruptedExceptio @Test public void testWriteProtosEncodedTypes() - throws IOException, InterruptedException, + throws IOException, + InterruptedException, TableRowToStorageApiProto.SchemaConversionException { String tableSpec = createTable(PROTO_ENCODED_TABLE_SCHEMA); final String timestamp = "1970-01-01T00:00:00.000043"; @@ -501,7 +502,8 @@ public void testWriteProtosEncodedTypes() @Test public void testWriteProtosStringTypes() - throws IOException, InterruptedException, + throws IOException, + InterruptedException, TableRowToStorageApiProto.SchemaConversionException { String tableSpec = createTable(PROTO_UNENCODED_TABLE_SCHEMA); final String timestamp = "1970-01-01T00:00:00.000043"; diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java index 4c164e6a38db..3106063b2d9a 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java @@ -184,7 +184,8 @@ public void testDynamicDestinations(boolean streaming) throws IOException, Inter long mod = i; String dest = destinations.get(i); List writtenRows = - BQ_CLIENT.queryUnflattened(String.format("SELECT * FROM `%s`", dest), PROJECT, true, true) + BQ_CLIENT + .queryUnflattened(String.format("SELECT * FROM `%s`", dest), PROJECT, true, true) .stream() .map(tableRow -> BigQueryUtils.toBeamRow(rowFilter.outputSchema(), tableRow)) .collect(Collectors.toList()); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableSharedClientTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableSharedClientTest.java index 3cdba0d9f25a..ce4709fd300d 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableSharedClientTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableSharedClientTest.java @@ -237,8 +237,7 @@ public BigtableDataSettings.Builder apply( builder .stubSettings() .setTransportChannelProvider( - oldTransport - .toBuilder() + oldTransport.toBuilder() .setChannelPoolSettings(ChannelPoolSettings.staticallySized(1)) .build()); // Make sure to disable builtin metrics diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/AdaptiveThrottlerTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/AdaptiveThrottlerTest.java index 6db8af3906c3..4dcb070dac01 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/AdaptiveThrottlerTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/AdaptiveThrottlerTest.java @@ -104,7 +104,7 @@ public void testThrottlingAfterErrors() throws Exception { assertThat( String.format("for i=%d", i), throttler.throttlingProbability(START_TIME_MS + i), - closeTo(0.33, /*error=*/ 0.1)); + closeTo(0.33, /* error= */ 0.1)); // Requests 10..13 should be throttled, 14..19 not throttled given the mocked random numbers // that we fed to throttler. assertThat(String.format("for i=%d", i), throttled, equalTo(i < 14)); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java index f20181fbc320..1dfef02f1731 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java @@ -359,8 +359,7 @@ public final void endToEnd_deadlineExceededOnAnIndividualWriteResultsInThrottlin final long docCount = totalDocCount / numWorkers; LOG.info("docCount = {}", docCount); RpcQosOptions options = - rpcQosOptions - .toBuilder() + rpcQosOptions.toBuilder() .withHintMaxNumWorkers(numWorkers) .withSamplePeriod(Duration.standardMinutes(10)) // .withBatchInitialCount(5) @@ -483,9 +482,7 @@ public final void endToEnd_maxBatchSizeRespected() throws Exception { BatchWriteRequest.newBuilder().setDatabase("projects/testing-project/databases/(default)"); BatchWriteRequest expectedGroup1Request = - builder - .build() - .toBuilder() + builder.build().toBuilder() .addWrites(write0) .addWrites(write1) .addWrites(write2) diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithDeadLetterQueueTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithDeadLetterQueueTest.java index e7f98ff73c6b..b8f3f6ea5ddd 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithDeadLetterQueueTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithDeadLetterQueueTest.java @@ -145,8 +145,7 @@ public void enqueueingWritesValidateBytesSize() throws Exception { public void nonRetryableWriteIsOutput() throws Exception { Write write0 = FirestoreProtoHelpers.newWrite(0); Write write1 = - FirestoreProtoHelpers.newWrite(1) - .toBuilder() + FirestoreProtoHelpers.newWrite(1).toBuilder() .setCurrentDocument(Precondition.newBuilder().setExists(false).build()) .build(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithSummaryTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithSummaryTest.java index e7174537943e..973521b5b055 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithSummaryTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnBatchWriteWithSummaryTest.java @@ -158,8 +158,7 @@ public void enqueueingWritesValidateBytesSize() throws Exception { public void nonRetryableWriteResultStopsAttempts() throws Exception { Write write0 = FirestoreProtoHelpers.newWrite(0); Write write1 = - FirestoreProtoHelpers.newWrite(1) - .toBuilder() + FirestoreProtoHelpers.newWrite(1).toBuilder() .setCurrentDocument(Precondition.newBuilder().setExists(false).build()) .build(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnPartitionQueryTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnPartitionQueryTest.java index 20f728bab73a..134015606eee 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnPartitionQueryTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnPartitionQueryTest.java @@ -225,8 +225,7 @@ protected PartitionQueryPage computeNext() { .build(); PartitionQueryResponse expectedResponse = - response1 - .toBuilder() + response1.toBuilder() .clearNextPageToken() .addAllPartitions(response2.getPartitionsList()) .build(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnRunQueryTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnRunQueryTest.java index 78dad6faeaea..ff475457d442 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnRunQueryTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnRunQueryTest.java @@ -159,10 +159,7 @@ public void resumeFromLastReadValue_withNoOrderBy() throws Exception { RunQueryRequest.newBuilder() .setParent(String.format("projects/%s/databases/(default)/document", projectId)) .setStructuredQuery( - testData - .request - .getStructuredQuery() - .toBuilder() + testData.request.getStructuredQuery().toBuilder() .setStartAt( Cursor.newBuilder() .setBefore(false) @@ -201,10 +198,7 @@ private void buildAndRunQueryRetryTest(String fieldName, String fieldValue) thro RunQueryRequest.newBuilder() .setParent(String.format("projects/%s/databases/(default)/document", projectId)) .setStructuredQuery( - testData - .request - .getStructuredQuery() - .toBuilder() + testData.request.getStructuredQuery().toBuilder() .setStartAt( Cursor.newBuilder() .setBefore(false) diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/QueryUtilsTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/QueryUtilsTest.java index 3a60381c11dd..9db2f7ba9cad 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/QueryUtilsTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/QueryUtilsTest.java @@ -205,8 +205,7 @@ public void getImplicitOrderBy_nameInWhere() { @Test public void getImplicitOrderBy_malformedWhereThrows() { testQuery = - testQuery - .toBuilder() + testQuery.toBuilder() .setWhere( Filter.newBuilder() .setUnaryFilter( diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosTest.java index f60c7ea60ca4..8910d491c4fd 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosTest.java @@ -152,8 +152,7 @@ public void setUp() { // init here after mocks have been initialized options = - RpcQosOptions.defaultOptions() - .toBuilder() + RpcQosOptions.defaultOptions().toBuilder() .withInitialBackoff(Duration.millis(1)) .withSamplePeriod(Duration.millis(100)) .withSamplePeriodBucketSize(Duration.millis(10)) diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/it/BaseFirestoreIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/it/BaseFirestoreIT.java index e0776927db0f..76ce6b650a07 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/it/BaseFirestoreIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/it/BaseFirestoreIT.java @@ -86,8 +86,7 @@ abstract class BaseFirestoreIT { public final TestPipeline testPipeline2 = TestPipeline.create(); protected static final RpcQosOptions RPC_QOS_OPTIONS = - RpcQosOptions.defaultOptions() - .toBuilder() + RpcQosOptions.defaultOptions().toBuilder() .withMaxAttempts(1) .withHintMaxNumWorkers(1) .build(); @@ -250,7 +249,7 @@ public final void listDocuments() throws Exception { public final void runQuery() throws Exception { String collectionId = "a"; DocumentGenerator documentGenerator = - helper.documentGenerator(NUM_ITEMS_TO_GENERATE, collectionId, /* addBazDoc = */ true); + helper.documentGenerator(NUM_ITEMS_TO_GENERATE, collectionId, /* addBazDoc= */ true); Instant readTime = toMaxWriteTime(documentGenerator.generateDocuments().get(10, TimeUnit.SECONDS)); Thread.sleep(5); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IOTestUtil.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IOTestUtil.java index e7a36a247c9f..c29f5d8eba76 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IOTestUtil.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IOTestUtil.java @@ -37,6 +37,7 @@ class HL7v2IOTestUtil { public static final long HL7V2_INDEXING_TIMEOUT_MINUTES = 10L; + /** Google Cloud Healthcare Dataset in Apache Beam integration test project. */ public static final String HEALTHCARE_DATASET_TEMPLATE = "projects/%s/locations/us-central1/datasets/apache-beam-integration-testing"; @@ -84,6 +85,7 @@ class HL7v2IOTestUtil { .collect(Collectors.toList()); static final long NUM_ADT = 2; + /** Clear all messages from the HL7v2 store. */ static void deleteAllHL7v2Messages(HealthcareApiClient client, String hl7v2Store) throws IOException { diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOTest.java index 3d9c65aa1376..4aefac0379a8 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOTest.java @@ -734,8 +734,7 @@ public void testWriteMalformedMessagesWithErrorHandler() throws Exception { // The most straightforward method to simulate a bad message is to have a format function that // deterministically fails based on some value messages.apply( - PubsubIO.writeMessages() - .toBuilder() + PubsubIO.writeMessages().toBuilder() .setFormatFn( (ValueInSingleWindow messageAndWindow) -> { if (messageAndWindow.getValue().getTopic().equals("badTopic")) { diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubWriteIT.java index 45c85183d536..cb3055caed70 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubWriteIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubWriteIT.java @@ -165,8 +165,7 @@ public void testBoundedWriteMessageWithAttributesAndOrderingKey() throws IOExcep for (IncomingMessage incomingMessage : incomingMessages) { com.google.pubsub.v1.PubsubMessage message = incomingMessage.message(); - @Nullable - PubsubMessage outgoingMessage = + @Nullable PubsubMessage outgoingMessage = outgoingMessages.remove(message.getAttributesMap().get("id")); if (outgoingMessage != null) { emptyOrDuplicatePull = false; diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessorTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessorTest.java index aad44879ce90..793db6ae22fd 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessorTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessorTest.java @@ -45,8 +45,7 @@ public void setUp() throws Exception { @Test public void testCreateOnlyOnce() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test1")) @@ -70,16 +69,14 @@ public void testCreateOnlyOnce() { @Test public void testRefCountedSpannerAccessorDifferentDbsOnlyOnce() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test1")) .setDatabaseId(StaticValueProvider.of("test1")) .build(); SpannerConfig config2 = - config1 - .toBuilder() + config1.toBuilder() .setInstanceId(StaticValueProvider.of("test2")) .setDatabaseId(StaticValueProvider.of("test2")) .build(); @@ -106,8 +103,7 @@ public void testRefCountedSpannerAccessorDifferentDbsOnlyOnce() { @Test public void testCreateWithValidDatabaseRole() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test1")) @@ -127,8 +123,7 @@ public void testCreateWithValidDatabaseRole() { @Test public void testCreateWithEmptyDatabaseRole() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test1")) @@ -149,8 +144,7 @@ public void testCreateWithEmptyDatabaseRole() { public void testBuildSpannerOptionsWithCredential() { TestCredential testCredential = new TestCredential(); SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test-instance")) @@ -170,8 +164,7 @@ public void testBuildSpannerOptionsWithCredential() { @Test public void testBuildSpannerOptionsWithNoHost() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test1")) @@ -186,8 +179,7 @@ public void testBuildSpannerOptionsWithNoHost() { @Test public void testBuildSpannerOptionsWithNullHost() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setHost((StaticValueProvider) null) .setProjectId(StaticValueProvider.of("project")) @@ -203,8 +195,7 @@ public void testBuildSpannerOptionsWithNullHost() { @Test public void testBuildSpannerOptionsWithNullHostValue() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setHost(StaticValueProvider.of((String) null)) .setProjectId(StaticValueProvider.of("project")) @@ -220,8 +211,7 @@ public void testBuildSpannerOptionsWithNullHostValue() { @Test public void testBuildSpannerOptionsWithEmptyHost() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setHost(StaticValueProvider.of("")) .setProjectId(StaticValueProvider.of("project")) @@ -238,8 +228,7 @@ public void testBuildSpannerOptionsWithEmptyHost() { public void testBuildSpannerOptionsWithCustomHost() { final String host = "https://alternative-host.example.org"; SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setHost(StaticValueProvider.of(host)) .setProjectId(StaticValueProvider.of("project")) diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java index dbe017531b28..5720a20d989e 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java @@ -1554,8 +1554,7 @@ private void testAndVerifyBatches(GatherSortCreateBatchesFn testFn) throws Excep @Test public void testRefCountedSpannerAccessorOnlyOnce() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setProjectId(StaticValueProvider.of("project")) .setInstanceId(StaticValueProvider.of("test1")) @@ -1586,8 +1585,7 @@ public void testRefCountedSpannerAccessorOnlyOnce() { @Test public void testRefCountedSpannerAccessorDifferentDbsOnlyOnce() { SpannerConfig config1 = - SpannerConfig.create() - .toBuilder() + SpannerConfig.create().toBuilder() .setServiceFactory(serviceFactory) .setMaxCumulativeBackoff(StaticValueProvider.of(Duration.standardSeconds(10))) .setProjectId(StaticValueProvider.of("project")) @@ -1595,8 +1593,7 @@ public void testRefCountedSpannerAccessorDifferentDbsOnlyOnce() { .setDatabaseId(StaticValueProvider.of("test1")) .build(); SpannerConfig config2 = - config1 - .toBuilder() + config1.toBuilder() .setInstanceId(StaticValueProvider.of("test2")) .setDatabaseId(StaticValueProvider.of("test2")) .build(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerReadIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerReadIT.java index 34c839d3e1e6..809e120e0ae7 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerReadIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerReadIT.java @@ -74,8 +74,7 @@ public class SpannerReadIT { public interface SpannerTestPipelineOptions extends TestPipelineOptions { @Description("Project that hosts Spanner instance") - @Nullable - String getInstanceProjectId(); + @Nullable String getInstanceProjectId(); void setInstanceProjectId(String value); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerWriteIT.java index df23435d82ab..f9d6cd8f5f34 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerWriteIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerWriteIT.java @@ -81,8 +81,7 @@ public class SpannerWriteIT { public interface SpannerTestPipelineOptions extends TestPipelineOptions { @Description("Project that hosts Spanner instance") - @Nullable - String getInstanceProjectId(); + @Nullable String getInstanceProjectId(); void setInstanceProjectId(String value); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/StructUtilsTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/StructUtilsTest.java index 9a378b015182..0ddc093dfbdd 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/StructUtilsTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/StructUtilsTest.java @@ -279,8 +279,7 @@ public void testStructTypeToBeamRowSchema() { @Test public void testStructTypeToBeamRowSchemaFailsTypeNotSupported() { StructType structTypeWithStruct = - createStructType() - .toBuilder() + createStructType().toBuilder() .addFields(getFieldForTypeCode("f_struct", TypeCode.STRUCT)) .build(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/ChangeStreamTestPipelineOptions.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/ChangeStreamTestPipelineOptions.java index 83fc9ebc791e..b70e75dc3192 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/ChangeStreamTestPipelineOptions.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/ChangeStreamTestPipelineOptions.java @@ -25,8 +25,7 @@ public interface ChangeStreamTestPipelineOptions extends IOTestPipelineOptions, StreamingOptions { @Description("Project that hosts Spanner instance") - @Nullable - String getProjectId(); + @Nullable String getProjectId(); void setProjectId(String value); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTableIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTableIT.java index 9318dad7ec6d..6a1afff761ca 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTableIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTableIT.java @@ -71,10 +71,10 @@ public class SpannerChangeStreamPlacementTableIT { @ClassRule public static final IntegrationTestEnv ENV = new IntegrationTestEnv( - /*isPostgres=*/ false, - /*isMutableChangeStream=*/ true, - /*isPlacementTable=*/ true, - /*host=*/ Optional.empty()); + /* isPostgres= */ false, + /* isMutableChangeStream= */ true, + /* isPlacementTable= */ true, + /* host= */ Optional.empty()); @Rule public final transient TestPipeline pipeline = TestPipeline.create(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTablePostgresIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTablePostgresIT.java index 129a4334d1bb..219486e01e28 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTablePostgresIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPlacementTablePostgresIT.java @@ -65,10 +65,10 @@ public class SpannerChangeStreamPlacementTablePostgresIT { @ClassRule public static final IntegrationTestEnv ENV = new IntegrationTestEnv( - /*isPostgres=*/ true, - /*isMutableChangeStream=*/ true, - /*isPlacementTable=*/ true, - /*host=*/ Optional.empty()); + /* isPostgres= */ true, + /* isMutableChangeStream= */ true, + /* isPlacementTable= */ true, + /* host= */ Optional.empty()); @Rule public final transient TestPipeline pipeline = TestPipeline.create(); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPostgresIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPostgresIT.java index 5f5f55e46964..032f6f885a4e 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPostgresIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/it/SpannerChangeStreamPostgresIT.java @@ -64,10 +64,10 @@ public class SpannerChangeStreamPostgresIT { @ClassRule public static final IntegrationTestEnv ENV = new IntegrationTestEnv( - /*isPostgres=*/ true, - /*isMutableChangeStream=*/ false, - /*isPlacementTable=*/ false, - /*host=*/ Optional.empty()); + /* isPostgres= */ true, + /* isMutableChangeStream= */ false, + /* isPlacementTable= */ false, + /* host= */ Optional.empty()); @Rule public final transient TestPipeline pipeline = TestPipeline.create(); diff --git a/sdks/java/io/hadoop-file-system/build.gradle b/sdks/java/io/hadoop-file-system/build.gradle index d78d6a300cac..854ccefa0174 100644 --- a/sdks/java/io/hadoop-file-system/build.gradle +++ b/sdks/java/io/hadoop-file-system/build.gradle @@ -85,7 +85,7 @@ hadoopVersions.each {kv -> configurations.all (Configuration it) -> { // error-prone requires newer guava, don't override for annotation processing // https://github.com/google/error-prone/issues/2745 - if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor") { + if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor" || it.name.startsWith("spotless")) { return } resolutionStrategy { diff --git a/sdks/java/io/hadoop-format/build.gradle b/sdks/java/io/hadoop-format/build.gradle index 73fc44a0f311..86a713c5bd30 100644 --- a/sdks/java/io/hadoop-format/build.gradle +++ b/sdks/java/io/hadoop-format/build.gradle @@ -125,7 +125,7 @@ hadoopVersions.each {kv -> configurations.all (Configuration it) -> { // error-prone requires newer guava, don't override for annotation processing // https://github.com/google/error-prone/issues/2745 - if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor") { + if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor" || it.name.startsWith("spotless") || it.name.startsWith("checkstyle")) { return } resolutionStrategy { diff --git a/sdks/java/io/hadoop-format/src/main/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIO.java b/sdks/java/io/hadoop-format/src/main/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIO.java index 0412e4286bb8..ccfd4ba57972 100644 --- a/sdks/java/io/hadoop-format/src/main/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIO.java +++ b/sdks/java/io/hadoop-format/src/main/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIO.java @@ -1146,8 +1146,8 @@ public static class Write extends PTransform>, PCollectionView> + private final @Nullable + PTransform>, PCollectionView> configTransform; private final ExternalSynchronization externalSynchronization; @@ -1158,7 +1158,7 @@ public static class Write extends PTransform>, PCollectionView> - configTransform, + configTransform, ExternalSynchronization externalSynchronization, boolean withPartitioning) { this.configuration = configuration; diff --git a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOCassandraIT.java b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOCassandraIT.java index cd85b97061d6..2c08632bb118 100644 --- a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOCassandraIT.java +++ b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOCassandraIT.java @@ -136,6 +136,7 @@ public String apply(Row input) { + input.getString("field9"); } }; + /** * This test reads data from the Cassandra instance based on query and verifies if data is read * successfully. diff --git a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticIT.java b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticIT.java index 5676a7940da3..0ee3b35da785 100644 --- a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticIT.java +++ b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticIT.java @@ -139,6 +139,7 @@ public String apply(LinkedMapWritable mapw) { return convertMapWRowToString(mapw); } }); + /* * Function to create a toString implementation of a MapWritable row by writing all field values * in a string row. diff --git a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticTest.java b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticTest.java index 484c73e13751..91cdbb2a76d6 100644 --- a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticTest.java +++ b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOElasticTest.java @@ -117,6 +117,7 @@ public String apply(LinkedMapWritable mapw) { return mapw.get(new Text("id")) + "|" + mapw.get(new Text("scientist")); } }); + /** * Test to read data from embedded Elasticsearch instance based on query and verify whether data * is read successfully. diff --git a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java index ad33c57bedb7..9f7947830171 100644 --- a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java +++ b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java @@ -793,6 +793,7 @@ public void testGetFractionConsumedForBadProgressValue() throws Exception { assertEquals(null, reader.getFractionConsumed()); reader.close(); } + /** This test validates that reader and its parent source reads the same records. */ @Test public void testReaderAndParentSourceReadsSameData() throws Exception { diff --git a/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java b/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java index ba2674653f6b..fecfe1126fd7 100644 --- a/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java +++ b/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java @@ -167,8 +167,7 @@ public abstract static class Read extends PTransform fieldNames) { } // SqlLiteral nodes do not contain field names, so we can ignore them. } + /** * parses a SQL filter expression string into an Iceberg {@link Expression} that can be used for * data pruning. diff --git a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/RecordWriterManager.java b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/RecordWriterManager.java index 2f532a08754c..f68382cc3f13 100644 --- a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/RecordWriterManager.java +++ b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/RecordWriterManager.java @@ -88,6 +88,7 @@ */ class RecordWriterManager implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(RecordWriterManager.class); + /** * Represents the state of one Iceberg table destination. Creates one {@link RecordWriter} per * partition and manages them in a {@link Cache}. @@ -319,8 +320,8 @@ void refreshIfStale() { @VisibleForTesting Table getOrCreateTable(IcebergDestination destination, Schema dataSchema) { TableIdentifier identifier = destination.getTableIdentifier(); - @Nullable - LastRefreshedTable lastRefreshedTable = LAST_REFRESHED_TABLE_CACHE.getIfPresent(identifier); + @Nullable LastRefreshedTable lastRefreshedTable = + LAST_REFRESHED_TABLE_CACHE.getIfPresent(identifier); if (lastRefreshedTable != null && lastRefreshedTable.table != null) { lastRefreshedTable.refreshIfStale(); return lastRefreshedTable.table; diff --git a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/TableCache.java b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/TableCache.java index cb00d90f7fb3..a3f676482574 100644 --- a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/TableCache.java +++ b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/TableCache.java @@ -53,7 +53,8 @@ public ListenableFuture

reload(String unusedIdentifier, Table table) { table.refresh(); return Futures.immediateFuture(table); } - });; + }); + ; static Table get(String identifier) { try { diff --git a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/WritePartitionedRowsToFiles.java b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/WritePartitionedRowsToFiles.java index 54ad120f1aca..d83e5de7df19 100644 --- a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/WritePartitionedRowsToFiles.java +++ b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/WritePartitionedRowsToFiles.java @@ -193,8 +193,8 @@ void refreshIfStale() { LastRefreshedTable getOrCreateTable(IcebergDestination destination, Schema dataSchema) { TableIdentifier identifier = destination.getTableIdentifier(); - @Nullable - LastRefreshedTable lastRefreshedTable = LAST_REFRESHED_TABLE_CACHE.getIfPresent(identifier); + @Nullable LastRefreshedTable lastRefreshedTable = + LAST_REFRESHED_TABLE_CACHE.getIfPresent(identifier); if (lastRefreshedTable != null) { lastRefreshedTable.refreshIfStale(); return lastRefreshedTable; diff --git a/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/ReadUtilsTest.java b/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/ReadUtilsTest.java index 73a0fd19e893..92b38636ffbc 100644 --- a/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/ReadUtilsTest.java +++ b/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/ReadUtilsTest.java @@ -182,8 +182,7 @@ public void testResolveFromSnapshotExclusive() throws IOException { // streaming TestCase.of(streamingScanConfig, latest.parentId(), "default streaming read"), TestCase.of( - streamingScanConfig - .toBuilder() + streamingScanConfig.toBuilder() .setFromSnapshotInclusive(third.snapshotId()) .build(), third.parentId(), @@ -203,8 +202,8 @@ public void testResolveFromSnapshotExclusive() throws IOException { List errors = new ArrayList<>(); for (TestCase testCase : scanConfigCases) { - @Nullable - Long snapshotId = ReadUtils.getFromSnapshotExclusive(simpleTable, testCase.scanConfig); + @Nullable Long snapshotId = + ReadUtils.getFromSnapshotExclusive(simpleTable, testCase.scanConfig); if (!Objects.equals(testCase.expectedSnapshotId, snapshotId)) { errors.add( String.format( diff --git a/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/catalog/IcebergCatalogBaseIT.java b/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/catalog/IcebergCatalogBaseIT.java index 74408d67ed86..fe05d6dd2a09 100644 --- a/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/catalog/IcebergCatalogBaseIT.java +++ b/sdks/java/io/iceberg/src/test/java/org/apache/beam/sdk/io/iceberg/catalog/IcebergCatalogBaseIT.java @@ -209,8 +209,7 @@ public void cleanUp() throws Exception { GcsUtil gcsUtil = OPTIONS.as(GcsOptions.class).getGcsUtil(); GcsPath path = GcsPath.fromUri(warehouse); - @Nullable - List objects = + @Nullable List objects = gcsUtil .listObjects( path.getBucket(), diff --git a/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java b/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java index a2aef8766921..f1f0531b9e5b 100644 --- a/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java +++ b/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java @@ -233,6 +233,7 @@ public void populateDisplayData(DisplayData.Builder builder) { builder.addIfNotNull(DisplayData.item("query", query())); } } + /** A InfluxDb {@link BoundedSource} reading {@link String} from a given instance. */ static class InfluxDBSource extends BoundedSource { private final Read spec; diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java index b53dbfd4fa5d..bd8f6ca2aa27 100644 --- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java +++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java @@ -692,15 +692,13 @@ public DataSource buildDatasource() { if (getUsername() != null) { @SuppressWarnings( "nullness") // this is actually nullable, but apache commons dbcp2 not annotated - @NonNull - String username = getUsername().get(); + @NonNull String username = getUsername().get(); basicDataSource.setUsername(username); } if (getPassword() != null) { @SuppressWarnings( "nullness") // this is actually nullable, but apache commons dbcp2 not annotated - @NonNull - String password = getPassword().get(); + @NonNull String password = getPassword().get(); basicDataSource.setPassword(password); } if (getConnectionProperties() != null) { diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcSchemaIOProvider.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcSchemaIOProvider.java index b9c8f2fad15d..fd20fcbc269d 100644 --- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcSchemaIOProvider.java +++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcSchemaIOProvider.java @@ -122,8 +122,7 @@ public PTransform> buildReader() { public PCollection expand(PBegin input) { // If we define a partition column we need to go a different route - @Nullable - String partitionColumn = + @Nullable String partitionColumn = config.getSchema().hasField("partitionColumn") ? config.getString("partitionColumn") : null; diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/SchemaUtil.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/SchemaUtil.java index 65f21308ea32..6cd90c21fc3d 100644 --- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/SchemaUtil.java +++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/SchemaUtil.java @@ -70,9 +70,9 @@ public class SchemaUtil { */ @FunctionalInterface interface ResultSetFieldExtractor extends Serializable { - @Nullable - Object extract(ResultSet rs, Integer index) throws SQLException; + @Nullable Object extract(ResultSet rs, Integer index) throws SQLException; } + // ResultSetExtractors for primitive schema types (excluding arrays, structs and logical types). private static final EnumMap RESULTSET_FIELD_EXTRACTORS = diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/providers/ReadFromPostgresSchemaTransformProvider.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/providers/ReadFromPostgresSchemaTransformProvider.java index 05011be73796..ea4262f742c4 100644 --- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/providers/ReadFromPostgresSchemaTransformProvider.java +++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/providers/ReadFromPostgresSchemaTransformProvider.java @@ -80,8 +80,7 @@ protected String jdbcType() { // Override "connectionInitSql" and "disableAutoCommit" for postgres configuration = - configuration - .toBuilder() + configuration.toBuilder() .setConnectionInitSql(Collections.emptyList()) .setDisableAutoCommit(true) .build(); diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java index 5deb7a2b62c7..645bc7c9c669 100644 --- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java +++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java @@ -217,8 +217,7 @@ public abstract static class Read extends PTransform> * *

So, a {@link ConnectionFactory} implementation should be serializable. */ - @Nullable - ConnectionFactory getConnectionFactory() { + @Nullable ConnectionFactory getConnectionFactory() { if (connectionFactory == null) { connectionFactory = Optional.ofNullable(getConnectionFactoryProviderFn()) @@ -864,8 +863,7 @@ public abstract static class Write private @Nullable transient ConnectionFactory connectionFactory; - @Nullable - ConnectionFactory getConnectionFactory() { + @Nullable ConnectionFactory getConnectionFactory() { if (connectionFactory == null) { connectionFactory = Optional.ofNullable(getConnectionFactoryProviderFn()) @@ -876,8 +874,8 @@ ConnectionFactory getConnectionFactory() { return connectionFactory; } - abstract @Nullable SerializableFunction - getConnectionFactoryProviderFn(); + abstract @Nullable + SerializableFunction getConnectionFactoryProviderFn(); abstract @Nullable String getQueue(); diff --git a/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/CommonJms.java b/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/CommonJms.java index 1d1245e6877d..5e89c4385641 100644 --- a/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/CommonJms.java +++ b/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/CommonJms.java @@ -120,13 +120,17 @@ void startBroker() throws Exception { } ConnectionFactory createConnectionFactory() - throws NoSuchMethodException, InvocationTargetException, InstantiationException, + throws NoSuchMethodException, + InvocationTargetException, + InstantiationException, IllegalAccessException { return connectionFactoryClass.getConstructor(String.class).newInstance(brokerUrl); } ConnectionFactory createConnectionFactoryWithSyncAcksAndWithoutPrefetch() - throws NoSuchMethodException, InvocationTargetException, InstantiationException, + throws NoSuchMethodException, + InvocationTargetException, + InstantiationException, IllegalAccessException { return connectionFactoryClass .getConstructor(String.class) diff --git a/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java b/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java index b3233f866172..48b9b787b2cc 100644 --- a/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java +++ b/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java @@ -172,7 +172,9 @@ public JmsIOTest( Integer brokerPort, String forceAsyncAcksParam, Class connectionFactoryClass) - throws InvocationTargetException, NoSuchMethodException, InstantiationException, + throws InvocationTargetException, + NoSuchMethodException, + InstantiationException, IllegalAccessException { this.commonJms = new CommonJms(brokerUrl, brokerPort, forceAsyncAcksParam, connectionFactoryClass); diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java index f34547bd2611..c6b7761a9a6c 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java @@ -278,6 +278,7 @@ private static class ExactlyOnceWriter @StateId(OUT_OF_ORDER_BUFFER) private final StateSpec>>>> outOfOrderBufferSpec; + // A random id assigned to each shard. Helps with detecting when multiple jobs are mistakenly // started with same groupId used for storing state on Kafka side, including the case where // a job is restarted with same groupId, but the metadata from previous run was not cleared. @@ -711,8 +712,7 @@ private static class ShardWriterCache { TimeUnit.MILLISECONDS); } - @Nullable - ShardWriter removeIfPresent(int shard) { + @Nullable ShardWriter removeIfPresent(int shard) { return cache.asMap().remove(shard); } diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java index 518319a38e32..c2c8b0b217d2 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java @@ -1903,8 +1903,7 @@ public PCollection> expand(PBegin input) { // Handles unbounded source to bounded conversion if maxNumRecords or maxReadTime is set. Unbounded> unbounded = org.apache.beam.sdk.io.Read.from( - kafkaRead - .toBuilder() + kafkaRead.toBuilder() .setKeyCoder(keyCoder) .setValueCoder(valueCoder) .build() @@ -2315,8 +2314,7 @@ static class ByteArrayKafkaRecord { byte @Nullable [] value; @SchemaFieldNumber("6") - @Nullable - List headers; + @Nullable List headers; @SchemaFieldNumber("7") int timestampTypeId; @@ -2488,12 +2486,12 @@ public abstract static class ReadSourceDescriptors abstract @Nullable CheckStopReadingFn getCheckStopReadingFn(); @Pure - abstract @Nullable SerializableFunction, Instant> - getExtractOutputTimestampFn(); + abstract @Nullable + SerializableFunction, Instant> getExtractOutputTimestampFn(); @Pure - abstract @Nullable SerializableFunction> - getCreateWatermarkEstimatorFn(); + abstract @Nullable + SerializableFunction> getCreateWatermarkEstimatorFn(); @Pure abstract boolean isCommitOffsetEnabled(); @@ -3169,8 +3167,8 @@ public abstract static class WriteRecords public abstract Map getProducerConfig(); @Pure - public abstract @Nullable SerializableFunction, Producer> - getProducerFactoryFn(); + public abstract @Nullable + SerializableFunction, Producer> getProducerFactoryFn(); @Pure public abstract @Nullable Class> getKeySerializer(); @@ -3179,8 +3177,8 @@ public abstract static class WriteRecords public abstract @Nullable Class> getValueSerializer(); @Pure - public abstract @Nullable KafkaPublishTimestampFunction> - getPublishTimestampFunction(); + public abstract @Nullable + KafkaPublishTimestampFunction> getPublishTimestampFunction(); // Configuration for EOS sink @Pure @@ -3197,8 +3195,8 @@ public abstract static class WriteRecords public abstract int getNumShards(); @Pure - public abstract @Nullable SerializableFunction, ? extends Consumer> - getConsumerFactoryFn(); + public abstract @Nullable + SerializableFunction, ? extends Consumer> getConsumerFactoryFn(); @Pure public abstract BadRecordRouter getBadRecordRouter(); diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java index 95709135d809..a6d0abe7dc3e 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java @@ -180,8 +180,7 @@ static Method findGetterMethod(KafkaIOReadProperties property) { * This method can be used to provide that value. */ @VisibleForTesting - @Nullable - Object getDefaultValue() { + @Nullable Object getDefaultValue() { return null; } } diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaMetrics.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaMetrics.java index 6a214fc17533..fa30ebf3f49a 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaMetrics.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaMetrics.java @@ -78,7 +78,8 @@ abstract class KafkaMetricsImpl implements KafkaMetrics { private static final Map LATENCY_HISTOGRAMS = new ConcurrentHashMap(); - abstract ConcurrentHashMap> perTopicRpcLatencies();; + abstract ConcurrentHashMap> perTopicRpcLatencies(); + ; abstract ConcurrentHashMap perTopicPartitionBacklogs(); diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java index 9f5d53600af3..c09926693713 100644 --- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java +++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java @@ -1254,8 +1254,7 @@ public interface Options extends IOTestPipelineOptions, StreamingOptions { void setWithTestcontainers(Boolean withTestcontainers); @Description("Kafka container version in format 'X.Y.Z'. Use when useTestcontainers is true") - @Nullable - String getKafkaContainerVersion(); + @Nullable String getKafkaContainerVersion(); void setKafkaContainerVersion(String kafkaContainerVersion); } diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java index 703d323090dd..5e7a5b326436 100644 --- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java +++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java @@ -273,6 +273,7 @@ public synchronized void assign(final Collection assigned) { super.assign(assigned); assignedPartitions.set(ImmutableList.copyOf(assigned)); } + // Override offsetsForTimes() in order to look up the offsets by timestamp. @Override public synchronized Map offsetsForTimes( diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFnTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFnTest.java index 5e3e08a60664..f887852738a9 100644 --- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFnTest.java +++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFnTest.java @@ -585,8 +585,7 @@ public void testProcessElementWhenTopicPartitionIsStopped() throws Exception { MockMultiOutputReceiver receiver = new MockMultiOutputReceiver(); ReadFromKafkaDoFn instance = ReadFromKafkaDoFn.create( - makeReadSourceDescriptor(consumer) - .toBuilder() + makeReadSourceDescriptor(consumer).toBuilder() .setCheckStopReadingFn( new SerializableFunction() { @Override diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/UpdateField.java b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/UpdateField.java index 9870376457ff..368adce39848 100644 --- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/UpdateField.java +++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/UpdateField.java @@ -62,8 +62,7 @@ public static UpdateField fullUpdate(String updateOperator, String destField) { public static UpdateField fieldUpdate( String updateOperator, String sourceField, String destField) { - return create() - .toBuilder() + return create().toBuilder() .setUpdateOperator(updateOperator) .setSourceField(sourceField) .setDestField(destField) diff --git a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java index 67e9ce053f9e..9257e3188542 100644 --- a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java +++ b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java @@ -226,8 +226,11 @@ public void testWriteAndRead() { } private double getCollectionSizeInBytes(final String collectionName) { - return mongoClient.getDatabase(options.getMongoDBDatabaseName()) - .runCommand(new Document("collStats", collectionName)).entrySet().stream() + return mongoClient + .getDatabase(options.getMongoDBDatabaseName()) + .runCommand(new Document("collStats", collectionName)) + .entrySet() + .stream() .filter(entry -> entry.getKey().equals("size")) .map(entry -> Double.parseDouble(String.valueOf(entry.getValue()))) .findFirst() diff --git a/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java b/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java index 2060b83b42dc..9a07302df656 100644 --- a/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java +++ b/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java @@ -478,8 +478,8 @@ public abstract static class ReadAll abstract @Nullable RowMapper getRowMapper(); - abstract @Nullable SerializableFunction> - getParametersFunction(); + abstract @Nullable + SerializableFunction> getParametersFunction(); abstract @Nullable Coder getCoder(); @@ -901,8 +901,8 @@ public abstract static class WriteUnwind abstract @Nullable ValueProvider getTransactionConfig(); - abstract @Nullable SerializableFunction> - getParametersFunction(); + abstract @Nullable + SerializableFunction> getParametersFunction(); abstract @Nullable ValueProvider getBatchSize(); diff --git a/sdks/java/io/parquet/src/main/java/org/apache/beam/sdk/io/parquet/ParquetIO.java b/sdks/java/io/parquet/src/main/java/org/apache/beam/sdk/io/parquet/ParquetIO.java index feaceeeb4432..c98f3dab61ff 100644 --- a/sdks/java/io/parquet/src/main/java/org/apache/beam/sdk/io/parquet/ParquetIO.java +++ b/sdks/java/io/parquet/src/main/java/org/apache/beam/sdk/io/parquet/ParquetIO.java @@ -333,6 +333,7 @@ public Read from(ValueProvider filepattern) { public Read from(String filepattern) { return from(ValueProvider.StaticValueProvider.of(filepattern)); } + /** Enable the reading with projection. */ public Read withProjection(Schema projectionSchema, Schema encoderSchema) { return toBuilder() @@ -468,8 +469,7 @@ public PCollection expand(PBegin input) { .apply(FileIO.matchAll()) .apply(FileIO.readMatches()) .apply( - parseFilesGenericRecords(getParseFn()) - .toBuilder() + parseFilesGenericRecords(getParseFn()).toBuilder() .setCoder(getCoder()) .setConfiguration(getConfiguration()) .build()); diff --git a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java index 73008a015c41..c0b2a00ab6bb 100644 --- a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java +++ b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java @@ -93,7 +93,9 @@ public Supplier publishRoutingKeyGen() { return () -> DEFAULT_ROUTING_KEY; } - /** @return The expected parsed (String) messages read from the queue during the test. */ + /** + * @return The expected parsed (String) messages read from the queue during the test. + */ public List expectedResults() { return RabbitMqTestUtils.generateRecords(numRecordsToPublish).stream() .map(RabbitMqTestUtils::recordToString) diff --git a/sdks/java/io/rrio/src/main/java/org/apache/beam/io/requestresponse/RequestResponseIO.java b/sdks/java/io/rrio/src/main/java/org/apache/beam/io/requestresponse/RequestResponseIO.java index 167325fc6f9b..4f78e9c980f3 100644 --- a/sdks/java/io/rrio/src/main/java/org/apache/beam/io/requestresponse/RequestResponseIO.java +++ b/sdks/java/io/rrio/src/main/java/org/apache/beam/io/requestresponse/RequestResponseIO.java @@ -251,8 +251,7 @@ public RequestResponseIO withBackOffSupplier( */ public RequestResponseIO withCache(Cache.Pair pair) { return new RequestResponseIO<>( - rrioConfiguration - .toBuilder() + rrioConfiguration.toBuilder() .setCacheRead(pair.getRead()) .setCacheWrite(pair.getWrite()) .build(), @@ -294,13 +293,13 @@ static Builder builder() { * {@link PCollection} of the original {@link RequestT}s and associated {@link ResponseT}, null * if no association persists in the cache. */ - abstract @Nullable PTransform, Result>> - getCacheRead(); + abstract @Nullable + PTransform, Result>> getCacheRead(); /** Writes {@link RequestT} and {@link ResponseT} associations to a cache. */ - abstract @Nullable PTransform< - PCollection>, Result>> - getCacheWrite(); + abstract @Nullable + PTransform>, Result>> + getCacheWrite(); abstract Builder toBuilder(); diff --git a/sdks/java/io/rrio/src/test/java/org/apache/beam/io/requestresponse/RedisExternalResourcesRule.java b/sdks/java/io/rrio/src/test/java/org/apache/beam/io/requestresponse/RedisExternalResourcesRule.java index 5c77dde387b0..d66575f3c180 100644 --- a/sdks/java/io/rrio/src/test/java/org/apache/beam/io/requestresponse/RedisExternalResourcesRule.java +++ b/sdks/java/io/rrio/src/test/java/org/apache/beam/io/requestresponse/RedisExternalResourcesRule.java @@ -60,8 +60,7 @@ protected void after() { } } - @NonNull - JedisPooled getValidatingClient() { + @NonNull JedisPooled getValidatingClient() { return checkStateNotNull(validatingClient); } diff --git a/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreDefaultRowMapper.java b/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreDefaultRowMapper.java index 6837f1893d06..2c467cd70a62 100644 --- a/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreDefaultRowMapper.java +++ b/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreDefaultRowMapper.java @@ -133,8 +133,7 @@ abstract Schema.FieldType getSchemaFieldType(ResultSetMetaData md, Integer index */ @FunctionalInterface interface ResultSetFieldExtractor extends Serializable { - @Nullable - Object extract(ResultSet rs, Integer index) throws SQLException; + @Nullable Object extract(ResultSet rs, Integer index) throws SQLException; } static ResultSetFieldConverter of(int columnType) { @@ -190,8 +189,7 @@ public DirectResultSetFieldConverter( } @Override - @Nullable - Object getValue(ResultSet rs, Integer index) throws SQLException { + @Nullable Object getValue(ResultSet rs, Integer index) throws SQLException { return extractor.extract(rs, index); } @@ -203,8 +201,7 @@ Schema.FieldType getSchemaFieldType(ResultSetMetaData md, Integer index) { static class CharResultSetFieldConverter extends ResultSetFieldConverter { @Override - @Nullable - Object getValue(ResultSet rs, Integer index) throws SQLException { + @Nullable Object getValue(ResultSet rs, Integer index) throws SQLException { return rs.getString(index); } @@ -217,8 +214,7 @@ Schema.FieldType getSchemaFieldType(ResultSetMetaData md, Integer index) throws static class BinaryResultSetFieldConverter extends ResultSetFieldConverter { @Override - @Nullable - Object getValue(ResultSet rs, Integer index) throws SQLException { + @Nullable Object getValue(ResultSet rs, Integer index) throws SQLException { return rs.getBytes(index); } @@ -231,8 +227,7 @@ Schema.FieldType getSchemaFieldType(ResultSetMetaData md, Integer index) throws static class TimestampResultSetFieldConverter extends ResultSetFieldConverter { @Override - @Nullable - Object getValue(ResultSet rs, Integer index) throws SQLException { + @Nullable Object getValue(ResultSet rs, Integer index) throws SQLException { Timestamp ts = rs.getTimestamp(index, Calendar.getInstance(TimeZone.getTimeZone(ZoneOffset.UTC))); if (ts == null) { @@ -249,8 +244,7 @@ Schema.FieldType getSchemaFieldType(ResultSetMetaData md, Integer index) { static class TimeResultSetFieldConverter extends ResultSetFieldConverter { @Override - @Nullable - Object getValue(ResultSet rs, Integer index) throws SQLException { + @Nullable Object getValue(ResultSet rs, Integer index) throws SQLException { Time time = rs.getTime(index, Calendar.getInstance(TimeZone.getTimeZone(ZoneOffset.UTC))); if (time == null) { return null; @@ -267,8 +261,7 @@ Schema.FieldType getSchemaFieldType(ResultSetMetaData md, Integer index) { static class DateResultSetFieldConverter extends ResultSetFieldConverter { @Override - @Nullable - Object getValue(ResultSet rs, Integer index) throws SQLException { + @Nullable Object getValue(ResultSet rs, Integer index) throws SQLException { // TODO(https://github.com/apache/beam/issues/19215) import when joda LocalDate is removed. java.time.LocalDate date = rs.getObject(index, java.time.LocalDate.class); if (date == null) { diff --git a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/BasicAuthSempClientFactory.java b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/BasicAuthSempClientFactory.java index 4c01257373b4..06a65577464a 100644 --- a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/BasicAuthSempClientFactory.java +++ b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/BasicAuthSempClientFactory.java @@ -54,6 +54,7 @@ public abstract static class Builder { /** Set Solace username. */ public abstract Builder username(String username); + /** Set Solace password. */ public abstract Builder password(String password); diff --git a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/SempBasicAuthClientExecutor.java b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/SempBasicAuthClientExecutor.java index 965fc8741374..6977dd14c32b 100644 --- a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/SempBasicAuthClientExecutor.java +++ b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/broker/SempBasicAuthClientExecutor.java @@ -193,7 +193,9 @@ private HttpResponse execute(HttpRequest request) throws IOException { private void setCookiesFromCookieManager(HttpHeaders httpHeaders) { httpHeaders.setCookie( - checkStateNotNull(COOKIE_MANAGER_MAP.get(cookieManagerKey)).getCookieStore().getCookies() + checkStateNotNull(COOKIE_MANAGER_MAP.get(cookieManagerKey)) + .getCookieStore() + .getCookies() .stream() .map(s -> s.getName() + "=" + s.getValue()) .collect(Collectors.joining(";"))); diff --git a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/data/Solace.java b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/data/Solace.java index e6cd35b63b45..2e25e9491ece 100644 --- a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/data/Solace.java +++ b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/data/Solace.java @@ -387,6 +387,7 @@ public abstract static class Builder { */ public static class SolaceRecordMapper { private static final Logger LOG = LoggerFactory.getLogger(SolaceRecordMapper.class); + /** * Maps a {@link BytesXMLMessage} (if not null) to a {@link Solace.Record}. * diff --git a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/read/WatermarkPolicy.java b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/read/WatermarkPolicy.java index 9d2ed24f3c06..2a2bdf55fd60 100644 --- a/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/read/WatermarkPolicy.java +++ b/sdks/java/io/solace/src/main/java/org/apache/beam/sdk/io/solace/read/WatermarkPolicy.java @@ -52,6 +52,7 @@ static WatermarkPolicy create( private WatermarkPolicy(WatermarkParameters watermarkParameters) { this.watermarkParameters = watermarkParameters; } + /** * Returns the current watermark. * @@ -94,8 +95,7 @@ void update(@Nullable T record) { return; } watermarkParameters = - watermarkParameters - .toBuilder() + watermarkParameters.toBuilder() .setLastSavedWatermark( Ordering.natural() .max( diff --git a/sdks/java/io/solace/src/test/java/org/apache/beam/sdk/io/solace/it/BasicAuthMultipleSempClientFactory.java b/sdks/java/io/solace/src/test/java/org/apache/beam/sdk/io/solace/it/BasicAuthMultipleSempClientFactory.java index 0a548c10555c..ab4821e9885d 100644 --- a/sdks/java/io/solace/src/test/java/org/apache/beam/sdk/io/solace/it/BasicAuthMultipleSempClientFactory.java +++ b/sdks/java/io/solace/src/test/java/org/apache/beam/sdk/io/solace/it/BasicAuthMultipleSempClientFactory.java @@ -59,6 +59,7 @@ public abstract static class Builder { /** Set Solace username. */ public abstract Builder username(String username); + /** Set Solace password. */ public abstract Builder password(String password); diff --git a/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/HasOffset.java b/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/HasOffset.java index 2a52de69046e..b295d97b2ae1 100644 --- a/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/HasOffset.java +++ b/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/HasOffset.java @@ -37,5 +37,6 @@ public interface HasOffset { * Some {@link org.apache.spark.streaming.receiver.Receiver} support mechanism of checkpoint (e.g. * ack). This method should be called before stopping the receiver. */ - default void setCheckpoint(Long recordsProcessed) {}; + default void setCheckpoint(Long recordsProcessed) {} + ; } diff --git a/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/SparkConsumer.java b/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/SparkConsumer.java index 6d54968e4900..4ccb99ef2843 100644 --- a/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/SparkConsumer.java +++ b/sdks/java/io/sparkreceiver/3/src/main/java/org/apache/beam/sdk/io/sparkreceiver/SparkConsumer.java @@ -29,8 +29,7 @@ interface SparkConsumer extends Serializable { boolean hasRecords(); - @Nullable - V poll(); + @Nullable V poll(); void start(Receiver sparkReceiver); diff --git a/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java b/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java index 9a6c76990ea9..6d6681b8f5fc 100644 --- a/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java +++ b/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java @@ -198,7 +198,10 @@ public interface Options extends IOTestPipelineOptions, StreamingOptions { } private void writeToRabbitMq(final List messages) - throws URISyntaxException, NoSuchAlgorithmException, KeyManagementException, IOException, + throws URISyntaxException, + NoSuchAlgorithmException, + KeyManagementException, + IOException, TimeoutException { final ConnectionFactory connectionFactory = new ConnectionFactory(); diff --git a/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/HttpEventPublisher.java b/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/HttpEventPublisher.java index cd7899ce8f40..141d0aa21f3e 100644 --- a/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/HttpEventPublisher.java +++ b/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/HttpEventPublisher.java @@ -323,7 +323,10 @@ Builder withMaxElapsedMillis(Integer maxElapsedMillis) { * @return {@link HttpEventPublisher} */ HttpEventPublisher build() - throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException, IOException, + throws NoSuchAlgorithmException, + KeyStoreException, + KeyManagementException, + IOException, CertificateException { checkNotNull(token(), "Authentication token needs to be specified via withToken(token)."); @@ -372,7 +375,10 @@ GenericUrl getGenericUrl(String baseUrl) { */ private CloseableHttpClient getHttpClient( int maxConnections, boolean disableCertificateValidation, byte[] rootCaCertificate) - throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException, IOException, + throws NoSuchAlgorithmException, + KeyStoreException, + KeyManagementException, + IOException, CertificateException { HttpClientBuilder builder = ApacheHttpTransport.newDefaultHttpClientBuilder(); diff --git a/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/CustomX509TrustManagerTest.java b/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/CustomX509TrustManagerTest.java index 80b467a614e3..3a8ca8b3b01d 100644 --- a/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/CustomX509TrustManagerTest.java +++ b/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/CustomX509TrustManagerTest.java @@ -41,8 +41,11 @@ public final class CustomX509TrustManagerTest { @Before public void setUp() - throws NoSuchAlgorithmException, CertificateException, FileNotFoundException, - KeyStoreException, IOException { + throws NoSuchAlgorithmException, + CertificateException, + FileNotFoundException, + KeyStoreException, + IOException { CertificateFactory cf = CertificateFactory.getInstance("X.509"); ClassLoader classLoader = this.getClass().getClassLoader(); FileInputStream rootCaInputStream = diff --git a/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/HttpEventPublisherTest.java b/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/HttpEventPublisherTest.java index 82374339ac95..0ef0100fa8f7 100644 --- a/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/HttpEventPublisherTest.java +++ b/sdks/java/io/splunk/src/test/java/org/apache/beam/sdk/io/splunk/HttpEventPublisherTest.java @@ -96,7 +96,10 @@ public void setUp() throws IOException { @Test public void stringPayloadTest() - throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, + throws IOException, + NoSuchAlgorithmException, + KeyStoreException, + KeyManagementException, CertificateException { HttpEventPublisher publisher = @@ -121,7 +124,10 @@ public void stringPayloadTest() @Test public void contentTest() - throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException, IOException, + throws NoSuchAlgorithmException, + KeyStoreException, + KeyManagementException, + IOException, CertificateException { HttpEventPublisher publisher = @@ -166,7 +172,10 @@ public void genericURLTest() @Test public void configureBackOffDefaultTest() - throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException, IOException, + throws NoSuchAlgorithmException, + KeyStoreException, + KeyManagementException, + IOException, CertificateException { HttpEventPublisher publisherDefaultBackOff = @@ -184,7 +193,10 @@ public void configureBackOffDefaultTest() @Test public void configureBackOffCustomTest() - throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException, IOException, + throws NoSuchAlgorithmException, + KeyStoreException, + KeyManagementException, + IOException, CertificateException { int timeoutInMillis = 600000; // 10 minutes diff --git a/sdks/java/io/synthetic/src/main/java/org/apache/beam/sdk/io/synthetic/SyntheticBoundedSource.java b/sdks/java/io/synthetic/src/main/java/org/apache/beam/sdk/io/synthetic/SyntheticBoundedSource.java index 798bd9c4c2c2..b083becb26ae 100644 --- a/sdks/java/io/synthetic/src/main/java/org/apache/beam/sdk/io/synthetic/SyntheticBoundedSource.java +++ b/sdks/java/io/synthetic/src/main/java/org/apache/beam/sdk/io/synthetic/SyntheticBoundedSource.java @@ -144,7 +144,8 @@ public List split(long desiredBundleSizeBytes, PipelineO : sourceOptions.forceNumInitialBundles; List res = - bundleSplitter.getBundleSizes(desiredNumBundles, this.getStartOffset(), this.getEndOffset()) + bundleSplitter + .getBundleSizes(desiredNumBundles, this.getStartOffset(), this.getEndOffset()) .stream() .map(offsetRange -> createSourceForSubrange(offsetRange.getFrom(), offsetRange.getTo())) .collect(Collectors.toList()); diff --git a/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftStruct.java b/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftStruct.java index 6d6487f25d78..ad732229032b 100644 --- a/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftStruct.java +++ b/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftStruct.java @@ -106,7 +106,9 @@ public enum _Fields implements org.apache.thrift.TFieldIdEnum { TEST_BOOL((short) 8, "testBool"), TEST_LIST((short) 9, "testList"), TEST_STRING_SET_TYPEDEF((short) 10, "testStringSetTypedef"), - /** @see TestThriftEnum */ + /** + * @see TestThriftEnum + */ TEST_ENUM((short) 11, "testEnum"), TEST_NESTED((short) 12, "testNested"), TEST_UNION((short) 13, "testUnion"); @@ -705,13 +707,17 @@ public void setTestStringSetTypedefIsSet(boolean value) { } } - /** @see TestThriftEnum */ + /** + * @see TestThriftEnum + */ @org.apache.thrift.annotation.Nullable public TestThriftEnum getTestEnum() { return this.testEnum; } - /** @see TestThriftEnum */ + /** + * @see TestThriftEnum + */ public void setTestEnum(@org.apache.thrift.annotation.Nullable TestThriftEnum testEnum) { this.testEnum = testEnum; } diff --git a/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftUnion.java b/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftUnion.java index 5cac062c9d67..066b4d531205 100644 --- a/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftUnion.java +++ b/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/TestThriftUnion.java @@ -38,7 +38,9 @@ public class TestThriftUnion */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SNAKE_CASE_NESTED_STRUCT((short) 1, "snake_case_nested_struct"), - /** @see TestThriftEnum */ + /** + * @see TestThriftEnum + */ CAMEL_CASE_ENUM((short) 2, "camelCaseEnum"); private static final java.util.Map byName = @@ -311,7 +313,9 @@ public void setSnake_case_nested_struct(TestThriftInnerStruct value) { value_ = value; } - /** @see TestThriftEnum */ + /** + * @see TestThriftEnum + */ public TestThriftEnum getCamelCaseEnum() { if (getSetField() == _Fields.CAMEL_CASE_ENUM) { return (TestThriftEnum) getFieldValue(); @@ -322,7 +326,9 @@ public TestThriftEnum getCamelCaseEnum() { } } - /** @see TestThriftEnum */ + /** + * @see TestThriftEnum + */ public void setCamelCaseEnum(TestThriftEnum value) { if (value == null) throw new java.lang.NullPointerException(); setField_ = _Fields.CAMEL_CASE_ENUM; diff --git a/sdks/java/io/xml/src/main/java/org/apache/beam/sdk/io/xml/XmlIO.java b/sdks/java/io/xml/src/main/java/org/apache/beam/sdk/io/xml/XmlIO.java index 5288d4eee5dd..5936ebc42461 100644 --- a/sdks/java/io/xml/src/main/java/org/apache/beam/sdk/io/xml/XmlIO.java +++ b/sdks/java/io/xml/src/main/java/org/apache/beam/sdk/io/xml/XmlIO.java @@ -252,7 +252,9 @@ abstract static class Builder { abstract Read build(); } - /** @deprecated Use {@link Compression} instead. */ + /** + * @deprecated Use {@link Compression} instead. + */ @Deprecated public enum CompressionType { /** See {@link Compression#AUTO}. */ @@ -340,7 +342,9 @@ public Read withMinBundleSize(long minBundleSize) { return toBuilder().setMinBundleSize(minBundleSize).build(); } - /** @deprecated use {@link #withCompression}. */ + /** + * @deprecated use {@link #withCompression}. + */ @Deprecated public Read withCompressionType(CompressionType compressionType) { return withCompression(compressionType.canonical); diff --git a/sdks/java/managed/src/test/java/org/apache/beam/sdk/managed/ManagedTest.java b/sdks/java/managed/src/test/java/org/apache/beam/sdk/managed/ManagedTest.java index 249faffec567..2499fc676f0f 100644 --- a/sdks/java/managed/src/test/java/org/apache/beam/sdk/managed/ManagedTest.java +++ b/sdks/java/managed/src/test/java/org/apache/beam/sdk/managed/ManagedTest.java @@ -113,8 +113,7 @@ public void runTestProviderTest(Managed.ManagedTransform writeOp) { @Test public void testManagedTestProviderWithConfigMap() { Managed.ManagedTransform writeOp = - Managed.write(Managed.ICEBERG) - .toBuilder() + Managed.write(Managed.ICEBERG).toBuilder() .setIdentifier(TestSchemaTransformProvider.IDENTIFIER) .build() .withSupportedIdentifiers(Arrays.asList(TestSchemaTransformProvider.IDENTIFIER)) @@ -131,8 +130,7 @@ public void testManagedTestProviderWithConfigFile() throws Exception { .getAbsolutePath(); Managed.ManagedTransform writeOp = - Managed.write(Managed.ICEBERG) - .toBuilder() + Managed.write(Managed.ICEBERG).toBuilder() .setIdentifier(TestSchemaTransformProvider.IDENTIFIER) .build() .withSupportedIdentifiers(Arrays.asList(TestSchemaTransformProvider.IDENTIFIER)) diff --git a/sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelParameters.java b/sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelParameters.java index fdf532810459..55529a2962d7 100644 --- a/sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelParameters.java +++ b/sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelParameters.java @@ -92,6 +92,7 @@ public Builder modelName(String modelName) { this.modelName = modelName; return this; } + /** * Sets the instruction prompt for the model. This prompt provides context or instructions to * the model about how to process the input text. diff --git a/sdks/java/ml/inference/remote/src/main/java/org/apache/beam/sdk/ml/inference/remote/RemoteInference.java b/sdks/java/ml/inference/remote/src/main/java/org/apache/beam/sdk/ml/inference/remote/RemoteInference.java index 9092fc9910d4..918b8333daf2 100644 --- a/sdks/java/ml/inference/remote/src/main/java/org/apache/beam/sdk/ml/inference/remote/RemoteInference.java +++ b/sdks/java/ml/inference/remote/src/main/java/org/apache/beam/sdk/ml/inference/remote/RemoteInference.java @@ -161,6 +161,7 @@ public void setupHandler() { throw new RuntimeException("Failed to instantiate handler: " + handlerClass.getName(), e); } } + /** Perform Inference. */ @ProcessElement public void processElement(ProcessContext c) throws Exception { diff --git a/sdks/java/testing/jpms-tests/src/test/java/org/apache/beam/sdk/jpmstests/JpmsIT.java b/sdks/java/testing/jpms-tests/src/test/java/org/apache/beam/sdk/jpmstests/JpmsIT.java index f9a8c945b0ca..3aa62a72caef 100644 --- a/sdks/java/testing/jpms-tests/src/test/java/org/apache/beam/sdk/jpmstests/JpmsIT.java +++ b/sdks/java/testing/jpms-tests/src/test/java/org/apache/beam/sdk/jpmstests/JpmsIT.java @@ -40,6 +40,7 @@ public class JpmsIT { private static final String DEFAULT_INPUT = "gs://apache-beam-samples/shakespeare/winterstale-personae"; private static final String DEFAULT_OUTPUT_CHECKSUM = "ebf895e7324e8a3edc72e7bcc96fa2ba7f690def"; + /** Options for the Jpms Integration test. */ public interface JpmsITOptions extends TestPipelineOptions, WordCountOptions {} diff --git a/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/CoGroupByKeyLoadTest.java b/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/CoGroupByKeyLoadTest.java index 49863e825173..513106e65c03 100644 --- a/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/CoGroupByKeyLoadTest.java +++ b/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/CoGroupByKeyLoadTest.java @@ -75,8 +75,7 @@ public interface Options extends LoadTestOptions { void setCoSourceOptions(String sourceOptions); @Description("Co-input window duration. If not set global windows will be used.") - @Nullable - Long getCoInputWindowDurationSec(); + @Nullable Long getCoInputWindowDurationSec(); void setCoInputWindowDurationSec(Long coInputWindowDurationSec); diff --git a/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/LoadTestOptions.java b/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/LoadTestOptions.java index e241d8b032fb..c745f7704b3e 100644 --- a/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/LoadTestOptions.java +++ b/sdks/java/testing/load-tests/src/main/java/org/apache/beam/sdk/loadtests/LoadTestOptions.java @@ -48,26 +48,22 @@ public interface LoadTestOptions extends PipelineOptions, ApplicationNameOptions void setLoadTestTimeout(Integer timeout); @Description("Window duration. If not set global windows will be used.") - @Nullable - Long getInputWindowDurationSec(); + @Nullable Long getInputWindowDurationSec(); void setInputWindowDurationSec(Long windowSizeSec); @Description("InfluxDB measurement to publish results to.") - @Nullable - String getInfluxMeasurement(); + @Nullable String getInfluxMeasurement(); void setInfluxMeasurement(@Nullable String measurement); @Description("InfluxDB host.") - @Nullable - String getInfluxHost(); + @Nullable String getInfluxHost(); void setInfluxHost(@Nullable String host); @Description("InfluxDB database.") - @Nullable - String getInfluxDatabase(); + @Nullable String getInfluxDatabase(); void setInfluxDatabase(@Nullable String database); @@ -78,8 +74,7 @@ public interface LoadTestOptions extends PipelineOptions, ApplicationNameOptions void setPublishToInfluxDB(Boolean publishToInfluxDB); @Description("Additional tags for Influx data") - @Nullable - Map getInfluxTags(); + @Nullable Map getInfluxTags(); void setInfluxTags(Map influxTags); diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java index 7e4e5da0d853..bfb2a1e9b19b 100644 --- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java +++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java @@ -122,17 +122,22 @@ public class NexmarkLauncher { /** Minimum number of samples needed for 'stead-state' rate calculation. */ private static final int MIN_SAMPLES = 9; + /** Minimum length of time over which to consider samples for 'steady-state' rate calculation. */ private static final Duration MIN_WINDOW = Duration.standardMinutes(2); + /** Delay between perf samples. */ private static final Duration PERF_DELAY = Duration.standardSeconds(15); + /** * How long to let streaming pipeline run after all events have been generated and we've seen no * activity. */ private static final Duration DONE_DELAY = Duration.standardMinutes(1); + /** How long to allow no activity at sources and sinks without warning. */ private static final Duration STUCK_WARNING_DELAY = Duration.standardMinutes(10); + /** * How long to let streaming pipeline run after we've seen no activity at sources or sinks, even * if all events have not been generated. diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java index 685e4e59dd5b..87f2d44a3ce2 100644 --- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java +++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java @@ -48,8 +48,7 @@ public interface NexmarkOptions void setSourceType(NexmarkUtils.SourceType sourceType); @Description("Prefix for input files if using avro input") - @Nullable - String getInputPath(); + @Nullable String getInputPath(); void setInputPath(String inputPath); @@ -76,41 +75,35 @@ void setPubsubMessageSerializationMethod( NexmarkUtils.PubsubMessageSerializationMethod pubsubMessageSerializationMethod); @Description("Which query to run.") - @Nullable - String getQuery(); + @Nullable String getQuery(); void setQuery(String query); @Description("Skip the execution of the given queries (comma separated)") - @Nullable - String getSkipQueries(); + @Nullable String getSkipQueries(); void setSkipQueries(String queries); @Description("Prefix for output files if using text output for results or running Query 10.") - @Nullable - String getOutputPath(); + @Nullable String getOutputPath(); void setOutputPath(String outputPath); @Description("Base name of pubsub topic to publish to in streaming mode.") @Default.String("nexmark") - @Nullable - String getPubsubTopic(); + @Nullable String getPubsubTopic(); void setPubsubTopic(String pubsubTopic); @Description("Base name of pubsub subscription to read from in streaming mode.") @Default.String("nexmark") - @Nullable - String getPubsubSubscription(); + @Nullable String getPubsubSubscription(); void setPubsubSubscription(String pubsubSubscription); @Description("Base name of BigQuery table name if using BigQuery output.") @Default.String("nexmark") - @Nullable - String getBigQueryTable(); + @Nullable String getBigQueryTable(); void setBigQueryTable(String bigQueryTable); @@ -123,23 +116,20 @@ void setPubsubMessageSerializationMethod( @Description( "Approximate number of events to generate. " + "Zero for effectively unlimited in streaming mode.") - @Nullable - Long getNumEvents(); + @Nullable Long getNumEvents(); void setNumEvents(Long numEvents); @Description( "Time in seconds to preload the subscription with data, at the initial input rate " + "of the pipeline.") - @Nullable - Integer getPreloadSeconds(); + @Nullable Integer getPreloadSeconds(); void setPreloadSeconds(Integer preloadSeconds); @Description( "Time in seconds to wait in pipelineResult.waitUntilFinish(), useful in streaming mode") - @Nullable - Integer getStreamTimeout(); + @Nullable Integer getStreamTimeout(); void setStreamTimeout(Integer streamTimeout); @@ -150,8 +140,7 @@ void setPubsubMessageSerializationMethod( void setCancelStreamingJobAfterFinish(boolean cancelStreamingJobAfterFinish); @Description("Number of unbounded sources to create events.") - @Nullable - Integer getNumEventGenerators(); + @Nullable Integer getNumEventGenerators(); void setNumEventGenerators(Integer numEventGenerators); @@ -161,14 +150,12 @@ void setPubsubMessageSerializationMethod( void setRateShape(NexmarkUtils.RateShape rateShape); @Description("Initial overall event rate (in --rateUnit).") - @Nullable - Integer getFirstEventRate(); + @Nullable Integer getFirstEventRate(); void setFirstEventRate(Integer firstEventRate); @Description("Next overall event rate (in --rateUnit).") - @Nullable - Integer getNextEventRate(); + @Nullable Integer getNextEventRate(); void setNextEventRate(Integer nextEventRate); @@ -178,14 +165,12 @@ void setPubsubMessageSerializationMethod( void setRateUnit(NexmarkUtils.RateUnit rateUnit); @Description("Overall period of rate shape, in seconds.") - @Nullable - Integer getRatePeriodSec(); + @Nullable Integer getRatePeriodSec(); void setRatePeriodSec(Integer ratePeriodSec); @Description("If true, relay events in real time in streaming mode.") - @Nullable - Boolean getIsRateLimited(); + @Nullable Boolean getIsRateLimited(); void setIsRateLimited(Boolean isRateLimited); @@ -193,8 +178,7 @@ void setPubsubMessageSerializationMethod( "If true, use wallclock time as event time. Otherwise, use a deterministic" + " time in the past so that multiple runs will see exactly the same event streams" + " and should thus have exactly the same results.") - @Nullable - Boolean getUseWallclockEventTime(); + @Nullable Boolean getUseWallclockEventTime(); void setUseWallclockEventTime(Boolean useWallclockEventTime); @@ -214,92 +198,77 @@ void setPubsubMessageSerializationMethod( void setLogResults(boolean logResults); @Description("Average size in bytes for a person record.") - @Nullable - Integer getAvgPersonByteSize(); + @Nullable Integer getAvgPersonByteSize(); void setAvgPersonByteSize(Integer avgPersonByteSize); @Description("Average size in bytes for an auction record.") - @Nullable - Integer getAvgAuctionByteSize(); + @Nullable Integer getAvgAuctionByteSize(); void setAvgAuctionByteSize(Integer avgAuctionByteSize); @Description("Average size in bytes for a bid record.") - @Nullable - Integer getAvgBidByteSize(); + @Nullable Integer getAvgBidByteSize(); void setAvgBidByteSize(Integer avgBidByteSize); @Description("Ratio of bids for 'hot' auctions above the background.") - @Nullable - Integer getHotAuctionRatio(); + @Nullable Integer getHotAuctionRatio(); void setHotAuctionRatio(Integer hotAuctionRatio); @Description("Ratio of auctions for 'hot' sellers above the background.") - @Nullable - Integer getHotSellersRatio(); + @Nullable Integer getHotSellersRatio(); void setHotSellersRatio(Integer hotSellersRatio); @Description("Ratio of auctions for 'hot' bidders above the background.") - @Nullable - Integer getHotBiddersRatio(); + @Nullable Integer getHotBiddersRatio(); void setHotBiddersRatio(Integer hotBiddersRatio); @Description("Window size in seconds.") - @Nullable - Long getWindowSizeSec(); + @Nullable Long getWindowSizeSec(); void setWindowSizeSec(Long windowSizeSec); @Description("Window period in seconds.") - @Nullable - Long getWindowPeriodSec(); + @Nullable Long getWindowPeriodSec(); void setWindowPeriodSec(Long windowPeriodSec); @Description("If in streaming mode, the holdback for watermark in seconds.") - @Nullable - Long getWatermarkHoldbackSec(); + @Nullable Long getWatermarkHoldbackSec(); void setWatermarkHoldbackSec(Long watermarkHoldbackSec); @Description("Roughly how many auctions should be in flight for each generator.") - @Nullable - Integer getNumInFlightAuctions(); + @Nullable Integer getNumInFlightAuctions(); void setNumInFlightAuctions(Integer numInFlightAuctions); @Description("Maximum number of people to consider as active for placing auctions or bids.") - @Nullable - Integer getNumActivePeople(); + @Nullable Integer getNumActivePeople(); void setNumActivePeople(Integer numActivePeople); @Description("Filename of perf data to append to.") - @Nullable - String getPerfFilename(); + @Nullable String getPerfFilename(); void setPerfFilename(String perfFilename); @Description("Filename of baseline perf data to read from.") - @Nullable - String getBaselineFilename(); + @Nullable String getBaselineFilename(); void setBaselineFilename(String baselineFilename); @Description("Filename of summary perf data to append to.") - @Nullable - String getSummaryFilename(); + @Nullable String getSummaryFilename(); void setSummaryFilename(String summaryFilename); @Description("Filename for javascript capturing all perf data and any baselines.") - @Nullable - String getJavascriptFilename(); + @Nullable String getJavascriptFilename(); void setJavascriptFilename(String javascriptFilename); @@ -318,8 +287,7 @@ void setPubsubMessageSerializationMethod( @Description( "Delay, in milliseconds, for each event. We will peg one core for this " + "number of milliseconds to simulate CPU-bound computation.") - @Nullable - Long getCpuDelayMs(); + @Nullable Long getCpuDelayMs(); void setCpuDelayMs(Long cpuDelayMs); @@ -327,46 +295,39 @@ void setPubsubMessageSerializationMethod( "Extra data, in bytes, to save to persistent state for each event. " + "This will force I/O all the way to durable storage to simulate an " + "I/O-bound computation.") - @Nullable - Long getDiskBusyBytes(); + @Nullable Long getDiskBusyBytes(); void setDiskBusyBytes(Long diskBusyBytes); @Description("Skip factor for query 2. We select bids for every {@code auctionSkip}'th auction") - @Nullable - Integer getAuctionSkip(); + @Nullable Integer getAuctionSkip(); void setAuctionSkip(Integer auctionSkip); @Description("Fanout for queries 4 (groups by category id) and 7 (finds a global maximum).") - @Nullable - Integer getFanout(); + @Nullable Integer getFanout(); void setFanout(Integer fanout); @Description( "Maximum waiting time to clean personState in query3 (ie maximum waiting of the auctions" + " related to person in state in seconds in event time).") - @Nullable - Integer getMaxAuctionsWaitingTime(); + @Nullable Integer getMaxAuctionsWaitingTime(); void setMaxAuctionsWaitingTime(Integer maxAuctionsWaitingTime); @Description("Length of occasional delay to impose on events (in seconds).") - @Nullable - Long getOccasionalDelaySec(); + @Nullable Long getOccasionalDelaySec(); void setOccasionalDelaySec(Long occasionalDelaySec); @Description("Probability that an event will be delayed by delayS.") - @Nullable - Double getProbDelayedEvent(); + @Nullable Double getProbDelayedEvent(); void setProbDelayedEvent(Double probDelayedEvent); @Description("Maximum size of each log file (in events). For Query10 only.") - @Nullable - Integer getMaxLogEvents(); + @Nullable Integer getMaxLogEvents(); void setMaxLogEvents(Integer maxLogEvents); @@ -383,47 +344,40 @@ void setPubsubMessageSerializationMethod( void setManageResources(boolean manageResources); @Description("If true, use pub/sub publish time instead of event time.") - @Nullable - Boolean getUsePubsubPublishTime(); + @Nullable Boolean getUsePubsubPublishTime(); void setUsePubsubPublishTime(Boolean usePubsubPublishTime); @Description( "Number of events in out-of-order groups. 1 implies no out-of-order events. " + "1000 implies every 1000 events per generator are emitted in pseudo-random order.") - @Nullable - Long getOutOfOrderGroupSize(); + @Nullable Long getOutOfOrderGroupSize(); void setOutOfOrderGroupSize(Long outOfOrderGroupSize); @Description("If false, do not add the Monitor and Snoop transforms.") - @Nullable - Boolean getDebug(); + @Nullable Boolean getDebug(); void setDebug(Boolean value); @Description("if provided, only generate events and write them to local file with this prefix.") - @Nullable - String getGenerateEventFilePathPrefix(); + @Nullable String getGenerateEventFilePathPrefix(); void setGenerateEventFilePathPrefix(String value); @Description("If set, cancel running pipelines after this long") - @Nullable - Long getRunningTimeMinutes(); + @Nullable Long getRunningTimeMinutes(); void setRunningTimeMinutes(Long value); @Description("Specify 'sql' to use Calcite SQL queries. Otherwise Java transforms will be used") - @Nullable - String getQueryLanguage(); + @Nullable String getQueryLanguage(); void setQueryLanguage(String value); @Description("Base name of Kafka events topic in streaming mode.") @Default.String("nexmark") - @Nullable - String getKafkaTopic(); + @Nullable String getKafkaTopic(); void setKafkaTopic(String value); @@ -443,14 +397,12 @@ void setPubsubMessageSerializationMethod( @Description("Base name of Kafka results topic in streaming mode.") @Default.String("nexmark-results") - @Nullable - String getKafkaResultsTopic(); + @Nullable String getKafkaResultsTopic(); void setKafkaResultsTopic(String value); @Description("Kafka Bootstrap Server domains.") - @Nullable - String getBootstrapServers(); + @Nullable String getBootstrapServers(); void setBootstrapServers(String value); @@ -471,14 +423,12 @@ void setPubsubMessageSerializationMethod( void setNexmarkParallel(int value); @Description("InfluxDB host.") - @Nullable - String getInfluxHost(); + @Nullable String getInfluxHost(); void setInfluxHost(@Nullable String host); @Description("InfluxDB database.") - @Nullable - String getInfluxDatabase(); + @Nullable String getInfluxDatabase(); void setInfluxDatabase(@Nullable String database); @@ -490,20 +440,17 @@ void setPubsubMessageSerializationMethod( @Description("Base name of measurement name if using InfluxDB output.") @Default.String("nexmark") - @Nullable - String getBaseInfluxMeasurement(); + @Nullable String getBaseInfluxMeasurement(); void setBaseInfluxMeasurement(String influxDBMeasurement); @Description("Name of retention policy for Influx data.") - @Nullable - String getInfluxRetentionPolicy(); + @Nullable String getInfluxRetentionPolicy(); void setInfluxRetentionPolicy(String influxRetentionPolicy); @Description("Additional tags for Influx data") - @Nullable - Map getInfluxTags(); + @Nullable Map getInfluxTags(); void setInfluxTags(Map influxTags); } diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/AbstractSimulator.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/AbstractSimulator.java index 487bdb13fedf..7037ddafc984 100644 --- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/AbstractSimulator.java +++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/AbstractSimulator.java @@ -69,8 +69,7 @@ public AbstractSimulator(Iterator> input) { } /** Called by implementors of {@link #run}: Fetch the next input element. */ - @Nullable - TimestampedValue nextInput() { + @Nullable TimestampedValue nextInput() { if (!input.hasNext()) { return null; } diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java index b99e16d25198..714f8c89d4ed 100644 --- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java +++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java @@ -72,12 +72,16 @@ public class Query10 extends NexmarkQueryTransform { private static class OutputFile implements Serializable { /** Maximum possible timestamp of records in file. */ private final Instant maxTimestamp; + /** Shard within window. */ private final String shard; + /** Index of file in all files in shard. */ private final long index; + /** Timing of records in this file. */ private final PaneInfo.Timing timing; + /** Path to file containing records, or {@literal null} if no output required. */ private final @Nullable String filename; @@ -147,8 +151,7 @@ private String timingToString(PaneInfo.Timing timing) { /** Construct an {@link OutputFile} for {@code pane} in {@code window} for {@code shard}. */ private OutputFile outputFileFor(BoundedWindow window, String shard, PaneInfo paneInfo) { - @Nullable - String filename = + @Nullable String filename = outputPath == null ? null : String.format( diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query8.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query8.java index 3ed44e8f16a3..c258fc9c6105 100644 --- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query8.java +++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query8.java @@ -84,8 +84,7 @@ public PCollection expand(PCollection events) { new DoFn, IdNameReserve>() { @ProcessElement public void processElement(ProcessContext c) { - @Nullable - Person person = + @Nullable Person person = c.element().getValue().getOnly(NexmarkQueryUtil.PERSON_TAG, null); if (person == null) { // Person was not created in last window period. diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/WinningBids.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/WinningBids.java index f8e4cee25c87..6ef901a879b8 100644 --- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/WinningBids.java +++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/WinningBids.java @@ -350,8 +350,7 @@ public PCollection expand(PCollection events) { @ProcessElement public void processElement(ProcessContext c) { - @Nullable - Auction auction = + @Nullable Auction auction = c.element().getValue().getOnly(NexmarkQueryUtil.AUCTION_TAG, null); if (auction == null) { // We have bids without a matching auction. Give up. diff --git a/sdks/java/testing/test-utils/src/main/java/org/apache/beam/sdk/testutils/publishing/InfluxDBPublisher.java b/sdks/java/testing/test-utils/src/main/java/org/apache/beam/sdk/testutils/publishing/InfluxDBPublisher.java index d7034620ed45..fb5342c022d2 100644 --- a/sdks/java/testing/test-utils/src/main/java/org/apache/beam/sdk/testutils/publishing/InfluxDBPublisher.java +++ b/sdks/java/testing/test-utils/src/main/java/org/apache/beam/sdk/testutils/publishing/InfluxDBPublisher.java @@ -121,7 +121,9 @@ public static DataPoint dataPoint( measurement, tags, fields, timestamp, timestampUnit); } - /** @deprecated Use {@link #publish} instead. */ + /** + * @deprecated Use {@link #publish} instead. + */ @Deprecated public static void publishNexmarkResults( final Collection> results, @@ -165,7 +167,9 @@ private static void publishWithCheck(final InfluxDBSettings settings, final Stri } } - /** @deprecated To be removed, kept for legacy interface {@link #publishNexmarkResults} */ + /** + * @deprecated To be removed, kept for legacy interface {@link #publishNexmarkResults} + */ @VisibleForTesting @Deprecated static String nexmarkDataPoints( diff --git a/sdks/java/testing/tpcds/src/main/java/org/apache/beam/sdk/tpcds/TpcdsOptions.java b/sdks/java/testing/tpcds/src/main/java/org/apache/beam/sdk/tpcds/TpcdsOptions.java index c85d65e641ad..93cbc94f30a0 100644 --- a/sdks/java/testing/tpcds/src/main/java/org/apache/beam/sdk/tpcds/TpcdsOptions.java +++ b/sdks/java/testing/tpcds/src/main/java/org/apache/beam/sdk/tpcds/TpcdsOptions.java @@ -80,8 +80,7 @@ public interface TpcdsOptions extends ApplicationNameOptions, GcpOptions, BeamSq @Description("Base name of BigQuery table name if using BigQuery output.") @Default.String("nexmark") - @Nullable - String getBigQueryTable(); + @Nullable String getBigQueryTable(); void setBigQueryTable(String bigQueryTable); @@ -92,14 +91,12 @@ public interface TpcdsOptions extends ApplicationNameOptions, GcpOptions, BeamSq void setBigQueryDataset(String bigQueryDataset); @Description("InfluxDB host.") - @Nullable - String getInfluxHost(); + @Nullable String getInfluxHost(); void setInfluxHost(@Nullable String host); @Description("InfluxDB database.") - @Nullable - String getInfluxDatabase(); + @Nullable String getInfluxDatabase(); void setInfluxDatabase(@Nullable String database); @@ -111,20 +108,17 @@ public interface TpcdsOptions extends ApplicationNameOptions, GcpOptions, BeamSq @Description("Base name of measurement name if using InfluxDB output.") @Default.String("tpcds") - @Nullable - String getBaseInfluxMeasurement(); + @Nullable String getBaseInfluxMeasurement(); void setBaseInfluxMeasurement(String influxDBMeasurement); @Description("Name of retention policy for Influx data.") - @Nullable - String getInfluxRetentionPolicy(); + @Nullable String getInfluxRetentionPolicy(); void setInfluxRetentionPolicy(String influxRetentionPolicy); @Description("Additional tags for Influx data") - @Nullable - Map getInfluxTags(); + @Nullable Map getInfluxTags(); void setInfluxTags(Map influxTags); } diff --git a/sdks/java/transform-service/src/main/java/org/apache/beam/sdk/transformservice/ExpansionService.java b/sdks/java/transform-service/src/main/java/org/apache/beam/sdk/transformservice/ExpansionService.java index 1a8cc8ddfb02..32a131700fef 100644 --- a/sdks/java/transform-service/src/main/java/org/apache/beam/sdk/transformservice/ExpansionService.java +++ b/sdks/java/transform-service/src/main/java/org/apache/beam/sdk/transformservice/ExpansionService.java @@ -171,11 +171,7 @@ private ExpansionApi.ExpansionResponse getAggregatedErrorResponse( + "\n"); } - return errorResponses - .values() - .iterator() - .next() - .toBuilder() + return errorResponses.values().iterator().next().toBuilder() .setError(errorMessageBuilder.toString()) .build(); }