Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1532,25 +1532,19 @@ class BeamModulePlugin implements Plugin<Project> {
def disabledChecks = [
// TODO(https://github.com/apache/beam/issues/20955): Enable errorprone checks
"AutoValueImmutableFields",
"AutoValueSubclassLeaked",
"ComparableType",
"DoNotMockAutoValue",
"EmptyBlockTag",
"EqualsUnsafeCast",
"EscapedEntity",
"ExtendsAutoValue",
"InlineMeSuggester",
"InvalidBlockTag",
"JodaConstructors",
"MixedMutabilityReturnType",
"PreferJavaTimeOverload",
"NonCanonicalType",
"Slf4jSignOnlyFormat",
"UnescapedEntity",
"UnrecognisedJavadocTag",
// errorprone 3.2.0+ checks
"DirectInvocationOnMock",
"JUnitIncompatibleType",
"MockNotUsedInProduction",
"NullableWildcard",
"SuperCallToObjectMethod",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,8 @@ public void setUp() throws Exception {
public void testSplitAndReadBundlesBack() throws Exception {
com.google.api.services.dataflow.model.Source source =
translateIOToCloudSource(CountingSource.upTo(10L), options);
List<WindowedValue<Integer>> elems = readElemsFromSource(options, source);
assertEquals(10L, elems.size());
List<WindowedValue<Long>> elems = readElemsFromSource(options, source);
assertEquals(10, elems.size());
for (long i = 0; i < 10L; i++) {
assertEquals(valueInGlobalWindow(i), elems.get((int) i));
}
Expand All @@ -188,7 +188,7 @@ public void testSplitAndReadBundlesBack() throws Exception {
com.google.api.services.dataflow.model.Source bundleSource = bundle.getSource();
assertTrue(bundleSource.getDoesNotNeedSplitting());
bundleSource.setCodec(source.getCodec());
List<WindowedValue<Integer>> xs = readElemsFromSource(options, bundleSource);
List<WindowedValue<Long>> xs = readElemsFromSource(options, bundleSource);
assertThat(
"Failed on bundle " + i,
xs,
Expand Down Expand Up @@ -305,15 +305,15 @@ public void testProgressAndSourceSplitTranslation() throws Exception {
// Same as previous test, but now using BasicSerializableSourceFormat wrappers.
// We know that the underlying reader behaves correctly (because of the previous test),
// now check that we are wrapping it correctly.
NativeReader<WindowedValue<Integer>> reader =
(NativeReader<WindowedValue<Integer>>)
NativeReader<WindowedValue<Long>> reader =
(NativeReader<WindowedValue<Long>>)
ReaderRegistry.defaultRegistry()
.create(
translateIOToCloudSource(CountingSource.upTo(10), options),
options,
null, // executionContext
TestOperationContext.create());
try (NativeReader.NativeReaderIterator<WindowedValue<Integer>> iterator = reader.iterator()) {
try (NativeReader.NativeReaderIterator<WindowedValue<Long>> iterator = reader.iterator()) {
assertTrue(iterator.start());
assertEquals(valueInGlobalWindow(0L), iterator.getCurrent());
assertEquals(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -734,6 +734,10 @@ public abstract static class ServerInfo {

abstract Builder toBuilder();

public static Builder builder() {
return new AutoValue_DefaultJobBundleFactory_ServerInfo.Builder();
}

@AutoValue.Builder
abstract static class Builder {
abstract Builder setControlServer(GrpcFnServer<FnApiControlClientPoolService> server);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ public void setUpMocks() throws Exception {
when(stateServer.getService()).thenReturn(stateService);
when(provisioningServer.getService()).thenReturn(provisionService);
serverInfo =
new AutoValue_DefaultJobBundleFactory_ServerInfo.Builder()
DefaultJobBundleFactory.ServerInfo.builder()
.setControlServer(controlServer)
.setLoggingServer(loggingServer)
.setRetrievalServer(retrievalServer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -507,8 +507,8 @@ private long offset(long newWeight) {

/**
* Emulates taking the ordered union of all elements in buffers, repeated according to their
* weight, and picking out the (k * step + offset)-th elements of this list for {@code 0 &lt;= k
* &lt; count}.
* weight, and picking out the (k * step + offset)-th elements of this list for 0 &lt;= k &lt;
* count.
Comment on lines +510 to +511
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

While removing {@code} resolves the EscapedEntity check, it's generally better to keep the code formatting for mathematical expressions in Javadoc. You can use the literal characters inside the {@code} block instead of HTML entities.

Suggested change
* weight, and picking out the (k * step + offset)-th elements of this list for 0 &lt;= k &lt;
* count.
* weight, and picking out the (k * step + offset)-th elements of this list for {@code 0 <= k <
* count}.

*/
private List<T> interpolate(
Iterable<QuantileBuffer<T>> buffers, int count, double step, double offset) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,16 +174,15 @@ public void testUnboundedDisplayData() {

@Test
public void testBuildExternal() {
@SuppressWarnings("NonCanonicalType")
GenerateSequence.External.ExternalConfiguration externalConfig =
new AutoValue_GenerateSequence.External.ExternalConfiguration();
new GenerateSequence.External.ExternalConfiguration();
externalConfig.setStart(42L);
externalConfig.setStop(43L);
externalConfig.setElementsPerPeriod(1L);
externalConfig.setMaxReadTime(2L);
externalConfig.setPeriod(3L);

AutoValue_GenerateSequence.Builder builder = new AutoValue_GenerateSequence.Builder();
GenerateSequence.Builder builder = GenerateSequence.from(0).toBuilder();
GenerateSequence object = builder.buildExternal(externalConfig);
assertThat(object.getFrom(), is(42L));
assertThat(object.getTo(), is(43L));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ public void testNanosInstant() {

Schema schema = Schema.builder().addLogicalTypeField("now", new NanosInstant()).build();
Row row = Row.withSchema(schema).addValues(now).build();
assertEquals(now, row.getLogicalTypeValue(0, NanosInstant.class));
assertEquals(now, row.getLogicalTypeValue(0, Instant.class));
assertEquals(nowAsRow, row.getBaseValue(0, Row.class));
}

Expand All @@ -108,7 +108,7 @@ public void testNanosDuration() {

Schema schema = Schema.builder().addLogicalTypeField("duration", new NanosDuration()).build();
Row row = Row.withSchema(schema).addValues(duration).build();
assertEquals(duration, row.getLogicalTypeValue(0, NanosDuration.class));
assertEquals(duration, row.getLogicalTypeValue(0, Duration.class));
assertEquals(durationAsRow, row.getBaseValue(0, Row.class));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -891,7 +891,7 @@ public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (!(o instanceof PojoWithNestedArray)) {
if (!(o instanceof PojoWithIterable)) {
return false;
}
PojoWithIterable that = (PojoWithIterable) o;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,6 @@ public List<CoderProvider> getCoderProviders() {
return ImmutableList.of(
CoderProviders.forCoder(TypeDescriptor.of(TableRow.class), TableRowJsonCoder.of()),
CoderProviders.forCoder(
TypeDescriptor.of(AutoValue_RowMutation.class), RowMutation.RowMutationCoder.of()));
TypeDescriptor.of(RowMutation.class), RowMutation.RowMutationCoder.of()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1194,6 +1194,10 @@ interface FromBeamRowFunction<T>

abstract Builder<T> toBuilder();

public static <T> Builder<T> builder() {
return new AutoValue_BigQueryIO_TypedRead.Builder<>();
}

@AutoValue.Builder
abstract static class Builder<T> {
abstract Builder<T> setJsonTableRef(ValueProvider<String> jsonTableRef);
Expand Down Expand Up @@ -2790,6 +2794,10 @@ public enum Method {

abstract Builder<T> toBuilder();

public static <T> Builder<T> builder() {
return new AutoValue_BigQueryIO_Write.Builder<>();
}

@AutoValue.Builder
abstract static class Builder<T> {
abstract Builder<T> setJsonTableRef(ValueProvider<String> jsonTableRef);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ public TypedRead<?> fromConfigRow(Row configRow, PipelineOptions options) {
(updateCompatibilityBeamVersion != null) ? updateCompatibilityBeamVersion : "2.53.0";

try {
BigQueryIO.TypedRead.Builder builder = new AutoValue_BigQueryIO_TypedRead.Builder<>();
BigQueryIO.TypedRead.Builder builder = BigQueryIO.TypedRead.builder();

String jsonTableRef = configRow.getString("json_table_ref");
if (jsonTableRef != null) {
Expand Down Expand Up @@ -378,7 +378,9 @@ public static class ReadRegistrar implements TransformPayloadTranslatorRegistrar
public Map<? extends Class<? extends PTransform>, ? extends TransformPayloadTranslator>
getTransformPayloadTranslators() {
return ImmutableMap.<Class<? extends PTransform>, TransformPayloadTranslator>builder()
.put(AutoValue_BigQueryIO_TypedRead.class, new BigQueryIOReadTranslator())
.put(
BigQueryIO.read(BigQueryIO.TableRowParser.INSTANCE).getClass(),
new BigQueryIOReadTranslator())
.build();
}
}
Expand Down Expand Up @@ -615,7 +617,7 @@ public Write<?> fromConfigRow(Row configRow, PipelineOptions options) {
(updateCompatibilityBeamVersion != null) ? updateCompatibilityBeamVersion : "2.53.0";

try {
BigQueryIO.Write.Builder builder = new AutoValue_BigQueryIO_Write.Builder<>();
BigQueryIO.Write.Builder builder = BigQueryIO.Write.builder();

String jsonTableRef = configRow.getString("json_table_ref");
if (jsonTableRef != null) {
Expand Down Expand Up @@ -928,7 +930,7 @@ public static class WriteRegistrar implements TransformPayloadTranslatorRegistra
public Map<? extends Class<? extends PTransform>, ? extends TransformPayloadTranslator>
getTransformPayloadTranslators() {
return ImmutableMap.<Class<? extends PTransform>, TransformPayloadTranslator>builder()
.put(AutoValue_BigQueryIO_Write.class, new BigQueryIOWriteTranslator())
.put(BigQueryIO.write().getClass(), new BigQueryIOWriteTranslator())
.build();
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ abstract static class Result {
public abstract List<String> getFilenames();

abstract boolean isFirstPane();

public static Result of(List<String> filenames, boolean isFirstPane) {
return new AutoValue_WritePartition_Result(filenames, isFirstPane);
}
}

static class ResultCoder extends AtomicCoder<Result> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ abstract static class Result {
abstract String getTableName();

abstract boolean isFirstPane();

public static Result of(String tableName, boolean isFirstPane) {
return new AutoValue_WriteTables_Result(tableName, isFirstPane);
}
}

static class ResultCoder extends AtomicCoder<WriteTables.Result> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2962,7 +2962,7 @@ public void testWriteTables() throws Exception {
partitions.add(
KV.of(
ShardedKey.of(tableDestination.getTableSpec(), j),
new AutoValue_WritePartition_Result(filesPerPartition, true)));
WritePartition.Result.of(filesPerPartition, true)));

String json =
String.format(
Expand Down Expand Up @@ -3089,8 +3089,7 @@ public void testWriteRename() throws Exception {
expectedRowsPerTable.putAll(tableDestination, rows);
String tableJson = toJsonString(tempTable);
tempTables.put(tableDestination, tableJson);
tempTablesElement.add(
KV.of(tableDestination, new AutoValue_WriteTables_Result(tableJson, true)));
tempTablesElement.add(KV.of(tableDestination, WriteTables.Result.of(tableJson, true)));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,8 @@
* available for value class hence value translation is required.).
*
* <pre>{@code
* SimpleFunction&lt;InputFormatValueClass, MyValueClass&gt; myOutputValueType =
* new SimpleFunction&lt;InputFormatValueClass, MyValueClass&gt;() {
* SimpleFunction<InputFormatValueClass, MyValueClass> myOutputValueType =
* new SimpleFunction<InputFormatValueClass, MyValueClass>() {
* public MyValueClass apply(InputFormatValueClass input) {
* // ...logic to transform InputFormatValueClass to MyValueClass
* }
Expand Down Expand Up @@ -257,15 +257,15 @@
* <pre>{@code
* Configuration myHadoopConfiguration = new Configuration(false);
* // Set Hadoop OutputFormat, key and value class in configuration
* myHadoopConfiguration.setClass(&quot;mapreduce.job.outputformat.class&quot;,
* myHadoopConfiguration.setClass("mapreduce.job.outputformat.class",
* MyDbOutputFormatClass, OutputFormat.class);
* myHadoopConfiguration.setClass(&quot;mapreduce.job.output.key.class&quot;,
* myHadoopConfiguration.setClass("mapreduce.job.output.key.class",
* MyDbOutputFormatKeyClass, Object.class);
* myHadoopConfiguration.setClass(&quot;mapreduce.job.output.value.class&quot;,
* myHadoopConfiguration.setClass("mapreduce.job.output.value.class",
* MyDbOutputFormatValueClass, Object.class);
* myHadoopConfiguration.setClass(&quot;mapreduce.job.partitioner.class&quot;,
* myHadoopConfiguration.setClass("mapreduce.job.partitioner.class",
* MyPartitionerClass, Object.class);
* myHadoopConfiguration.setInt(&quot;mapreduce.job.reduces&quot;, 2);
* myHadoopConfiguration.setInt("mapreduce.job.reduces", 2);
* }</pre>
*
* <p>You will need to set OutputFormat key and value class (i.e. "mapreduce.job.output.key.class"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.WriteModel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
Expand Down Expand Up @@ -329,8 +328,7 @@ public Read withBucketAuto(boolean bucketAuto) {
public Read withQueryFn(
SerializableFunction<MongoCollection<Document>, MongoCursor<Document>> queryBuilderFn) {
checkArgument(
Arrays.asList(AutoValue_FindQuery.class, AutoValue_AggregationQuery.class)
.contains(queryBuilderFn.getClass()),
(queryBuilderFn instanceof FindQuery || queryBuilderFn instanceof AggregationQuery),
String.format("[%s]" + ERROR_MSG_QUERY_FN, queryBuilderFn.getClass().getName()));
return builder().setQueryFn(queryBuilderFn).build();
}
Expand Down Expand Up @@ -504,7 +502,7 @@ public List<BoundedSource<Document>> split(
List<Document> splitKeys;
List<BoundedSource<Document>> sources = new ArrayList<>();

if (spec.queryFn().getClass() == AutoValue_FindQuery.class) {
if (spec.queryFn() instanceof FindQuery) {
if (spec.bucketAuto()) {
splitKeys = buildAutoBuckets(mongoDatabase, spec);
} else {
Expand Down
Loading