props) {
- return new KafkaAvroDeserializerConfig(props);
- }
-
- protected KafkaAvroDeserializerConfig deserializerConfig(VerifiableProperties props) {
- return new KafkaAvroDeserializerConfig(props.props());
- }
-
- /**
- * Deserializes the payload without including schema information for primitive types, maps, and
- * arrays. Just the resulting deserialized object is returned.
- *
- * This behavior is the norm for Decoders/Deserializers.
- *
- * @param payload serialized data
- * @return the deserialized object
- */
- protected Object deserialize(byte[] payload) throws SerializationException {
- return deserialize(null, null, payload, null);
- }
-
- /**
- * Just like single-parameter version but accepts an Avro schema to use for reading
- *
- * @param payload serialized data
- * @param readerSchema schema to use for Avro read (optional, enables Avro projection)
- * @return the deserialized object
- */
- protected Object deserialize(byte[] payload, Schema readerSchema) throws SerializationException {
- return deserialize(null, null, payload, readerSchema);
- }
-
- protected Object deserialize(String topic, Boolean isKey, byte[] payload, Schema readerSchema)
- throws SerializationException {
- if (payload == null) {
- return null;
- }
-
- DeserializationContext context = new DeserializationContext(topic, isKey, payload);
- return context.read(context.schemaFromRegistry().rawSchema(), readerSchema);
- }
-
- private Integer schemaVersion(String topic,
- Boolean isKey,
- int id,
- String subject,
- AvroSchema schema,
- Object result) throws IOException, RestClientException {
- Integer version;
- if (isDeprecatedSubjectNameStrategy(isKey)) {
- subject = getSubjectName(topic, isKey, result, schema);
- AvroSchema subjectSchema = (AvroSchema) schemaRegistry.getSchemaBySubjectAndId(subject, id);
- version = schemaRegistry.getVersion(subject, subjectSchema);
- } else {
- //we already got the subject name
- version = schemaRegistry.getVersion(subject, schema);
- }
- return version;
- }
-
- private String subjectName(String topic, Boolean isKey, AvroSchema schemaFromRegistry) {
- return isDeprecatedSubjectNameStrategy(isKey)
- ? null
- : getSubjectName(topic, isKey, null, schemaFromRegistry);
- }
-
- /**
- * Deserializes the payload and includes schema information, with version information from the
- * schema registry embedded in the schema.
- *
- * @param payload the serialized data
- * @return a GenericContainer with the schema and data, either as a {@link NonRecordContainer},
- * {@link org.apache.avro.generic.GenericRecord}, or {@link SpecificRecord}
- */
- protected GenericContainerWithVersion deserializeWithSchemaAndVersion(
- String topic, boolean isKey, byte[] payload)
- throws SerializationException {
- // Even if the caller requests schema & version, if the payload is null we cannot include it.
- // The caller must handle this case.
- if (payload == null) {
- return null;
- }
-
- // Annotate the schema with the version. Note that we only do this if the schema +
- // version are requested, i.e. in Kafka Connect converters. This is critical because that
- // code *will not* rely on exact schema equality. Regular deserializers *must not* include
- // this information because it would return schemas which are not equivalent.
- //
- // Note, however, that we also do not fill in the connect.version field. This allows the
- // Converter to let a version provided by a Kafka Connect source take priority over the
- // schema registry's ordering (which is implicit by auto-registration time rather than
- // explicit from the Connector).
- DeserializationContext context = new DeserializationContext(topic, isKey, payload);
- AvroSchema schema = context.schemaForDeserialize();
- Object result = context.read(schema.rawSchema(), null);
-
- try {
- Integer version = schemaVersion(topic, isKey, context.getSchemaId(),
- context.getSubject(), schema, result);
- if (schema.rawSchema().getType().equals(Schema.Type.RECORD)) {
- return new GenericContainerWithVersion((GenericContainer) result, version);
- } else {
- return new GenericContainerWithVersion(new NonRecordContainer(schema.rawSchema(), result),
- version);
- }
- } catch (RestClientException | IOException e) {
- throw new SerializationException("Error retrieving Avro "
- + getSchemaType(isKey)
- + " schema version for id "
- + context.getSchemaId(), e);
- }
- }
-
- protected DatumReader> getDatumReader(Schema writerSchema, Schema readerSchema) {
- // normalize reader schema
- readerSchema = getReaderSchema(writerSchema, readerSchema);
- boolean writerSchemaIsPrimitive =
- AvroSchemaUtils.getPrimitiveSchemas().containsValue(writerSchema);
- if (writerSchemaIsPrimitive) {
- return new GenericDatumReader<>(writerSchema, readerSchema);
- } else if (useSchemaReflection) {
- return new ReflectDatumReader<>(writerSchema, readerSchema);
- } else if (useSpecificAvroReader) {
- return new SpecificDatumReader<>(writerSchema, readerSchema);
- } else {
- return new GenericDatumReader<>(writerSchema, readerSchema);
- }
- }
-
- /**
- * Normalizes the reader schema, puts the resolved schema into the cache.
- *
- * if the reader schema is provided, use the provided one
- * if the reader schema is cached for the writer schema full name, use the cached value
- * if the writer schema is primitive, use the writer one
- * if schema reflection is used, generate one from the class referred by writer schema
- * if generated classes are used, query the class referred by writer schema
- * otherwise use the writer schema
- *
- */
- private Schema getReaderSchema(Schema writerSchema, Schema readerSchema) {
- if (readerSchema != null) {
- return readerSchema;
- }
- readerSchema = readerSchemaCache.get(writerSchema.getFullName());
- if (readerSchema != null) {
- return readerSchema;
- }
- boolean writerSchemaIsPrimitive =
- AvroSchemaUtils.getPrimitiveSchemas().values().contains(writerSchema);
- if (writerSchemaIsPrimitive) {
- readerSchema = writerSchema;
- } else if (useSchemaReflection) {
- readerSchema = getReflectionReaderSchema(writerSchema);
- readerSchemaCache.put(writerSchema.getFullName(), readerSchema);
- } else if (useSpecificAvroReader) {
- readerSchema = getSpecificReaderSchema(writerSchema);
- readerSchemaCache.put(writerSchema.getFullName(), readerSchema);
- } else {
- readerSchema = writerSchema;
- }
- return readerSchema;
- }
-
- @SuppressWarnings("unchecked")
- private Schema getSpecificReaderSchema(Schema writerSchema) {
- Class readerClass = SpecificData.get().getClass(writerSchema);
- if (readerClass == null) {
- throw new SerializationException("Could not find class "
- + writerSchema.getFullName()
- + " specified in writer's schema whilst finding reader's "
- + "schema for a SpecificRecord.");
- }
- try {
- return readerClass.getConstructor().newInstance().getSchema();
- } catch (InstantiationException | NoSuchMethodException | InvocationTargetException e) {
- throw new SerializationException(writerSchema.getFullName()
- + " specified by the "
- + "writers schema could not be instantiated to "
- + "find the readers schema.");
- } catch (IllegalAccessException e) {
- throw new SerializationException(writerSchema.getFullName()
- + " specified by the "
- + "writers schema is not allowed to be instantiated "
- + "to find the readers schema.");
- }
- }
-
- private Schema getReflectionReaderSchema(Schema writerSchema) {
- // shall we use ReflectData.AllowNull.get() instead?
- Class> readerClass = ReflectData.get().getClass(writerSchema);
- if (readerClass == null) {
- throw new SerializationException("Could not find class "
- + writerSchema.getFullName()
- + " specified in writer's schema whilst finding reader's "
- + "schema for a reflected class.");
- }
- return ReflectData.get().getSchema(readerClass);
- }
-
- class DeserializationContext {
- private final String topic;
- private final Boolean isKey;
- private final ByteBuffer buffer;
- private final int schemaId;
-
- DeserializationContext(final String topic, final Boolean isKey, final byte[] payload) {
- this.topic = topic;
- this.isKey = isKey;
- this.buffer = getByteBuffer(payload);
- this.schemaId = buffer.getInt();
- }
-
- AvroSchema schemaFromRegistry() {
- String subject = getSubject();
- ConcurrentMap subjectIdMap = oldToNewIdMap.computeIfAbsent(subject, sub -> new ConcurrentHashMap<>());
- int id = subjectIdMap.getOrDefault(schemaId, schemaId);
- try {
- return (AvroSchema) schemaRegistry.getSchemaBySubjectAndId(subject, id);
- } catch (RestClientException e) {
- if (e.getErrorCode() == 40403) {
- try {
- List versions = schemaRegistry.getAllVersions(subject);
- int latestId = versions.get(versions.size() - 1);
- subjectIdMap.put(schemaId, latestId);
-
- return (AvroSchema) schemaRegistry.getSchemaBySubjectAndId(subject, latestId);
- } catch (RestClientException | IOException ex) {
- throw new SerializationException("Error retrieving Avro "
- + getSchemaType(isKey)
- + " schema for id "
- + schemaId, e);
- }
- }
- throw new SerializationException("Error retrieving Avro "
- + getSchemaType(isKey)
- + " schema for id "
- + schemaId, e);
- } catch (IOException e) {
- throw new SerializationException("Error retrieving Avro "
- + getSchemaType(isKey)
- + " schema for id "
- + schemaId, e);
- }
- }
-
- AvroSchema schemaForDeserialize() {
- return schemaFromRegistry();
- }
-
- String getSubject() {
- return subjectName(topic, isKey, schemaFromRegistry());
- }
-
- String getTopic() {
- return topic;
- }
-
- boolean isKey() {
- return isKey;
- }
-
-
- int getSchemaId() {
- return schemaId;
- }
-
- Object read(Schema writerSchema) {
- return read(writerSchema, null);
- }
-
- Object read(Schema writerSchema, Schema readerSchema) {
- DatumReader> reader = getDatumReader(writerSchema, readerSchema);
- int length = buffer.limit() - 1 - idSize;
- if (writerSchema.getType().equals(Schema.Type.BYTES)) {
- byte[] bytes = new byte[length];
- buffer.get(bytes, 0, length);
- return bytes;
- } else {
- int start = buffer.position() + buffer.arrayOffset();
- try {
- Object result = reader.read(null, decoderFactory.binaryDecoder(buffer.array(),
- start, length, null));
- if (writerSchema.getType().equals(Schema.Type.STRING)) {
- return result.toString();
- } else {
- return result;
- }
- } catch (IOException | RuntimeException e) {
- // avro deserialization may throw AvroRuntimeException, NullPointerException, etc
- throw new SerializationException("Error deserializing Avro message for id "
- + schemaId, e);
- }
- }
- }
- }
-
- private static String getSchemaType(Boolean isKey) {
- if (isKey == null) {
- return "unknown";
- } else if (isKey) {
- return "key";
- } else {
- return "value";
- }
- }
-}
diff --git a/radar-commons/build.gradle b/radar-commons/build.gradle
index a208d566..ad6bcec4 100644
--- a/radar-commons/build.gradle
+++ b/radar-commons/build.gradle
@@ -1,8 +1,5 @@
description = 'RADAR Common utilities library.'
-targetCompatibility = '1.8'
-sourceCompatibility = '1.8'
-
//---------------------------------------------------------------------------//
// Sources and classpath configurations //
//---------------------------------------------------------------------------//
@@ -16,17 +13,11 @@ configurations {
// In this section you declare where to find the dependencies of your project
repositories {
maven { url 'https://jitpack.io' }
- maven { url 'https://oss.jfrog.org/artifactory/oss-snapshot-local' }
}
// In this section you declare the dependencies for your production and test code
dependencies {
- api (group: 'org.apache.avro', name: 'avro', version: avroVersion) {
- exclude group: 'org.xerial.snappy', module: 'snappy-java'
- exclude group: 'com.thoughtworks.paranamer', module: 'paranamer'
- exclude group: 'org.apache.commons', module: 'commons-compress'
- exclude group: 'org.tukaani', module: 'xz'
- }
+ api (group: 'org.apache.avro', name: 'avro', version: avroVersion)
// to implement producers and consumers
api group: 'com.squareup.okhttp3', name: 'okhttp', version: okhttpVersion
diff --git a/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java b/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java
index 5ad6b177..90a5d401 100644
--- a/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java
+++ b/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java
@@ -28,6 +28,7 @@
/**
* POJO representing a ServerConfig configuration.
*/
+@SuppressWarnings("PMD.GodClass")
public class ServerConfig {
private String host;
private int port = -1;
@@ -57,21 +58,7 @@ public ServerConfig(String urlString) throws MalformedURLException {
/** Get the path of the server as a string. This does not include proxyHost information. */
public String getUrlString() {
- return addUrlString(new StringBuilder(40)).toString();
- }
-
- private StringBuilder addUrlString(StringBuilder builder) {
- if (protocol != null) {
- builder.append(protocol).append("://");
- }
- builder.append(host);
- if (port != -1) {
- builder.append(':').append(port);
- }
- if (path != null) {
- builder.append(path);
- }
- return builder;
+ return getUrl().toString();
}
/** Get the paths of a list of servers, concatenated with commas. */
@@ -84,7 +71,7 @@ public static String getPaths(List configList) {
} else {
builder.append(',');
}
- server.addUrlString(builder);
+ builder.append(server.getUrl());
}
return builder.toString();
}
@@ -109,22 +96,11 @@ public URL getUrl() {
* @throws IllegalStateException if the URL is invalid
*/
public HttpUrl getHttpUrl() {
- HttpUrl.Builder urlBuilder = new HttpUrl.Builder()
- .scheme(protocol)
- .host(host);
-
- if (port != -1) {
- urlBuilder.port(port);
- }
- if (path != null) {
- urlBuilder.encodedPath(path);
- }
-
- return urlBuilder.build();
+ return HttpUrl.get(getUrl());
}
/**
- * Get the HTTP proxyHost associated to given server
+ * Get the HTTP proxyHost associated to given server.
* @return http proxyHost if specified, or null if none is specified.
* @throws IllegalStateException if proxyHost is set but proxyPort is not or if the server
* protocol is not HTTP(s)
@@ -206,23 +182,29 @@ public String getPath() {
* @throws IllegalArgumentException if the path contains a question mark.
*/
public final void setPath(String path) {
- if (path == null) {
- this.path = "/";
- } else if (path.contains("?")) {
+ this.path = cleanPath(path);
+ }
+
+ @SuppressWarnings("PMD.UseStringBufferForStringAppends")
+ private static String cleanPath(String path) {
+ String newPath = path;
+ if (newPath == null) {
+ newPath = "/";
+ }
+ if (newPath.contains("?")) {
throw new IllegalArgumentException("Cannot set server path with query string");
- } else {
- this.path = path.trim();
- if (this.path.isEmpty()) {
- this.path = "/";
- } else {
- if (this.path.charAt(0) != '/') {
- this.path = '/' + this.path;
- }
- if (this.path.charAt(this.path.length() - 1) != '/') {
- this.path += '/';
- }
- }
}
+ newPath = newPath.trim();
+ if (newPath.isEmpty()) {
+ newPath = "/";
+ }
+ if (newPath.charAt(0) != '/') {
+ newPath = '/' + newPath;
+ }
+ if (newPath.charAt(newPath.length() - 1) != '/') {
+ newPath += '/';
+ }
+ return newPath;
}
@Override
diff --git a/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java b/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java
index c8f40ba0..bbd65b48 100644
--- a/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java
+++ b/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java
@@ -41,11 +41,12 @@ private class SchemaEncoderWriter implements AvroWriter {
this.schema = schema;
GenericData genericData;
+ ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
if (SpecificRecord.class.isAssignableFrom(clazz)) {
- genericData = new SpecificData(RemoteSchemaEncoder.class.getClassLoader());
+ genericData = new SpecificData(classLoader);
isGeneric = false;
} else {
- genericData = new GenericData(RemoteSchemaEncoder.class.getClassLoader());
+ genericData = new GenericData(classLoader);
isGeneric = true;
}
recordEncoder = new AvroDatumEncoder(genericData, binary);
diff --git a/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java b/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java
index 6e2fedce..8bc7f22c 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java
@@ -152,6 +152,7 @@ public void flush() throws IOException {
}
@Override
+ @SuppressWarnings("PMD.UseTryWithResources")
public void close() throws IOException {
try {
flush();
diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java
index 51b68772..7f70ce66 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java
@@ -368,10 +368,10 @@ private AvroDataMapper mapBytes(Schema from, final Schema to, final Object defau
private AvroDataMapper mapRecord(Schema from, Schema to)
- throws SchemaValidationException {
+ throws SchemaValidationException {
if (to.getType() != Schema.Type.RECORD) {
throw new SchemaValidationException(to, from,
- new IllegalArgumentException("From and to schemas must be records."));
+ new IllegalArgumentException("From and to schemas must be records."));
}
List fromFields = from.getFields();
Schema.Field[] toFields = new Schema.Field[fromFields.size()];
@@ -398,8 +398,8 @@ private AvroDataMapper mapRecord(Schema from, Schema to)
for (int i = 0; i < filledPositions.length; i++) {
if (!filledPositions[i] && to.getFields().get(i).defaultVal() == null) {
throw new SchemaValidationException(to, from,
- new IllegalArgumentException("Cannot map to record without default value"
- + " for new field " + to.getFields().get(i).name()));
+ new IllegalArgumentException("Cannot map to record without default value"
+ + " for new field " + to.getFields().get(i).name()));
}
}
diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java b/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java
index 4fe2a5c1..41c56377 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java
@@ -124,9 +124,10 @@ public void prepare(ParsedSchemaMetadata keySchema, ParsedSchemaMetadata valueSc
@Override
public String content(int maxLength) throws IOException {
- Buffer buffer = new Buffer();
- writeToSink(buffer, maxLength / 2 - 2);
- return "0x" + Strings.bytesToHex(
- buffer.readByteArray(Math.min(buffer.size(), maxLength - 2)));
+ try (Buffer buffer = new Buffer()) {
+ writeToSink(buffer, maxLength / 2 - 2);
+ return "0x" + Strings.bytesToHex(
+ buffer.readByteArray(Math.min(buffer.size(), maxLength - 2)));
+ }
}
}
diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java b/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java
index a12c9011..6ce5f98b 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java
@@ -56,9 +56,9 @@ public long contentLength() {
@Override
public void writeTo(BufferedSink sink) throws IOException {
- BufferedSink gzipSink = Okio.buffer(new GzipSink(sink));
- body.writeTo(gzipSink);
- gzipSink.close();
+ try (BufferedSink gzipSink = Okio.buffer(new GzipSink(sink))) {
+ body.writeTo(gzipSink);
+ }
}
};
}
@@ -72,4 +72,4 @@ public int hashCode() {
public boolean equals(Object obj) {
return this == obj || obj != null && getClass() == obj.getClass();
}
-}
\ No newline at end of file
+}
diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java b/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java
index 1d47ad99..2ae287ce 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java
@@ -118,8 +118,9 @@ public void prepare(ParsedSchemaMetadata keySchema, ParsedSchemaMetadata valueSc
@Override
public String content(int maxLength) throws IOException {
- Buffer buffer = new Buffer();
- writeToSink(buffer, maxLength);
- return buffer.readString(Math.min(buffer.size(), maxLength), StandardCharsets.UTF_8);
+ try (Buffer buffer = new Buffer()) {
+ writeToSink(buffer, maxLength);
+ return buffer.readString(Math.min(buffer.size(), maxLength), StandardCharsets.UTF_8);
+ }
}
}
diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java b/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java
index 9bc29727..df1cf8f6 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java
@@ -187,11 +187,12 @@ public String toString() {
* @throws IOException if the body could not be read as a String.
*/
public static String responseBody(Response response) throws IOException {
- ResponseBody body = response.body();
- if (body == null) {
- return null;
+ try (ResponseBody body = response.body()) {
+ if (body == null) {
+ return null;
+ }
+ return body.string();
}
- return body.string();
}
/** Create a new builder with the settings of the current client. */
diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java b/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java
index 8b609397..074a1ac0 100644
--- a/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java
+++ b/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java
@@ -37,6 +37,7 @@
* Retriever of an Avro Schema. Internally, only {@link JSONObject} is used to manage JSON data,
* to keep the class as lean as possible.
*/
+@SuppressWarnings("PMD.GodClass")
public class SchemaRetriever {
private static final Logger logger = LoggerFactory.getLogger(SchemaRetriever.class);
private static final long MAX_VALIDITY = 86400L;
@@ -195,15 +196,7 @@ protected ParsedSchemaMetadata getCachedVersion(String subject, int id,
Integer version = reportedVersion;
if (version == null || version <= 0) {
ConcurrentMap versions = subjectVersionCache.get(subject);
- if (versions != null) {
- for (Map.Entry entry : versions.entrySet()) {
- if (!entry.getValue().isExpired() && entry.getKey() != 0
- && entry.getValue().value == id) {
- version = entry.getKey();
- break;
- }
- }
- }
+ version = findCachedVersion(id, versions);
if (version == null || version <= 0) {
return null;
}
@@ -211,6 +204,20 @@ protected ParsedSchemaMetadata getCachedVersion(String subject, int id,
return new ParsedSchemaMetadata(id, version, schema);
}
+ private Integer findCachedVersion(int id, ConcurrentMap cache) {
+ if (cache == null) {
+ return null;
+ }
+ for (Map.Entry entry : cache.entrySet()) {
+ if (!entry.getValue().isExpired()
+ && entry.getKey() != 0
+ && entry.getValue().value == id) {
+ return entry.getKey();
+ }
+ }
+ return null;
+ }
+
protected void cache(ParsedSchemaMetadata metadata, String subject, boolean latest) {
TimedInt id = new TimedInt(metadata.getId(), cacheValidity);
schemaCache.put(metadata.getSchema(), id);
diff --git a/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java b/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java
index 2b63875e..ce8cda49 100644
--- a/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java
+++ b/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java
@@ -17,6 +17,7 @@
package org.radarbase.topic;
import java.lang.reflect.InvocationTargetException;
+import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.apache.avro.Schema;
@@ -96,7 +97,7 @@ public V newValueInstance() throws ClassCastException {
}
public Schema.Type[] getValueFieldTypes() {
- return valueFieldTypes;
+ return Arrays.copyOf(valueFieldTypes, valueFieldTypes.length);
}
/**
diff --git a/radar-commons/src/main/java/org/radarbase/util/Base64.java b/radar-commons/src/main/java/org/radarbase/util/Base64.java
index 62d630b1..3f23e4a3 100644
--- a/radar-commons/src/main/java/org/radarbase/util/Base64.java
+++ b/radar-commons/src/main/java/org/radarbase/util/Base64.java
@@ -52,7 +52,8 @@
@SuppressWarnings("PMD.ClassNamingConventions")
public class Base64 {
- private Base64() {}
+ private Base64() {
+ }
/**
* Returns a {@link Encoder} that encodes using the
diff --git a/settings.gradle b/settings.gradle
index d6543b68..2c61dca1 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -13,18 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-pluginManagement {
- repositories {
- gradlePluginPortal()
- jcenter()
- maven {
- name "JCenter Gradle Plugins"
- url "https://dl.bintray.com/gradle/gradle-plugins"
- }
- }
-}
include ':radar-commons'
include ':radar-commons-testing'
include ':radar-commons-server'
-include ':radar-commons-unsafe'