diff --git a/buildSrc/src/main/kotlin/aiven-apache-kafka-connectors-all.java-conventions.gradle.kts b/buildSrc/src/main/kotlin/aiven-apache-kafka-connectors-all.java-conventions.gradle.kts index d16ac9f84..fc8c97968 100644 --- a/buildSrc/src/main/kotlin/aiven-apache-kafka-connectors-all.java-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/aiven-apache-kafka-connectors-all.java-conventions.gradle.kts @@ -133,6 +133,11 @@ spotless { endWithNewline() } + kotlinGradle { + target("*.gradle.kts") + ktfmt() + } + java { licenseHeaderFile(file("${project.rootDir}/gradle-config/java.header")) importOrder("javax", "java", "org.apache.kafka", "io.aiven", "") diff --git a/commons/build.gradle.kts b/commons/build.gradle.kts index 863fbbeef..7b39934b4 100644 --- a/commons/build.gradle.kts +++ b/commons/build.gradle.kts @@ -14,178 +14,176 @@ * limitations under the License. */ - -plugins { - id("aiven-apache-kafka-connectors-all.java-conventions") -} +plugins { id("aiven-apache-kafka-connectors-all.java-conventions") } dependencies { - compileOnly(apache.kafka.connect.api) - compileOnly(apache.kafka.connect.runtime) - compileOnly(apache.kafka.connect.json) - - implementation(confluent.kafka.connect.avro.data) { - exclude(group = "org.apache.kafka", module = "kafka-clients") - } - - implementation(tools.spotbugs.annotations) - implementation(compressionlibs.snappy) - implementation(compressionlibs.zstd.jni) - - implementation(logginglibs.slf4j) - - implementation(apache.commons.text) - - implementation(apache.parquet.avro) { - exclude(group = "org.xerial.snappy", module = "snappy-java") - exclude(group = "org.slf4j", module = "slf4j-api") - exclude(group = "org.apache.avro", module = "avro") - } - implementation(apache.hadoop.common) { - exclude(group = "org.apache.hadoop.thirdparty", module = "hadoop-shaded-protobuf_3_7") - exclude(group = "com.google.guava", module = "guava") - exclude(group = "commons-cli", module = "commons-cli") - exclude(group = "org.apache.commons", module = "commons-math3") - exclude(group = "org.apache.httpcomponents", module = "httpclient") - exclude(group = "commons-codec", module = "commons-codec") - exclude(group = "commons-io", module = "commons-io") - exclude(group = "commons-net", module = "commons-net") - exclude(group = "org.eclipse.jetty") - exclude(group = "org.eclipse.jetty.websocket") - exclude(group = "javax.servlet") - exclude(group = "javax.servlet.jsp") - exclude(group = "javax.activation") - exclude(group = "com.sun.jersey") - exclude(group = "log4j") - exclude(group = "org.apache.commons", module = "commons-text") - exclude(group = "org.slf4j", module = "slf4j-api") - exclude(group = "org.apache.hadoop", module = "hadoop-auth") - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-api") - exclude(group = "com.google.re2j") - exclude(group = "com.google.protobuf") - exclude(group = "com.google.code.gson") - exclude(group = "com.jcraft") - exclude(group = "org.apache.curator") - exclude(group = "org.apache.zookeeper") - exclude(group = "org.apache.htrace") - exclude(group = "com.google.code.findbugs") - exclude(group = "org.apache.kerby") - exclude(group = "com.fasterxml.jackson.core") - exclude(group = "com.fasterxml.woodstox", module = "woodstox-core:5.0.3") - exclude(group = "org.apache.avro", module = "avro") - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-common") - exclude(group = "com.google.inject.extensions", module = "guice-servlet") - exclude(group = "io.netty", module = "netty") - } + compileOnly(apache.kafka.connect.api) + compileOnly(apache.kafka.connect.runtime) + compileOnly(apache.kafka.connect.json) + + implementation(confluent.kafka.connect.avro.data) { + exclude(group = "org.apache.kafka", module = "kafka-clients") + } + + implementation(tools.spotbugs.annotations) + implementation(compressionlibs.snappy) + implementation(compressionlibs.zstd.jni) + + implementation(logginglibs.slf4j) + + implementation(apache.commons.text) + + implementation(apache.parquet.avro) { + exclude(group = "org.xerial.snappy", module = "snappy-java") + exclude(group = "org.slf4j", module = "slf4j-api") + exclude(group = "org.apache.avro", module = "avro") + } + implementation(apache.hadoop.common) { + exclude(group = "org.apache.hadoop.thirdparty", module = "hadoop-shaded-protobuf_3_7") + exclude(group = "com.google.guava", module = "guava") + exclude(group = "commons-cli", module = "commons-cli") + exclude(group = "org.apache.commons", module = "commons-math3") + exclude(group = "org.apache.httpcomponents", module = "httpclient") + exclude(group = "commons-codec", module = "commons-codec") + exclude(group = "commons-io", module = "commons-io") + exclude(group = "commons-net", module = "commons-net") + exclude(group = "org.eclipse.jetty") + exclude(group = "org.eclipse.jetty.websocket") + exclude(group = "javax.servlet") + exclude(group = "javax.servlet.jsp") + exclude(group = "javax.activation") + exclude(group = "com.sun.jersey") + exclude(group = "log4j") + exclude(group = "org.apache.commons", module = "commons-text") + exclude(group = "org.slf4j", module = "slf4j-api") + exclude(group = "org.apache.hadoop", module = "hadoop-auth") + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-api") + exclude(group = "com.google.re2j") + exclude(group = "com.google.protobuf") + exclude(group = "com.google.code.gson") + exclude(group = "com.jcraft") + exclude(group = "org.apache.curator") + exclude(group = "org.apache.zookeeper") + exclude(group = "org.apache.htrace") + exclude(group = "com.google.code.findbugs") + exclude(group = "org.apache.kerby") + exclude(group = "com.fasterxml.jackson.core") + exclude(group = "com.fasterxml.woodstox", module = "woodstox-core:5.0.3") + exclude(group = "org.apache.avro", module = "avro") + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-common") + exclude(group = "com.google.inject.extensions", module = "guice-servlet") + exclude(group = "io.netty", module = "netty") + } + + testImplementation(apache.kafka.connect.api) + testImplementation(apache.kafka.connect.runtime) + testImplementation(apache.kafka.connect.json) + testImplementation(testinglibs.junit.jupiter) + testImplementation(apache.parquet.tools) { exclude(group = "org.slf4j", module = "slf4j-api") } + testImplementation(jackson.databind) + testImplementation(testinglibs.mockito.core) + testImplementation(testinglibs.assertj.core) + + testImplementation(testinglibs.woodstox.stax2.api) + testImplementation(apache.hadoop.mapreduce.client.core) + testImplementation(confluent.kafka.connect.avro.converter) + + testRuntimeOnly(testinglibs.junit.jupiter.engine) + testRuntimeOnly(logginglibs.logback.classic) +} - testImplementation(apache.kafka.connect.api) - testImplementation(apache.kafka.connect.runtime) - testImplementation(apache.kafka.connect.json) - testImplementation(testinglibs.junit.jupiter) - testImplementation(apache.parquet.tools) { - exclude(group = "org.slf4j", module = "slf4j-api") +distributions { + main { + contents { + from("jar") + from(configurations.runtimeClasspath.get().map { if (it.isDirectory) it else zipTree(it) }) + + into("/") { + from("$projectDir") + include("version.txt", "README*", "LICENSE*", "NOTICE*", "licenses/") + include("config/") + } } - testImplementation(jackson.databind) - testImplementation(testinglibs.mockito.core) - testImplementation(testinglibs.assertj.core) - - testImplementation(testinglibs.woodstox.stax2.api) - testImplementation(apache.hadoop.mapreduce.client.core) - testImplementation(confluent.kafka.connect.avro.converter) - - testRuntimeOnly(testinglibs.junit.jupiter.engine) - testRuntimeOnly(logginglibs.logback.classic) + } } -distributions { - main { - contents { - from("jar") - from(configurations.runtimeClasspath.get().map { if (it.isDirectory) it else zipTree(it) }) +publishing { + publications { + create("publishMavenJavaArtifact") { + groupId = group.toString() + artifactId = "commons-for-apache-kafka-connect" + version = version.toString() + + from(components["java"]) + + pom { + name = "Aiven's Common Module for Apache Kafka connectors" + description = "Aiven's Common Module for Apache Kafka connectors" + url = "https://github.com/aiven-open/commons-for-apache-kafka-connect" + organization { + name = "Aiven Oy" + url = "https://aiven.io" + } + licenses { + license { + name = "Apache 2.0" + url = "http://www.apache.org/licenses/LICENSE-2.0" + distribution = "repo" + } + } - into("/") { - from("$projectDir") - include("version.txt", "README*", "LICENSE*", "NOTICE*", "licenses/") - include("config/") - } + developers { + developer { + id = "aiven" + name = "Aiven Opensource" + email = "opensource@aiven.io" + } } - } -} -publishing { - publications { - create("publishMavenJavaArtifact") { - groupId = group.toString() - artifactId = "commons-for-apache-kafka-connect" - version = version.toString() - - from(components["java"]) - - pom { - name = "Aiven's Common Module for Apache Kafka connectors" - description = "Aiven's Common Module for Apache Kafka connectors" - url = "https://github.com/aiven-open/commons-for-apache-kafka-connect" - organization { - name = "Aiven Oy" - url = "https://aiven.io" - } - - licenses { - license { - name = "Apache 2.0" - url = "http://www.apache.org/licenses/LICENSE-2.0" - distribution = "repo" - } - } - - developers { - developer { - id = "aiven" - name = "Aiven Opensource" - email = "opensource@aiven.io" - } - } - - scm { - connection = "scm:git:git://github.com:aiven-open/commons-for-apache-kafka-connect.git" - developerConnection = "scm:git:ssh://github.com:aiven-open/commons-for-apache-kafka-connect.git" - url = "https://github.com/aiven-open/commons-for-apache-kafka-connect" - } - } + scm { + connection = "scm:git:git://github.com:aiven-open/commons-for-apache-kafka-connect.git" + developerConnection = + "scm:git:ssh://github.com:aiven-open/commons-for-apache-kafka-connect.git" + url = "https://github.com/aiven-open/commons-for-apache-kafka-connect" } + } } + } - repositories { - maven { - name = "sonatype" + repositories { + maven { + name = "sonatype" - val releasesRepoUrl = uri("https://oss.sonatype.org/service/local/staging/deploy/maven2") - val snapshotsRepoUrl = uri("https://oss.sonatype.org/content/repositories/snapshots") - url = if (version.toString().endsWith("SNAPSHOT")) snapshotsRepoUrl else releasesRepoUrl + val releasesRepoUrl = uri("https://oss.sonatype.org/service/local/staging/deploy/maven2") + val snapshotsRepoUrl = uri("https://oss.sonatype.org/content/repositories/snapshots") + url = if (version.toString().endsWith("SNAPSHOT")) snapshotsRepoUrl else releasesRepoUrl - credentials(PasswordCredentials::class) - } + credentials(PasswordCredentials::class) } + } } signing { - sign(publishing.publications["publishMavenJavaArtifact"]) - useGpgCmd() - // Some issue in the plugin: - // GPG outputs already armored signatures. The plugin also does armoring for `asc` files. - // This results in double armored signatures, i.e. garbage. - // Override the signature type provider to use unarmored output for `asc` files, which works well with GPG. - class ASCSignatureProvider() : AbstractSignatureTypeProvider() { - val binary = object: BinarySignatureType() { - override fun getExtension(): String { - return "asc"; - } - } - init { - register(binary) - setDefaultType(binary.extension) + sign(publishing.publications["publishMavenJavaArtifact"]) + useGpgCmd() + // Some issue in the plugin: + // GPG outputs already armored signatures. The plugin also does armoring for `asc` files. + // This results in double armored signatures, i.e. garbage. + // Override the signature type provider to use unarmored output for `asc` files, which works well + // with GPG. + class ASCSignatureProvider() : AbstractSignatureTypeProvider() { + val binary = + object : BinarySignatureType() { + override fun getExtension(): String { + return "asc" + } } + + init { + register(binary) + setDefaultType(binary.extension) } - signatureTypes = ASCSignatureProvider() + } + signatureTypes = ASCSignatureProvider() } diff --git a/gcs-connector/build.gradle.kts b/gcs-connector/build.gradle.kts index 59c9342f7..56e56261a 100644 --- a/gcs-connector/build.gradle.kts +++ b/gcs-connector/build.gradle.kts @@ -16,268 +16,258 @@ import com.github.spotbugs.snom.SpotBugsTask * limitations under the License. */ -plugins { - id("aiven-apache-kafka-connectors-all.java-conventions") -} +plugins { id("aiven-apache-kafka-connectors-all.java-conventions") } group = "io.aiven" -val kafkaVersion by extra ("1.1.0") +val kafkaVersion by extra("1.1.0") -val integrationTest: SourceSet = sourceSets.create("integrationTest") { - java { - srcDir("src/integration-test/java") - } - resources { - srcDir("src/integration-test/resources") +val integrationTest: SourceSet = + sourceSets.create("integrationTest") { + java { srcDir("src/integration-test/java") } + resources { srcDir("src/integration-test/resources") } + compileClasspath += sourceSets.main.get().output + configurations.testRuntimeClasspath.get() + runtimeClasspath += output + compileClasspath } - compileClasspath += sourceSets.main.get().output + configurations.testRuntimeClasspath.get() - runtimeClasspath += output + compileClasspath -} -val integrationTestImplementation: Configuration by configurations.getting { - extendsFrom(configurations.implementation.get()) -} +val integrationTestImplementation: Configuration by + configurations.getting { extendsFrom(configurations.implementation.get()) } tasks.register("integrationTest") { - description = "Runs the integration tests." - group = "verification" - testClassesDirs = integrationTest.output.classesDirs - classpath = integrationTest.runtimeClasspath - - // defines testing order - shouldRunAfter("test") - // requires archive for connect runner - dependsOn("distTar") - useJUnitPlatform() - - // Run always. - outputs.upToDateWhen { false } - - // Pass the GCS credentials path to the tests. - if (project.hasProperty("gcsCredentialsPath")) { - systemProperty("integration-test.gcs.credentials.path", project.findProperty("gcsCredentialsPath").toString()) - } - // Pass the GCS credentials JSON to the tests. - if (project.hasProperty("gcsCredentialsJson")) { - systemProperty("integration-test.gcs.credentials.json", project.findProperty("gcsCredentialsJson").toString()) - } - // Pass the GCS bucket name to the tests. - systemProperty("integration-test.gcs.bucket", project.findProperty("testGcsBucket").toString()) - // Pass the distribution file path to the tests. - val distTarTask = tasks.get("distTar") as Tar - val distributionFilePath = distTarTask.archiveFile.get().asFile.path - systemProperty("integration-test.distribution.file.path", distributionFilePath) - systemProperty("fake-gcs-server-version", "1.45.2") + description = "Runs the integration tests." + group = "verification" + testClassesDirs = integrationTest.output.classesDirs + classpath = integrationTest.runtimeClasspath + + // defines testing order + shouldRunAfter("test") + // requires archive for connect runner + dependsOn("distTar") + useJUnitPlatform() + + // Run always. + outputs.upToDateWhen { false } + + // Pass the GCS credentials path to the tests. + if (project.hasProperty("gcsCredentialsPath")) { + systemProperty( + "integration-test.gcs.credentials.path", + project.findProperty("gcsCredentialsPath").toString()) + } + // Pass the GCS credentials JSON to the tests. + if (project.hasProperty("gcsCredentialsJson")) { + systemProperty( + "integration-test.gcs.credentials.json", + project.findProperty("gcsCredentialsJson").toString()) + } + // Pass the GCS bucket name to the tests. + systemProperty("integration-test.gcs.bucket", project.findProperty("testGcsBucket").toString()) + // Pass the distribution file path to the tests. + val distTarTask = tasks.get("distTar") as Tar + val distributionFilePath = distTarTask.archiveFile.get().asFile.path + systemProperty("integration-test.distribution.file.path", distributionFilePath) + systemProperty("fake-gcs-server-version", "1.45.2") } idea { - module { - testSources.from(integrationTest.java.srcDirs) - testSources.from(integrationTest.resources.srcDirs) - } + module { + testSources.from(integrationTest.java.srcDirs) + testSources.from(integrationTest.resources.srcDirs) + } } dependencies { - compileOnly(apache.kafka.connect.api) - compileOnly(apache.kafka.connect.runtime) - - implementation(project(":commons")) - - implementation("com.google.cloud:google-cloud-storage:2.37.0") { - exclude(group = "com.google.guava", module = "guava") - } - // TODO: document why specific version of guava is required - implementation("com.google.guava:guava:33.0.0-jre") - - implementation(tools.spotbugs.annotations) - implementation(logginglibs.slf4j) - - testImplementation(testinglibs.junit.jupiter) - testImplementation(testinglibs.hamcrest) - testImplementation(testinglibs.assertj.core) - testImplementation(testinglibs.mockito.core) - testImplementation(testinglibs.jqwik) - // is provided by "jqwik", but need this in testImplementation scope - testImplementation(testinglibs.jqwik.engine) - - testImplementation(apache.kafka.connect.api) - testImplementation(apache.kafka.connect.runtime) - testImplementation(apache.kafka.connect.json) - testImplementation("com.google.cloud:google-cloud-nio:0.127.16") - - testImplementation(compressionlibs.snappy) - testImplementation(compressionlibs.zstd.jni) - testImplementation (apache.parquet.tools) { - exclude(group = "org.slf4j", module = "slf4j-api") - } - testImplementation(apache.hadoop.mapreduce.client.core) { - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-client") - exclude(group = "org.apache.hadoop.thirdparty", module = "hadoop-shaded-protobuf_3_7") - exclude(group = "com.google.guava", module = "guava") - exclude(group = "commons-cli", module = "commons-cli") - exclude(group = "org.apache.commons", module = "commons-math3") - exclude(group = "org.apache.httpcomponents", module = "httpclient") - exclude(group = "commons-codec", module = "commons-codec") - exclude(group = "commons-io", module = "commons-io") - exclude(group = "commons-net", module = "commons-net") - exclude(group = "org.eclipse.jetty") - exclude(group = "org.eclipse.jetty.websocket") - exclude(group = "javax.servlet") - exclude(group = "javax.servlet.jsp") - exclude(group = "javax.activation") - exclude(group = "com.sun.jersey") - exclude(group = "log4j") - exclude(group = "org.apache.commons", module = "commons-text") - exclude(group = "org.slf4j", module = "slf4j-api") - exclude(group = "org.apache.hadoop", module = "hadoop-auth") - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-api") - exclude(group = "com.google.re2j") - exclude(group = "com.google.protobuf") - exclude(group = "com.google.code.gson") - exclude(group = "com.jcraft") - exclude(group = "org.apache.curator") - exclude(group = "org.apache.zookeeper") - exclude(group = "org.apache.htrace") - exclude(group = "com.google.code.findbugs") - exclude(group = "org.apache.kerby") - exclude(group = "com.fasterxml.jackson.core") - exclude(group = "com.fasterxml.woodstox", module = "woodstox-core:5.0.3") - exclude(group = "org.apache.avro", module = "avro") - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-common") - exclude(group = "com.google.inject.extensions", module = "guice-servlet") - exclude(group = "io.netty", module = "netty") - } - - testRuntimeOnly(logginglibs.slf4j.log4j12) - - integrationTestImplementation(testinglibs.wiremock) - integrationTestImplementation(testcontainers.junit.jupiter) - integrationTestImplementation(testcontainers.kafka) // this is not Kafka version - integrationTestImplementation(testinglibs.awaitility) - - integrationTestImplementation(apache.kafka.connect.transforms) - // TODO: add avro-converter to ConnectRunner via plugin.path instead of on worker classpath - integrationTestImplementation(confluent.kafka.connect.avro.converter) { - exclude(group = "org.apache.kafka", module = "kafka-clients") - } - - // Make test utils from "test" available in "integration-test" - integrationTestImplementation(sourceSets["test"].output) + compileOnly(apache.kafka.connect.api) + compileOnly(apache.kafka.connect.runtime) + + implementation(project(":commons")) + + implementation("com.google.cloud:google-cloud-storage:2.37.0") { + exclude(group = "com.google.guava", module = "guava") + } + // TODO: document why specific version of guava is required + implementation("com.google.guava:guava:33.0.0-jre") + + implementation(tools.spotbugs.annotations) + implementation(logginglibs.slf4j) + + testImplementation(testinglibs.junit.jupiter) + testImplementation(testinglibs.hamcrest) + testImplementation(testinglibs.assertj.core) + testImplementation(testinglibs.mockito.core) + testImplementation(testinglibs.jqwik) + // is provided by "jqwik", but need this in testImplementation scope + testImplementation(testinglibs.jqwik.engine) + + testImplementation(apache.kafka.connect.api) + testImplementation(apache.kafka.connect.runtime) + testImplementation(apache.kafka.connect.json) + testImplementation("com.google.cloud:google-cloud-nio:0.127.16") + + testImplementation(compressionlibs.snappy) + testImplementation(compressionlibs.zstd.jni) + testImplementation(apache.parquet.tools) { exclude(group = "org.slf4j", module = "slf4j-api") } + testImplementation(apache.hadoop.mapreduce.client.core) { + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-client") + exclude(group = "org.apache.hadoop.thirdparty", module = "hadoop-shaded-protobuf_3_7") + exclude(group = "com.google.guava", module = "guava") + exclude(group = "commons-cli", module = "commons-cli") + exclude(group = "org.apache.commons", module = "commons-math3") + exclude(group = "org.apache.httpcomponents", module = "httpclient") + exclude(group = "commons-codec", module = "commons-codec") + exclude(group = "commons-io", module = "commons-io") + exclude(group = "commons-net", module = "commons-net") + exclude(group = "org.eclipse.jetty") + exclude(group = "org.eclipse.jetty.websocket") + exclude(group = "javax.servlet") + exclude(group = "javax.servlet.jsp") + exclude(group = "javax.activation") + exclude(group = "com.sun.jersey") + exclude(group = "log4j") + exclude(group = "org.apache.commons", module = "commons-text") + exclude(group = "org.slf4j", module = "slf4j-api") + exclude(group = "org.apache.hadoop", module = "hadoop-auth") + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-api") + exclude(group = "com.google.re2j") + exclude(group = "com.google.protobuf") + exclude(group = "com.google.code.gson") + exclude(group = "com.jcraft") + exclude(group = "org.apache.curator") + exclude(group = "org.apache.zookeeper") + exclude(group = "org.apache.htrace") + exclude(group = "com.google.code.findbugs") + exclude(group = "org.apache.kerby") + exclude(group = "com.fasterxml.jackson.core") + exclude(group = "com.fasterxml.woodstox", module = "woodstox-core:5.0.3") + exclude(group = "org.apache.avro", module = "avro") + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-common") + exclude(group = "com.google.inject.extensions", module = "guice-servlet") + exclude(group = "io.netty", module = "netty") + } + + testRuntimeOnly(logginglibs.slf4j.log4j12) + + integrationTestImplementation(testinglibs.wiremock) + integrationTestImplementation(testcontainers.junit.jupiter) + integrationTestImplementation(testcontainers.kafka) // this is not Kafka version + integrationTestImplementation(testinglibs.awaitility) + + integrationTestImplementation(apache.kafka.connect.transforms) + // TODO: add avro-converter to ConnectRunner via plugin.path instead of on worker classpath + integrationTestImplementation(confluent.kafka.connect.avro.converter) { + exclude(group = "org.apache.kafka", module = "kafka-clients") + } + + // Make test utils from "test" available in "integration-test" + integrationTestImplementation(sourceSets["test"].output) } tasks.named("pmdIntegrationTest") { - ruleSetFiles = files("${project.rootDir}/gradle-config/aiven-pmd-test-ruleset.xml") - ruleSets = emptyList() // Clear the default rulesets + ruleSetFiles = files("${project.rootDir}/gradle-config/aiven-pmd-test-ruleset.xml") + ruleSets = emptyList() // Clear the default rulesets } tasks.named("spotbugsIntegrationTest") { - reports.create("html") { - setStylesheet("fancy-hist.xsl") - } + reports.create("html") { setStylesheet("fancy-hist.xsl") } } tasks.processResources { - filesMatching("gcs-connector-for-apache-kafka-version.properties") { - expand(mapOf("version" to version)) - } + filesMatching("gcs-connector-for-apache-kafka-version.properties") { + expand(mapOf("version" to version)) + } } -tasks.jar { - manifest { - attributes(mapOf("Version" to project.version)) - } -} +tasks.jar { manifest { attributes(mapOf("Version" to project.version)) } } -tasks.distTar { - dependsOn(":commons:jar") -} -tasks.distZip { - dependsOn(":commons:jar") -} +tasks.distTar { dependsOn(":commons:jar") } + +tasks.distZip { dependsOn(":commons:jar") } distributions { - main { - contents { - from("jar") - from(configurations.runtimeClasspath.get().map { if (it.isDirectory) it else zipTree(it) }) - } + main { + contents { + from("jar") + from(configurations.runtimeClasspath.get().map { if (it.isDirectory) it else zipTree(it) }) } + } } publishing { - publications { - create("publishMavenJavaArtifact") { - groupId = group.toString() - artifactId = "gcs-connector-for-apache-kafka" - version = version.toString() - - from(components["java"]) - - pom { - name = "Aiven's GCS Sink Connector for Apache Kafka" - description = "Aiven's GCS Sink Connector for Apache Kafka" - url = "https://github.com/aiven/gcs-connector-for-apache-kafka" - organization { - name = "Aiven Oy" - url = "https://aiven.io" - } - - licenses { - license { - name = "Apache 2.0" - url = "http://www.apache.org/licenses/LICENSE-2.0" - distribution = "repo" - } - } - - developers { - developer { - id = "aiven" - name = "Aiven Opensource" - email = "opensource@aiven.io" - } - } - - scm { - connection = "scm:git:git://github.com:aiven/gcs-connector-for-apache-kafka.git" - developerConnection = "scm:git:ssh://github.com:aiven/gcs-connector-for-apache-kafka.git" - url = "https://github.com/aiven/gcs-connector-for-apache-kafka" - } - } + publications { + create("publishMavenJavaArtifact") { + groupId = group.toString() + artifactId = "gcs-connector-for-apache-kafka" + version = version.toString() + + from(components["java"]) + + pom { + name = "Aiven's GCS Sink Connector for Apache Kafka" + description = "Aiven's GCS Sink Connector for Apache Kafka" + url = "https://github.com/aiven/gcs-connector-for-apache-kafka" + organization { + name = "Aiven Oy" + url = "https://aiven.io" } - } - repositories { - maven { - name = "sonatype" + licenses { + license { + name = "Apache 2.0" + url = "http://www.apache.org/licenses/LICENSE-2.0" + distribution = "repo" + } + } - val releasesRepoUrl = uri("https://oss.sonatype.org/service/local/staging/deploy/maven2") - val snapshotsRepoUrl = uri("https://oss.sonatype.org/content/repositories/snapshots") - url = if (version.toString().endsWith("SNAPSHOT")) snapshotsRepoUrl else releasesRepoUrl + developers { + developer { + id = "aiven" + name = "Aiven Opensource" + email = "opensource@aiven.io" + } + } - credentials(PasswordCredentials::class) + scm { + connection = "scm:git:git://github.com:aiven/gcs-connector-for-apache-kafka.git" + developerConnection = "scm:git:ssh://github.com:aiven/gcs-connector-for-apache-kafka.git" + url = "https://github.com/aiven/gcs-connector-for-apache-kafka" } + } + } + } + + repositories { + maven { + name = "sonatype" + + val releasesRepoUrl = uri("https://oss.sonatype.org/service/local/staging/deploy/maven2") + val snapshotsRepoUrl = uri("https://oss.sonatype.org/content/repositories/snapshots") + url = if (version.toString().endsWith("SNAPSHOT")) snapshotsRepoUrl else releasesRepoUrl + + credentials(PasswordCredentials::class) } + } } signing { - sign(publishing.publications["publishMavenJavaArtifact"]) - useGpgCmd() - // Some issue in the plugin: - // GPG outputs already armored signatures. The plugin also does armoring for `asc` files. - // This results in double armored signatures, i.e. garbage. - // Override the signature type provider to use unarmored output for `asc` files, which works well with GPG. - class ASCSignatureProvider() : AbstractSignatureTypeProvider() { - val binary = object: BinarySignatureType() { - override fun getExtension(): String { - return "asc"; - } - } - init { - register(binary) - setDefaultType(binary.extension) + sign(publishing.publications["publishMavenJavaArtifact"]) + useGpgCmd() + // Some issue in the plugin: + // GPG outputs already armored signatures. The plugin also does armoring for `asc` files. + // This results in double armored signatures, i.e. garbage. + // Override the signature type provider to use unarmored output for `asc` files, which works well + // with GPG. + class ASCSignatureProvider() : AbstractSignatureTypeProvider() { + val binary = + object : BinarySignatureType() { + override fun getExtension(): String { + return "asc" + } } + + init { + register(binary) + setDefaultType(binary.extension) } - signatureTypes = ASCSignatureProvider() -} \ No newline at end of file + } + signatureTypes = ASCSignatureProvider() +} diff --git a/s3-connector/build.gradle.kts b/s3-connector/build.gradle.kts index eae29abfd..3288097cf 100644 --- a/s3-connector/build.gradle.kts +++ b/s3-connector/build.gradle.kts @@ -16,264 +16,249 @@ import com.github.spotbugs.snom.SpotBugsTask * limitations under the License. */ -plugins { - id("aiven-apache-kafka-connectors-all.java-conventions") -} - -val amazonS3Version by extra ("1.12.729") -val amazonSTSVersion by extra ("1.12.729") -val s3mockVersion by extra ("0.2.6") - -val integrationTest: SourceSet = sourceSets.create("integrationTest") { - java { - srcDir("src/integration-test/java") +plugins { id("aiven-apache-kafka-connectors-all.java-conventions") } + +val amazonS3Version by extra("1.12.729") +val amazonSTSVersion by extra("1.12.729") +val s3mockVersion by extra("0.2.6") + +val integrationTest: SourceSet = + sourceSets.create("integrationTest") { + java { srcDir("src/integration-test/java") } + resources { srcDir("src/integration-test/resources") } + compileClasspath += sourceSets.main.get().output + configurations.testRuntimeClasspath.get() + runtimeClasspath += output + compileClasspath } - resources { - srcDir("src/integration-test/resources") - } - compileClasspath += sourceSets.main.get().output + configurations.testRuntimeClasspath.get() - runtimeClasspath += output + compileClasspath -} -val integrationTestImplementation: Configuration by configurations.getting { - extendsFrom(configurations.implementation.get()) -} +val integrationTestImplementation: Configuration by + configurations.getting { extendsFrom(configurations.implementation.get()) } tasks.register("integrationTest") { - description = "Runs the integration tests." - group = "verification" - testClassesDirs = integrationTest.output.classesDirs - classpath = integrationTest.runtimeClasspath - - // defines testing order - shouldRunAfter("test") - // requires archive for connect runner - dependsOn("distTar") - useJUnitPlatform() - - // Run always. - outputs.upToDateWhen { false } - - val distTarTask = tasks.get("distTar") as Tar - val distributionFilePath = distTarTask.archiveFile.get().asFile.path - systemProperty("integration-test.distribution.file.path", distributionFilePath) + description = "Runs the integration tests." + group = "verification" + testClassesDirs = integrationTest.output.classesDirs + classpath = integrationTest.runtimeClasspath + + // defines testing order + shouldRunAfter("test") + // requires archive for connect runner + dependsOn("distTar") + useJUnitPlatform() + + // Run always. + outputs.upToDateWhen { false } + + val distTarTask = tasks.get("distTar") as Tar + val distributionFilePath = distTarTask.archiveFile.get().asFile.path + systemProperty("integration-test.distribution.file.path", distributionFilePath) } idea { - module { - testSources.from(integrationTest.java.srcDirs) - testSources.from(integrationTest.resources.srcDirs) - } + module { + testSources.from(integrationTest.java.srcDirs) + testSources.from(integrationTest.resources.srcDirs) + } } dependencies { - compileOnly(apache.kafka.connect.api) - compileOnly(apache.kafka.connect.runtime) - - implementation(project(":commons")) - - implementation(tools.spotbugs.annotations) - implementation(logginglibs.slf4j) - implementation("com.amazonaws:aws-java-sdk-s3:$amazonS3Version") - implementation("com.amazonaws:aws-java-sdk-sts:$amazonSTSVersion") - - testImplementation(compressionlibs.snappy) - testImplementation(compressionlibs.zstd.jni) - - testImplementation(apache.kafka.connect.api) - testImplementation(apache.kafka.connect.runtime) - testImplementation(apache.kafka.connect.json) - - testImplementation(testinglibs.junit.jupiter) - testImplementation(testinglibs.assertj.core) - - testImplementation("io.findify:s3mock_2.11:$s3mockVersion") - - testImplementation(testinglibs.mockito.core) - - testRuntimeOnly(testinglibs.junit.jupiter.engine) - testImplementation(testinglibs.mockito.junit.jupiter) - - testRuntimeOnly(logginglibs.logback.classic) - - integrationTestImplementation(testinglibs.localstack) - integrationTestImplementation(testcontainers.junit.jupiter) - integrationTestImplementation(testcontainers.kafka) // this is not Kafka version - integrationTestImplementation(testcontainers.localstack) - integrationTestImplementation(testinglibs.wiremock) - - // TODO: add avro-converter to ConnectRunner via plugin.path instead of on worker classpath - integrationTestImplementation(confluent.kafka.connect.avro.converter) { - exclude(group = "org.apache.kafka", module = "kafka-clients") - } - - integrationTestImplementation(apache.avro) - - testImplementation (apache.parquet.tools) { - exclude(group = "org.slf4j", module = "slf4j-api") - } - testImplementation(apache.hadoop.mapreduce.client.core) { - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-client") - exclude(group = "org.apache.hadoop.thirdparty", module = "hadoop-shaded-protobuf_3_7") - exclude(group = "com.google.guava", module = "guava") - exclude(group = "commons-cli", module = "commons-cli") - exclude(group = "org.apache.commons", module = "commons-math3") - exclude(group = "org.apache.httpcomponents", module = "httpclient") - exclude(group = "commons-codec", module = "commons-codec") - exclude(group = "commons-io", module = "commons-io") - exclude(group = "commons-net", module = "commons-net") - exclude(group = "org.eclipse.jetty") - exclude(group = "org.eclipse.jetty.websocket") - exclude(group = "javax.servlet") - exclude(group = "javax.servlet.jsp") - exclude(group = "javax.activation") - exclude(group = "com.sun.jersey") - exclude(group = "log4j") - exclude(group = "org.apache.commons", module = "commons-text") - exclude(group = "org.slf4j", module = "slf4j-api") - exclude(group = "org.apache.hadoop", module = "hadoop-auth") - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-api") - exclude(group = "com.google.re2j") - exclude(group = "com.google.protobuf") - exclude(group = "com.google.code.gson") - exclude(group = "com.jcraft") - exclude(group = "org.apache.curator") - exclude(group = "org.apache.zookeeper") - exclude(group = "org.apache.htrace") - exclude(group = "com.google.code.findbugs") - exclude(group = "org.apache.kerby") - exclude(group = "com.fasterxml.jackson.core") - exclude(group = "com.fasterxml.woodstox", module = "woodstox-core:5.0.3") - exclude(group = "org.apache.avro", module = "avro") - exclude(group = "org.apache.hadoop", module = "hadoop-yarn-common") - exclude(group = "com.google.inject.extensions", module = "guice-servlet") - exclude(group = "io.netty", module = "netty") - } - - // Make test utils from 'test' available in 'integration-test' - integrationTestImplementation(sourceSets["test"].output) - integrationTestImplementation(testinglibs.awaitility) + compileOnly(apache.kafka.connect.api) + compileOnly(apache.kafka.connect.runtime) + + implementation(project(":commons")) + + implementation(tools.spotbugs.annotations) + implementation(logginglibs.slf4j) + implementation("com.amazonaws:aws-java-sdk-s3:$amazonS3Version") + implementation("com.amazonaws:aws-java-sdk-sts:$amazonSTSVersion") + + testImplementation(compressionlibs.snappy) + testImplementation(compressionlibs.zstd.jni) + + testImplementation(apache.kafka.connect.api) + testImplementation(apache.kafka.connect.runtime) + testImplementation(apache.kafka.connect.json) + + testImplementation(testinglibs.junit.jupiter) + testImplementation(testinglibs.assertj.core) + + testImplementation("io.findify:s3mock_2.11:$s3mockVersion") + + testImplementation(testinglibs.mockito.core) + + testRuntimeOnly(testinglibs.junit.jupiter.engine) + testImplementation(testinglibs.mockito.junit.jupiter) + + testRuntimeOnly(logginglibs.logback.classic) + + integrationTestImplementation(testinglibs.localstack) + integrationTestImplementation(testcontainers.junit.jupiter) + integrationTestImplementation(testcontainers.kafka) // this is not Kafka version + integrationTestImplementation(testcontainers.localstack) + integrationTestImplementation(testinglibs.wiremock) + + // TODO: add avro-converter to ConnectRunner via plugin.path instead of on worker classpath + integrationTestImplementation(confluent.kafka.connect.avro.converter) { + exclude(group = "org.apache.kafka", module = "kafka-clients") + } + + integrationTestImplementation(apache.avro) + + testImplementation(apache.parquet.tools) { exclude(group = "org.slf4j", module = "slf4j-api") } + testImplementation(apache.hadoop.mapreduce.client.core) { + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-client") + exclude(group = "org.apache.hadoop.thirdparty", module = "hadoop-shaded-protobuf_3_7") + exclude(group = "com.google.guava", module = "guava") + exclude(group = "commons-cli", module = "commons-cli") + exclude(group = "org.apache.commons", module = "commons-math3") + exclude(group = "org.apache.httpcomponents", module = "httpclient") + exclude(group = "commons-codec", module = "commons-codec") + exclude(group = "commons-io", module = "commons-io") + exclude(group = "commons-net", module = "commons-net") + exclude(group = "org.eclipse.jetty") + exclude(group = "org.eclipse.jetty.websocket") + exclude(group = "javax.servlet") + exclude(group = "javax.servlet.jsp") + exclude(group = "javax.activation") + exclude(group = "com.sun.jersey") + exclude(group = "log4j") + exclude(group = "org.apache.commons", module = "commons-text") + exclude(group = "org.slf4j", module = "slf4j-api") + exclude(group = "org.apache.hadoop", module = "hadoop-auth") + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-api") + exclude(group = "com.google.re2j") + exclude(group = "com.google.protobuf") + exclude(group = "com.google.code.gson") + exclude(group = "com.jcraft") + exclude(group = "org.apache.curator") + exclude(group = "org.apache.zookeeper") + exclude(group = "org.apache.htrace") + exclude(group = "com.google.code.findbugs") + exclude(group = "org.apache.kerby") + exclude(group = "com.fasterxml.jackson.core") + exclude(group = "com.fasterxml.woodstox", module = "woodstox-core:5.0.3") + exclude(group = "org.apache.avro", module = "avro") + exclude(group = "org.apache.hadoop", module = "hadoop-yarn-common") + exclude(group = "com.google.inject.extensions", module = "guice-servlet") + exclude(group = "io.netty", module = "netty") + } + + // Make test utils from 'test' available in 'integration-test' + integrationTestImplementation(sourceSets["test"].output) + integrationTestImplementation(testinglibs.awaitility) } tasks.named("pmdIntegrationTest") { - ruleSetFiles = files("${project.rootDir}/gradle-config/aiven-pmd-test-ruleset.xml") - ruleSets = emptyList() // Clear the default rulesets + ruleSetFiles = files("${project.rootDir}/gradle-config/aiven-pmd-test-ruleset.xml") + ruleSets = emptyList() // Clear the default rulesets } tasks.named("spotbugsIntegrationTest") { - reports.create("html") { - setStylesheet("fancy-hist.xsl") - } + reports.create("html") { setStylesheet("fancy-hist.xsl") } } tasks.processResources { - filesMatching("s3-connector-for-apache-kafka-version.properties") { - expand(mapOf("version" to version)) - } + filesMatching("s3-connector-for-apache-kafka-version.properties") { + expand(mapOf("version" to version)) + } } -tasks.jar { - manifest { - attributes(mapOf("Version" to project.version)) - } -} +tasks.jar { manifest { attributes(mapOf("Version" to project.version)) } } -tasks.distTar { - dependsOn(":commons:jar") -} -tasks.distZip { - dependsOn(":commons:jar") -} +tasks.distTar { dependsOn(":commons:jar") } -distributions { - main { - contents { - from("jar") - from(configurations.runtimeClasspath.get().map { if (it.isDirectory) it else zipTree(it) }) +tasks.distZip { dependsOn(":commons:jar") } - - into("/") { - from("$projectDir") - include("version.txt", "README*", "LICENSE*", "NOTICE*", "licenses/") - include("config/") - } - } +distributions { + main { + contents { + from("jar") + from(configurations.runtimeClasspath.get().map { if (it.isDirectory) it else zipTree(it) }) + + into("/") { + from("$projectDir") + include("version.txt", "README*", "LICENSE*", "NOTICE*", "licenses/") + include("config/") + } } + } } publishing { - publications { - create("publishMavenJavaArtifact") { - groupId = group.toString() - artifactId = "s3-connector-for-apache-kafka" - version = version.toString() - - from(components["java"]) - - pom { - name = "Aiven's S3 Sink Connector for Apache Kafka" - description = "Aiven's S3 Sink Connector for Apache Kafka" - url = "https://github.com/aiven-open/s3-connector-for-apache-kafka" - organization { - name = "Aiven Oy" - url = "https://aiven.io" - } - - licenses { - license { - name = "Apache 2.0" - url = "http://www.apache.org/licenses/LICENSE-2.0" - distribution = "repo" - } - } - - developers { - developer { - id = "aiven" - name = "Aiven Opensource" - email = "opensource@aiven.io" - } - } - - scm { - connection = "scm:git:git://github.com:aiven/s3-connector-for-apache-kafka.git" - developerConnection = "scm:git:ssh://github.com:aiven/s3-connector-for-apache-kafka.git" - url = "https://github.com/aiven-open/s3-connector-for-apache-kafka" - } - } + publications { + create("publishMavenJavaArtifact") { + groupId = group.toString() + artifactId = "s3-connector-for-apache-kafka" + version = version.toString() + + from(components["java"]) + + pom { + name = "Aiven's S3 Sink Connector for Apache Kafka" + description = "Aiven's S3 Sink Connector for Apache Kafka" + url = "https://github.com/aiven-open/s3-connector-for-apache-kafka" + organization { + name = "Aiven Oy" + url = "https://aiven.io" } - } - repositories { - maven { - name = "sonatype" + licenses { + license { + name = "Apache 2.0" + url = "http://www.apache.org/licenses/LICENSE-2.0" + distribution = "repo" + } + } - val releasesRepoUrl = uri("https://oss.sonatype.org/service/local/staging/deploy/maven2") - val snapshotsRepoUrl = uri("https://oss.sonatype.org/content/repositories/snapshots") - url = if (version.toString().endsWith("SNAPSHOT")) snapshotsRepoUrl else releasesRepoUrl + developers { + developer { + id = "aiven" + name = "Aiven Opensource" + email = "opensource@aiven.io" + } + } - credentials(PasswordCredentials::class) + scm { + connection = "scm:git:git://github.com:aiven/s3-connector-for-apache-kafka.git" + developerConnection = "scm:git:ssh://github.com:aiven/s3-connector-for-apache-kafka.git" + url = "https://github.com/aiven-open/s3-connector-for-apache-kafka" } + } + } + } + + repositories { + maven { + name = "sonatype" + + val releasesRepoUrl = uri("https://oss.sonatype.org/service/local/staging/deploy/maven2") + val snapshotsRepoUrl = uri("https://oss.sonatype.org/content/repositories/snapshots") + url = if (version.toString().endsWith("SNAPSHOT")) snapshotsRepoUrl else releasesRepoUrl + + credentials(PasswordCredentials::class) } + } } signing { - sign(publishing.publications["publishMavenJavaArtifact"]) - useGpgCmd() - // Some issue in the plugin: - // GPG outputs already armored signatures. The plugin also does armoring for `asc` files. - // This results in double armored signatures, i.e. garbage. - // Override the signature type provider to use unarmored output for `asc` files, which works well with GPG. - class ASCSignatureProvider() : AbstractSignatureTypeProvider() { - val binary = object: BinarySignatureType() { - override fun getExtension(): String { - return "asc"; - } - } - init { - register(binary) - setDefaultType(binary.extension) + sign(publishing.publications["publishMavenJavaArtifact"]) + useGpgCmd() + // Some issue in the plugin: + // GPG outputs already armored signatures. The plugin also does armoring for `asc` files. + // This results in double armored signatures, i.e. garbage. + // Override the signature type provider to use unarmored output for `asc` files, which works well + // with GPG. + class ASCSignatureProvider() : AbstractSignatureTypeProvider() { + val binary = + object : BinarySignatureType() { + override fun getExtension(): String { + return "asc" + } } + + init { + register(binary) + setDefaultType(binary.extension) } - signatureTypes = ASCSignatureProvider() + } + signatureTypes = ASCSignatureProvider() }