Skip to content

Commit

Permalink
Merge pull request #161 from jillesvangurp/elastic-cloud-auth
Browse files Browse the repository at this point in the history
elastic cloud auth
  • Loading branch information
jillesvangurp authored Jan 2, 2025
2 parents dd019ea + 8b21974 commit aac097f
Show file tree
Hide file tree
Showing 20 changed files with 205 additions and 304 deletions.
Binary file modified gradle/wrapper/gradle-wrapper.jar
Binary file not shown.
2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
Expand Down
6 changes: 4 additions & 2 deletions gradlew
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#

##############################################################################
#
Expand Down Expand Up @@ -55,7 +57,7 @@
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
Expand Down Expand Up @@ -84,7 +86,7 @@ done
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit

# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
Expand Down
2 changes: 2 additions & 0 deletions gradlew.bat
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem

@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@ package com.jillesvangurp.ktsearch

import io.ktor.client.*
import io.ktor.client.call.*
import io.ktor.client.plugins.auth.Auth
import io.ktor.client.plugins.auth.providers.BasicAuthCredentials
import io.ktor.client.plugins.auth.providers.basic
import io.ktor.client.plugins.defaultRequest
import io.ktor.client.plugins.logging.LogLevel
import io.ktor.client.plugins.logging.Logging
import io.ktor.client.request.*
import io.ktor.client.statement.*
import io.ktor.content.*
Expand All @@ -13,8 +19,47 @@ expect fun defaultKtorHttpClient(
user: String? = null,
password: String? = null,
elasticApiKey: String? = null,
block: HttpClientConfig<*>.()->Unit
): HttpClient

fun HttpClientConfig<*>.defaultInit(
logging: Boolean ,
user: String? ,
password: String? ,
elasticApiKey: String?,

) {
engine {
pipelining = true
}
if(!user.isNullOrBlank() && !password.isNullOrBlank()) {
install(Auth) {
basic {
credentials {
BasicAuthCredentials(user, password)
}
sendWithoutRequest {
true
}
}
}
}
if(!elasticApiKey.isNullOrBlank()) {
defaultRequest {
header("Authorization","ApiKey $elasticApiKey")
}
}
if (logging) {
install(Logging) {
level = LogLevel.ALL
}
} else {
install(Logging) {
level = LogLevel.NONE
}
}
}

/**
* Ktor-client implementation of the RestClient.
*/
Expand All @@ -35,7 +80,9 @@ class KtorRestClient(
user = user,
password = password,
elasticApiKey = elasticApiKey
),
) {
defaultInit(logging, user, password, elasticApiKey)
},
) : RestClient, Closeable {
constructor(
host: String = "localhost",
Expand All @@ -50,7 +97,9 @@ class KtorRestClient(
user = user,
password = password,
elasticApiKey = elasticApiKey
),
) {
defaultInit(logging, user, password, elasticApiKey)
},
) : this(
client = client,
https = https,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
package com.jillesvangurp.ktsearch

import io.kotest.matchers.shouldBe
import kotlinx.coroutines.test.runTest
import kotlinx.serialization.json.*
import kotlin.test.Test
import kotlin.time.Duration.Companion.seconds

class IndexCreateTest: SearchTestBase() {
@Test
fun createIndex() = runTest {
fun createIndex() = coRun {
val indexName = randomIndexName()
val response = client.createIndex(indexName) {
dynamicTemplate("test_fields") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import kotlin.test.Test
class IndexTemplateTest : SearchTestBase() {

@Test
@IgnoreJs // keeps failing with some weird error
@IgnoreJs // keeps failing with some weird fail to fetch error in js/wasm, no idea why
fun shouldCreateDataStream() = coRun {

val suffix = Random.nextULong()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,124 +6,120 @@ import com.jillesvangurp.searchdsls.querydsl.ReindexOperationType.INDEX
import com.jillesvangurp.searchdsls.querydsl.ReindexVersionType.EXTERNAL
import com.jillesvangurp.searchdsls.querydsl.term
import io.kotest.matchers.shouldBe
import kotlinx.coroutines.test.runTest
import kotlinx.serialization.json.jsonObject
import kotlin.test.AfterTest
import kotlin.test.BeforeTest
import kotlin.test.Test
import kotlin.time.Duration.Companion.seconds
import kotlinx.serialization.json.jsonObject

class ReindexTest : SearchTestBase() {
private val sourceName = randomIndexName()
private val destinationName = randomIndexName()

@BeforeTest
fun before() = coRun {
createIndices()
}
private suspend fun withSourceAndDestination(block: suspend (String, String)->Unit) {
val sourceName = randomIndexName()
val destinationName = randomIndexName()

@AfterTest
fun after() = coRun {
deleteIndices()
client.createIndex(sourceName, mapping = TestDocument.mapping)
client.createIndex(destinationName, mapping = TestDocument.mapping)
block(sourceName,destinationName)
client.deleteIndex(sourceName)
client.deleteIndex(destinationName)
}

@Test
fun basicReindex() = runTest {
client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)

val response = client.reindex {
conflicts = PROCEED
source {
index = sourceName
fun basicReindex() = coRun {
withSourceAndDestination { sourceName, destinationName ->
client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)

val response = client.reindex {
conflicts = PROCEED
source {
index = sourceName
}
destination {
index = destinationName
}
}
destination {
index = destinationName
}
}

response.shouldHave(total = 1, created = 1, batches = 1)
response.shouldHave(total = 1, created = 1, batches = 1)
}
}

@Test
fun basicReindexWithSpecificValue() = runTest {
client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)

val response = client.reindex(
refresh = false,
timeout = 10.seconds,
waitForActiveShards = "1",
requestsPerSecond = 10,
requireAlias = false,
scroll = 10.seconds,
slices = 3,
maxDocs = 9
) {
conflicts = PROCEED
maxDocs = 9
source {
index = sourceName
batchSize = 10
fields("name")
}
destination {
index = destinationName
versionType = EXTERNAL
operationType = INDEX
fun basicReindexWithSpecificValue() = coRun {
withSourceAndDestination { sourceName, destinationName ->

client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)

val response = client.reindex(
refresh = false,
timeout = 10.seconds,
waitForActiveShards = "1",
requestsPerSecond = 10,
requireAlias = false,
scroll = 10.seconds,
slices = 3,
maxDocs = 9
) {
conflicts = PROCEED
maxDocs = 9
source {
index = sourceName
batchSize = 10
fields("name")
}
destination {
index = destinationName
versionType = EXTERNAL
operationType = INDEX
}
}
}

response.shouldHave(total = 1, created = 1, batches = 1)
response.shouldHave(total = 1, created = 1, batches = 1)
}
}

@Test
fun reindexWithQuery() = runTest {
client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)
client.indexDocument(sourceName, TestDocument(name = "t2"), refresh = WaitFor)

val response = client.reindex {
source {
index = sourceName
query = term("name", "t1")
fun reindexWithQuery() = coRun {
withSourceAndDestination { sourceName, destinationName ->

client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)
client.indexDocument(sourceName, TestDocument(name = "t2"), refresh = WaitFor)

val response = client.reindex {
source {
index = sourceName
query = term("name", "t1")
}
destination {
index = destinationName
}
}
destination {
index = destinationName
}
}

response.shouldHave(total = 1, created = 1, batches = 1)
response.shouldHave(total = 1, created = 1, batches = 1)
}
}

@Test
fun asyncReindex() = runTest {
client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)

val taskId = client.reindexAsync {
source {
index = sourceName
fun asyncReindex() = coRun {
withSourceAndDestination { sourceName, destinationName ->

client.indexDocument(sourceName, TestDocument(name = "t1"), refresh = WaitFor)

val taskId = client.reindexAsync {
source {
index = sourceName
}
destination {
index = destinationName
}
}
destination {
index = destinationName
}
}

val taskResponse = client.getTask(taskId.value, waitForCompletion = true)
val jsonResponse = requireNotNull(taskResponse["response"]?.jsonObject) { "response element is missing on $taskResponse"}
val response = jsonResponse.parse(ReindexResponse.serializer())

response.shouldHave(total = 1, created = 1, batches = 1)
}


private suspend fun deleteIndices() {
client.deleteIndex(sourceName)
client.deleteIndex(destinationName)
}
val taskResponse = client.getTask(taskId.value, waitForCompletion = true)
val jsonResponse =
requireNotNull(taskResponse["response"]?.jsonObject) { "response element is missing on $taskResponse" }
val response = jsonResponse.parse(ReindexResponse.serializer())

private suspend fun createIndices() {
client.createIndex(sourceName, mapping = TestDocument.mapping)
client.createIndex(destinationName, mapping = TestDocument.mapping)
response.shouldHave(total = 1, created = 1, batches = 1)
}
}

}

private fun ReindexResponse.shouldHave(
Expand Down Expand Up @@ -152,5 +148,4 @@ private fun ReindexResponse.shouldHave(
it.retries.search shouldBe retriesSearch
it.failures shouldBe failures
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ expect fun coRun(timeout: Duration = 30.seconds, block: suspend () -> Unit): Tes

@Suppress("EXPECT_ACTUAL_CLASSIFIERS_ARE_IN_BETA_WARNING")
@Target(AnnotationTarget.CLASS, AnnotationTarget.FUNCTION)
// in js and wasm we have this implementation which causes the test to be ignored
// actual typealias IgnoreJs = kotlin.test.Ignore
expect annotation class IgnoreJs()

private val logger = KotlinLogging.logger { }
Expand Down Expand Up @@ -66,7 +68,7 @@ open class SearchTestBase {
)
KtorRestClient(
nodes = nodes,
client = defaultKtorHttpClient(true),
client = defaultKtorHttpClient(true) {},
// sniffing is a bit weird in docker, publish address is not always reachable
nodeSelector = SniffingNodeSelector(initialNodes = nodes, maxNodeAge = 5.hours)
)
Expand Down
Loading

0 comments on commit aac097f

Please sign in to comment.