supplier;
@Setup
public void setup() {
@@ -130,10 +130,10 @@ public void setup() {
switch (type) {
case "binary":
- supplier = () -> new Rounding.BinarySearchArrayRounding(values, size, null);
+ supplier = () -> new BinarySearcher(values, size);
break;
case "linear":
- supplier = () -> new Rounding.BidirectionalLinearSearchArrayRounding(values, size, null);
+ supplier = () -> new BidirectionalLinearSearcher(values, size);
break;
default:
throw new IllegalArgumentException("invalid type: " + type);
diff --git a/build.gradle b/build.gradle
index 9d62e942a4431..b1cd1d532bfeb 100644
--- a/build.gradle
+++ b/build.gradle
@@ -375,7 +375,7 @@ allprojects {
} else {
// Link to non-shadowed dependant projects
project.javadoc.dependsOn "${upstreamProject.path}:javadoc"
- String externalLinkName = upstreamProject.base.archivesBaseName
+ String externalLinkName = upstreamProject.base.archivesName
String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + externalLinkName.replaceAll('\\.', '/') + '/' + dep.version
String projectRelativePath = project.relativePath(upstreamProject.buildDir)
project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${projectRelativePath}/docs/javadoc/"
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index 3dc689795151b..0efa170250a7b 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -103,32 +103,32 @@ dependencies {
api localGroovy()
api 'commons-codec:commons-codec:1.16.0'
- api 'org.apache.commons:commons-compress:1.23.0'
+ api 'org.apache.commons:commons-compress:1.25.0'
api 'org.apache.ant:ant:1.10.14'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0'
api 'com.netflix.nebula:nebula-publishing-plugin:20.3.0'
api 'com.netflix.nebula:gradle-info-plugin:12.1.6'
api 'org.apache.rat:apache-rat:0.15'
- api 'commons-io:commons-io:2.13.0'
+ api 'commons-io:commons-io:2.15.1'
api "net.java.dev.jna:jna:5.13.0"
api 'com.github.johnrengelman:shadow:8.1.1'
api 'org.jdom:jdom2:2.0.6.1'
api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}"
- api 'de.thetaphi:forbiddenapis:3.5.1'
- api 'com.avast.gradle:gradle-docker-compose-plugin:0.16.12'
+ api 'de.thetaphi:forbiddenapis:3.6'
+ api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.5'
api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}"
- api 'org.apache.maven:maven-model:3.9.4'
+ api 'org.apache.maven:maven-model:3.9.6'
api 'com.networknt:json-schema-validator:1.0.86'
api 'org.jruby.jcodings:jcodings:1.0.58'
api 'org.jruby.joni:joni:2.2.1'
api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}"
- api "org.ajoberstar.grgit:grgit-core:5.2.0"
+ api "org.ajoberstar.grgit:grgit-core:5.2.1"
testFixturesApi "junit:junit:${props.getProperty('junit')}"
testFixturesApi "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
testFixturesApi gradleApi()
testFixturesApi gradleTestKit()
- testImplementation 'com.github.tomakehurst:wiremock-jre8-standalone:2.35.0'
+ testImplementation 'org.wiremock:wiremock-standalone:3.3.1'
testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}"
integTestImplementation('org.spockframework:spock-core:2.3-groovy-3.0') {
exclude module: "groovy"
diff --git a/buildSrc/reaper/src/main/java/org/opensearch/gradle/reaper/Reaper.java b/buildSrc/reaper/src/main/java/org/opensearch/gradle/reaper/Reaper.java
index c5b4de157c75c..662510fbbf61c 100644
--- a/buildSrc/reaper/src/main/java/org/opensearch/gradle/reaper/Reaper.java
+++ b/buildSrc/reaper/src/main/java/org/opensearch/gradle/reaper/Reaper.java
@@ -45,17 +45,16 @@
/**
* A standalone process that will reap external services after a build dies.
- *
* Input
* Since how to reap a given service is platform and service dependent, this tool
* operates on system commands to execute. It takes a single argument, a directory
* that will contain files with reaping commands. Each line in each file will be
* executed with {@link Runtime#exec(String)}.
- *
+ *
* The main method will wait indefinitely on the parent process (Gradle) by
* reading from stdin. When Gradle shuts down, whether normally or abruptly, the
* pipe will be broken and read will return.
- *
+ *
* The reaper will then iterate over the files in the configured directory,
* and execute the given commands. If any commands fail, a failure message is
* written to stderr. Otherwise, the input file will be deleted. If no inputs
diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy
index 556763333d279..13f5f8724c6f2 100644
--- a/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy
@@ -89,7 +89,7 @@ class PluginBuildPlugin implements Plugin {
String name = extension1.name
BasePluginExtension base = project.getExtensions().findByType(BasePluginExtension.class)
- base.archivesBaseName = name
+ base.archivesName = name
project.description = extension1.description
if (extension1.name == null) {
@@ -155,7 +155,7 @@ class PluginBuildPlugin implements Plugin {
// Only configure publishing if applied externally
if (extension.hasClientJar) {
project.pluginManager.apply('com.netflix.nebula.maven-base-publish')
- // Only change Jar tasks, we don't want a -client zip so we can't change archivesBaseName
+ // Only change Jar tasks, we don't want a -client zip so we can't change archivesName
project.tasks.withType(Jar) {
archiveBaseName = archiveBaseName.get() + "-client"
}
@@ -163,7 +163,7 @@ class PluginBuildPlugin implements Plugin {
project.publishing.publications.nebula(MavenPublication).artifactId(extension.name + "-client")
final BasePluginExtension base = project.getExtensions().findByType(BasePluginExtension.class)
project.tasks.withType(GenerateMavenPom.class).configureEach { GenerateMavenPom generatePOMTask ->
- generatePOMTask.destination = "${project.buildDir}/distributions/${base.archivesBaseName}-client-${project.versions.opensearch}.pom"
+ generatePOMTask.destination = "${project.buildDir}/distributions/${base.archivesName}-client-${project.versions.opensearch}.pom"
}
} else {
if (project.plugins.hasPlugin(MavenPublishPlugin)) {
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java b/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java
index cddd03ccc2019..4d45640b75e3d 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java
@@ -52,15 +52,15 @@
/**
* A container for opensearch supported version information used in BWC testing.
- *
+ *
* Parse the Java source file containing the versions declarations and use the known rules to figure out which are all
* the version the current one is wire and index compatible with.
* On top of this, figure out which of these are unreleased and provide the branch they can be built from.
- *
+ *
* Note that in this context, currentVersion is the unreleased version this build operates on.
* At any point in time there will surely be four such unreleased versions being worked on,
* thus currentVersion will be one of these.
- *
+ *
* Considering:
*
* M, M > 0
@@ -84,7 +84,7 @@
* Each build is only concerned with versions before it, as those are the ones that need to be tested
* for backwards compatibility. We never look forward, and don't add forward facing version number to branches of previous
* version.
- *
+ *
* Each branch has a current version, and expected compatible versions are parsed from the server code's Version` class.
* We can reliably figure out which the unreleased versions are due to the convention of always adding the next unreleased
* version number to server in all branches when a version is released.
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/LoggingOutputStream.java b/buildSrc/src/main/java/org/opensearch/gradle/LoggingOutputStream.java
index 5ae7ad1595e2f..5259700b3a63d 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/LoggingOutputStream.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/LoggingOutputStream.java
@@ -38,7 +38,7 @@
/**
* Writes data passed to this stream as log messages.
- *
+ *
* The stream will be flushed whenever a newline is detected.
* Allows setting an optional prefix before each line of output.
*/
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java
index 97e923c366598..7ec21bba18c64 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java
@@ -77,7 +77,7 @@ public void apply(Project project) {
}
private static String getArchivesBaseName(Project project) {
- return project.getExtensions().getByType(BasePluginExtension.class).getArchivesBaseName();
+ return project.getExtensions().getByType(BasePluginExtension.class).getArchivesName().get();
}
/**Configuration generation of maven poms. */
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java
index 159270d28e3d6..c6e49dc44d6bd 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java
@@ -76,7 +76,7 @@ public InternalBwcGitPlugin(ProviderFactory providerFactory, ExecOperations exec
public void apply(Project project) {
this.project = project;
this.gitExtension = project.getExtensions().create("bwcGitConfig", BwcGitExtension.class);
- Provider remote = providerFactory.systemProperty("bwc.remote").forUseAtConfigurationTime().orElse("opensearch-project");
+ Provider remote = providerFactory.systemProperty("bwc.remote").orElse("opensearch-project");
TaskContainer tasks = project.getTasks();
TaskProvider createCloneTaskProvider = tasks.register("createClone", LoggedExec.class, createClone -> {
@@ -105,7 +105,6 @@ public void apply(Project project) {
String remoteRepo = remote.get();
// for testing only we can override the base remote url
String remoteRepoUrl = providerFactory.systemProperty("testRemoteRepo")
- .forUseAtConfigurationTime()
.getOrElse("https://github.com/" + remoteRepo + "/OpenSearch.git");
addRemote.setCommandLine(asList("git", "remote", "add", remoteRepo, remoteRepoUrl));
});
@@ -113,7 +112,6 @@ public void apply(Project project) {
TaskProvider fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> {
Provider gitFetchLatest = project.getProviders()
.systemProperty("tests.bwc.git_fetch_latest")
- .forUseAtConfigurationTime()
.orElse("true")
.map(fetchProp -> {
if ("true".equals(fetchProp)) {
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionRule.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionRule.java
index aa81ef75701fa..db46d2e3edc55 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionRule.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionRule.java
@@ -40,7 +40,7 @@
/**
* Represent rules for tests enforced by the @{link {@link TestingConventionsTasks}}
- *
+ *
* Rules are identified by name, tests must have this name as a suffix and implement one of the base classes
* and be part of all the specified tasks.
*/
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java b/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java
index 1423b52c443d9..e82d8ed73ced2 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java
@@ -61,7 +61,7 @@
/**
* A custom archive task that assembles a tar archive that preserves symbolic links.
- *
+ *
* This task is necessary because the built-in task {@link org.gradle.api.tasks.bundling.Tar} does not preserve symbolic links.
*/
public class SymbolicLinkPreservingTar extends Tar {
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
index a420c8b63b02c..1ad7e056b6ae6 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
@@ -77,9 +77,9 @@
import java.util.stream.Stream;
public class DistroTestPlugin implements Plugin {
- private static final String SYSTEM_JDK_VERSION = "11.0.20+8";
+ private static final String SYSTEM_JDK_VERSION = "17.0.9+9";
private static final String SYSTEM_JDK_VENDOR = "adoptium";
- private static final String GRADLE_JDK_VERSION = "17.0.8+7";
+ private static final String GRADLE_JDK_VERSION = "17.0.9+9";
private static final String GRADLE_JDK_VENDOR = "adoptium";
// all distributions used by distro tests. this is temporary until tests are per distribution
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java
index 728e36ce98bff..fcadf35593ce6 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java
@@ -65,7 +65,7 @@
* Rest YAML tests :
* When the {@link RestResourcesPlugin} has been applied the {@link CopyRestTestsTask} will copy the Rest YAML tests if explicitly
* configured with `includeCore` through the `restResources.restTests` extension.
- *
+ *
* Additionally you can specify which sourceSetName resources should be copied to. The default is the yamlRestTest source set.
* @see CopyRestApiTask
* @see CopyRestTestsTask
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java
index 3aba941875115..c9e18426966f9 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java
@@ -170,6 +170,7 @@ public void execute(Task task) {
.findFirst();
composeExtension.getExecutable().set(dockerCompose.isPresent() ? dockerCompose.get() : "/usr/bin/docker");
+ composeExtension.getUseDockerComposeV2().set(false);
tasks.named("composeUp").configure(t -> {
// Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantMachine.java b/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantMachine.java
index 2d71b9361963b..7abf9bf5fbef6 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantMachine.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantMachine.java
@@ -53,7 +53,7 @@
/**
* An helper to manage a vagrant box.
- *
+ *
* This is created alongside a {@link VagrantExtension} for a project to manage starting and
* stopping a single vagrant box.
*/
@@ -185,7 +185,7 @@ public void setArgs(String... args) {
/**
* A function to translate output from the vagrant command execution to the progress line.
- *
+ *
* The function takes the current line of output from vagrant, and returns a new
* progress line, or {@code null} if there is no update.
*/
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantShellTask.java b/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantShellTask.java
index 85d3e340c50e7..ca1b95183505f 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantShellTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantShellTask.java
@@ -47,7 +47,7 @@
/**
* A shell script to run within a vagrant VM.
- *
+ *
* The script is run as root within the VM.
*/
public abstract class VagrantShellTask extends DefaultTask {
diff --git a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java
index c0e7320ba7615..8e246ff9ecd11 100644
--- a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java
+++ b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java
@@ -58,7 +58,7 @@ public void tearDown() {
* This test is used to verify that adding the 'opensearch.pluginzip' to the project
* adds some other transitive plugins and tasks under the hood. This is basically
* a behavioral test of the {@link Publish#apply(Project)} method.
- *
+ *
* This is equivalent of having a build.gradle script with just the following section:
*
* plugins {
@@ -202,7 +202,7 @@ public void useDefaultValues() throws IOException, URISyntaxException, XmlPullPa
GradleRunner runner = prepareGradleRunnerFromTemplate("useDefaultValues.gradle", "build", ZIP_PUBLISH_TASK);
BuildResult result = runner.build();
- /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
+ /* Check if build and ZIP_PUBLISH_TASK tasks have run well */
assertEquals(SUCCESS, result.task(":" + "build").getOutcome());
assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome());
@@ -277,7 +277,7 @@ public void allProjectsGroup() throws IOException, URISyntaxException, XmlPullPa
GradleRunner runner = prepareGradleRunnerFromTemplate("allProjectsGroup.gradle", "build", ZIP_PUBLISH_TASK);
BuildResult result = runner.build();
- /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
+ /* Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
assertEquals(SUCCESS, result.task(":" + "build").getOutcome());
assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome());
@@ -312,7 +312,7 @@ public void groupPriorityLevel() throws IOException, URISyntaxException, XmlPull
GradleRunner runner = prepareGradleRunnerFromTemplate("groupPriorityLevel.gradle", "build", ZIP_PUBLISH_TASK);
BuildResult result = runner.build();
- /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
+ /* Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
assertEquals(SUCCESS, result.task(":" + "build").getOutcome());
assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome());
@@ -348,7 +348,7 @@ public void missingPOMEntity() throws IOException, URISyntaxException, XmlPullPa
GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle", "build", ZIP_PUBLISH_TASK);
BuildResult result = runner.build();
- /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
+ /* Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
assertEquals(SUCCESS, result.task(":" + "build").getOutcome());
assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome());
@@ -395,7 +395,7 @@ public void customizedGroupValue() throws IOException, URISyntaxException, XmlPu
GradleRunner runner = prepareGradleRunnerFromTemplate("customizedGroupValue.gradle", "build", ZIP_PUBLISH_TASK);
BuildResult result = runner.build();
- /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
+ /* Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */
assertEquals(SUCCESS, result.task(":" + "build").getOutcome());
assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome());
diff --git a/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/GradleThreadsFilter.java b/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/GradleThreadsFilter.java
index b64c719440733..def5248c1f255 100644
--- a/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/GradleThreadsFilter.java
+++ b/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/GradleThreadsFilter.java
@@ -36,7 +36,7 @@
/**
* Filter out threads controlled by gradle that may be created during unit tests.
- *
+ *
* Currently this includes pooled threads for Exec as well as file system event watcher threads.
*/
public class GradleThreadsFilter implements ThreadFilter {
diff --git a/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java b/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java
index 163a903d31832..1a2e36aa78e9f 100644
--- a/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java
+++ b/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java
@@ -43,7 +43,7 @@
/**
* Backwards compatible test* method provider (public, non-static).
- *
+ *
* copy of org.apache.lucene.util.LuceneJUnit3MethodProvider to avoid a dependency between build and test fw.
*/
public final class JUnit3MethodProvider implements TestMethodProvider {
diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
index cb8050d1718c4..f24b61ef0d165 100644
--- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
+++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
@@ -15,8 +15,9 @@ plugins {
repositories {
mavenCentral()
}
+
dependencies {
- implementation "org.apache.logging.log4j:log4j-core:2.20.0"
+ implementation "org.apache.logging.log4j:log4j-core:2.22.0"
}
["0.0.1", "0.0.2"].forEach { v ->
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index ee666f90f83dd..74d655cfb1045 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,22 +1,20 @@
opensearch = 3.0.0
-lucene = 9.8.0-snapshot-4373c3b
+lucene = 9.8.0
bundled_jdk_vendor = adoptium
-bundled_jdk = 20.0.2+9
-# See please https://github.com/adoptium/temurin-build/issues/3371
-bundled_jdk_linux_ppc64le = 20+36
+bundled_jdk = 21.0.1+12
# optional dependencies
spatial4j = 0.7
jts = 1.15.0
-jackson = 2.15.2
-jackson_databind = 2.15.2
+jackson = 2.16.0
+jackson_databind = 2.16.0
snakeyaml = 2.1
icu4j = 70.1
supercsv = 2.4.0
-log4j = 2.20.0
+log4j = 2.21.0
slf4j = 1.7.36
-asm = 9.5
+asm = 9.6
jettison = 1.5.4
woodstox = 6.4.0
kotlin = 1.7.10
@@ -28,9 +26,13 @@ jakarta_annotation = 1.3.5
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 5.13.0
-netty = 4.1.97.Final
+netty = 4.1.101.Final
joda = 2.12.2
+# project reactor
+reactor_netty = 1.1.13
+reactor = 3.5.11
+
# client dependencies
httpclient5 = 5.2.1
httpcore5 = 5.2.2
@@ -48,14 +50,14 @@ reactivestreams = 1.0.4
# when updating this version, you need to ensure compatibility with:
# - plugins/ingest-attachment (transitive dependency, check the upstream POM)
# - distribution/tools/plugin-cli
-bouncycastle=1.75
+bouncycastle=1.76
# test dependencies
randomizedrunner = 2.7.1
junit = 4.13.2
hamcrest = 2.1
-mockito = 5.4.0
+mockito = 5.5.0
objenesis = 3.2
-bytebuddy = 1.14.3
+bytebuddy = 1.14.7
# benchmark dependencies
jmh = 1.35
@@ -68,4 +70,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final
# opentelemetry dependencies
-opentelemetry = 1.30.1
+opentelemetry = 1.32.0
+opentelemetrysemconv = 1.23.1-alpha
diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle
index 6fd5262f0ab4f..c1af5fa92e35c 100644
--- a/client/benchmark/build.gradle
+++ b/client/benchmark/build.gradle
@@ -33,7 +33,7 @@ apply plugin: 'application'
base {
group = 'org.opensearch.client'
- archivesBaseName = 'client-benchmarks'
+ archivesName = 'client-benchmarks'
}
// Not published so no need to assemble
diff --git a/client/benchmark/src/main/java/org/opensearch/client/benchmark/metrics/SampleRecorder.java b/client/benchmark/src/main/java/org/opensearch/client/benchmark/metrics/SampleRecorder.java
index e53e4f1ad692d..9cd12f5e78bd0 100644
--- a/client/benchmark/src/main/java/org/opensearch/client/benchmark/metrics/SampleRecorder.java
+++ b/client/benchmark/src/main/java/org/opensearch/client/benchmark/metrics/SampleRecorder.java
@@ -37,7 +37,7 @@
/**
* Stores measurement samples.
- *
+ *
* This class is NOT threadsafe.
*/
public final class SampleRecorder {
diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle
index 770cb3f78ca47..fdc93d8037ce6 100644
--- a/client/rest-high-level/build.gradle
+++ b/client/rest-high-level/build.gradle
@@ -39,7 +39,7 @@ apply plugin: 'opensearch.rest-resources'
base {
group = 'org.opensearch.client'
- archivesBaseName = 'opensearch-rest-high-level-client'
+ archivesName = 'opensearch-rest-high-level-client'
}
restResources {
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/ClusterClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/ClusterClient.java
index 5bd5a5d0e308e..eb0a8b0e8f40a 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/ClusterClient.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/ClusterClient.java
@@ -170,8 +170,8 @@ public ClusterHealthResponse health(ClusterHealthRequest healthRequest, RequestO
/**
* Asynchronously get cluster health using the Cluster Health API.
- *
* If timeout occurred, {@link ClusterHealthResponse} will have isTimedOut() == true and status() == RestStatus.REQUEST_TIMEOUT
+ *
* @param healthRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
index 61a202d25167f..35d9929a649ff 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
@@ -451,9 +451,9 @@ static void addSearchRequestParams(Params params, SearchRequest searchRequest) {
params.withIndicesOptions(searchRequest.indicesOptions());
}
params.withSearchType(searchRequest.searchType().name().toLowerCase(Locale.ROOT));
- /**
- * Merging search responses as part of CCS flow to reduce roundtrips is not supported for point in time -
- * refer to org.opensearch.action.search.SearchResponseMerger
+ /*
+ Merging search responses as part of CCS flow to reduce roundtrips is not supported for point in time -
+ refer to org.opensearch.action.search.SearchResponseMerger
*/
if (searchRequest.pointInTimeBuilder() != null) {
params.putParam("ccs_minimize_roundtrips", "false");
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/TimedRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/TimedRequest.java
index dad5b6a3679ec..d40445b2daa81 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/TimedRequest.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/TimedRequest.java
@@ -37,7 +37,7 @@
/**
* A base request for any requests that supply timeouts.
- *
+ *
* Please note, any requests that use a ackTimeout should set timeout as they
* represent the same backing field on the server.
*/
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java
index 7805a7853b003..62c5b54c0e75e 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java
@@ -156,7 +156,7 @@ public MediaType mappingsMediaType() {
/**
* Adds mapping that will be added when the index gets created.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*
* @param source The mapping source
@@ -168,7 +168,7 @@ public CreateIndexRequest mapping(String source, MediaType mediaType) {
/**
* Adds mapping that will be added when the index gets created.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*
* @param source The mapping source
@@ -179,7 +179,7 @@ public CreateIndexRequest mapping(XContentBuilder source) {
/**
* Adds mapping that will be added when the index gets created.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*
* @param source The mapping source
@@ -196,7 +196,7 @@ public CreateIndexRequest mapping(Map source) {
/**
* Adds mapping that will be added when the index gets created.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*
* @param source The mapping source
@@ -282,7 +282,7 @@ public CreateIndexRequest aliases(Collection aliases) {
/**
* Sets the settings and mappings as a single source.
- *
+ *
* Note that the mapping definition should *not* be nested under a type name.
*/
public CreateIndexRequest source(String source, MediaType mediaType) {
@@ -291,7 +291,7 @@ public CreateIndexRequest source(String source, MediaType mediaType) {
/**
* Sets the settings and mappings as a single source.
- *
+ *
* Note that the mapping definition should *not* be nested under a type name.
*/
public CreateIndexRequest source(XContentBuilder source) {
@@ -300,7 +300,7 @@ public CreateIndexRequest source(XContentBuilder source) {
/**
* Sets the settings and mappings as a single source.
- *
+ *
* Note that the mapping definition should *not* be nested under a type name.
*/
public CreateIndexRequest source(BytesReference source, MediaType mediaType) {
@@ -311,7 +311,7 @@ public CreateIndexRequest source(BytesReference source, MediaType mediaType) {
/**
* Sets the settings and mappings as a single source.
- *
+ *
* Note that the mapping definition should *not* be nested under a type name.
*/
@SuppressWarnings("unchecked")
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java
index 6d7e95d191ba6..a63393bd2341b 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java
@@ -105,7 +105,7 @@ public MediaType mediaType() {
/**
* The mapping source definition.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*/
public PutMappingRequest source(Map mappingSource) {
@@ -120,7 +120,7 @@ public PutMappingRequest source(Map mappingSource) {
/**
* The mapping source definition.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*/
public PutMappingRequest source(String mappingSource, MediaType mediaType) {
@@ -131,7 +131,7 @@ public PutMappingRequest source(String mappingSource, MediaType mediaType) {
/**
* The mapping source definition.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*/
public PutMappingRequest source(XContentBuilder builder) {
@@ -142,7 +142,7 @@ public PutMappingRequest source(XContentBuilder builder) {
/**
* The mapping source definition.
- *
+ *
* Note that the definition should *not* be nested under a type name.
*/
public PutMappingRequest source(BytesReference source, MediaType mediaType) {
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskGroup.java b/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskGroup.java
index c419884700587..9129de717459f 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskGroup.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskGroup.java
@@ -38,7 +38,6 @@
/**
* Client side counterpart of server side version.
- *
* {@link org.opensearch.action.admin.cluster.node.tasks.list.TaskGroup}
*/
public class TaskGroup {
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java b/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java
index 51ac62830446f..75badc4e3dbf2 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java
@@ -54,6 +54,7 @@ public class TaskInfo {
private long runningTimeNanos;
private boolean cancellable;
private boolean cancelled;
+ private Long cancellationStartTime;
private TaskId parentTaskId;
private final Map status = new HashMap<>();
private final Map headers = new HashMap<>();
@@ -127,6 +128,14 @@ void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
+ public Long getCancellationStartTime() {
+ return this.cancellationStartTime;
+ }
+
+ public void setCancellationStartTime(Long cancellationStartTime) {
+ this.cancellationStartTime = cancellationStartTime;
+ }
+
public TaskId getParentTaskId() {
return parentTaskId;
}
@@ -180,6 +189,7 @@ private void noOpParse(Object s) {}
parser.declareString(TaskInfo::setParentTaskId, new ParseField("parent_task_id"));
parser.declareObject(TaskInfo::setHeaders, (p, c) -> p.mapStrings(), new ParseField("headers"));
parser.declareObject(TaskInfo::setResourceStats, (p, c) -> p.map(), new ParseField("resource_stats"));
+ parser.declareLong(TaskInfo::setCancellationStartTime, new ParseField("cancellation_time_millis"));
PARSER = (XContentParser p, Void v, String name) -> parser.parse(p, new TaskInfo(new TaskId(name)), null);
}
@@ -199,7 +209,8 @@ && isCancelled() == taskInfo.isCancelled()
&& Objects.equals(getParentTaskId(), taskInfo.getParentTaskId())
&& Objects.equals(status, taskInfo.status)
&& Objects.equals(getHeaders(), taskInfo.getHeaders())
- && Objects.equals(getResourceStats(), taskInfo.getResourceStats());
+ && Objects.equals(getResourceStats(), taskInfo.getResourceStats())
+ && Objects.equals(getCancellationStartTime(), taskInfo.cancellationStartTime);
}
@Override
@@ -216,7 +227,8 @@ public int hashCode() {
getParentTaskId(),
status,
getHeaders(),
- getResourceStats()
+ getResourceStats(),
+ getCancellationStartTime()
);
}
@@ -250,6 +262,8 @@ public String toString() {
+ headers
+ ", resource_stats="
+ resourceStats
+ + ", cancellationStartTime="
+ + cancellationStartTime
+ '}';
}
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java
index 49bcb61b2dc3d..c464ee9ece74a 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java
@@ -44,7 +44,7 @@
/**
* Base class for HLRC request parsing tests.
- *
+ *
* This case class facilitates generating client side request test instances and
* verifies that they are correctly parsed into server side request instances.
*
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java
index 27704b01560c4..7d2d6b87b85c6 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java
@@ -44,7 +44,7 @@
/**
* Base class for HLRC response parsing tests.
- *
+ *
* This case class facilitates generating server side response test instances and
* verifies that they are correctly parsed into HLRC response instances.
*
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorRetryIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorRetryIT.java
index b7f6328b3c88e..3678cc042ba47 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorRetryIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorRetryIT.java
@@ -180,7 +180,7 @@ private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) {
/**
* Internal helper class to correlate backoff states with bulk responses. This is needed to check whether we maxed out the number
* of retries but still got rejected (which is perfectly fine and can also happen from time to time under heavy load).
- *
+ *
* This implementation relies on an implementation detail in Retry, namely that the bulk listener is notified on the same thread
* as the last call to the backoff policy's iterator. The advantage is that this is non-invasive to the rest of the production code.
*/
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java
index d5c1888e78b5d..b0990560b08ba 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java
@@ -24,7 +24,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@@ -72,7 +71,7 @@ public void testCreateAndDeletePit() throws IOException {
assertTrue(deletePitResponse.getDeletePitResults().get(0).getPitId().equals(createPitResponse.getId()));
}
- public void testDeleteAllAndListAllPits() throws IOException, InterruptedException {
+ public void testDeleteAllAndListAllPits() throws Exception {
CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index");
CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync);
CreatePitResponse pitResponse1 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync);
@@ -91,11 +90,9 @@ public void testDeleteAllAndListAllPits() throws IOException, InterruptedExcepti
List pits = getAllPitResponse.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList());
assertTrue(pits.contains(pitResponse.getId()));
assertTrue(pits.contains(pitResponse1.getId()));
- CountDownLatch countDownLatch = new CountDownLatch(1);
ActionListener deletePitListener = new ActionListener<>() {
@Override
public void onResponse(DeletePitResponse response) {
- countDownLatch.countDown();
for (DeletePitInfo deletePitInfo : response.getDeletePitResults()) {
assertTrue(deletePitInfo.isSuccessful());
}
@@ -103,19 +100,20 @@ public void onResponse(DeletePitResponse response) {
@Override
public void onFailure(Exception e) {
- countDownLatch.countDown();
if (!(e instanceof OpenSearchStatusException)) {
throw new AssertionError("Delete all failed");
}
}
};
final CreatePitResponse pitResponse3 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync);
-
+ assertTrue(pitResponse3.getId() != null);
ActionListener getPitsListener = new ActionListener() {
@Override
public void onResponse(GetAllPitNodesResponse response) {
List pits = response.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList());
assertTrue(pits.contains(pitResponse3.getId()));
+ // delete all pits
+ highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener);
}
@Override
@@ -126,11 +124,12 @@ public void onFailure(Exception e) {
}
};
highLevelClient().getAllPitsAsync(RequestOptions.DEFAULT, getPitsListener);
- highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener);
- assertTrue(countDownLatch.await(10, TimeUnit.SECONDS));
+
// validate no pits case
- getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT);
- assertTrue(getAllPitResponse.getPitInfos().size() == 0);
- highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener);
+ assertBusy(() -> {
+ GetAllPitNodesResponse getAllPitResponse1 = highLevelClient().getAllPits(RequestOptions.DEFAULT);
+ assertTrue(getAllPitResponse1.getPitInfos().size() == 0);
+ highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener);
+ });
}
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java
index abb2d75aea751..ce080b45273b4 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java
@@ -137,15 +137,15 @@
* You need to wrap your code between two tags like:
* // tag::example
* // end::example
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[example]
* --------------------------------------------------
- *
+ *
* The column width of the code block is 84. If the code contains a line longer
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
* (the code indentation of the tag is not included in the width)
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IngestClientDocumentationIT.java
index d14759065b5eb..28909cf58541a 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IngestClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IngestClientDocumentationIT.java
@@ -65,15 +65,15 @@
* You need to wrap your code between two tags like:
* // tag::example
* // end::example
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/IngestClientDocumentationIT.java[example]
* --------------------------------------------------
- *
+ *
* The column width of the code block is 84. If the code contains a line longer
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
* (the code indentation of the tag is not included in the width)
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java
index 50bcf79642eac..d0015db044843 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -90,15 +90,15 @@
* You need to wrap your code between two tags like:
* // tag::example
* // end::example
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[example]
* --------------------------------------------------
- *
+ *
* The column width of the code block is 84. If the code contains a line longer
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
* (the code indentation of the tag is not included in the width)
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/StoredScriptsDocumentationIT.java
index 6916ae11556e2..2e2d15df5392a 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/StoredScriptsDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/StoredScriptsDocumentationIT.java
@@ -66,15 +66,15 @@
* You need to wrap your code between two tags like:
* // tag::example
* // end::example
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[example]
* --------------------------------------------------
- *
+ *
* The column width of the code block is 84. If the code contains a line longer
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
* (the code indentation of the tag is not included in the width)
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/TasksClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/TasksClientDocumentationIT.java
index 03e267aafd1b7..cbac0b8c97d9c 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/TasksClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/TasksClientDocumentationIT.java
@@ -66,15 +66,15 @@
* You need to wrap your code between two tags like:
* // tag::example
* // end::example
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/{@link TasksClientDocumentationIT}.java[example]
* --------------------------------------------------
- *
+ *
* The column width of the code block is 84. If the code contains a line longer
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
* (the code indentation of the tag is not included in the width)
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/RandomCreateIndexGenerator.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/RandomCreateIndexGenerator.java
index 1f747dc139d15..edb4d16c6d992 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/RandomCreateIndexGenerator.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/RandomCreateIndexGenerator.java
@@ -44,7 +44,7 @@ public class RandomCreateIndexGenerator {
/**
* Returns a random {@link CreateIndexRequest}.
- *
+ *
* Randomizes the index name, the aliases, mappings and settings associated with the
* index. When present, the mappings make no mention of types.
*/
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java
index 835a93b5b09ce..faf5024d0c173 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java
@@ -84,6 +84,10 @@ protected CancelTasksResponseTests.ByNodeCancelTasksResponse createServerTestIns
for (int i = 0; i < 4; i++) {
boolean cancellable = randomBoolean();
boolean cancelled = cancellable == true ? randomBoolean() : false;
+ Long cancellationStartTime = null;
+ if (cancelled) {
+ cancellationStartTime = randomNonNegativeLong();
+ }
tasks.add(
new org.opensearch.tasks.TaskInfo(
new TaskId(NODE_ID, (long) i),
@@ -97,7 +101,8 @@ protected CancelTasksResponseTests.ByNodeCancelTasksResponse createServerTestIns
cancelled,
new TaskId("node1", randomLong()),
Collections.singletonMap("x-header-of", "some-value"),
- null
+ null,
+ cancellationStartTime
)
);
}
@@ -135,6 +140,7 @@ protected void assertInstances(
assertEquals(ti.isCancelled(), taskInfo.isCancelled());
assertEquals(ti.getParentTaskId().getNodeId(), taskInfo.getParentTaskId().getNodeId());
assertEquals(ti.getParentTaskId().getId(), taskInfo.getParentTaskId().getId());
+ assertEquals(ti.getCancellationStartTime(), taskInfo.getCancellationStartTime());
FakeTaskStatus status = (FakeTaskStatus) ti.getStatus();
assertEquals(status.code, taskInfo.getStatus().get("code"));
assertEquals(status.status, taskInfo.getStatus().get("status"));
diff --git a/client/rest/build.gradle b/client/rest/build.gradle
index 2c437c909fb03..f18df65dfddfa 100644
--- a/client/rest/build.gradle
+++ b/client/rest/build.gradle
@@ -34,13 +34,13 @@ apply plugin: 'opensearch.build'
apply plugin: 'opensearch.publish'
java {
- targetCompatibility = JavaVersion.VERSION_11
- sourceCompatibility = JavaVersion.VERSION_11
+ targetCompatibility = JavaVersion.VERSION_1_8
+ sourceCompatibility = JavaVersion.VERSION_1_8
}
base {
group = 'org.opensearch.client'
- archivesBaseName = 'opensearch-rest-client'
+ archivesName = 'opensearch-rest-client'
}
dependencies {
@@ -109,3 +109,10 @@ thirdPartyAudit.ignoreMissingClasses(
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener'
)
+
+tasks.withType(JavaCompile) {
+ // Suppressing '[options] target value 8 is obsolete and will be removed in a future release'
+ configure(options) {
+ options.compilerArgs << '-Xlint:-options'
+ }
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java
index e819fa27a8939..15905add76c4f 100644
--- a/client/rest/src/main/java/org/opensearch/client/RestClient.java
+++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java
@@ -310,7 +310,7 @@ public boolean isRunning() {
* they will be retried). In case of failures all of the alive nodes (or
* dead nodes that deserve a retry) are retried until one responds or none
* of them does, in which case an {@link IOException} will be thrown.
- *
+ *
* This method works by performing an asynchronous call and waiting
* for the result. If the asynchronous call throws an exception we wrap
* it and rethrow it so that the stack trace attached to the exception
@@ -1116,9 +1116,15 @@ public long getContentLength() {
if (chunkedEnabled.get()) {
return -1L;
} else {
- long size;
+ long size = 0;
+ final byte[] buf = new byte[8192];
+ int nread = 0;
+
try (InputStream is = getContent()) {
- size = is.readAllBytes().length;
+ // read to EOF which may read more or less than buffer size
+ while ((nread = is.read(buf)) > 0) {
+ size += nread;
+ }
} catch (IOException ex) {
size = -1L;
}
diff --git a/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java b/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java
index 9bd17d1c24c7e..ae38c1a0308d1 100644
--- a/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java
+++ b/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java
@@ -86,25 +86,29 @@ protected void data(final ByteBuffer src, final boolean endOfStream) throws IOEx
return;
}
+ int len = src.limit();
+ if (len < 0) {
+ len = 4096;
+ } else if (len > bufferLimitBytes) {
+ throw new ContentTooLongException(
+ "entity content is too long [" + len + "] for the configured buffer limit [" + bufferLimitBytes + "]"
+ );
+ }
+
ByteArrayBuffer buffer = bufferRef.get();
if (buffer == null) {
- buffer = new ByteArrayBuffer(bufferLimitBytes);
+ buffer = new ByteArrayBuffer(len);
if (bufferRef.compareAndSet(null, buffer) == false) {
buffer = bufferRef.get();
}
}
- int len = src.limit();
if (buffer.length() + len > bufferLimitBytes) {
throw new ContentTooLongException(
"entity content is too long [" + len + "] for the configured buffer limit [" + bufferLimitBytes + "]"
);
}
- if (len < 0) {
- len = 4096;
- }
-
if (src.hasArray()) {
buffer.append(src.array(), src.arrayOffset() + src.position(), src.remaining());
} else {
@@ -136,4 +140,12 @@ public void releaseResources() {
buffer = null;
}
}
+
+ /**
+ * Gets current byte buffer instance
+ * @return byte buffer instance
+ */
+ ByteArrayBuffer getBuffer() {
+ return bufferRef.get();
+ }
}
diff --git a/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java
index b2807d35d230e..42c31864e0578 100644
--- a/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java
+++ b/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java
@@ -89,15 +89,15 @@
* You need to wrap your code between two tags like:
* // tag::example[]
* // end::example[]
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/RestClientDocumentation.java[example]
* --------------------------------------------------
- *
+ *
* Note that this is not a test class as we are only interested in testing that docs snippets compile. We don't want
* to send requests to a node and we don't even have the tools to do it.
*/
diff --git a/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java b/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java
new file mode 100644
index 0000000000000..fdfe49ca901c9
--- /dev/null
+++ b/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java
@@ -0,0 +1,71 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client.nio;
+
+import org.apache.hc.core5.http.ContentTooLongException;
+import org.opensearch.client.RestClientTestCase;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.assertThrows;
+
+public class HeapBufferedAsyncEntityConsumerTests extends RestClientTestCase {
+ private static final int BUFFER_LIMIT = 100 * 1024 * 1024 /* 100Mb */;
+ private HeapBufferedAsyncEntityConsumer consumer;
+
+ @Before
+ public void setUp() {
+ consumer = new HeapBufferedAsyncEntityConsumer(BUFFER_LIMIT);
+ }
+
+ @After
+ public void tearDown() {
+ consumer.releaseResources();
+ }
+
+ public void testConsumerAllocatesBufferLimit() throws IOException {
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(1000).flip());
+ assertThat(consumer.getBuffer().capacity(), equalTo(1000));
+ }
+
+ public void testConsumerAllocatesEmptyBuffer() throws IOException {
+ consumer.consume((ByteBuffer) ByteBuffer.allocate(0).flip());
+ assertThat(consumer.getBuffer().capacity(), equalTo(0));
+ }
+
+ public void testConsumerExpandsBufferLimits() throws IOException {
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(1000).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(2000).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(3000).flip());
+ assertThat(consumer.getBuffer().capacity(), equalTo(6000));
+ }
+
+ public void testConsumerAllocatesLimit() throws IOException {
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT).flip());
+ assertThat(consumer.getBuffer().capacity(), equalTo(BUFFER_LIMIT));
+ }
+
+ public void testConsumerFailsToAllocateOverLimit() throws IOException {
+ assertThrows(ContentTooLongException.class, () -> consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT + 1).flip()));
+ }
+
+ public void testConsumerFailsToExpandOverLimit() throws IOException {
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT).flip());
+ assertThrows(ContentTooLongException.class, () -> consumer.consume((ByteBuffer) randomByteBufferOfLength(1).flip()));
+ }
+
+ private static ByteBuffer randomByteBufferOfLength(int length) {
+ return ByteBuffer.allocate(length).put(randomBytesOfLength(length));
+ }
+}
diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle
index f645b2dbbc933..4b50a996d1f9f 100644
--- a/client/sniffer/build.gradle
+++ b/client/sniffer/build.gradle
@@ -37,7 +37,7 @@ java {
base {
group = 'org.opensearch.client'
- archivesBaseName = 'opensearch-rest-client-sniffer'
+ archivesName = 'opensearch-rest-client-sniffer'
}
dependencies {
diff --git a/client/sniffer/licenses/jackson-core-2.15.2.jar.sha1 b/client/sniffer/licenses/jackson-core-2.15.2.jar.sha1
deleted file mode 100644
index ec6781b968eed..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a6fe1836469a69b3ff66037c324d75fc66ef137c
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..c2b70fb4ae202
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
@@ -0,0 +1 @@
+899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java
index 440e9a2ea5cd1..8a4ca1fb0a136 100644
--- a/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java
+++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java
@@ -49,15 +49,15 @@
* You need to wrap your code between two tags like:
* // tag::example[]
* // end::example[]
- *
+ *
* Where example is your tag name.
- *
+ *
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/SnifferDocumentation.java[example]
* --------------------------------------------------
- *
+ *
* Note that this is not a test class as we are only interested in testing that docs snippets compile. We don't want
* to send requests to a node and we don't even have the tools to do it.
*/
diff --git a/client/test/build.gradle b/client/test/build.gradle
index f81a009389681..b77865df6decf 100644
--- a/client/test/build.gradle
+++ b/client/test/build.gradle
@@ -30,8 +30,8 @@
apply plugin: 'opensearch.build'
java {
- targetCompatibility = JavaVersion.VERSION_11
- sourceCompatibility = JavaVersion.VERSION_11
+ targetCompatibility = JavaVersion.VERSION_1_8
+ sourceCompatibility = JavaVersion.VERSION_1_8
}
base {
@@ -69,3 +69,10 @@ dependenciesInfo.enabled = false
//we aren't releasing this jar
thirdPartyAudit.enabled = false
test.enabled = false
+
+tasks.withType(JavaCompile) {
+ // Suppressing '[options] target value 8 is obsolete and will be removed in a future release'
+ configure(options) {
+ options.compilerArgs << '-Xlint:-options'
+ }
+}
diff --git a/distribution/archives/darwin-arm64-tar/build.gradle b/distribution/archives/darwin-arm64-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/darwin-arm64-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/darwin-tar/build.gradle b/distribution/archives/darwin-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/darwin-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/freebsd-tar/build.gradle b/distribution/archives/freebsd-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/freebsd-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/integ-test-zip/build.gradle b/distribution/archives/integ-test-zip/build.gradle
index 9418223b0a44d..ffaea5e8ca771 100644
--- a/distribution/archives/integ-test-zip/build.gradle
+++ b/distribution/archives/integ-test-zip/build.gradle
@@ -38,7 +38,7 @@ apply plugin: 'com.netflix.nebula.maven-publish'
base {
group = "org.opensearch.distribution.integ-test-zip"
- archivesBaseName = "opensearch"
+ archivesName = "opensearch"
}
integTest {
diff --git a/distribution/archives/jre-linux-tar/build.gradle b/distribution/archives/jre-linux-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/jre-linux-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/linux-arm64-tar/build.gradle b/distribution/archives/linux-arm64-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/linux-arm64-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/linux-ppc64le-tar/build.gradle b/distribution/archives/linux-ppc64le-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/linux-ppc64le-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/linux-s390x-tar/build.gradle b/distribution/archives/linux-s390x-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/linux-s390x-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/linux-tar/build.gradle b/distribution/archives/linux-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/linux-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-darwin-arm64-tar/build.gradle b/distribution/archives/no-jdk-darwin-arm64-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-darwin-arm64-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-darwin-tar/build.gradle b/distribution/archives/no-jdk-darwin-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-darwin-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-freebsd-tar/build.gradle b/distribution/archives/no-jdk-freebsd-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-freebsd-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-linux-arm64-tar/build.gradle b/distribution/archives/no-jdk-linux-arm64-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-linux-arm64-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-linux-ppc64le-tar/build.gradle b/distribution/archives/no-jdk-linux-ppc64le-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-linux-ppc64le-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-linux-tar/build.gradle b/distribution/archives/no-jdk-linux-tar/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-linux-tar/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/no-jdk-windows-zip/build.gradle b/distribution/archives/no-jdk-windows-zip/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/no-jdk-windows-zip/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/archives/windows-zip/build.gradle b/distribution/archives/windows-zip/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/archives/windows-zip/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/docker/docker-arm64-build-context/build.gradle b/distribution/docker/docker-arm64-build-context/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/docker/docker-arm64-build-context/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/docker/docker-arm64-export/build.gradle b/distribution/docker/docker-arm64-export/build.gradle
index 3506c4e39c234..62f3dc68b0c8e 100644
--- a/distribution/docker/docker-arm64-export/build.gradle
+++ b/distribution/docker/docker-arm64-export/build.gradle
@@ -11,3 +11,5 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/docker/docker-export/build.gradle b/distribution/docker/docker-export/build.gradle
index 3506c4e39c234..62f3dc68b0c8e 100644
--- a/distribution/docker/docker-export/build.gradle
+++ b/distribution/docker/docker-export/build.gradle
@@ -11,3 +11,5 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/docker/docker-ppc64le-export/build.gradle b/distribution/docker/docker-ppc64le-export/build.gradle
index 820a0cdf69dfc..ae7def32c4d6c 100644
--- a/distribution/docker/docker-ppc64le-export/build.gradle
+++ b/distribution/docker/docker-ppc64le-export/build.gradle
@@ -10,3 +10,5 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/docker/docker-s390x-export/build.gradle b/distribution/docker/docker-s390x-export/build.gradle
index 3506c4e39c234..62f3dc68b0c8e 100644
--- a/distribution/docker/docker-s390x-export/build.gradle
+++ b/distribution/docker/docker-s390x-export/build.gradle
@@ -11,3 +11,5 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/docker/src/docker/config/log4j2.properties b/distribution/docker/src/docker/config/log4j2.properties
index 761478a9fdc6e..8edd6a7354a16 100644
--- a/distribution/docker/src/docker/config/log4j2.properties
+++ b/distribution/docker/src/docker/config/log4j2.properties
@@ -34,6 +34,16 @@ logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
logger.deprecation.appenderRef.header_warning.ref = header_warning
logger.deprecation.additivity = false
+appender.search_request_slowlog_json_appender.type = Console
+appender.search_request_slowlog_json_appender.name = search_request_slowlog_json_appender
+appender.search_request_slowlog_json_appender.layout.type = OpenSearchJsonLayout
+appender.search_request_slowlog_json_appender.layout.type_name = search_request_slowlog
+
+logger.search_request_slowlog_logger.name = cluster.search.request.slowlog
+logger.search_request_slowlog_logger.level = trace
+logger.search_request_slowlog_logger.appenderRef.search_request_slowlog_json_appender.ref = search_request_slowlog_json_appender
+logger.search_request_slowlog_logger.additivity = false
+
appender.index_search_slowlog_rolling.type = Console
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
appender.index_search_slowlog_rolling.layout.type = OpenSearchJsonLayout
diff --git a/distribution/packages/arm64-deb/build.gradle b/distribution/packages/arm64-deb/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/arm64-deb/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/arm64-no-jdk-deb/build.gradle b/distribution/packages/arm64-no-jdk-deb/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/arm64-no-jdk-deb/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/arm64-no-jdk-rpm/build.gradle b/distribution/packages/arm64-no-jdk-rpm/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/arm64-no-jdk-rpm/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/arm64-rpm/build.gradle b/distribution/packages/arm64-rpm/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/arm64-rpm/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index 7914fcc172ef4..ededa7bff34d8 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -63,7 +63,7 @@ import java.util.regex.Pattern
*/
plugins {
- id "com.netflix.nebula.ospackage-base" version "11.4.0"
+ id "com.netflix.nebula.ospackage-base" version "11.6.0"
}
void addProcessFilesTask(String type, boolean jdk) {
@@ -213,7 +213,7 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
configurationFile '/etc/opensearch/jvm.options'
configurationFile '/etc/opensearch/log4j2.properties'
from("${packagingFiles}") {
- dirMode 02750
+ dirMode 0750
into('/etc')
permissionGroup 'opensearch'
includeEmptyDirs true
@@ -223,7 +223,7 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
}
from("${packagingFiles}/etc/opensearch") {
into('/etc/opensearch')
- dirMode 02750
+ dirMode 0750
fileMode 0660
permissionGroup 'opensearch'
includeEmptyDirs true
@@ -281,8 +281,8 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
dirMode mode
}
}
- copyEmptyDir('/var/log/opensearch', 'opensearch', 'opensearch', 02750)
- copyEmptyDir('/var/lib/opensearch', 'opensearch', 'opensearch', 02750)
+ copyEmptyDir('/var/log/opensearch', 'opensearch', 'opensearch', 0750)
+ copyEmptyDir('/var/lib/opensearch', 'opensearch', 'opensearch', 0750)
copyEmptyDir('/usr/share/opensearch/plugins', 'root', 'root', 0755)
into '/usr/share/opensearch'
diff --git a/distribution/packages/deb/build.gradle b/distribution/packages/deb/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/deb/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/no-jdk-arm64-deb/build.gradle b/distribution/packages/no-jdk-arm64-deb/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/no-jdk-arm64-deb/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/no-jdk-arm64-rpm/build.gradle b/distribution/packages/no-jdk-arm64-rpm/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/no-jdk-arm64-rpm/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/no-jdk-deb/build.gradle b/distribution/packages/no-jdk-deb/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/no-jdk-deb/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/no-jdk-rpm/build.gradle b/distribution/packages/no-jdk-rpm/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/no-jdk-rpm/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/rpm/build.gradle b/distribution/packages/rpm/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/distribution/packages/rpm/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/distribution/packages/src/deb/lintian/opensearch b/distribution/packages/src/deb/lintian/opensearch
index 854b23131ecbc..e6db8e8c6b322 100644
--- a/distribution/packages/src/deb/lintian/opensearch
+++ b/distribution/packages/src/deb/lintian/opensearch
@@ -15,11 +15,11 @@ missing-dep-on-jarwrapper
# we prefer to not make our config and log files world readable
non-standard-file-perm etc/default/opensearch 0660 != 0644
-non-standard-dir-perm etc/opensearch/ 2750 != 0755
-non-standard-dir-perm etc/opensearch/jvm.options.d/ 2750 != 0755
+non-standard-dir-perm etc/opensearch/ 0750 != 0755
+non-standard-dir-perm etc/opensearch/jvm.options.d/ 0750 != 0755
non-standard-file-perm etc/opensearch/*
-non-standard-dir-perm var/lib/opensearch/ 2750 != 0755
-non-standard-dir-perm var/log/opensearch/ 2750 != 0755
+non-standard-dir-perm var/lib/opensearch/ 0750 != 0755
+non-standard-dir-perm var/log/opensearch/ 0750 != 0755
executable-is-not-world-readable etc/init.d/opensearch 0750
non-standard-file-permissions-for-etc-init.d-script etc/init.d/opensearch 0750 != 0755
diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options
index 952110c6c0289..1a0abcbaf9c88 100644
--- a/distribution/src/config/jvm.options
+++ b/distribution/src/config/jvm.options
@@ -81,7 +81,7 @@ ${error.file}
# JDK 20+ Incubating Vector Module for SIMD optimizations;
# disabling may reduce performance on vector optimized lucene
-20:--add-modules=jdk.incubator.vector
+20-:--add-modules=jdk.incubator.vector
# HDFS ForkJoinPool.common() support by SecurityManager
-Djava.util.concurrent.ForkJoinPool.common.threadFactory=org.opensearch.secure_sm.SecuredForkJoinWorkerThreadFactory
diff --git a/distribution/src/config/log4j2.properties b/distribution/src/config/log4j2.properties
index bb27aaf2e22e6..d040afae82e53 100644
--- a/distribution/src/config/log4j2.properties
+++ b/distribution/src/config/log4j2.properties
@@ -113,6 +113,47 @@ logger.deprecation.appenderRef.deprecation_rolling_old.ref = deprecation_rolling
logger.deprecation.appenderRef.header_warning.ref = header_warning
logger.deprecation.additivity = false
+######## Search Request Slowlog JSON ####################
+appender.search_request_slowlog_json_appender.type = RollingFile
+appender.search_request_slowlog_json_appender.name = search_request_slowlog_json_appender
+appender.search_request_slowlog_json_appender.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs\
+ .cluster_name}_index_search_slowlog.json
+appender.search_request_slowlog_json_appender.filePermissions = rw-r-----
+appender.search_request_slowlog_json_appender.layout.type = OpenSearchJsonLayout
+appender.search_request_slowlog_json_appender.layout.type_name = search_request_slowlog
+appender.search_request_slowlog_json_appender.layout.opensearchmessagefields=message,took,took_millis,phase_took,total_hits,search_type,shards,source,id
+
+appender.search_request_slowlog_json_appender.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs\
+ .cluster_name}_index_search_slowlog-%i.json.gz
+appender.search_request_slowlog_json_appender.policies.type = Policies
+appender.search_request_slowlog_json_appender.policies.size.type = SizeBasedTriggeringPolicy
+appender.search_request_slowlog_json_appender.policies.size.size = 1GB
+appender.search_request_slowlog_json_appender.strategy.type = DefaultRolloverStrategy
+appender.search_request_slowlog_json_appender.strategy.max = 4
+#################################################
+######## Search Request Slowlog Log File - old style pattern ####
+appender.search_request_slowlog_log_appender.type = RollingFile
+appender.search_request_slowlog_log_appender.name = search_request_slowlog_log_appender
+appender.search_request_slowlog_log_appender.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\
+ _index_search_slowlog.log
+appender.search_request_slowlog_log_appender.filePermissions = rw-r-----
+appender.search_request_slowlog_log_appender.layout.type = PatternLayout
+appender.search_request_slowlog_log_appender.layout.pattern = [%d{ISO8601}][%-5p][%c{1.}] [%node_name]%marker %m%n
+
+appender.search_request_slowlog_log_appender.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\
+ _index_search_slowlog-%i.log.gz
+appender.search_request_slowlog_log_appender.policies.type = Policies
+appender.search_request_slowlog_log_appender.policies.size.type = SizeBasedTriggeringPolicy
+appender.search_request_slowlog_log_appender.policies.size.size = 1GB
+appender.search_request_slowlog_log_appender.strategy.type = DefaultRolloverStrategy
+appender.search_request_slowlog_log_appender.strategy.max = 4
+#################################################
+logger.search_request_slowlog_logger.name = cluster.search.request.slowlog
+logger.search_request_slowlog_logger.level = trace
+logger.search_request_slowlog_logger.appenderRef.search_request_slowlog_json_appender.ref = search_request_slowlog_json_appender
+logger.search_request_slowlog_logger.appenderRef.search_request_slowlog_log_appender.ref = search_request_slowlog_log_appender
+logger.search_request_slowlog_logger.additivity = false
+
######## Search slowlog JSON ####################
appender.index_search_slowlog_rolling.type = RollingFile
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml
index 1d2cfe7eccae6..b7ab2e1c2309b 100644
--- a/distribution/src/config/opensearch.yml
+++ b/distribution/src/config/opensearch.yml
@@ -121,3 +121,9 @@ ${path.logs}
# index searcher threadpool.
#
#opensearch.experimental.feature.concurrent_segment_search.enabled: false
+#
+#
+# Gates the optimization of datetime formatters caching along with change in default datetime formatter
+# Once there is no observed impact on performance, this feature flag can be removed.
+#
+#opensearch.experimental.optimization.datetime_formatter_caching.enabled: false
diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle
index e75267f7c4a74..aee205a24dea3 100644
--- a/distribution/tools/launchers/build.gradle
+++ b/distribution/tools/launchers/build.gradle
@@ -39,7 +39,7 @@ dependencies {
}
base {
- archivesBaseName = 'opensearch-launchers'
+ archivesName = 'opensearch-launchers'
}
tasks.withType(CheckForbiddenApis).configureEach {
diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java
index aa3dfbe39ee96..726c381db09f6 100644
--- a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java
+++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java
@@ -101,15 +101,15 @@ private static String maybeShowCodeDetailsInExceptionMessages() {
}
private static String javaLocaleProviders() {
- /**
- * SPI setting is used to allow loading custom CalendarDataProvider
- * in jdk8 it has to be loaded from jre/lib/ext,
- * in jdk9+ it is already within ES project and on a classpath
- *
- * Due to internationalization enhancements in JDK 9 OpenSearch need to set the provider to COMPAT otherwise time/date
- * parsing will break in an incompatible way for some date patterns and locales.
- * //TODO COMPAT will be deprecated in at some point, see please https://bugs.openjdk.java.net/browse/JDK-8232906
- * See also: documentation in server/org.opensearch.common.time.IsoCalendarDataProvider
+ /*
+ SPI setting is used to allow loading custom CalendarDataProvider
+ in jdk8 it has to be loaded from jre/lib/ext,
+ in jdk9+ it is already within ES project and on a classpath
+
+ Due to internationalization enhancements in JDK 9 OpenSearch need to set the provider to COMPAT otherwise time/date
+ parsing will break in an incompatible way for some date patterns and locales.
+ //TODO COMPAT will be deprecated in at some point, see please https://bugs.openjdk.java.net/browse/JDK-8232906
+ See also: documentation in server/org.opensearch.common.time.IsoCalendarDataProvider
*/
return "-Djava.locale.providers=SPI,COMPAT";
}
diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle
index 2db3fef55d02e..f40fb1c4b0a9f 100644
--- a/distribution/tools/plugin-cli/build.gradle
+++ b/distribution/tools/plugin-cli/build.gradle
@@ -31,14 +31,14 @@
apply plugin: 'opensearch.build'
base {
- archivesBaseName = 'opensearch-plugin-cli'
+ archivesName = 'opensearch-plugin-cli'
}
dependencies {
compileOnly project(":server")
compileOnly project(":libs:opensearch-cli")
api "org.bouncycastle:bcpg-fips:1.0.7.1"
- api "org.bouncycastle:bc-fips:1.0.2.3"
+ api "org.bouncycastle:bc-fips:1.0.2.4"
testImplementation project(":test:framework")
testImplementation 'com.google.jimfs:jimfs:1.3.0'
testRuntimeOnly("com.google.guava:guava:${versions.guava}") {
diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1
deleted file mode 100644
index c71320050b7de..0000000000000
--- a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-da62b32cb72591f5b4d322e6ab0ce7de3247b534
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.4.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.4.jar.sha1
new file mode 100644
index 0000000000000..da37449f80d7e
--- /dev/null
+++ b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.4.jar.sha1
@@ -0,0 +1 @@
+9008d04fc13da6455e6a792935b93b629757335d
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
index 66f43b1e30d28..838d6e22a37bd 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
@@ -107,7 +107,7 @@
/**
* A command for the plugin cli to install a plugin into opensearch.
- *
+ *
* The install command takes a plugin id, which may be any of the following:
*
* An official opensearch plugin name
@@ -411,7 +411,7 @@ private String getMavenUrl(Terminal terminal, String[] coordinates, String platf
/**
* Returns {@code true} if the given url exists, and {@code false} otherwise.
- *
+ *
* The given url must be {@code https} and existing means a {@code HEAD} request returns 200.
*/
// pkg private for tests to manipulate
@@ -698,7 +698,6 @@ InputStream getPublicKey() {
/**
* Creates a URL and opens a connection.
- *
* If the URL returns a 404, {@code null} is returned, otherwise the open URL opject is returned.
*/
// pkg private for tests
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ProgressInputStream.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ProgressInputStream.java
index 579f676631a5a..02be3dbc82a44 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ProgressInputStream.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ProgressInputStream.java
@@ -41,7 +41,7 @@
* The listener is triggered whenever a full percent is increased
* The listener is never triggered twice on the same percentage
* The listener will always return 99 percent, if the expectedTotalSize is exceeded, until it is finished
- *
+ *
* Only used by the InstallPluginCommand, thus package private here
*/
abstract class ProgressInputStream extends FilterInputStream {
diff --git a/distribution/tools/upgrade-cli/build.gradle b/distribution/tools/upgrade-cli/build.gradle
index 99824463f14f8..92c043132c021 100644
--- a/distribution/tools/upgrade-cli/build.gradle
+++ b/distribution/tools/upgrade-cli/build.gradle
@@ -10,7 +10,7 @@
apply plugin: 'opensearch.build'
base {
- archivesBaseName = 'opensearch-upgrade-cli'
+ archivesName = 'opensearch-upgrade-cli'
}
dependencies {
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.15.2.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.15.2.jar.sha1
deleted file mode 100644
index f63416ddb8ceb..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4724a65ac8e8d156a24898d50fd5dbd3642870b8
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..79ed9e0c63fc8
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
@@ -0,0 +1 @@
+dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.15.2.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.15.2.jar.sha1
deleted file mode 100644
index f16d80af8dce6..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9353b021f10c307c00328f52090de2bdb4b6ff9c
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..da00d281934b1
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
@@ -0,0 +1 @@
+3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/UpgradeTask.java b/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/UpgradeTask.java
index b7dcbd50cf781..708f644bcdeb6 100644
--- a/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/UpgradeTask.java
+++ b/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/UpgradeTask.java
@@ -17,7 +17,7 @@
* An interface for an upgrade task, which in this instance is an unit of
* operation that is part of the overall upgrade process. This extends the
* {@link java.util.function.Consumer} interface.
- *
+ *
* The implementing tasks consume and instance of a tuple of {@link TaskInput}
* and {@link Terminal} and operate via side effects.
*
diff --git a/doc-tools/missing-doclet/src/main/java/org/opensearch/missingdoclet/MissingDoclet.java b/doc-tools/missing-doclet/src/main/java/org/opensearch/missingdoclet/MissingDoclet.java
index e6122e7baf91a..e1ad55fe4b60b 100644
--- a/doc-tools/missing-doclet/src/main/java/org/opensearch/missingdoclet/MissingDoclet.java
+++ b/doc-tools/missing-doclet/src/main/java/org/opensearch/missingdoclet/MissingDoclet.java
@@ -45,7 +45,7 @@
* It isn't recursive, just ignores exactly the elements you tell it.
* Has option --missing-method to apply "method" level to selected packages (fix one at a time).
* Matches package names exactly: so you'll need to list subpackages separately.
- *
+ *
* Note: This by default ignores javadoc validation on overridden methods.
*/
// Original version of this class is ported from MissingDoclet code in Lucene,
diff --git a/docs/build.gradle b/docs/build.gradle
new file mode 100644
index 0000000000000..385d5ff27433e
--- /dev/null
+++ b/docs/build.gradle
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
+
+// See please https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#deprecated_missing_project_directory
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index 7f93135c49b76..d64cd4917707c 100644
Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index f01f0a84a786a..f1d76d80bbfa3 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=bb09982fdf52718e4c7b25023d10df6d35a5fff969860bdf5a5bd27a3ab27a9e
+distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b
diff --git a/gradlew b/gradlew
index 0adc8e1a53214..1aa94a4269074 100755
--- a/gradlew
+++ b/gradlew
@@ -145,7 +145,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
- # shellcheck disable=SC3045
+ # shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
@@ -153,7 +153,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
- # shellcheck disable=SC3045
+ # shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
@@ -202,11 +202,11 @@ fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
-# Collect all arguments for the java command;
-# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
-# shell script including quotes and variable substitutions, so put them in
-# double quotes to make sure that they get re-expanded; and
-# * put everything else in single quotes, so that it's not re-expanded.
+# Collect all arguments for the java command:
+# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
+# and any embedded shellness will be escaped.
+# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
+# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
diff --git a/libs/cli/src/main/java/org/opensearch/cli/Command.java b/libs/cli/src/main/java/org/opensearch/cli/Command.java
index eed5c4ba4ee6f..cc9230bdb2282 100644
--- a/libs/cli/src/main/java/org/opensearch/cli/Command.java
+++ b/libs/cli/src/main/java/org/opensearch/cli/Command.java
@@ -162,7 +162,7 @@ protected static void exit(int status) {
/**
* Executes this command.
- *
+ *
* Any runtime user errors (like an input file that does not exist), should throw a {@link UserException}. */
protected abstract void execute(Terminal terminal, OptionSet options) throws Exception;
diff --git a/libs/cli/src/main/java/org/opensearch/cli/Terminal.java b/libs/cli/src/main/java/org/opensearch/cli/Terminal.java
index be030c18507ad..fb1097178e5a3 100644
--- a/libs/cli/src/main/java/org/opensearch/cli/Terminal.java
+++ b/libs/cli/src/main/java/org/opensearch/cli/Terminal.java
@@ -44,7 +44,7 @@
/**
* A Terminal wraps access to reading input and writing output for a cli.
- *
+ *
* The available methods are similar to those of {@link Console}, with the ability
* to read either normal text or a password, and the ability to print a line
* of text. Printing is also gated by the {@link Verbosity} of the terminal,
diff --git a/libs/common/build.gradle b/libs/common/build.gradle
index 973fe30d09842..4f89b81636420 100644
--- a/libs/common/build.gradle
+++ b/libs/common/build.gradle
@@ -14,7 +14,7 @@ import org.opensearch.gradle.info.BuildParams
apply plugin: 'opensearch.publish'
base {
- archivesBaseName = 'opensearch-common'
+ archivesName = 'opensearch-common'
}
dependencies {
diff --git a/libs/common/src/main/java/org/opensearch/bootstrap/JarHell.java b/libs/common/src/main/java/org/opensearch/bootstrap/JarHell.java
index c4ba778e7db86..fc5e364241d12 100644
--- a/libs/common/src/main/java/org/opensearch/bootstrap/JarHell.java
+++ b/libs/common/src/main/java/org/opensearch/bootstrap/JarHell.java
@@ -104,7 +104,7 @@ public static void checkJarHell(Consumer output) throws IOException, URI
/**
* Parses the classpath into an array of URLs
- * @return array of URLs
+ * @return collection of URLs
* @throws IllegalStateException if the classpath contains empty elements
*/
public static Set parseClassPath() {
@@ -114,7 +114,7 @@ public static Set parseClassPath() {
/**
* Parses the classpath into a set of URLs. For testing.
* @param classPath classpath to parse (typically the system property {@code java.class.path})
- * @return array of URLs
+ * @return collection of URLs
* @throws IllegalStateException if the classpath contains empty elements
*/
@SuppressForbidden(reason = "resolves against CWD because that is how classpaths work")
diff --git a/libs/common/src/main/java/org/opensearch/common/Booleans.java b/libs/common/src/main/java/org/opensearch/common/Booleans.java
index 2ca061820b2eb..ab7ad37e92612 100644
--- a/libs/common/src/main/java/org/opensearch/common/Booleans.java
+++ b/libs/common/src/main/java/org/opensearch/common/Booleans.java
@@ -45,30 +45,72 @@ private Booleans() {
/**
* Parses a char[] representation of a boolean value to boolean
.
*
- * @return true
iff the sequence of chars is "true", false
iff the sequence of chars is "false" or the
- * provided default value iff either text is null
or length == 0.
+ * @return true
iff the sequence of chars is "true", false
iff the sequence of
+ * chars is "false" or the provided default value iff either text is null
or length == 0.
* @throws IllegalArgumentException if the string cannot be parsed to boolean.
*/
public static boolean parseBoolean(char[] text, int offset, int length, boolean defaultValue) {
- if (text == null || length == 0) {
+ if (text == null) {
return defaultValue;
- } else {
- return parseBoolean(new String(text, offset, length));
}
+
+ switch (length) {
+ case 0:
+ return defaultValue;
+ case 1:
+ case 2:
+ case 3:
+ default:
+ break;
+ case 4:
+ if (text[offset] == 't' && text[offset + 1] == 'r' && text[offset + 2] == 'u' && text[offset + 3] == 'e') {
+ return true;
+ }
+ break;
+ case 5:
+ if (text[offset] == 'f'
+ && text[offset + 1] == 'a'
+ && text[offset + 2] == 'l'
+ && text[offset + 3] == 's'
+ && text[offset + 4] == 'e') {
+ return false;
+ }
+ break;
+ }
+
+ throw new IllegalArgumentException(
+ "Failed to parse value [" + new String(text, offset, length) + "] as only [true] or [false] are allowed."
+ );
}
/**
- * returns true iff the sequence of chars is one of "true","false".
+ * Returns true iff the sequence of chars is one of "true", "false".
*
* @param text sequence to check
* @param offset offset to start
* @param length length to check
*/
public static boolean isBoolean(char[] text, int offset, int length) {
- if (text == null || length == 0) {
+ if (text == null) {
return false;
}
- return isBoolean(new String(text, offset, length));
+
+ switch (length) {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ default:
+ return false;
+ case 4:
+ return text[offset] == 't' && text[offset + 1] == 'r' && text[offset + 2] == 'u' && text[offset + 3] == 'e';
+ case 5:
+ return text[offset] == 'f'
+ && text[offset + 1] == 'a'
+ && text[offset + 2] == 'l'
+ && text[offset + 3] == 's'
+ && text[offset + 4] == 'e';
+ }
}
public static boolean isBoolean(String value) {
@@ -91,63 +133,45 @@ public static boolean parseBoolean(String value) {
throw new IllegalArgumentException("Failed to parse value [" + value + "] as only [true] or [false] are allowed.");
}
- private static boolean hasText(CharSequence str) {
- if (str == null || str.length() == 0) {
- return false;
- }
- int strLen = str.length();
- for (int i = 0; i < strLen; i++) {
- if (!Character.isWhitespace(str.charAt(i))) {
- return true;
- }
- }
- return false;
- }
-
/**
+ * Parses a string representation of a boolean value to boolean
.
+ * Note the subtle difference between this and {@link #parseBoolean(char[], int, int, boolean)}; this returns the
+ * default value even when the value is non-zero length containing all whitespaces (possibly overlooked, but
+ * preserving this behavior for compatibility reasons). Use {@link #parseBooleanStrict(String, boolean)} instead.
*
* @param value text to parse.
- * @param defaultValue The default value to return if the provided value is null
.
+ * @param defaultValue The default value to return if the provided value is null
or blank.
* @return see {@link #parseBoolean(String)}
*/
+ @Deprecated
public static boolean parseBoolean(String value, boolean defaultValue) {
- if (hasText(value)) {
- return parseBoolean(value);
- }
- return defaultValue;
- }
-
- public static Boolean parseBoolean(String value, Boolean defaultValue) {
- if (hasText(value)) {
- return parseBoolean(value);
+ if (value == null || value.isBlank()) {
+ return defaultValue;
}
- return defaultValue;
+ return parseBoolean(value);
}
- /**
- * Returns {@code false} if text is in "false", "0", "off", "no"; else, {@code true}.
- *
- * @deprecated Only kept to provide automatic upgrades for pre 6.0 indices. Use {@link #parseBoolean(String, Boolean)} instead.
- */
@Deprecated
- public static Boolean parseBooleanLenient(String value, Boolean defaultValue) {
- if (value == null) { // only for the null case we do that here!
+ public static Boolean parseBoolean(String value, Boolean defaultValue) {
+ if (value == null || value.isBlank()) {
return defaultValue;
}
- return parseBooleanLenient(value, false);
+ return parseBoolean(value);
}
/**
- * Returns {@code false} if text is in "false", "0", "off", "no"; else, {@code true}.
+ * Parses a string representation of a boolean value to boolean
.
+ * Analogous to {@link #parseBoolean(char[], int, int, boolean)}.
*
- * @deprecated Only kept to provide automatic upgrades for pre 6.0 indices. Use {@link #parseBoolean(String, boolean)} instead.
+ * @return true
iff the sequence of chars is "true", false
iff the sequence of
+ * chars is "false", or the provided default value iff either text is null
or length == 0.
+ * @throws IllegalArgumentException if the string cannot be parsed to boolean.
*/
- @Deprecated
- public static boolean parseBooleanLenient(String value, boolean defaultValue) {
- if (value == null) {
+ public static boolean parseBooleanStrict(String value, boolean defaultValue) {
+ if (value == null || value.length() == 0) {
return defaultValue;
}
- return !(value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
+ return parseBoolean(value);
}
/**
@@ -163,71 +187,4 @@ public static boolean isFalse(String value) {
public static boolean isTrue(String value) {
return "true".equals(value);
}
-
- /**
- * Returns {@code false} if text is in "false", "0", "off", "no"; else, {@code true}.
- *
- * @deprecated Only kept to provide automatic upgrades for pre 6.0 indices. Use {@link #parseBoolean(char[], int, int, boolean)} instead
- */
- @Deprecated
- public static boolean parseBooleanLenient(char[] text, int offset, int length, boolean defaultValue) {
- if (text == null || length == 0) {
- return defaultValue;
- }
- if (length == 1) {
- return text[offset] != '0';
- }
- if (length == 2) {
- return !(text[offset] == 'n' && text[offset + 1] == 'o');
- }
- if (length == 3) {
- return !(text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f');
- }
- if (length == 5) {
- return !(text[offset] == 'f'
- && text[offset + 1] == 'a'
- && text[offset + 2] == 'l'
- && text[offset + 3] == 's'
- && text[offset + 4] == 'e');
- }
- return true;
- }
-
- /**
- * returns true if the a sequence of chars is one of "true","false","on","off","yes","no","0","1"
- *
- * @param text sequence to check
- * @param offset offset to start
- * @param length length to check
- *
- * @deprecated Only kept to provide automatic upgrades for pre 6.0 indices. Use {@link #isBoolean(char[], int, int)} instead.
- */
- @Deprecated
- public static boolean isBooleanLenient(char[] text, int offset, int length) {
- if (text == null || length == 0) {
- return false;
- }
- if (length == 1) {
- return text[offset] == '0' || text[offset] == '1';
- }
- if (length == 2) {
- return (text[offset] == 'n' && text[offset + 1] == 'o') || (text[offset] == 'o' && text[offset + 1] == 'n');
- }
- if (length == 3) {
- return (text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f')
- || (text[offset] == 'y' && text[offset + 1] == 'e' && text[offset + 2] == 's');
- }
- if (length == 4) {
- return (text[offset] == 't' && text[offset + 1] == 'r' && text[offset + 2] == 'u' && text[offset + 3] == 'e');
- }
- if (length == 5) {
- return (text[offset] == 'f'
- && text[offset + 1] == 'a'
- && text[offset + 2] == 'l'
- && text[offset + 3] == 's'
- && text[offset + 4] == 'e');
- }
- return false;
- }
-
}
diff --git a/libs/common/src/main/java/org/opensearch/common/CheckedBiConsumer.java b/libs/common/src/main/java/org/opensearch/common/CheckedBiConsumer.java
index 50c15bb7a95a8..c2ef08e288346 100644
--- a/libs/common/src/main/java/org/opensearch/common/CheckedBiConsumer.java
+++ b/libs/common/src/main/java/org/opensearch/common/CheckedBiConsumer.java
@@ -32,13 +32,16 @@
package org.opensearch.common;
+import org.opensearch.common.annotation.PublicApi;
+
import java.util.function.BiConsumer;
/**
* A {@link BiConsumer}-like interface which allows throwing checked exceptions.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
@FunctionalInterface
public interface CheckedBiConsumer {
void accept(T t, U u) throws E;
diff --git a/libs/common/src/main/java/org/opensearch/common/CheckedFunction.java b/libs/common/src/main/java/org/opensearch/common/CheckedFunction.java
index 9c17ad4b4ee3f..927edd1b9905a 100644
--- a/libs/common/src/main/java/org/opensearch/common/CheckedFunction.java
+++ b/libs/common/src/main/java/org/opensearch/common/CheckedFunction.java
@@ -32,6 +32,8 @@
package org.opensearch.common;
+import org.opensearch.common.annotation.PublicApi;
+
import java.util.function.Function;
/**
@@ -39,6 +41,7 @@
*
* @opensearch.api
*/
+@PublicApi(since = "1.0.0")
@FunctionalInterface
public interface CheckedFunction {
R apply(T t) throws E;
diff --git a/libs/common/src/main/java/org/opensearch/common/Explicit.java b/libs/common/src/main/java/org/opensearch/common/Explicit.java
index 66e079c461e75..da44c6fd4dcef 100644
--- a/libs/common/src/main/java/org/opensearch/common/Explicit.java
+++ b/libs/common/src/main/java/org/opensearch/common/Explicit.java
@@ -32,19 +32,22 @@
package org.opensearch.common;
+import org.opensearch.common.annotation.PublicApi;
+
import java.util.Objects;
/**
* Holds a value that is either:
* a) set implicitly e.g. through some default value
* b) set explicitly e.g. from a user selection
- *
+ *
* When merging conflicting configuration settings such as
* field mapping settings it is preferable to preserve an explicit
* choice rather than a choice made only made implicitly by defaults.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public class Explicit {
private final T value;
diff --git a/libs/common/src/main/java/org/opensearch/common/LocalTimeOffset.java b/libs/common/src/main/java/org/opensearch/common/LocalTimeOffset.java
index 7e89641927ed5..eb7b331c9aa24 100644
--- a/libs/common/src/main/java/org/opensearch/common/LocalTimeOffset.java
+++ b/libs/common/src/main/java/org/opensearch/common/LocalTimeOffset.java
@@ -514,7 +514,7 @@ public boolean anyMoveBackToPreviousDay() {
* Builds an array that can be {@link Arrays#binarySearch(long[], long)}ed
* for the daylight savings time transitions.
*
- * @openearch.internal
+ * @opensearch.internal
*/
private static class TransitionArrayLookup extends AbstractManyTransitionsLookup {
private final LocalTimeOffset[] offsets;
diff --git a/libs/common/src/main/java/org/opensearch/common/Nullable.java b/libs/common/src/main/java/org/opensearch/common/Nullable.java
index c663ef863ed48..70db2a3755eba 100644
--- a/libs/common/src/main/java/org/opensearch/common/Nullable.java
+++ b/libs/common/src/main/java/org/opensearch/common/Nullable.java
@@ -32,6 +32,8 @@
package org.opensearch.common;
+import org.opensearch.common.annotation.PublicApi;
+
import javax.annotation.CheckForNull;
import javax.annotation.meta.TypeQualifierNickname;
@@ -53,5 +55,6 @@
@CheckForNull
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.PARAMETER, ElementType.FIELD, ElementType.METHOD })
+@PublicApi(since = "1.0.0")
public @interface Nullable {
}
diff --git a/libs/common/src/main/java/org/opensearch/common/SetOnce.java b/libs/common/src/main/java/org/opensearch/common/SetOnce.java
index a596b5fcdb61d..778926ce108b7 100644
--- a/libs/common/src/main/java/org/opensearch/common/SetOnce.java
+++ b/libs/common/src/main/java/org/opensearch/common/SetOnce.java
@@ -35,7 +35,7 @@
* A convenient class which offers a semi-immutable object wrapper implementation which allows one
* to set the value of an object exactly once, and retrieve it many times. If {@link #set(Object)}
* is called more than once, {@link AlreadySetException} is thrown and the operation will fail.
- *
+ *
* This is borrowed from lucene's experimental API. It is not reused to eliminate the dependency
* on lucene core for such a simple (standalone) utility class that may change beyond OpenSearch needs.
*
diff --git a/libs/common/src/main/java/org/opensearch/common/SuppressForbidden.java b/libs/common/src/main/java/org/opensearch/common/SuppressForbidden.java
index 1f1b28bcf6759..c479d7bd98e8a 100644
--- a/libs/common/src/main/java/org/opensearch/common/SuppressForbidden.java
+++ b/libs/common/src/main/java/org/opensearch/common/SuppressForbidden.java
@@ -31,6 +31,8 @@
package org.opensearch.common;
+import org.opensearch.common.annotation.PublicApi;
+
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -43,6 +45,7 @@
*/
@Retention(RetentionPolicy.CLASS)
@Target({ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE })
+@PublicApi(since = "1.0.0")
public @interface SuppressForbidden {
String reason();
}
diff --git a/libs/common/src/main/java/org/opensearch/common/TriFunction.java b/libs/common/src/main/java/org/opensearch/common/TriFunction.java
index 7b1bbece68680..8594e8e2cd0c9 100644
--- a/libs/common/src/main/java/org/opensearch/common/TriFunction.java
+++ b/libs/common/src/main/java/org/opensearch/common/TriFunction.java
@@ -32,6 +32,8 @@
package org.opensearch.common;
+import org.opensearch.common.annotation.PublicApi;
+
/**
* Represents a function that accepts three arguments and produces a result.
*
@@ -40,8 +42,9 @@
* @param the type of the third argument
* @param the return type
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
@FunctionalInterface
public interface TriFunction {
/**
diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
new file mode 100644
index 0000000000000..1864aec4aa951
--- /dev/null
+++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
@@ -0,0 +1,369 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.Nullable;
+import org.opensearch.common.annotation.DeprecatedApi;
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.common.annotation.PublicApi;
+
+import javax.annotation.processing.AbstractProcessor;
+import javax.annotation.processing.RoundEnvironment;
+import javax.annotation.processing.SupportedAnnotationTypes;
+import javax.lang.model.AnnotatedConstruct;
+import javax.lang.model.SourceVersion;
+import javax.lang.model.element.AnnotationMirror;
+import javax.lang.model.element.Element;
+import javax.lang.model.element.ElementKind;
+import javax.lang.model.element.ExecutableElement;
+import javax.lang.model.element.Modifier;
+import javax.lang.model.element.PackageElement;
+import javax.lang.model.element.TypeElement;
+import javax.lang.model.element.TypeParameterElement;
+import javax.lang.model.element.VariableElement;
+import javax.lang.model.type.ArrayType;
+import javax.lang.model.type.DeclaredType;
+import javax.lang.model.type.ReferenceType;
+import javax.lang.model.type.TypeMirror;
+import javax.lang.model.type.TypeVariable;
+import javax.lang.model.type.WildcardType;
+import javax.tools.Diagnostic.Kind;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * The annotation processor for API related annotations: {@link DeprecatedApi}, {@link ExperimentalApi},
+ * {@link InternalApi} and {@link PublicApi}.
+ *
+ * The checks are built on top of the following rules:
+ *
+ * introspect each type annotated with {@link PublicApi}, {@link DeprecatedApi} or {@link ExperimentalApi},
+ * filtering out package-private declarations
+ * make sure those leak only {@link PublicApi}, {@link DeprecatedApi} or {@link ExperimentalApi} types as well (exceptions,
+ * method return values, method arguments, method generic type arguments, class generic type arguments, annotations)
+ * recursively follow the type introspection chains to enforce the rules down the line
+ *
+ */
+@InternalApi
+@SupportedAnnotationTypes("org.opensearch.common.annotation.*")
+public class ApiAnnotationProcessor extends AbstractProcessor {
+ private static final String OPTION_CONTINUE_ON_FAILING_CHECKS = "continueOnFailingChecks";
+ private static final String OPENSEARCH_PACKAGE = "org.opensearch";
+
+ private final Set reported = new HashSet<>();
+ private final Set processed = new HashSet<>();
+ private Kind reportFailureAs = Kind.ERROR;
+
+ @Override
+ public SourceVersion getSupportedSourceVersion() {
+ return SourceVersion.latest();
+ }
+
+ @Override
+ public Set getSupportedOptions() {
+ return Set.of(OPTION_CONTINUE_ON_FAILING_CHECKS);
+ }
+
+ @Override
+ public boolean process(Set extends TypeElement> annotations, RoundEnvironment round) {
+ processingEnv.getMessager().printMessage(Kind.NOTE, "Processing OpenSearch Api annotations");
+
+ if (processingEnv.getOptions().containsKey(OPTION_CONTINUE_ON_FAILING_CHECKS) == true) {
+ reportFailureAs = Kind.NOTE;
+ }
+
+ final Set extends Element> elements = round.getElementsAnnotatedWithAny(
+ Set.of(PublicApi.class, ExperimentalApi.class, DeprecatedApi.class)
+ );
+
+ for (var element : elements) {
+ if (!checkPackage(element)) {
+ continue;
+ }
+
+ // Skip all not-public elements
+ checkPublicVisibility(null, element);
+
+ if (element instanceof TypeElement) {
+ process((TypeElement) element);
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Check top level executable element
+ * @param executable top level executable element
+ * @param enclosing enclosing element
+ */
+ private void process(ExecutableElement executable, Element enclosing) {
+ if (!inspectable(executable)) {
+ return;
+ }
+
+ // The executable element should not be internal (unless constructor for injectable core component)
+ checkNotInternal(enclosing, executable);
+
+ // Check this elements annotations
+ for (final AnnotationMirror annotation : executable.getAnnotationMirrors()) {
+ final Element element = annotation.getAnnotationType().asElement();
+ if (inspectable(element)) {
+ checkNotInternal(executable.getEnclosingElement(), element);
+ checkPublic(executable.getEnclosingElement(), element);
+ }
+ }
+
+ // Process method return types
+ final TypeMirror returnType = executable.getReturnType();
+ if (returnType instanceof ReferenceType) {
+ process(executable, (ReferenceType) returnType);
+ }
+
+ // Process method thrown types
+ for (final TypeMirror thrownType : executable.getThrownTypes()) {
+ if (thrownType instanceof ReferenceType) {
+ process(executable, (ReferenceType) thrownType);
+ }
+ }
+
+ // Process method type parameters
+ for (final TypeParameterElement typeParameter : executable.getTypeParameters()) {
+ for (final TypeMirror boundType : typeParameter.getBounds()) {
+ if (boundType instanceof ReferenceType) {
+ process(executable, (ReferenceType) boundType);
+ }
+ }
+ }
+
+ // Process method arguments
+ for (final VariableElement parameter : executable.getParameters()) {
+ final TypeMirror parameterType = parameter.asType();
+ if (parameterType instanceof ReferenceType) {
+ process(executable, (ReferenceType) parameterType);
+ }
+ }
+ }
+
+ /**
+ * Check wildcard type bounds referred by an element
+ * @param executable element
+ * @param type wildcard type
+ */
+ private void process(ExecutableElement executable, WildcardType type) {
+ if (type.getExtendsBound() instanceof ReferenceType) {
+ process(executable, (ReferenceType) type.getExtendsBound());
+ }
+
+ if (type.getSuperBound() instanceof ReferenceType) {
+ process(executable, (ReferenceType) type.getSuperBound());
+ }
+ }
+
+ /**
+ * Check reference type bounds referred by an executable element
+ * @param executable executable element
+ * @param ref reference type
+ */
+ private void process(ExecutableElement executable, ReferenceType ref) {
+ // The element has been processed already
+ if (processed.add(ref) == false) {
+ return;
+ }
+
+ if (ref instanceof DeclaredType) {
+ final DeclaredType declaredType = (DeclaredType) ref;
+
+ final Element element = declaredType.asElement();
+ if (inspectable(element)) {
+ checkNotInternal(executable.getEnclosingElement(), element);
+ checkPublic(executable.getEnclosingElement(), element);
+ }
+
+ for (final TypeMirror type : declaredType.getTypeArguments()) {
+ if (type instanceof ReferenceType) {
+ process(executable, (ReferenceType) type);
+ } else if (type instanceof WildcardType) {
+ process(executable, (WildcardType) type);
+ }
+ }
+ } else if (ref instanceof ArrayType) {
+ final TypeMirror componentType = ((ArrayType) ref).getComponentType();
+ if (componentType instanceof ReferenceType) {
+ process(executable, (ReferenceType) componentType);
+ }
+ } else if (ref instanceof TypeVariable) {
+ final TypeVariable typeVariable = (TypeVariable) ref;
+ if (typeVariable.getUpperBound() instanceof ReferenceType) {
+ process(executable, (ReferenceType) typeVariable.getUpperBound());
+ }
+ if (typeVariable.getLowerBound() instanceof ReferenceType) {
+ process(executable, (ReferenceType) typeVariable.getLowerBound());
+ }
+ }
+
+ // Check this elements annotations
+ for (final AnnotationMirror annotation : ref.getAnnotationMirrors()) {
+ final Element element = annotation.getAnnotationType().asElement();
+ if (inspectable(element)) {
+ checkNotInternal(executable.getEnclosingElement(), element);
+ checkPublic(executable.getEnclosingElement(), element);
+ }
+ }
+ }
+
+ /**
+ * Check if a particular executable element should be inspected or not
+ * @param executable executable element to inspect
+ * @return {@code true} if a particular executable element should be inspected, {@code false} otherwise
+ */
+ private boolean inspectable(ExecutableElement executable) {
+ // The constructors for public APIs could use non-public APIs when those are supposed to be only
+ // consumed (not instantiated) by external consumers.
+ return executable.getKind() != ElementKind.CONSTRUCTOR && executable.getModifiers().contains(Modifier.PUBLIC);
+ }
+
+ /**
+ * Check if a particular element should be inspected or not
+ * @param element element to inspect
+ * @return {@code true} if a particular element should be inspected, {@code false} otherwise
+ */
+ private boolean inspectable(Element element) {
+ final PackageElement pckg = processingEnv.getElementUtils().getPackageOf(element);
+ return pckg.getQualifiedName().toString().startsWith(OPENSEARCH_PACKAGE);
+ }
+
+ /**
+ * Check if a particular element belongs to OpenSeach managed packages
+ * @param element element to inspect
+ * @return {@code true} if a particular element belongs to OpenSeach managed packages, {@code false} otherwise
+ */
+ private boolean checkPackage(Element element) {
+ // The element was reported already
+ if (reported.contains(element)) {
+ return false;
+ }
+
+ final PackageElement pckg = processingEnv.getElementUtils().getPackageOf(element);
+ final boolean belongsToOpenSearch = pckg.getQualifiedName().toString().startsWith(OPENSEARCH_PACKAGE);
+
+ if (!belongsToOpenSearch) {
+ reported.add(element);
+
+ processingEnv.getMessager()
+ .printMessage(
+ reportFailureAs,
+ "The type "
+ + element
+ + " is not residing in "
+ + OPENSEARCH_PACKAGE
+ + ".* package "
+ + "and should not be annotated as OpenSearch APIs."
+ );
+ }
+
+ return belongsToOpenSearch;
+ }
+
+ /**
+ * Check the fields, methods, constructors, and member types that are directly
+ * declared in this class or interface.
+ * @param element class or interface
+ */
+ private void process(Element element) {
+ // Check the fields, methods, constructors, and member types that are directly
+ // declared in this class or interface.
+ for (final Element enclosed : element.getEnclosedElements()) {
+ // Skip all not-public elements
+ if (!enclosed.getModifiers().contains(Modifier.PUBLIC)) {
+ continue;
+ }
+
+ if (enclosed instanceof ExecutableElement) {
+ process((ExecutableElement) enclosed, element);
+ }
+ }
+ }
+
+ /**
+ * Check if element is public and annotated with {@link PublicApi}, {@link DeprecatedApi} or {@link ExperimentalApi}
+ * @param referencedBy the referrer for the element
+ * @param element element to check
+ */
+ private void checkPublic(@Nullable Element referencedBy, final Element element) {
+ // The element was reported already
+ if (reported.contains(element)) {
+ return;
+ }
+
+ checkPublicVisibility(referencedBy, element);
+
+ if (element.getAnnotation(PublicApi.class) == null
+ && element.getAnnotation(ExperimentalApi.class) == null
+ && element.getAnnotation(DeprecatedApi.class) == null) {
+ reported.add(element);
+
+ processingEnv.getMessager()
+ .printMessage(
+ reportFailureAs,
+ "The element "
+ + element
+ + " is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ + ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "")
+ );
+ }
+ }
+
+ /**
+ * Check if element has public visibility (following Java visibility rules)
+ * @param referencedBy the referrer for the element
+ * @param element element to check
+ */
+ private void checkPublicVisibility(Element referencedBy, final Element element) {
+ if (!element.getModifiers().contains(Modifier.PUBLIC) && !element.getModifiers().contains(Modifier.PROTECTED)) {
+ reported.add(element);
+
+ processingEnv.getMessager()
+ .printMessage(
+ reportFailureAs,
+ "The element "
+ + element
+ + " is part of the public APIs but does not have public or protected visibility"
+ + ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "")
+ );
+ }
+ }
+
+ /**
+ * Check if element is not annotated with {@link InternalApi}
+ * @param referencedBy the referrer for the element
+ * @param element element to check
+ */
+ private void checkNotInternal(@Nullable Element referencedBy, final Element element) {
+ // The element was reported already
+ if (reported.contains(element)) {
+ return;
+ }
+
+ if (element.getAnnotation(InternalApi.class) != null) {
+ reported.add(element);
+
+ processingEnv.getMessager()
+ .printMessage(
+ reportFailureAs,
+ "The element "
+ + element
+ + " is part of the public APIs but is marked as @InternalApi"
+ + ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "")
+ );
+ }
+ }
+}
diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/package-info.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/package-info.java
new file mode 100644
index 0000000000000..fa23e4a7addce
--- /dev/null
+++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/package-info.java
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Classes related yo OpenSearch API annotation processing
+ *
+ * @opensearch.internal
+ */
+@org.opensearch.common.annotation.InternalApi
+package org.opensearch.common.annotation.processor;
diff --git a/libs/common/src/main/java/org/opensearch/common/collect/Iterators.java b/libs/common/src/main/java/org/opensearch/common/collect/Iterators.java
index c7e7ae6a44a21..9b64932356c10 100644
--- a/libs/common/src/main/java/org/opensearch/common/collect/Iterators.java
+++ b/libs/common/src/main/java/org/opensearch/common/collect/Iterators.java
@@ -41,6 +41,15 @@
* @opensearch.internal
*/
public class Iterators {
+
+ /**
+ * Concat iterators
+ *
+ * @param iterators the iterators to concat
+ * @param the type of iterator
+ * @return a new {@link ConcatenatedIterator}
+ * @throws NullPointerException if iterators is null
+ */
public static Iterator concat(Iterator extends T>... iterators) {
if (iterators == null) {
throw new NullPointerException("iterators");
@@ -71,6 +80,11 @@ static class ConcatenatedIterator implements Iterator {
this.iterators = iterators;
}
+ /**
+ * Returns {@code true} if the iteration has more elements. (In other words, returns {@code true} if {@link #next} would return an
+ * element rather than throwing an exception.)
+ * @return {@code true} if the iteration has more elements
+ */
@Override
public boolean hasNext() {
boolean hasNext = false;
@@ -81,6 +95,11 @@ public boolean hasNext() {
return hasNext;
}
+ /**
+ * Returns the next element in the iteration.
+ * @return the next element in the iteration
+ * @throws NoSuchElementException if the iteration has no more elements
+ */
@Override
public T next() {
if (!hasNext()) {
diff --git a/libs/common/src/main/java/org/opensearch/common/collect/Tuple.java b/libs/common/src/main/java/org/opensearch/common/collect/Tuple.java
index d0b94536b0729..a5d97dcd85ef7 100644
--- a/libs/common/src/main/java/org/opensearch/common/collect/Tuple.java
+++ b/libs/common/src/main/java/org/opensearch/common/collect/Tuple.java
@@ -32,13 +32,15 @@
package org.opensearch.common.collect;
+import org.opensearch.common.annotation.PublicApi;
+
/**
* Java 9 Tuple
- *
* todo: deprecate and remove w/ min jdk upgrade to 11?
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public class Tuple {
public static Tuple tuple(V1 v1, V2 v2) {
diff --git a/libs/common/src/main/java/org/opensearch/common/crypto/MasterKeyProvider.java b/libs/common/src/main/java/org/opensearch/common/crypto/MasterKeyProvider.java
index 8afa48eb92c0f..31d2dcd0dba3d 100644
--- a/libs/common/src/main/java/org/opensearch/common/crypto/MasterKeyProvider.java
+++ b/libs/common/src/main/java/org/opensearch/common/crypto/MasterKeyProvider.java
@@ -7,12 +7,17 @@
*/
package org.opensearch.common.crypto;
+import org.opensearch.common.annotation.ExperimentalApi;
+
import java.io.Closeable;
import java.util.Map;
/**
* Master key provider responsible for management of master keys.
+ *
+ * @opensearch.experimental
*/
+@ExperimentalApi
public interface MasterKeyProvider extends Closeable {
/**
diff --git a/libs/common/src/main/java/org/opensearch/common/io/InputStreamContainer.java b/libs/common/src/main/java/org/opensearch/common/io/InputStreamContainer.java
index eb8a4e1382497..3095336338f7f 100644
--- a/libs/common/src/main/java/org/opensearch/common/io/InputStreamContainer.java
+++ b/libs/common/src/main/java/org/opensearch/common/io/InputStreamContainer.java
@@ -8,13 +8,16 @@
package org.opensearch.common.io;
+import org.opensearch.common.annotation.ExperimentalApi;
+
import java.io.InputStream;
/**
* Model composed of an input stream and the total content length of the stream
*
- * @opensearch.internal
+ * @opensearch.experimental
*/
+@ExperimentalApi
public class InputStreamContainer {
private final InputStream inputStream;
diff --git a/libs/common/src/main/java/org/opensearch/common/io/PathUtils.java b/libs/common/src/main/java/org/opensearch/common/io/PathUtils.java
index b3526859933ec..ed8d50892b74a 100644
--- a/libs/common/src/main/java/org/opensearch/common/io/PathUtils.java
+++ b/libs/common/src/main/java/org/opensearch/common/io/PathUtils.java
@@ -93,7 +93,7 @@ public static Path get(URI uri) {
/**
* Tries to resolve the given path against the list of available roots.
- *
+ *
* If path starts with one of the listed roots, it returned back by this method, otherwise null is returned.
*/
public static Path get(Path[] roots, String path) {
@@ -109,7 +109,7 @@ public static Path get(Path[] roots, String path) {
/**
* Tries to resolve the given file uri against the list of available roots.
- *
+ *
* If uri starts with one of the listed roots, it returned back by this method, otherwise null is returned.
*/
public static Path get(Path[] roots, URI uri) {
diff --git a/libs/common/src/main/java/org/opensearch/common/lease/Releasable.java b/libs/common/src/main/java/org/opensearch/common/lease/Releasable.java
index 30bea6185febc..dfc4fefb9ee55 100644
--- a/libs/common/src/main/java/org/opensearch/common/lease/Releasable.java
+++ b/libs/common/src/main/java/org/opensearch/common/lease/Releasable.java
@@ -32,13 +32,16 @@
package org.opensearch.common.lease;
+import org.opensearch.common.annotation.PublicApi;
+
import java.io.Closeable;
/**
* Specialization of {@link AutoCloseable} for calls that might not throw a checked exception.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public interface Releasable extends Closeable {
@Override
diff --git a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
index a4fbc6cb65b0d..0f289c09bbae2 100644
--- a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
+++ b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
@@ -368,7 +368,7 @@ public static InetAddress forString(String ipString) {
/**
* Convert a byte array into an InetAddress.
- *
+ *
* {@link InetAddress#getByAddress} is documented as throwing a checked
* exception "if IP address is of illegal length." We replace it with
* an unchecked exception, for use by callers who already know that addr
@@ -423,7 +423,7 @@ public static Tuple parseCidr(String maskedAddress) {
/**
* Given an address and prefix length, returns the string representation of the range in CIDR notation.
- *
+ *
* See {@link #toAddrString} for details on how the address is represented.
*/
public static String toCidrString(InetAddress address, int prefixLength) {
diff --git a/libs/common/src/main/java/org/opensearch/common/round/BidirectionalLinearSearcher.java b/libs/common/src/main/java/org/opensearch/common/round/BidirectionalLinearSearcher.java
new file mode 100644
index 0000000000000..5c3dcf2bd4708
--- /dev/null
+++ b/libs/common/src/main/java/org/opensearch/common/round/BidirectionalLinearSearcher.java
@@ -0,0 +1,59 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * It uses linear search on a sorted array of pre-computed round-down points.
+ * For small inputs (≤ 64 elements), this can be much faster than binary search as it avoids the penalty of
+ * branch mispredictions and pipeline stalls, and accesses memory sequentially.
+ *
+ *
+ * It uses "meet in the middle" linear search to avoid the worst case scenario when the desired element is present
+ * at either side of the array. This is helpful for time-series data where velocity increases over time, so more
+ * documents are likely to find a greater timestamp which is likely to be present on the right end of the array.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+class BidirectionalLinearSearcher implements Roundable {
+ private final long[] ascending;
+ private final long[] descending;
+
+ BidirectionalLinearSearcher(long[] values, int size) {
+ if (size <= 0) {
+ throw new IllegalArgumentException("at least one value must be present");
+ }
+
+ int len = (size + 1) >>> 1; // rounded-up to handle odd number of values
+ ascending = new long[len];
+ descending = new long[len];
+
+ for (int i = 0; i < len; i++) {
+ ascending[i] = values[i];
+ descending[i] = values[size - i - 1];
+ }
+ }
+
+ @Override
+ public long floor(long key) {
+ int i = 0;
+ for (; i < ascending.length; i++) {
+ if (descending[i] <= key) {
+ return descending[i];
+ }
+ if (ascending[i] > key) {
+ assert i > 0 : "key must be greater than or equal to " + ascending[0];
+ return ascending[i - 1];
+ }
+ }
+ return ascending[i - 1];
+ }
+}
diff --git a/libs/common/src/main/java/org/opensearch/common/round/BinarySearcher.java b/libs/common/src/main/java/org/opensearch/common/round/BinarySearcher.java
new file mode 100644
index 0000000000000..b9d76945115ed
--- /dev/null
+++ b/libs/common/src/main/java/org/opensearch/common/round/BinarySearcher.java
@@ -0,0 +1,43 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+import java.util.Arrays;
+
+/**
+ * It uses binary search on a sorted array of pre-computed round-down points.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+class BinarySearcher implements Roundable {
+ private final long[] values;
+ private final int size;
+
+ BinarySearcher(long[] values, int size) {
+ if (size <= 0) {
+ throw new IllegalArgumentException("at least one value must be present");
+ }
+
+ this.values = values;
+ this.size = size;
+ }
+
+ @Override
+ public long floor(long key) {
+ int idx = Arrays.binarySearch(values, 0, size, key);
+ assert idx != -1 : "key must be greater than or equal to " + values[0];
+ if (idx < 0) {
+ idx = -2 - idx;
+ }
+ return values[idx];
+ }
+}
diff --git a/libs/common/src/main/java/org/opensearch/common/round/Roundable.java b/libs/common/src/main/java/org/opensearch/common/round/Roundable.java
new file mode 100644
index 0000000000000..ae6f9b787c1e9
--- /dev/null
+++ b/libs/common/src/main/java/org/opensearch/common/round/Roundable.java
@@ -0,0 +1,28 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Interface to round-off values.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+@FunctionalInterface
+public interface Roundable {
+ /**
+ * Returns the greatest lower bound of the given key.
+ * In other words, it returns the largest value such that {@code value <= key}.
+ * @param key to floor
+ * @return the floored value
+ */
+ long floor(long key);
+}
diff --git a/libs/common/src/main/java/org/opensearch/common/round/RoundableFactory.java b/libs/common/src/main/java/org/opensearch/common/round/RoundableFactory.java
new file mode 100644
index 0000000000000..b7422694c3013
--- /dev/null
+++ b/libs/common/src/main/java/org/opensearch/common/round/RoundableFactory.java
@@ -0,0 +1,39 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Factory class to create and return the fastest implementation of {@link Roundable}.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+public final class RoundableFactory {
+ /**
+ * The maximum limit up to which linear search is used, otherwise binary search is used.
+ * This is because linear search is much faster on small arrays.
+ * Benchmark results: PR #9727
+ */
+ private static final int LINEAR_SEARCH_MAX_SIZE = 64;
+
+ private RoundableFactory() {}
+
+ /**
+ * Creates and returns the fastest implementation of {@link Roundable}.
+ */
+ public static Roundable create(long[] values, int size) {
+ if (size <= LINEAR_SEARCH_MAX_SIZE) {
+ return new BidirectionalLinearSearcher(values, size);
+ } else {
+ return new BinarySearcher(values, size);
+ }
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/http/package-info.java b/libs/common/src/main/java/org/opensearch/common/round/package-info.java
similarity index 72%
rename from libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/http/package-info.java
rename to libs/common/src/main/java/org/opensearch/common/round/package-info.java
index 9feb862a4e010..e79c4017de31b 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/http/package-info.java
+++ b/libs/common/src/main/java/org/opensearch/common/round/package-info.java
@@ -7,6 +7,6 @@
*/
/**
- * Contains No-op implementations
+ * Contains classes to round-off values.
*/
-package org.opensearch.telemetry.tracing.http;
+package org.opensearch.common.round;
diff --git a/libs/common/src/main/java/org/opensearch/common/unit/TimeValue.java b/libs/common/src/main/java/org/opensearch/common/unit/TimeValue.java
index a3fcffb1d6a4c..30ed5bf63a748 100644
--- a/libs/common/src/main/java/org/opensearch/common/unit/TimeValue.java
+++ b/libs/common/src/main/java/org/opensearch/common/unit/TimeValue.java
@@ -224,10 +224,10 @@ public double getDaysFrac() {
/**
* Returns a {@link String} representation of the current {@link TimeValue}.
- *
+ *
* Note that this method might produce fractional time values (ex 1.6m) which cannot be
* parsed by method like {@link TimeValue#parse(String, String, String, String)}.
- *
+ *
* Also note that the maximum string value that will be generated is
* {@code 106751.9d} due to the way that values are internally converted
* to nanoseconds (106751.9 days is Long.MAX_VALUE nanoseconds)
@@ -239,12 +239,12 @@ public String toString() {
/**
* Returns a {@link String} representation of the current {@link TimeValue}.
- *
+ *
* Note that this method might produce fractional time values (ex 1.6m) which cannot be
* parsed by method like {@link TimeValue#parse(String, String, String, String)}. The number of
* fractional decimals (up to 10 maximum) are truncated to the number of fraction pieces
* specified.
- *
+ *
* Also note that the maximum string value that will be generated is
* {@code 106751.9d} due to the way that values are internally converted
* to nanoseconds (106751.9 days is Long.MAX_VALUE nanoseconds)
diff --git a/libs/common/src/main/java/org/opensearch/common/util/BitMixer.java b/libs/common/src/main/java/org/opensearch/common/util/BitMixer.java
index 8762217916c7a..d6ea4fa359df3 100644
--- a/libs/common/src/main/java/org/opensearch/common/util/BitMixer.java
+++ b/libs/common/src/main/java/org/opensearch/common/util/BitMixer.java
@@ -25,9 +25,9 @@
/**
* Bit mixing utilities from carrotsearch.hppc.
- *
+ *
* Licensed under ALv2. This is pulled in directly to avoid a full hppc dependency.
- *
+ *
* The purpose of these methods is to evenly distribute key space over int32
* range.
*/
@@ -111,7 +111,7 @@ public static int mix32(int k) {
/**
* Computes David Stafford variant 9 of 64bit mix function (MH3 finalization step,
* with different shifts and constants).
- *
+ *
* Variant 9 is picked because it contains two 32-bit shifts which could be possibly
* optimized into better machine code.
*
diff --git a/libs/common/src/main/resources/META-INF/services/javax.annotation.processing.Processor b/libs/common/src/main/resources/META-INF/services/javax.annotation.processing.Processor
new file mode 100644
index 0000000000000..c4e4dfed864f2
--- /dev/null
+++ b/libs/common/src/main/resources/META-INF/services/javax.annotation.processing.Processor
@@ -0,0 +1,12 @@
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+# The OpenSearch Contributors require contributions made to
+# this file be licensed under the Apache-2.0 license or a
+# compatible open source license.
+#
+# Modifications Copyright OpenSearch Contributors. See
+# GitHub history for details.
+#
+
+org.opensearch.common.annotation.processor.ApiAnnotationProcessor
\ No newline at end of file
diff --git a/server/src/test/java/org/opensearch/common/BooleansTests.java b/libs/common/src/test/java/org/opensearch/common/BooleansTests.java
similarity index 58%
rename from server/src/test/java/org/opensearch/common/BooleansTests.java
rename to libs/common/src/test/java/org/opensearch/common/BooleansTests.java
index 7e4a0ad8e456b..578ec742d126d 100644
--- a/server/src/test/java/org/opensearch/common/BooleansTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/BooleansTests.java
@@ -34,10 +34,7 @@
import org.opensearch.test.OpenSearchTestCase;
-import java.util.Locale;
-
import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.nullValue;
public class BooleansTests extends OpenSearchTestCase {
private static final String[] NON_BOOLEANS = new String[] {
@@ -81,8 +78,23 @@ public void testParseBooleanWithFallback() {
assertFalse(Booleans.parseBoolean(null, Boolean.FALSE));
assertTrue(Booleans.parseBoolean(null, Boolean.TRUE));
+ assertFalse(Booleans.parseBoolean("", false));
+ assertTrue(Booleans.parseBoolean("", true));
+ assertNull(Booleans.parseBoolean("", null));
+ assertFalse(Booleans.parseBoolean("", Boolean.FALSE));
+ assertTrue(Booleans.parseBoolean("", Boolean.TRUE));
+
+ assertFalse(Booleans.parseBoolean(" \t\n", false));
+ assertTrue(Booleans.parseBoolean(" \t\n", true));
+ assertNull(Booleans.parseBoolean(" \t\n", null));
+ assertFalse(Booleans.parseBoolean(" \t\n", Boolean.FALSE));
+ assertTrue(Booleans.parseBoolean(" \t\n", Boolean.TRUE));
+
assertTrue(Booleans.parseBoolean("true", randomFrom(Boolean.TRUE, Boolean.FALSE, null)));
assertFalse(Booleans.parseBoolean("false", randomFrom(Boolean.TRUE, Boolean.FALSE, null)));
+
+ assertTrue(Booleans.parseBoolean(new char[0], 0, 0, true));
+ assertFalse(Booleans.parseBoolean(new char[0], 0, 0, false));
}
public void testParseNonBooleanWithFallback() {
@@ -109,56 +121,12 @@ public void testParseNonBoolean() {
}
}
- public void testIsBooleanLenient() {
- String[] booleans = new String[] { "true", "false", "on", "off", "yes", "no", "0", "1" };
- String[] notBooleans = new String[] { "11", "00", "sdfsdfsf", "F", "T" };
- assertThat(Booleans.isBooleanLenient(null, 0, 1), is(false));
-
- for (String b : booleans) {
- String t = "prefix" + b + "suffix";
- assertTrue(
- "failed to recognize [" + b + "] as boolean",
- Booleans.isBooleanLenient(t.toCharArray(), "prefix".length(), b.length())
- );
- }
-
- for (String nb : notBooleans) {
- String t = "prefix" + nb + "suffix";
- assertFalse("recognized [" + nb + "] as boolean", Booleans.isBooleanLenient(t.toCharArray(), "prefix".length(), nb.length()));
- }
- }
-
- public void testParseBooleanLenient() {
- assertThat(Booleans.parseBooleanLenient(randomFrom("true", "on", "yes", "1"), randomBoolean()), is(true));
- assertThat(Booleans.parseBooleanLenient(randomFrom("false", "off", "no", "0"), randomBoolean()), is(false));
- assertThat(Booleans.parseBooleanLenient(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT), randomBoolean()), is(true));
- assertThat(Booleans.parseBooleanLenient(null, false), is(false));
- assertThat(Booleans.parseBooleanLenient(null, true), is(true));
-
- assertThat(
- Booleans.parseBooleanLenient(randomFrom("true", "on", "yes", "1"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)),
- is(true)
- );
- assertThat(
- Booleans.parseBooleanLenient(randomFrom("false", "off", "no", "0"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)),
- is(false)
- );
- assertThat(
- Booleans.parseBooleanLenient(
- randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),
- randomFrom(Boolean.TRUE, Boolean.FALSE, null)
- ),
- is(true)
- );
- assertThat(Booleans.parseBooleanLenient(null, Boolean.FALSE), is(false));
- assertThat(Booleans.parseBooleanLenient(null, Boolean.TRUE), is(true));
- assertThat(Booleans.parseBooleanLenient(null, null), nullValue());
-
- char[] chars = randomFrom("true", "on", "yes", "1").toCharArray();
- assertThat(Booleans.parseBooleanLenient(chars, 0, chars.length, randomBoolean()), is(true));
- chars = randomFrom("false", "off", "no", "0").toCharArray();
- assertThat(Booleans.parseBooleanLenient(chars, 0, chars.length, randomBoolean()), is(false));
- chars = randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT).toCharArray();
- assertThat(Booleans.parseBooleanLenient(chars, 0, chars.length, randomBoolean()), is(true));
+ public void testParseBooleanStrict() {
+ assertTrue(Booleans.parseBooleanStrict("true", false));
+ assertFalse(Booleans.parseBooleanStrict("false", true));
+ assertTrue(Booleans.parseBooleanStrict(null, true));
+ assertFalse(Booleans.parseBooleanStrict("", false));
+ expectThrows(IllegalArgumentException.class, () -> Booleans.parseBooleanStrict("foobar", false));
+ expectThrows(IllegalArgumentException.class, () -> Booleans.parseBooleanStrict(" \t\n", false));
}
}
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
new file mode 100644
index 0000000000000..df04709458b29
--- /dev/null
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
@@ -0,0 +1,476 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.test.OpenSearchTestCase;
+
+import javax.tools.Diagnostic;
+
+import static org.opensearch.common.annotation.processor.CompilerSupport.HasDiagnostic.matching;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.instanceOf;
+
+@SuppressWarnings("deprecation")
+public class ApiAnnotationProcessorTests extends OpenSearchTestCase implements CompilerSupport {
+ public void testPublicApiMethodArgumentNotAnnotated() {
+ final CompilerResult result = compile("PublicApiMethodArgumentNotAnnotated.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotated)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodArgumentNotAnnotatedGenerics() {
+ final CompilerResult result = compile("PublicApiMethodArgumentNotAnnotatedGenerics.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedGenerics)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodThrowsNotAnnotated() {
+ final CompilerResult result = compile("PublicApiMethodThrowsNotAnnotated.java", "PublicApiAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodThrowsNotAnnotated)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodArgumentNotAnnotatedPackagePrivate() {
+ final CompilerResult result = compile("PublicApiMethodArgumentNotAnnotatedPackagePrivate.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(4));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but does not have public or protected visibility "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)"
+ )
+ )
+ )
+ );
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodArgumentAnnotatedPackagePrivate() {
+ final CompilerResult result = compile("PublicApiMethodArgumentAnnotatedPackagePrivate.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.AnnotatedPackagePrivate is part of the public APIs but does not have public or protected visibility "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentAnnotatedPackagePrivate)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiWithInternalApiMethod() {
+ final CompilerResult result = compile("PublicApiWithInternalApiMethod.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element method() is part of the public APIs but is marked as @InternalApi (referenced by org.opensearch.common.annotation.processor.PublicApiWithInternalApiMethod)"
+ )
+ )
+ )
+ );
+ }
+
+ /**
+ * The constructor arguments have relaxed semantics at the moment: those could be not annotated or be annotated as {@link InternalApi}
+ */
+ public void testPublicApiConstructorArgumentNotAnnotated() {
+ final CompilerResult result = compile("PublicApiConstructorArgumentNotAnnotated.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ /**
+ * The constructor arguments have relaxed semantics at the moment: those could be not annotated or be annotated as {@link InternalApi}
+ */
+ public void testPublicApiConstructorArgumentAnnotatedInternalApi() {
+ final CompilerResult result = compile("PublicApiConstructorArgumentAnnotatedInternalApi.java", "InternalApiAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiWithExperimentalApiMethod() {
+ final CompilerResult result = compile("PublicApiWithExperimentalApiMethod.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiMethodReturnNotAnnotated() {
+ final CompilerResult result = compile("PublicApiMethodReturnNotAnnotated.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotated)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnNotAnnotatedGenerics() {
+ final CompilerResult result = compile("PublicApiMethodReturnNotAnnotatedGenerics.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedGenerics)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnNotAnnotatedArray() {
+ final CompilerResult result = compile("PublicApiMethodReturnNotAnnotatedArray.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedArray)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnNotAnnotatedBoundedGenerics() {
+ final CompilerResult result = compile("PublicApiMethodReturnNotAnnotatedBoundedGenerics.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedBoundedGenerics)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnNotAnnotatedAnnotation() {
+ final CompilerResult result = compile(
+ "PublicApiMethodReturnNotAnnotatedAnnotation.java",
+ "PublicApiAnnotated.java",
+ "NotAnnotatedAnnotation.java"
+ );
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedAnnotation)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnNotAnnotatedWildcardGenerics() {
+ final CompilerResult result = compile("PublicApiMethodReturnNotAnnotatedWildcardGenerics.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiWithPackagePrivateMethod() {
+ final CompilerResult result = compile("PublicApiWithPackagePrivateMethod.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiMethodReturnSelf() {
+ final CompilerResult result = compile("PublicApiMethodReturnSelf.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testExperimentalApiMethodReturnSelf() {
+ final CompilerResult result = compile("ExperimentalApiMethodReturnSelf.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testDeprecatedApiMethodReturnSelf() {
+ final CompilerResult result = compile("DeprecatedApiMethodReturnSelf.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiPackagePrivate() {
+ final CompilerResult result = compile("PublicApiPackagePrivate.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.PublicApiPackagePrivate is part of the public APIs but does not have public or protected visibility"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodGenericsArgumentNotAnnotated() {
+ final CompilerResult result = compile("PublicApiMethodGenericsArgumentNotAnnotated.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodGenericsArgumentNotAnnotated)"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnAnnotatedArray() {
+ final CompilerResult result = compile("PublicApiMethodReturnAnnotatedArray.java", "PublicApiAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiMethodGenericsArgumentAnnotated() {
+ final CompilerResult result = compile("PublicApiMethodGenericsArgumentAnnotated.java", "PublicApiAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiAnnotatedNotOpensearch() {
+ final CompilerResult result = compileWithPackage("org.acme", "PublicApiAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The type org.acme.PublicApiAnnotated is not residing in org.opensearch.* package and should not be annotated as OpenSearch APIs."
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiMethodReturnAnnotatedGenerics() {
+ final CompilerResult result = compile(
+ "PublicApiMethodReturnAnnotatedGenerics.java",
+ "PublicApiAnnotated.java",
+ "NotAnnotatedAnnotation.java"
+ );
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnAnnotatedGenerics)"
+ )
+ )
+ )
+ );
+ }
+
+ /**
+ * The type could expose protected inner types which are still considered to be a public API when used
+ */
+ public void testPublicApiWithProtectedInterface() {
+ final CompilerResult result = compile("PublicApiWithProtectedInterface.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+}
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
new file mode 100644
index 0000000000000..dcf8dd7945012
--- /dev/null
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
@@ -0,0 +1,139 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeMatcher;
+
+import javax.tools.Diagnostic;
+import javax.tools.DiagnosticCollector;
+import javax.tools.JavaCompiler;
+import javax.tools.JavaCompiler.CompilationTask;
+import javax.tools.JavaFileObject;
+import javax.tools.JavaFileObject.Kind;
+import javax.tools.SimpleJavaFileObject;
+import javax.tools.StandardJavaFileManager;
+import javax.tools.ToolProvider;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.io.UncheckedIOException;
+import java.net.URI;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+interface CompilerSupport {
+ default CompilerResult compile(String name, String... names) {
+ return compileWithPackage(ApiAnnotationProcessorTests.class.getPackageName(), name, names);
+ }
+
+ default CompilerResult compileWithPackage(String pck, String name, String... names) {
+ final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
+ final DiagnosticCollector collector = new DiagnosticCollector<>();
+
+ try (StringWriter out = new StringWriter()) {
+ final StandardJavaFileManager fileManager = compiler.getStandardFileManager(collector, null, null);
+ final List files = Stream.concat(Stream.of(name), Arrays.stream(names))
+ .map(f -> asSource(pck, f))
+ .collect(Collectors.toList());
+
+ final CompilationTask task = compiler.getTask(out, fileManager, collector, null, null, files);
+ task.setProcessors(Collections.singleton(new ApiAnnotationProcessor()));
+
+ if (AccessController.doPrivileged((PrivilegedAction) () -> task.call())) {
+ return new Success();
+ } else {
+ return new Failure(collector.getDiagnostics());
+ }
+ } catch (final IOException ex) {
+ throw new UncheckedIOException(ex);
+ }
+ }
+
+ private static JavaFileObject asSource(String pkg, String name) {
+ final String resource = "/" + pkg.replaceAll("[.]", "/") + "/" + name;
+ final URL source = ApiAnnotationProcessorTests.class.getResource(resource);
+
+ return new SimpleJavaFileObject(URI.create(source.toExternalForm()), Kind.SOURCE) {
+ @Override
+ public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
+ try (final InputStream in = ApiAnnotationProcessorTests.class.getResourceAsStream(resource)) {
+ return new String(in.readAllBytes(), StandardCharsets.UTF_8);
+ }
+ }
+ };
+ }
+
+ class CompilerResult {}
+
+ class Success extends CompilerResult {
+
+ }
+
+ class Failure extends CompilerResult {
+ private final List> diagnotics;
+
+ Failure(List> diagnotics) {
+ this.diagnotics = diagnotics;
+ }
+
+ List> diagnotics() {
+ return diagnotics;
+ }
+ }
+
+ class HasDiagnostic extends TypeSafeMatcher> {
+ private final Diagnostic.Kind kind;
+ private final Matcher matcher;
+
+ HasDiagnostic(final Diagnostic.Kind kind, final Matcher matcher) {
+ this.kind = kind;
+ this.matcher = matcher;
+ }
+
+ @Override
+ public void describeTo(Description description) {
+ description.appendText("diagnostic with kind ").appendValue(kind).appendText(" ");
+
+ if (matcher != null) {
+ description.appendText(" and message ");
+ matcher.describeTo(description);
+ }
+ }
+
+ @Override
+ protected boolean matchesSafely(Diagnostic extends JavaFileObject> item) {
+ if (!kind.equals(item.getKind())) {
+ return false;
+ } else if (matcher != null) {
+ return matcher.matches(item.getMessage(Locale.ROOT));
+ } else {
+ return true;
+ }
+ }
+
+ public static HasDiagnostic matching(final Diagnostic.Kind kind, final Matcher matcher) {
+ return new HasDiagnostic(kind, matcher);
+ }
+
+ public static HasDiagnostic matching(final Diagnostic.Kind kind) {
+ return new HasDiagnostic(kind, null);
+ }
+ }
+}
diff --git a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
new file mode 100644
index 0000000000000..ae9f629c59024
--- /dev/null
+++ b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
@@ -0,0 +1,57 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.test.OpenSearchTestCase;
+
+public class RoundableTests extends OpenSearchTestCase {
+
+ public void testFloor() {
+ int size = randomIntBetween(1, 256);
+ long[] values = new long[size];
+ for (int i = 1; i < values.length; i++) {
+ values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1;
+ }
+
+ Roundable[] impls = { new BinarySearcher(values, size), new BidirectionalLinearSearcher(values, size) };
+
+ for (int i = 0; i < 100000; i++) {
+ // Index of the expected round-down point.
+ int idx = randomIntBetween(0, size - 1);
+
+ // Value of the expected round-down point.
+ long expected = values[idx];
+
+ // Delta between the expected and the next round-down point.
+ long delta = (idx < size - 1) ? (values[idx + 1] - values[idx]) : 200;
+
+ // Adding a random delta between 0 (inclusive) and delta (exclusive) to the expected
+ // round-down point, which will still floor to the same value.
+ long key = expected + (randomNonNegativeLong() % delta);
+
+ for (Roundable roundable : impls) {
+ assertEquals(expected, roundable.floor(key));
+ }
+ }
+ }
+
+ public void testFailureCases() {
+ Throwable throwable;
+
+ throwable = assertThrows(IllegalArgumentException.class, () -> new BinarySearcher(new long[0], 0));
+ assertEquals("at least one value must be present", throwable.getMessage());
+ throwable = assertThrows(IllegalArgumentException.class, () -> new BidirectionalLinearSearcher(new long[0], 0));
+ assertEquals("at least one value must be present", throwable.getMessage());
+
+ throwable = assertThrows(AssertionError.class, () -> new BinarySearcher(new long[] { 100 }, 1).floor(50));
+ assertEquals("key must be greater than or equal to 100", throwable.getMessage());
+ throwable = assertThrows(AssertionError.class, () -> new BidirectionalLinearSearcher(new long[] { 100 }, 1).floor(50));
+ assertEquals("key must be greater than or equal to 100", throwable.getMessage());
+ }
+}
diff --git a/libs/common/src/test/resources/org/acme/PublicApiAnnotated.java b/libs/common/src/test/resources/org/acme/PublicApiAnnotated.java
new file mode 100644
index 0000000000000..bc16fd996e69d
--- /dev/null
+++ b/libs/common/src/test/resources/org/acme/PublicApiAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.acme;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiAnnotated {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/bootstrap/test.policy b/libs/common/src/test/resources/org/opensearch/bootstrap/test.policy
new file mode 100644
index 0000000000000..e0a183b7eac88
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/bootstrap/test.policy
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+grant {
+ // allow to use JVM tooling (Java Compiler) in tests for annotation processing
+ permission java.io.FilePermission "${java.home}/lib/*", "read";
+ permission java.io.FilePermission "${java.home}/lib/modules/*", "read";
+ permission java.lang.RuntimePermission "accessSystemModules";
+ permission java.lang.RuntimePermission "accessDeclaredMembers";
+ permission java.lang.RuntimePermission "accessClassInPackage.*";
+};
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/DeprecatedApiMethodReturnSelf.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/DeprecatedApiMethodReturnSelf.java
new file mode 100644
index 0000000000000..7c5b6f6ea2f51
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/DeprecatedApiMethodReturnSelf.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.DeprecatedApi;
+
+@DeprecatedApi(since = "1.0.0")
+public class DeprecatedApiMethodReturnSelf {
+ public DeprecatedApiMethodReturnSelf method() {
+ return new DeprecatedApiMethodReturnSelf();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/ExperimentalApiAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/ExperimentalApiAnnotated.java
new file mode 100644
index 0000000000000..5be07e22c811f
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/ExperimentalApiAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+
+@ExperimentalApi
+public class ExperimentalApiAnnotated {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/ExperimentalApiMethodReturnSelf.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/ExperimentalApiMethodReturnSelf.java
new file mode 100644
index 0000000000000..cde8f4f254faf
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/ExperimentalApiMethodReturnSelf.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+
+@ExperimentalApi
+public class ExperimentalApiMethodReturnSelf {
+ public ExperimentalApiMethodReturnSelf method() {
+ return new ExperimentalApiMethodReturnSelf();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java
new file mode 100644
index 0000000000000..9996ba8b736aa
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class InternalApiAnnotated {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotated.java
new file mode 100644
index 0000000000000..ec16ce926ea86
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotated.java
@@ -0,0 +1,13 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+public class NotAnnotated {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotatedAnnotation.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotatedAnnotation.java
new file mode 100644
index 0000000000000..a3e9c4f576d92
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotatedAnnotation.java
@@ -0,0 +1,27 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+@Target({
+ ElementType.TYPE,
+ ElementType.TYPE_PARAMETER,
+ ElementType.TYPE_USE,
+ ElementType.PACKAGE,
+ ElementType.METHOD,
+ ElementType.CONSTRUCTOR,
+ ElementType.PARAMETER,
+ ElementType.FIELD,
+ ElementType.ANNOTATION_TYPE,
+ ElementType.MODULE })
+public @interface NotAnnotatedAnnotation {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotatedException.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotatedException.java
new file mode 100644
index 0000000000000..0aadaf8f9bf31
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/NotAnnotatedException.java
@@ -0,0 +1,13 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+public class NotAnnotatedException extends Exception {
+ private static final long serialVersionUID = 1L;
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiAnnotated.java
new file mode 100644
index 0000000000000..b2a7f03cb2d31
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiAnnotated {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorArgumentAnnotatedInternalApi.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorArgumentAnnotatedInternalApi.java
new file mode 100644
index 0000000000000..6bea2961a14e6
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorArgumentAnnotatedInternalApi.java
@@ -0,0 +1,20 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiConstructorArgumentAnnotatedInternalApi {
+ /**
+ * The constructor arguments have relaxed semantics at the moment: those could be not annotated or be annotated as {@link InternalApi}
+ */
+ public PublicApiConstructorArgumentAnnotatedInternalApi(InternalApiAnnotated arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorArgumentNotAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorArgumentNotAnnotated.java
new file mode 100644
index 0000000000000..6c7481d9978cd
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorArgumentNotAnnotated.java
@@ -0,0 +1,20 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiConstructorArgumentNotAnnotated {
+ /**
+ * The constructor arguments have relaxed semantics at the moment: those could be not annotated or be annotated as {@link InternalApi}
+ */
+ public PublicApiConstructorArgumentNotAnnotated(NotAnnotated arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentAnnotatedPackagePrivate.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentAnnotatedPackagePrivate.java
new file mode 100644
index 0000000000000..5dae56a7cd7d3
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentAnnotatedPackagePrivate.java
@@ -0,0 +1,20 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodArgumentAnnotatedPackagePrivate {
+ public void method(AnnotatedPackagePrivate arg) {}
+}
+
+// The public API exposes this class through public method argument, it should be public
+@PublicApi(since = "1.0.0")
+class AnnotatedPackagePrivate {}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotated.java
new file mode 100644
index 0000000000000..ddfec939f79e8
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodArgumentNotAnnotated {
+ public void method(NotAnnotated arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotatedGenerics.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotatedGenerics.java
new file mode 100644
index 0000000000000..d32502831d299
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotatedGenerics.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+import java.util.Collection;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodArgumentNotAnnotatedGenerics {
+ public void method(Collection super NotAnnotated> arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotatedPackagePrivate.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotatedPackagePrivate.java
new file mode 100644
index 0000000000000..d4fb31b172ef2
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodArgumentNotAnnotatedPackagePrivate.java
@@ -0,0 +1,19 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodArgumentNotAnnotatedPackagePrivate {
+ public void method(NotAnnotatedPackagePrivate arg) {}
+}
+
+// The public API exposes this class through public method argument, it should be annotated and be public
+class NotAnnotatedPackagePrivate {}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodGenericsArgumentAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodGenericsArgumentAnnotated.java
new file mode 100644
index 0000000000000..9715748cfa659
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodGenericsArgumentAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodGenericsArgumentAnnotated {
+ public void method(T arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodGenericsArgumentNotAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodGenericsArgumentNotAnnotated.java
new file mode 100644
index 0000000000000..f149c1f34b067
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodGenericsArgumentNotAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodGenericsArgumentNotAnnotated {
+ public void method(T arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnAnnotatedArray.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnAnnotatedArray.java
new file mode 100644
index 0000000000000..39b7e146fe1e7
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnAnnotatedArray.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnAnnotatedArray {
+ public PublicApiAnnotated[] method() {
+ return new PublicApiAnnotated[0];
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnAnnotatedGenerics.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnAnnotatedGenerics.java
new file mode 100644
index 0000000000000..2171eccee2f31
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnAnnotatedGenerics.java
@@ -0,0 +1,23 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+import java.util.Collection;
+import java.util.Collections;
+
+import org.acme.PublicApiAnnotated;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnAnnotatedGenerics {
+ public Collection<@NotAnnotatedAnnotation PublicApiAnnotated> method() {
+ return Collections.emptyList();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotated.java
new file mode 100644
index 0000000000000..725d06072d0ea
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotated.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnNotAnnotated {
+ public NotAnnotated method() {
+ return new NotAnnotated();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedAnnotation.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedAnnotation.java
new file mode 100644
index 0000000000000..b684e36a53da1
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedAnnotation.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnNotAnnotatedAnnotation {
+ public @NotAnnotatedAnnotation PublicApiAnnotated method() {
+ return new PublicApiAnnotated();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedArray.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedArray.java
new file mode 100644
index 0000000000000..e4c541dcea57f
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedArray.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnNotAnnotatedArray {
+ public NotAnnotated[] method() {
+ return new NotAnnotated[0];
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedBoundedGenerics.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedBoundedGenerics.java
new file mode 100644
index 0000000000000..0646faf152610
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedBoundedGenerics.java
@@ -0,0 +1,21 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+import java.util.Collection;
+import java.util.Collections;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnNotAnnotatedBoundedGenerics {
+ public Collection extends NotAnnotated> method() {
+ return Collections.emptyList();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedGenerics.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedGenerics.java
new file mode 100644
index 0000000000000..2227883c707d0
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedGenerics.java
@@ -0,0 +1,21 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+import java.util.Collection;
+import java.util.Collections;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnNotAnnotatedGenerics {
+ public Collection method() {
+ return Collections.emptyList();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedWildcardGenerics.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedWildcardGenerics.java
new file mode 100644
index 0000000000000..f2818ebb23c4a
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnNotAnnotatedWildcardGenerics.java
@@ -0,0 +1,21 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+import java.util.Collection;
+import java.util.Collections;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnNotAnnotatedWildcardGenerics {
+ public Collection> method() {
+ return Collections.emptyList();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnSelf.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnSelf.java
new file mode 100644
index 0000000000000..883471b23ae0f
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodReturnSelf.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodReturnSelf {
+ public PublicApiMethodReturnSelf method() {
+ return new PublicApiMethodReturnSelf();
+ }
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodThrowsNotAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodThrowsNotAnnotated.java
new file mode 100644
index 0000000000000..496b243276565
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiMethodThrowsNotAnnotated.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiMethodThrowsNotAnnotated {
+ public void method(PublicApiAnnotated arg) throws NotAnnotatedException {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiPackagePrivate.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiPackagePrivate.java
new file mode 100644
index 0000000000000..88c20e7f4c8f1
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiPackagePrivate.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+class PublicApiPackagePrivate {
+ void method() {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithExperimentalApiMethod.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithExperimentalApiMethod.java
new file mode 100644
index 0000000000000..faaaa1d9f4051
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithExperimentalApiMethod.java
@@ -0,0 +1,18 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiWithExperimentalApiMethod {
+ @ExperimentalApi
+ public void method() {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithInternalApiMethod.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithInternalApiMethod.java
new file mode 100644
index 0000000000000..5bfa3c9f3e008
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithInternalApiMethod.java
@@ -0,0 +1,19 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiWithInternalApiMethod {
+ // The public API exposes internal API method, it should be public API
+ @InternalApi
+ public void method() {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithPackagePrivateMethod.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithPackagePrivateMethod.java
new file mode 100644
index 0000000000000..1345467423530
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithPackagePrivateMethod.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiWithPackagePrivateMethod {
+ void method(NotAnnotated arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithProtectedInterface.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithProtectedInterface.java
new file mode 100644
index 0000000000000..222ae01fd15e6
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithProtectedInterface.java
@@ -0,0 +1,22 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiWithProtectedInterface {
+ public void method(ProtectedInterface iface) {}
+
+ /**
+ * The type could expose protected inner types which are still considered to be a public API when used
+ */
+ @PublicApi(since = "1.0.0")
+ protected interface ProtectedInterface {}
+}
diff --git a/libs/compress/src/main/java/org/opensearch/compress/ZstdCompressor.java b/libs/compress/src/main/java/org/opensearch/compress/ZstdCompressor.java
index 01afc368fb120..e2a740f72be93 100644
--- a/libs/compress/src/main/java/org/opensearch/compress/ZstdCompressor.java
+++ b/libs/compress/src/main/java/org/opensearch/compress/ZstdCompressor.java
@@ -30,10 +30,13 @@
* @opensearch.experimental - class methods might change
*/
public class ZstdCompressor implements Compressor {
- // An arbitrary header that we use to identify compressed streams
- // It needs to be different from other compressors and to not be specific
- // enough so that no stream starting with these bytes could be detected as
- // a XContent
+
+ /**
+ * An arbitrary header that we use to identify compressed streams
+ * It needs to be different from other compressors and to not be specific
+ * enough so that no stream starting with these bytes could be detected as
+ * a XContent
+ * */
private static final byte[] HEADER = new byte[] { 'Z', 'S', 'T', 'D', '\0' };
/**
@@ -44,10 +47,20 @@ public class ZstdCompressor implements Compressor {
@PublicApi(since = "2.10.0")
public static final String NAME = "ZSTD";
+ /**
+ * The compression level for {@link ZstdOutputStreamNoFinalizer}
+ */
private static final int LEVEL = 3;
+ /** The buffer size for {@link BufferedInputStream} and {@link BufferedOutputStream}
+ */
private static final int BUFFER_SIZE = 4096;
+ /**
+ * Compares the given bytes with the {@link ZstdCompressor#HEADER} of a compressed stream
+ * @param bytes the bytes to compare to ({@link ZstdCompressor#HEADER})
+ * @return true if the bytes are the {@link ZstdCompressor#HEADER}, false otherwise
+ */
@Override
public boolean isCompressed(BytesReference bytes) {
if (bytes.length() < HEADER.length) {
@@ -61,11 +74,22 @@ public boolean isCompressed(BytesReference bytes) {
return true;
}
+ /**
+ * Returns the length of the {@link ZstdCompressor#HEADER}
+ * @return the {@link ZstdCompressor#HEADER} length
+ */
@Override
public int headerLength() {
return HEADER.length;
}
+ /**
+ * Returns a new {@link ZstdInputStreamNoFinalizer} from the given compressed {@link InputStream}
+ * @param in the compressed {@link InputStream}
+ * @return a new {@link ZstdInputStreamNoFinalizer} from the given compressed {@link InputStream}
+ * @throws IOException if an I/O error occurs
+ * @throws IllegalArgumentException if the input stream is not compressed with ZSTD
+ */
@Override
public InputStream threadLocalInputStream(InputStream in) throws IOException {
final byte[] header = in.readNBytes(HEADER.length);
@@ -75,17 +99,36 @@ public InputStream threadLocalInputStream(InputStream in) throws IOException {
return new ZstdInputStreamNoFinalizer(new BufferedInputStream(in, BUFFER_SIZE), RecyclingBufferPool.INSTANCE);
}
+ /**
+ * Returns a new {@link ZstdOutputStreamNoFinalizer} from the given {@link OutputStream}
+ * @param out the {@link OutputStream}
+ * @return a new {@link ZstdOutputStreamNoFinalizer} from the given {@link OutputStream}
+ * @throws IOException if an I/O error occurs
+ */
@Override
public OutputStream threadLocalOutputStream(OutputStream out) throws IOException {
out.write(HEADER);
return new ZstdOutputStreamNoFinalizer(new BufferedOutputStream(out, BUFFER_SIZE), RecyclingBufferPool.INSTANCE, LEVEL);
}
+ /**
+ * Always throws an {@link UnsupportedOperationException} as ZSTD compression is supported only for snapshotting
+ * @param bytesReference a reference to the bytes to uncompress
+ * @return always throws an exception
+ * @throws UnsupportedOperationException if the method is called
+ * @throws IOException is never thrown
+ */
@Override
public BytesReference uncompress(BytesReference bytesReference) throws IOException {
throw new UnsupportedOperationException("ZSTD compression is supported only for snapshotting");
}
+ /**
+ * Always throws an {@link UnsupportedOperationException} as ZSTD compression is supported only for snapshotting
+ * @param bytesReference a reference to the bytes to compress
+ * @return always throws an exception
+ * @throws UnsupportedOperationException if the method is called
+ */
@Override
public BytesReference compress(BytesReference bytesReference) throws IOException {
throw new UnsupportedOperationException("ZSTD compression is supported only for snapshotting");
diff --git a/libs/core/build.gradle b/libs/core/build.gradle
index 46b6f4471731f..4850b5aea5c85 100644
--- a/libs/core/build.gradle
+++ b/libs/core/build.gradle
@@ -33,7 +33,7 @@ import org.opensearch.gradle.info.BuildParams
apply plugin: 'opensearch.publish'
base {
- archivesBaseName = 'opensearch-core'
+ archivesName = 'opensearch-core'
}
// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs
diff --git a/libs/core/licenses/jackson-core-2.15.2.jar.sha1 b/libs/core/licenses/jackson-core-2.15.2.jar.sha1
deleted file mode 100644
index ec6781b968eed..0000000000000
--- a/libs/core/licenses/jackson-core-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a6fe1836469a69b3ff66037c324d75fc66ef137c
\ No newline at end of file
diff --git a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 b/libs/core/licenses/jackson-core-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..c2b70fb4ae202
--- /dev/null
+++ b/libs/core/licenses/jackson-core-2.16.0.jar.sha1
@@ -0,0 +1 @@
+899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/core/licenses/log4j-api-2.20.0.jar.sha1 b/libs/core/licenses/log4j-api-2.20.0.jar.sha1
deleted file mode 100644
index 37154d9861ac0..0000000000000
--- a/libs/core/licenses/log4j-api-2.20.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1fe6082e660daf07c689a89c94dc0f49c26b44bb
\ No newline at end of file
diff --git a/libs/core/licenses/log4j-api-2.21.0.jar.sha1 b/libs/core/licenses/log4j-api-2.21.0.jar.sha1
new file mode 100644
index 0000000000000..51446052594aa
--- /dev/null
+++ b/libs/core/licenses/log4j-api-2.21.0.jar.sha1
@@ -0,0 +1 @@
+760192f2b69eacf4a4afc78e5a1d7a8de054fcbd
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.8.0-snapshot-4373c3b.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0-snapshot-4373c3b.jar.sha1
deleted file mode 100644
index dc363f2776429..0000000000000
--- a/libs/core/licenses/lucene-core-9.8.0-snapshot-4373c3b.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-30c3afcf058532d3d2b8820375043000e7f34a9b
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0.jar.sha1
new file mode 100644
index 0000000000000..f9a3e2f3cbee6
--- /dev/null
+++ b/libs/core/licenses/lucene-core-9.8.0.jar.sha1
@@ -0,0 +1 @@
+5e8421c5f8573bcf22e9265fc7e19469545a775a
\ No newline at end of file
diff --git a/libs/core/src/main/java/org/opensearch/Build.java b/libs/core/src/main/java/org/opensearch/Build.java
index 67a50a8a31a0e..b5d67f5501725 100644
--- a/libs/core/src/main/java/org/opensearch/Build.java
+++ b/libs/core/src/main/java/org/opensearch/Build.java
@@ -216,7 +216,7 @@ public String getDistribution() {
/**
* Get the version as considered at build time
- *
+ *
* Offers a way to get the fully qualified version as configured by the build.
* This will be the same as {@link Version} for production releases, but may include on of the qualifier ( e.x alpha1 )
* or -SNAPSHOT for others.
diff --git a/libs/core/src/main/java/org/opensearch/LegacyESVersion.java b/libs/core/src/main/java/org/opensearch/LegacyESVersion.java
index 32eae654cf975..5d8e067a8fd8b 100644
--- a/libs/core/src/main/java/org/opensearch/LegacyESVersion.java
+++ b/libs/core/src/main/java/org/opensearch/LegacyESVersion.java
@@ -40,7 +40,7 @@
/**
* The Contents of this file were originally moved from {@link Version}.
- *
+ *
* This class keeps all the supported OpenSearch predecessor versions for
* backward compatibility purpose.
*
diff --git a/libs/core/src/main/java/org/opensearch/OpenSearchException.java b/libs/core/src/main/java/org/opensearch/OpenSearchException.java
index 5bad711a15032..cce86b452f698 100644
--- a/libs/core/src/main/java/org/opensearch/OpenSearchException.java
+++ b/libs/core/src/main/java/org/opensearch/OpenSearchException.java
@@ -168,7 +168,7 @@ public OpenSearchException(Throwable cause) {
/**
* Construct a OpenSearchException
with the specified detail message.
- *
+ *
* The message can be parameterized using {}
as placeholders for the given
* arguments
*
@@ -182,7 +182,7 @@ public OpenSearchException(String msg, Object... args) {
/**
* Construct a OpenSearchException
with the specified detail message
* and nested exception.
- *
+ *
* The message can be parameterized using {}
as placeholders for the given
* arguments
*
@@ -587,7 +587,7 @@ public static OpenSearchException innerFromXContent(XContentParser parser, boole
* Static toXContent helper method that renders {@link OpenSearchException} or {@link Throwable} instances
* as XContent, delegating the rendering to {@link OpenSearchException#toXContent(XContentBuilder, ToXContent.Params)}
* or {@link #innerToXContent(XContentBuilder, ToXContent.Params, Throwable, String, String, Map, Map, Throwable)}.
- *
+ *
* This method is usually used when the {@link Throwable} is rendered as a part of another XContent object, and its result can
* be parsed back using the {@code OpenSearchException.fromXContent(XContentParser)} method.
*/
@@ -606,7 +606,7 @@ public static void generateThrowableXContent(XContentBuilder builder, ToXContent
* depends on the value of the "detailed" parameter: when it's false only a simple message based on the type and message of the
* exception is rendered. When it's true all detail are provided including guesses root causes, cause and potentially stack
* trace.
- *
+ *
* This method is usually used when the {@link Exception} is rendered as a full XContent object, and its output can be parsed
* by the {@code #OpenSearchException.failureFromXContent(XContentParser)} method.
*/
diff --git a/libs/core/src/main/java/org/opensearch/OpenSearchParseException.java b/libs/core/src/main/java/org/opensearch/OpenSearchParseException.java
index c2516402b0d30..26aff04b30a56 100644
--- a/libs/core/src/main/java/org/opensearch/OpenSearchParseException.java
+++ b/libs/core/src/main/java/org/opensearch/OpenSearchParseException.java
@@ -32,6 +32,7 @@
package org.opensearch;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.rest.RestStatus;
@@ -40,8 +41,9 @@
/**
* Unchecked exception that is translated into a {@code 400 BAD REQUEST} error when it bubbles out over HTTP.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public class OpenSearchParseException extends OpenSearchException {
public OpenSearchParseException(String msg, Object... args) {
diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java
index 32f4ca0317907..d94be3f25b53d 100644
--- a/libs/core/src/main/java/org/opensearch/Version.java
+++ b/libs/core/src/main/java/org/opensearch/Version.java
@@ -94,7 +94,11 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_2_9_0 = new Version(2090099, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_9_1 = new Version(2090199, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_10_0 = new Version(2100099, org.apache.lucene.util.Version.LUCENE_9_7_0);
+ public static final Version V_2_10_1 = new Version(2100199, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0);
+ public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0);
+ public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0);
+ public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0);
public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0);
public static final Version CURRENT = V_3_0_0;
diff --git a/libs/core/src/main/java/org/opensearch/core/action/ActionListener.java b/libs/core/src/main/java/org/opensearch/core/action/ActionListener.java
index 119e56cfe0bf2..4fd55898a2cb5 100644
--- a/libs/core/src/main/java/org/opensearch/core/action/ActionListener.java
+++ b/libs/core/src/main/java/org/opensearch/core/action/ActionListener.java
@@ -154,9 +154,9 @@ static ActionListener wrap(Runnable runnable) {
/**
* Creates a listener that wraps another listener, mapping response values via the given mapping function and passing along
* exceptions to the delegate.
- *
+ *
* Notice that it is considered a bug if the listener's onResponse or onFailure fails. onResponse failures will not call onFailure.
- *
+ *
* If the function fails, the listener's onFailure handler will be called. The principle is that the mapped listener will handle
* exceptions from the mapping function {@code fn} but it is the responsibility of {@code delegate} to handle its own exceptions
* inside `onResponse` and `onFailure`.
@@ -334,7 +334,7 @@ protected void innerOnFailure(Exception e) {
/**
* Completes the given listener with the result from the provided supplier accordingly.
* This method is mainly used to complete a listener with a block of synchronous code.
- *
+ *
* If the supplier fails, the listener's onFailure handler will be called.
* It is the responsibility of {@code delegate} to handle its own exceptions inside `onResponse` and `onFailure`.
*/
diff --git a/libs/core/src/main/java/org/opensearch/core/action/ActionResponse.java b/libs/core/src/main/java/org/opensearch/core/action/ActionResponse.java
index 041d8b1bffb4a..7525bfb243aae 100644
--- a/libs/core/src/main/java/org/opensearch/core/action/ActionResponse.java
+++ b/libs/core/src/main/java/org/opensearch/core/action/ActionResponse.java
@@ -32,6 +32,7 @@
package org.opensearch.core.action;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.transport.TransportResponse;
@@ -42,6 +43,7 @@
*
* @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public abstract class ActionResponse extends TransportResponse {
public ActionResponse() {}
diff --git a/libs/core/src/main/java/org/opensearch/core/action/NotifyOnceListener.java b/libs/core/src/main/java/org/opensearch/core/action/NotifyOnceListener.java
index 6af9ca005d171..f087322e0024c 100644
--- a/libs/core/src/main/java/org/opensearch/core/action/NotifyOnceListener.java
+++ b/libs/core/src/main/java/org/opensearch/core/action/NotifyOnceListener.java
@@ -32,6 +32,8 @@
package org.opensearch.core.action;
+import org.opensearch.common.annotation.PublicApi;
+
import java.util.concurrent.atomic.AtomicBoolean;
/**
@@ -39,8 +41,9 @@
* the is called is only called once. Subclasses should implement notification logic with
* innerOnResponse and innerOnFailure.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public abstract class NotifyOnceListener implements ActionListener {
private final AtomicBoolean hasBeenCalled = new AtomicBoolean(false);
diff --git a/libs/core/src/main/java/org/opensearch/core/common/Strings.java b/libs/core/src/main/java/org/opensearch/core/common/Strings.java
index 6227716af9cc9..8fdec670bd9f2 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/Strings.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/Strings.java
@@ -38,7 +38,7 @@
/**
* String utility class.
- *
+ *
* TODO replace Strings in :server
*
* @opensearch.internal
diff --git a/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java
index 0f75f763d21c1..846950ff17c63 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java
@@ -71,17 +71,23 @@ public interface CircuitBreaker {
/**
* The type of breaker
- *
+ * can be {@link #MEMORY}, {@link #PARENT}, or {@link #NOOP}
* @opensearch.internal
*/
enum Type {
- // A regular or ChildMemoryCircuitBreaker
+ /** A regular or ChildMemoryCircuitBreaker */
MEMORY,
- // A special parent-type for the hierarchy breaker service
+ /** A special parent-type for the hierarchy breaker service */
PARENT,
- // A breaker where every action is a noop, it never breaks
+ /** A breaker where every action is a noop, it never breaks */
NOOP;
+ /**
+ * Converts string (case-insensitive) to breaker {@link Type}
+ * @param value "noop", "parent", or "memory" (case-insensitive)
+ * @return the breaker {@link Type}
+ * @throws IllegalArgumentException if value is not "noop", "parent", or "memory"
+ */
public static Type parseValue(String value) {
switch (value.toLowerCase(Locale.ROOT)) {
case "noop":
@@ -98,13 +104,13 @@ public static Type parseValue(String value) {
/**
* The breaker durability
- *
+ * can be {@link #TRANSIENT} or {@link #PERMANENT}
* @opensearch.internal
*/
enum Durability {
- // The condition that tripped the circuit breaker fixes itself eventually.
+ /** The condition that tripped the circuit breaker fixes itself eventually. */
TRANSIENT,
- // The condition that tripped the circuit breaker requires manual intervention.
+ /** The condition that tripped the circuit breaker requires manual intervention. */
PERMANENT
}
@@ -120,11 +126,14 @@ enum Durability {
* @param bytes number of bytes to add
* @param label string label describing the bytes being added
* @return the number of "used" bytes for the circuit breaker
+ * @throws CircuitBreakingException if the breaker tripped
*/
double addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException;
/**
* Adjust the circuit breaker without tripping
+ * @param bytes number of bytes to add
+ * @return the number of "used" bytes for the circuit breaker
*/
long addWithoutBreaking(long bytes);
@@ -154,7 +163,10 @@ enum Durability {
String getName();
/**
- * @return whether a tripped circuit breaker will reset itself (transient) or requires manual intervention (permanent).
+ * Returns the {@link Durability} of this breaker
+ * @return whether a tripped circuit breaker will
+ * reset itself ({@link Durability#TRANSIENT})
+ * or requires manual intervention ({@link Durability#PERMANENT}).
*/
Durability getDurability();
diff --git a/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java
index e6443a0d48ce0..2df116dcad076 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java
@@ -46,8 +46,11 @@
*/
public class CircuitBreakingException extends OpenSearchException {
+ /** The number of bytes wanted */
private final long bytesWanted;
+ /** The circuit breaker limit */
private final long byteLimit;
+ /** The {@link CircuitBreaker.Durability} of the circuit breaker */
private final CircuitBreaker.Durability durability;
public CircuitBreakingException(StreamInput in) throws IOException {
@@ -88,6 +91,7 @@ public CircuitBreaker.Durability getDurability() {
return durability;
}
+ /** Always returns {@link RestStatus#TOO_MANY_REQUESTS} */
@Override
public RestStatus status() {
return RestStatus.TOO_MANY_REQUESTS;
diff --git a/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java
index 86a0a7ccb96fd..17b9fefd27c99 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java
@@ -33,65 +33,120 @@
package org.opensearch.core.common.breaker;
/**
- * A CircuitBreaker that doesn't increment or adjust, and all operations are
- * basically noops
- *
+ * A {@link CircuitBreaker} that doesn't increment or adjust, and all operations are
+ * basically noops.
+ * It never trips, limit is always -1, always returns 0 for all metrics.
* @opensearch.internal
*/
public class NoopCircuitBreaker implements CircuitBreaker {
- public static final int LIMIT = -1;
+ /** The limit of this breaker is always -1 */
+ public static final int LIMIT = -1;
+ /** Name of this breaker */
private final String name;
+ /**
+ * Creates a new NoopCircuitBreaker (that never trip) with the given name
+ * @param name the name of this breaker
+ */
public NoopCircuitBreaker(String name) {
this.name = name;
}
+ /**
+ * This is a noop, a noop breaker never trip
+ * @param fieldName name of this noop breaker
+ * @param bytesNeeded bytes needed
+ */
@Override
public void circuitBreak(String fieldName, long bytesNeeded) {
// noop
}
+ /**
+ * This is a noop, always return 0 and never throw/trip
+ * @param bytes number of bytes to add
+ * @param label string label describing the bytes being added
+ * @return always return 0
+ * @throws CircuitBreakingException never thrown
+ */
@Override
public double addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException {
return 0;
}
+ /**
+ * This is a noop, nothing is added, always return 0
+ * @param bytes number of bytes to add (ignored)
+ * @return always return 0
+ */
@Override
public long addWithoutBreaking(long bytes) {
return 0;
}
+ /**
+ * This is a noop, always return 0
+ * @return always return 0
+ */
@Override
public long getUsed() {
return 0;
}
+ /**
+ * A noop breaker have a constant limit of -1
+ * @return always return -1
+ */
@Override
public long getLimit() {
return LIMIT;
}
+ /**
+ * A noop breaker have no overhead, always return 0
+ * @return always return 0
+ */
@Override
public double getOverhead() {
return 0;
}
+ /**
+ * A noop breaker never trip, always return 0
+ * @return always return 0
+ */
@Override
public long getTrippedCount() {
return 0;
}
+ /**
+ * return the name of this breaker
+ * @return the name of this breaker
+ */
@Override
public String getName() {
return this.name;
}
+ /**
+ * A noop breaker {@link Durability} is always {@link Durability#PERMANENT}
+ * @return always return {@link Durability#PERMANENT }
+ */
@Override
public Durability getDurability() {
return Durability.PERMANENT;
}
+ /**
+ * Limit and overhead are constant for a noop breaker.
+ * this is a noop.
+ * @param limit the desired limit (ignored)
+ * @param overhead the desired overhead (ignored)
+ */
@Override
- public void setLimitAndOverhead(long limit, double overhead) {}
+ public void setLimitAndOverhead(long limit, double overhead) {
+ // noop
+ }
}
diff --git a/libs/core/src/main/java/org/opensearch/core/common/bytes/AbstractBytesReference.java b/libs/core/src/main/java/org/opensearch/core/common/bytes/AbstractBytesReference.java
index e054776d67fdc..a2bf7e499dee8 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/bytes/AbstractBytesReference.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/bytes/AbstractBytesReference.java
@@ -49,14 +49,10 @@
*/
public abstract class AbstractBytesReference implements BytesReference {
- private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it
+ /** we cache the hash of this reference since it can be quite costly to re-calculated it */
+ private Integer hash = null;
private static final int MAX_UTF16_LENGTH = Integer.MAX_VALUE >> 1;
- @Override
- public int getInt(int index) {
- return (get(index) & 0xFF) << 24 | (get(index + 1) & 0xFF) << 16 | (get(index + 2) & 0xFF) << 8 | get(index + 3) & 0xFF;
- }
-
@Override
public int indexOf(byte marker, int from) {
final int to = length();
diff --git a/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesArray.java b/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesArray.java
index ae04ddcc19eee..d7a8414935143 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesArray.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesArray.java
@@ -32,6 +32,7 @@
package org.opensearch.core.common.bytes;
+import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -83,6 +84,11 @@ public byte get(int index) {
return bytes[offset + index];
}
+ @Override
+ public int getInt(int index) {
+ return (int) BitUtil.VH_BE_INT.get(bytes, offset + index);
+ }
+
@Override
public int length() {
return length;
diff --git a/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesReference.java b/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesReference.java
index 9d24d3653397b..8cb65c9feb1ca 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesReference.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/bytes/BytesReference.java
@@ -153,9 +153,11 @@ static BytesReference fromByteArray(ByteArray byteArray, int length) {
byte get(int index);
/**
- * Returns the integer read from the 4 bytes (BE) starting at the given index.
+ * Returns the integer read from the 4 bytes (big endian) starting at the given index.
*/
- int getInt(int index);
+ default int getInt(int index) {
+ return ((get(index) & 0xFF) << 24) | ((get(index + 1) & 0xFF) << 16) | ((get(index + 2) & 0xFF) << 8) | (get(index + 3) & 0xFF);
+ }
/**
* Finds the index of the first occurrence of the given marker between within the given bounds.
diff --git a/libs/core/src/main/java/org/opensearch/core/common/bytes/CompositeBytesReference.java b/libs/core/src/main/java/org/opensearch/core/common/bytes/CompositeBytesReference.java
index 53915a3da824c..1a48abee2dbf8 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/bytes/CompositeBytesReference.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/bytes/CompositeBytesReference.java
@@ -45,7 +45,7 @@
/**
* A composite {@link BytesReference} that allows joining multiple bytes references
* into one without copying.
- *
+ *
* Note, {@link #toBytesRef()} will materialize all pages in this BytesReference.
*
* @opensearch.internal
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/BytesStreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/BytesStreamInput.java
index a50d1c165ed72..cad43f817faaf 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/BytesStreamInput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/BytesStreamInput.java
@@ -8,6 +8,7 @@
package org.opensearch.core.common.io.stream;
+import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
import java.io.EOFException;
@@ -17,7 +18,7 @@
* {@link StreamInput} version of Lucene's {@link org.apache.lucene.store.ByteArrayDataInput}
* This is used as a replacement of Lucene ByteArrayDataInput for abstracting byte order changes
* in Lucene's API
- *
+ *
* Attribution given to apache lucene project under ALv2:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -121,4 +122,33 @@ public int read() throws IOException {
return bytes[pos++] & 0xFF;
}
+ @Override
+ public short readShort() throws IOException {
+ if (available() < Short.BYTES) {
+ throw new EOFException();
+ }
+ short value = (short) BitUtil.VH_BE_SHORT.get(bytes, pos);
+ pos += Short.BYTES;
+ return value;
+ }
+
+ @Override
+ public int readInt() throws IOException {
+ if (available() < Integer.BYTES) {
+ throw new EOFException();
+ }
+ int value = (int) BitUtil.VH_BE_INT.get(bytes, pos);
+ pos += Integer.BYTES;
+ return value;
+ }
+
+ @Override
+ public long readLong() throws IOException {
+ if (available() < Long.BYTES) {
+ throw new EOFException();
+ }
+ long value = (long) BitUtil.VH_BE_LONG.get(bytes, pos);
+ pos += Long.BYTES;
+ return value;
+ }
}
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/NamedWriteableRegistry.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/NamedWriteableRegistry.java
index abac76c8b6c27..123b52eb92876 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/NamedWriteableRegistry.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/NamedWriteableRegistry.java
@@ -43,7 +43,7 @@
/**
* A registry for {@link Writeable.Reader} readers of {@link NamedWriteable}.
- *
+ *
* The registration is keyed by the combination of the category class of {@link NamedWriteable}, and a name unique
* to that category.
*
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
index ece2012302919..3e996bdee83a2 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
@@ -96,7 +96,7 @@
/**
* A stream from this node to another node. Technically, it can also be streamed to a byte array but that is mostly for testing.
- *
+ *
* This class's methods are optimized so you can put the methods that read and write a class next to each other and you can scan them
* visually for differences. That means that most variables should be read and written in a single line so even large objects fit both
* reading and writing on the screen. It also means that the methods on this class are named very similarly to {@link StreamOutput}. Finally
@@ -1128,7 +1128,7 @@ public C readNamedWriteable(@SuppressWarnings("unused
* the corresponding entry in the registry by name, so that the proper object can be read and returned.
* Default implementation throws {@link UnsupportedOperationException} as StreamInput doesn't hold a registry.
* Use {@link FilterInputStream} instead which wraps a stream and supports a {@link NamedWriteableRegistry} too.
- *
+ *
* Prefer {@link StreamInput#readNamedWriteable(Class)} and {@link StreamOutput#writeNamedWriteable(NamedWriteable)} unless you
* have a compelling reason to use this method instead.
*/
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
index 94b813246bc7e..2d69e1c686df3 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
@@ -88,7 +88,7 @@
/**
* A stream from another node to this node. Technically, it can also be streamed from a byte array but that is mostly for testing.
- *
+ *
* This class's methods are optimized so you can put the methods that read and write a class next to each other and you can scan them
* visually for differences. That means that most variables should be read and written in a single line so even large objects fit both
* reading and writing on the screen. It also means that the methods on this class are named very similarly to {@link StreamInput}. Finally
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/Writeable.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/Writeable.java
index af9df51655414..960f4bec5eeb5 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/Writeable.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/Writeable.java
@@ -32,6 +32,8 @@
package org.opensearch.core.common.io.stream;
+import org.opensearch.common.annotation.PublicApi;
+
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -41,8 +43,9 @@
* across the wire" using OpenSearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by
* serializing and deserializing must be equal and have the same hashCode. It isn't required that such a copy be entirely unchanged.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.8.0")
public interface Writeable {
/**
* A WriteableRegistry registers {@link Writer} methods for writing data types over a
@@ -135,8 +138,11 @@ public static Class> getCustomClassFromInstance(final Object value) {
* out.writeMapOfLists(someMap, StreamOutput::writeString, StreamOutput::writeString);
* }
*
+ *
+ * @opensearch.api
*/
@FunctionalInterface
+ @PublicApi(since = "2.8.0")
interface Writer {
/**
@@ -161,8 +167,11 @@ interface Writer {
* this.someMap = in.readMapOfLists(StreamInput::readString, StreamInput::readString);
* }
*
+ *
+ * @opensearch.api
*/
@FunctionalInterface
+ @PublicApi(since = "2.8.0")
interface Reader {
/**
diff --git a/libs/core/src/main/java/org/opensearch/core/common/logging/LoggerMessageFormat.java b/libs/core/src/main/java/org/opensearch/core/common/logging/LoggerMessageFormat.java
index 59492193d16dc..c7b9bee3cbf4d 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/logging/LoggerMessageFormat.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/logging/LoggerMessageFormat.java
@@ -30,6 +30,13 @@
* GitHub history for details.
*/
+/*
+ * This code is based on code from SFL4J 1.5.11
+ * Copyright (c) 2004-2007 QOS.ch
+ * All rights reserved.
+ * SPDX-License-Identifier: MIT
+ */
+
package org.opensearch.core.common.logging;
import java.util.HashSet;
@@ -37,6 +44,10 @@
/**
* Format string for OpenSearch log messages.
+ *
+ * This class is almost a copy of {@code org.slf4j.helpers.MessageFormatter}
+ * The original code is licensed under the MIT License and is available at :
+ * MessageFormatter.java
*
* @opensearch.internal
*/
@@ -51,6 +62,17 @@ public static String format(final String messagePattern, final Object... argArra
return format(null, messagePattern, argArray);
}
+ /**
+ * (this is almost a copy of {@code org.slf4j.helpers.MessageFormatter.arrayFormat})
+ *
+ * @param prefix the prefix to prepend to the formatted message (can be null)
+ * @param messagePattern the message pattern which will be parsed and formatted
+ * @param argArray an array of arguments to be substituted in place of formatting anchors
+ * @return null if messagePattern is null
+ * messagePattern if argArray is (null or empty) and prefix is null
+ * prefix + messagePattern if argArray is (null or empty) and prefix is not null
+ * formatted message otherwise (even if prefix is null)
+ */
public static String format(final String prefix, final String messagePattern, final Object... argArray) {
if (messagePattern == null) {
return null;
@@ -110,6 +132,13 @@ public static String format(final String prefix, final String messagePattern, fi
return sbuf.toString();
}
+ /**
+ * Checks if (delimterStartIndex - 1) in messagePattern is an escape character.
+ * @param messagePattern the message pattern
+ * @param delimiterStartIndex the index of the character to check
+ * @return true if there is an escape char before the character at delimiterStartIndex.
+ * Always returns false if delimiterStartIndex == 0 (edge case)
+ */
static boolean isEscapedDelimiter(String messagePattern, int delimiterStartIndex) {
if (delimiterStartIndex == 0) {
@@ -119,6 +148,13 @@ static boolean isEscapedDelimiter(String messagePattern, int delimiterStartIndex
return potentialEscape == ESCAPE_CHAR;
}
+ /**
+ * Checks if (delimterStartIndex - 2) in messagePattern is an escape character.
+ * @param messagePattern the message pattern
+ * @param delimiterStartIndex the index of the character to check
+ * @return true if (delimterStartIndex - 2) in messagePattern is an escape character.
+ * Always returns false if delimiterStartIndex is less than 2 (edge case)
+ */
static boolean isDoubleEscaped(String messagePattern, int delimiterStartIndex) {
return delimiterStartIndex >= 2 && messagePattern.charAt(delimiterStartIndex - 2) == ESCAPE_CHAR;
}
diff --git a/libs/core/src/main/java/org/opensearch/core/common/settings/SecureString.java b/libs/core/src/main/java/org/opensearch/core/common/settings/SecureString.java
index 322300a554284..45ee72f558724 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/settings/SecureString.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/settings/SecureString.java
@@ -50,7 +50,7 @@ public final class SecureString implements CharSequence, Closeable {
/**
* Constructs a new SecureString which controls the passed in char array.
- *
+ *
* Note: When this instance is closed, the array will be zeroed out.
*/
public SecureString(char[] chars) {
@@ -59,7 +59,7 @@ public SecureString(char[] chars) {
/**
* Constructs a new SecureString from an existing String.
- *
+ *
* NOTE: This is not actually secure, since the provided String cannot be deallocated, but
* this constructor allows for easy compatibility between new and old apis.
*
diff --git a/libs/core/src/main/java/org/opensearch/core/common/text/Text.java b/libs/core/src/main/java/org/opensearch/core/common/text/Text.java
index ca5402edae59e..3a46bd4602297 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/text/Text.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/text/Text.java
@@ -32,6 +32,7 @@
package org.opensearch.core.common.text;
import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.ToXContentFragment;
@@ -44,8 +45,9 @@
* Both {@link String} and {@link BytesReference} representation of the text. Starts with one of those, and if
* the other is requests, caches the other one in a local reference so no additional conversion will be needed.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public final class Text implements Comparable, ToXContentFragment {
public static final Text[] EMPTY_ARRAY = new Text[0];
diff --git a/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java b/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java
index 1a853877ed0b9..3b5fbb7d76307 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java
@@ -32,6 +32,7 @@
package org.opensearch.core.common.transport;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.common.network.NetworkAddress;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
@@ -47,8 +48,9 @@
/**
* A transport address used for IP socket address (wraps {@link java.net.InetSocketAddress}).
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public final class TransportAddress implements Writeable, ToXContentFragment {
/**
@@ -71,6 +73,12 @@ public TransportAddress(InetAddress address, int port) {
this(new InetSocketAddress(address, port));
}
+ /**
+ * Creates a new {@link TransportAddress} from a {@link InetSocketAddress}.
+ * @param address the address to wrap
+ * @throws IllegalArgumentException if the address is null or not resolved
+ * @see InetSocketAddress#getAddress()
+ */
public TransportAddress(InetSocketAddress address) {
if (address == null) {
throw new IllegalArgumentException("InetSocketAddress must not be null");
@@ -82,7 +90,9 @@ public TransportAddress(InetSocketAddress address) {
}
/**
- * Read from a stream.
+ * Creates a new {@link TransportAddress} from a {@link StreamInput}.
+ * @param in the stream to read from
+ * @throws IOException if an I/O error occurs
*/
public TransportAddress(StreamInput in) throws IOException {
final int len = in.readByte();
@@ -116,6 +126,8 @@ public String getAddress() {
/**
* Returns the addresses port
+ * @return the port number, or 0 if the socket is not bound yet.
+ * @see InetSocketAddress#getPort()
*/
public int getPort() {
return address.getPort();
diff --git a/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java
index 1f49a3531986c..49eadbbb2bc00 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java
@@ -45,6 +45,13 @@
* A {@code SizeUnit} does not maintain size information, but only
* helps organize and use size representations that may be maintained
* separately across various contexts.
+ *
+ * It use conventional data storage values (base-2) :
+ *
+ * 1KB = 1024 bytes
+ * 1MB = 1024KB
+ * ...
+ *
*
* @opensearch.api
*/
diff --git a/libs/core/src/main/java/org/opensearch/core/common/util/ByteArray.java b/libs/core/src/main/java/org/opensearch/core/common/util/ByteArray.java
index e50f24417f8bc..f4d81c4ca4363 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/util/ByteArray.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/util/ByteArray.java
@@ -33,14 +33,16 @@
package org.opensearch.core.common.util;
import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.annotation.PublicApi;
import java.nio.ByteBuffer;
/**
* Abstraction of an array of byte values.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public interface ByteArray extends BigArray {
/**
diff --git a/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java b/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java
index e8dd31fcf1869..5335c98182b64 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java
@@ -73,6 +73,16 @@ public static boolean isEmpty(Object[] array) {
/**
* Return a rotated view of the given list with the given distance.
+ *
+ * The distance can be negative, in which case the list is rotated to the left.
+ * The distance can be larger than the size of the list, in which case the list is rotated multiple times.
+ * The distance can be zero, in which case the list is not rotated.
+ * The list can be empty, in which case it remains empty.
+ *
+ * @param list the list to rotate
+ * @param distance the distance to rotate (positive rotates right, negative rotates left)
+ * @return a rotated view of the given list with the given distance
+ * @see RotatedList
*/
public static List rotate(final List list, int distance) {
if (list.isEmpty()) {
@@ -92,7 +102,13 @@ public static List rotate(final List list, int distance) {
}
/**
- * in place de-duplicates items in a list
+ * In place de-duplicates items in a list
+ * Noop if the list is empty or has one item.
+ *
+ * @throws NullPointerException if the list is `null` or comparator is `null`
+ * @param array the list to de-duplicate
+ * @param comparator the comparator to use to compare items
+ * @param the type of the items in the list
*/
public static void sortAndDedup(final List array, Comparator comparator) {
// base case: one item
@@ -115,6 +131,12 @@ public static void sortAndDedup(final List array, Comparator comparato
array.subList(deduped.nextIndex(), array.size()).clear();
}
+ /**
+ * Converts a collection of Integers to an array of ints.
+ * @param ints The collection of Integers to convert
+ * @return The array of ints
+ * @throws NullPointerException if ints is null
+ */
public static int[] toArray(Collection ints) {
Objects.requireNonNull(ints);
return ints.stream().mapToInt(s -> s).toArray();
@@ -134,6 +156,11 @@ public static void ensureNoSelfReferences(Object value, String messageHint) {
}
}
+ /**
+ * Converts an object to an Iterable, if possible.
+ * @param value The object to convert
+ * @return The Iterable, or null if the object cannot be converted
+ */
@SuppressWarnings("unchecked")
private static Iterable> convert(Object value) {
if (value == null) {
@@ -192,6 +219,13 @@ private static class RotatedList extends AbstractList implements RandomAcc
private final List in;
private final int distance;
+ /**
+ * Creates a rotated list
+ * @param list The list to rotate
+ * @param distance The distance to rotate to the right
+ * @throws IllegalArgumentException if the distance is negative or greater than the size of the list;
+ * or if the list is not a {@link RandomAccess} list
+ */
RotatedList(List list, int distance) {
if (distance < 0 || distance >= list.size()) {
throw new IllegalArgumentException();
@@ -218,6 +252,13 @@ public int size() {
}
}
+ /**
+ * Converts an {@link Iterable} to an {@link ArrayList}.
+ * @param elements The iterable to convert
+ * @param the type the elements
+ * @return an {@link ArrayList}
+ * @throws NullPointerException if elements is null
+ */
@SuppressWarnings("unchecked")
public static ArrayList iterableAsArrayList(Iterable extends E> elements) {
if (elements == null) {
@@ -297,11 +338,11 @@ public static List> eagerPartition(List list, int size) {
}
/**
- * Check if a collection is empty or not. Empty collection mean either it is null or it has no elements in it. If
- * collection contains a null element it means it is not empty.
+ * Checks if a collection is empty or not. Empty collection mean either it is null or it has no elements in it.
+ * If collection contains a null element it means it is not empty.
*
* @param collection {@link Collection}
- * @return boolean
+ * @return true if collection is null or {@code isEmpty()}, false otherwise
* @param Element
*/
public static boolean isEmpty(final Collection collection) {
diff --git a/libs/core/src/main/java/org/opensearch/core/compress/Compressor.java b/libs/core/src/main/java/org/opensearch/core/compress/Compressor.java
index 27d5b5dfdfa15..5324ea6151e51 100644
--- a/libs/core/src/main/java/org/opensearch/core/compress/Compressor.java
+++ b/libs/core/src/main/java/org/opensearch/core/compress/Compressor.java
@@ -43,7 +43,7 @@
/**
* Compressor interface used for compressing {@link org.opensearch.core.xcontent.MediaType} and
* {@code org.opensearch.repositories.blobstore.BlobStoreRepository} implementations.
- *
+ *
* This is not to be confused with {@link org.apache.lucene.codecs.compressing.Compressor} which is used
* for codec implementations such as {@code org.opensearch.index.codec.customcodecs.Lucene95CustomCodec}
* for compressing {@link org.apache.lucene.document.StoredField}s
diff --git a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
index 9290254c30d8d..af09a7aebba79 100644
--- a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
+++ b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
@@ -23,7 +23,7 @@
/**
* A registry that wraps a static Map singleton which holds a mapping of unique String names (typically the
* compressor header as a string) to registerd {@link Compressor} implementations.
- *
+ *
* This enables plugins, modules, extensions to register their own compression implementations through SPI
*
* @opensearch.experimental
@@ -105,7 +105,7 @@ public static Compressor getCompressor(final String name) {
/**
* Returns the registered compressors as an Immutable collection
- *
+ *
* note: used for testing
*/
public static Map registeredCompressors() {
diff --git a/libs/core/src/main/java/org/opensearch/core/compress/spi/CompressorProvider.java b/libs/core/src/main/java/org/opensearch/core/compress/spi/CompressorProvider.java
index 019e282444d64..9b806618fe0a0 100644
--- a/libs/core/src/main/java/org/opensearch/core/compress/spi/CompressorProvider.java
+++ b/libs/core/src/main/java/org/opensearch/core/compress/spi/CompressorProvider.java
@@ -18,7 +18,7 @@
/**
* Service Provider Interface for plugins, modules, extensions providing custom
* compression algorithms
- *
+ *
* see {@link Compressor} for implementing methods
* and {@link org.opensearch.core.compress.CompressorRegistry} for the registration of custom
* Compressors
diff --git a/libs/core/src/main/java/org/opensearch/core/index/Index.java b/libs/core/src/main/java/org/opensearch/core/index/Index.java
index fdff43f3c9139..a927179114188 100644
--- a/libs/core/src/main/java/org/opensearch/core/index/Index.java
+++ b/libs/core/src/main/java/org/opensearch/core/index/Index.java
@@ -48,6 +48,8 @@
/**
* A value class representing the basic required properties of an OpenSearch index.
+ *
+ * (This class is immutable.)
*
* @opensearch.api
*/
@@ -57,6 +59,8 @@ public class Index implements Writeable, ToXContentObject {
public static final Index[] EMPTY_ARRAY = new Index[0];
private static final String INDEX_UUID_KEY = "index_uuid";
private static final String INDEX_NAME_KEY = "index_name";
+ public static final String UNKNOWN_INDEX_NAME = "_unknown_";
+
private static final ObjectParser INDEX_PARSER = new ObjectParser<>("index", Builder::new);
static {
INDEX_PARSER.declareString(Builder::name, new ParseField(INDEX_NAME_KEY));
@@ -66,39 +70,74 @@ public class Index implements Writeable, ToXContentObject {
private final String name;
private final String uuid;
+ /**
+ * Creates a new Index instance with name and unique identifier
+ *
+ * @param name the name of the index
+ * @param uuid the unique identifier of the index
+ * @throws NullPointerException if either name or uuid are null
+ */
public Index(String name, String uuid) {
this.name = Objects.requireNonNull(name);
this.uuid = Objects.requireNonNull(uuid);
}
/**
- * Read from a stream.
+ * Creates a new Index instance from a {@link StreamInput}.
+ * Reads the name and unique identifier from the stream.
+ *
+ * @param in the stream to read from
+ * @throws IOException if an error occurs while reading from the stream
+ * @see #writeTo(StreamOutput)
*/
public Index(StreamInput in) throws IOException {
this.name = in.readString();
this.uuid = in.readString();
}
+ /**
+ * Gets the name of the index.
+ *
+ * @return the name of the index.
+ */
public String getName() {
return this.name;
}
+ /**
+ * Gets the unique identifier of the index.
+ *
+ * @return the unique identifier of the index. "_na_" if {@link Strings#UNKNOWN_UUID_VALUE}.
+ */
public String getUUID() {
return uuid;
}
+ /**
+ * Returns either the name and unique identifier of the index
+ * or only the name if the uuid is {@link Strings#UNKNOWN_UUID_VALUE}.
+ *
+ * If we have a uuid we put it in the toString so it'll show up in logs
+ * which is useful as more and more things use the uuid rather
+ * than the name as the lookup key for the index.
+ *
+ * @return {@code "[name/uuid]"} or {@code "[name]"}
+ */
@Override
public String toString() {
- /*
- * If we have a uuid we put it in the toString so it'll show up in logs which is useful as more and more things use the uuid rather
- * than the name as the lookup key for the index.
- */
if (Strings.UNKNOWN_UUID_VALUE.equals(uuid)) {
return "[" + name + "]";
}
return "[" + name + "/" + uuid + "]";
}
+ /**
+ * Checks if this index is the same as another index by comparing the name and unique identifier.
+ * If both uuid are {@link Strings#UNKNOWN_UUID_VALUE} then only the name is compared.
+ *
+ * @param o the index to compare to
+ * @return true if the name and unique identifier are the same, false otherwise.
+ */
@Override
public boolean equals(Object o) {
if (this == o) {
@@ -118,6 +157,10 @@ public int hashCode() {
return result;
}
+ /** Writes the name and unique identifier to the {@link StreamOutput}
+ *
+ * @param out The stream to write to
+ */
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeString(name);
diff --git a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
index adea6cd8f0687..c0abad7ed727f 100644
--- a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
+++ b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
@@ -55,44 +55,87 @@ public class ShardId implements Comparable, ToXContentFragment, Writeab
private final int shardId;
private final int hashCode;
+ /**
+ * Constructs a new shard id.
+ * @param index the index name
+ * @param shardId the shard id
+ */
public ShardId(Index index, int shardId) {
this.index = index;
this.shardId = shardId;
this.hashCode = computeHashCode();
}
+ /**
+ * Constructs a new shard id with the given index name, index unique identifier, and shard id.
+ * @param index the index name
+ * @param indexUUID the index unique identifier
+ * @param shardId the shard id
+ */
public ShardId(String index, String indexUUID, int shardId) {
this(new Index(index, indexUUID), shardId);
}
+ /**
+ * Constructs a new shardId from a stream.
+ * @param in the stream to read from
+ * @throws IOException if an error occurs while reading from the stream
+ * @see #writeTo(StreamOutput)
+ */
public ShardId(StreamInput in) throws IOException {
index = new Index(in);
shardId = in.readVInt();
hashCode = computeHashCode();
}
+ /**
+ * Writes this shard id to a stream.
+ * @param out the stream to write to
+ * @throws IOException if an error occurs while writing to the stream
+ */
@Override
public void writeTo(StreamOutput out) throws IOException {
index.writeTo(out);
out.writeVInt(shardId);
}
+ /**
+ * Returns the index of this shard id.
+ * @return the index of this shard id
+ */
public Index getIndex() {
return index;
}
+ /**
+ * Returns the name of the index of this shard id.
+ * @return the name of the index of this shard id
+ */
public String getIndexName() {
return index.getName();
}
+ /**
+ * Return the shardId of this shard id.
+ * @return the shardId of this shard id
+ * @see #getId()
+ */
public int id() {
return this.shardId;
}
+ /**
+ * Returns the shard id of this shard id.
+ * @return the shard id of this shard id
+ */
public int getId() {
return id();
}
+ /**
+ * Returns a string representation of this shard id.
+ * @return "[indexName][shardId]"
+ */
@Override
public String toString() {
return "[" + index.getName() + "][" + shardId + "]";
@@ -100,9 +143,13 @@ public String toString() {
/**
* Parse the string representation of this shardId back to an object.
+ *
* We lose index uuid information here, but since we use toString in
* rest responses, this is the best we can do to reconstruct the object
* on the client side.
+ *
+ * @param shardIdString the string representation of the shard id
+ * (Expect a string of format "[indexName][shardId]" (square brackets included))
*/
public static ShardId fromString(String shardIdString) {
int splitPosition = shardIdString.indexOf("][");
@@ -122,17 +169,30 @@ public boolean equals(Object o) {
return shardId == shardId1.shardId && index.equals(shardId1.index);
}
+ /** Returns the hash code of this shard id.
+ *
+ * @return the hash code of this shard id
+ */
@Override
public int hashCode() {
return hashCode;
}
+ /** Computes the hash code of this shard id.
+ *
+ * @return the hash code of this shard id.
+ */
private int computeHashCode() {
int result = index != null ? index.hashCode() : 0;
result = 31 * result + shardId;
return result;
}
+ /**
+ * Compares this ShardId with the specified ShardId.
+ * @param o the ShardId to be compared.
+ * @return a negative integer, zero, or a positive integer if this ShardId is less than, equal to, or greater than the specified ShardId
+ */
@Override
public int compareTo(ShardId o) {
if (o.getId() == shardId) {
diff --git a/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java
index ab887acb85a87..992655efec8f0 100644
--- a/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java
@@ -32,6 +32,7 @@
package org.opensearch.core.indices.breaker;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
@@ -43,29 +44,56 @@
/**
* Stats class encapsulating all of the different circuit breaker stats
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public class AllCircuitBreakerStats implements Writeable, ToXContentFragment {
+ /** An array of all the circuit breaker stats */
private final CircuitBreakerStats[] allStats;
+ /**
+ * Constructs the instance
+ *
+ * @param allStats an array of all the circuit breaker stats
+ */
public AllCircuitBreakerStats(CircuitBreakerStats[] allStats) {
this.allStats = allStats;
}
+ /**
+ * Constructs the new instance from {@link StreamInput}
+ * @param in the {@link StreamInput} to read from
+ * @throws IOException If an error occurs while reading from the StreamInput
+ * @see #writeTo(StreamOutput)
+ */
public AllCircuitBreakerStats(StreamInput in) throws IOException {
allStats = in.readArray(CircuitBreakerStats::new, CircuitBreakerStats[]::new);
}
+ /**
+ * Writes this instance into a {@link StreamOutput}
+ * @param out the {@link StreamOutput} to write to
+ * @throws IOException if an error occurs while writing to the StreamOutput
+ */
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeArray(allStats);
}
+ /**
+ * Returns inner stats instances for all circuit breakers
+ * @return inner stats instances for all circuit breakers
+ */
public CircuitBreakerStats[] getAllStats() {
return this.allStats;
}
+ /**
+ * Returns the stats for a specific circuit breaker
+ * @param name the name of the circuit breaker
+ * @return the {@link CircuitBreakerStats} for the circuit breaker, null if the circuit breaker with such name does not exist
+ */
public CircuitBreakerStats getStats(String name) {
for (CircuitBreakerStats stats : allStats) {
if (stats.getName().equals(name)) {
diff --git a/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java
index ee9c94f432a36..dedeb0803271f 100644
--- a/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java
@@ -32,8 +32,7 @@
package org.opensearch.core.indices.breaker;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
import org.opensearch.core.common.breaker.CircuitBreaker;
@@ -41,11 +40,10 @@
* Interface for Circuit Breaker services, which provide breakers to classes
* that load field data.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public abstract class CircuitBreakerService extends AbstractLifecycleComponent {
- private static final Logger logger = LogManager.getLogger(CircuitBreakerService.class);
-
protected CircuitBreakerService() {}
/**
diff --git a/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java
index 0e53a38908a96..ee71cf8d2ac0e 100644
--- a/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java
@@ -32,6 +32,7 @@
package org.opensearch.core.indices.breaker;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
@@ -43,18 +44,34 @@
import java.util.Locale;
/**
- * Class encapsulating stats about the circuit breaker
+ * Class encapsulating stats about the {@link org.opensearch.core.common.breaker.CircuitBreaker}
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public class CircuitBreakerStats implements Writeable, ToXContentObject {
+ /** The name of the circuit breaker */
private final String name;
+ /** The limit size in byte of the circuit breaker. Field : "limit_size_in_bytes" */
private final long limit;
+ /** The estimated size in byte of the breaker. Field : "estimated_size_in_bytes" */
private final long estimated;
+ /** The number of times the breaker has been tripped. Field : "tripped" */
private final long trippedCount;
+ /** The overhead of the breaker. Field : "overhead" */
private final double overhead;
+ /**
+ * Constructs new instance
+ *
+ * @param name The name of the circuit breaker
+ * @param limit The limit size in byte of the circuit breaker
+ * @param estimated The estimated size in byte of the breaker
+ * @param overhead The overhead of the breaker
+ * @param trippedCount The number of times the breaker has been tripped
+ * @see org.opensearch.core.common.breaker.CircuitBreaker
+ */
public CircuitBreakerStats(String name, long limit, long estimated, double overhead, long trippedCount) {
this.name = name;
this.limit = limit;
@@ -63,6 +80,14 @@ public CircuitBreakerStats(String name, long limit, long estimated, double overh
this.overhead = overhead;
}
+ /**
+ * Constructs new instance from the {@link StreamInput}
+ *
+ * @param in The StreamInput
+ * @throws IOException if an error occurs while reading from the StreamInput
+ * @see org.opensearch.core.common.breaker.CircuitBreaker
+ * @see #writeTo(StreamOutput)
+ */
public CircuitBreakerStats(StreamInput in) throws IOException {
this.limit = in.readLong();
this.estimated = in.readLong();
@@ -71,6 +96,13 @@ public CircuitBreakerStats(StreamInput in) throws IOException {
this.name = in.readString();
}
+ /**
+ * Writes this instance into a {@link StreamOutput}
+ *
+ * @param out The StreamOutput
+ * @throws IOException if an error occurs while writing to the StreamOutput
+ * @see #CircuitBreakerStats(StreamInput)
+ */
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(limit);
@@ -80,22 +112,42 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
}
+ /**
+ * Returns the name of the circuit breaker
+ * @return The name of the circuit breaker
+ */
public String getName() {
return this.name;
}
+ /**
+ * Returns the limit size in byte of the circuit breaker
+ * @return The limit size in byte of the circuit breaker
+ */
public long getLimit() {
return this.limit;
}
+ /**
+ * Returns the estimated size in byte of the breaker
+ * @return The estimated size in byte of the breaker
+ */
public long getEstimated() {
return this.estimated;
}
+ /**
+ * Returns the number of times the breaker has been tripped
+ * @return The number of times the breaker has been tripped
+ */
public long getTrippedCount() {
return this.trippedCount;
}
+ /**
+ * Returns the overhead of the breaker
+ * @return The overhead of the breaker
+ */
public double getOverhead() {
return this.overhead;
}
@@ -113,6 +165,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ /**
+ * Returns a String representation of this CircuitBreakerStats
+ * @return "[name,limit=limit/limit_human,estimated=estimated/estimated_human,overhead=overhead,tripped=trippedCount]"
+ */
@Override
public String toString() {
return "["
diff --git a/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java
index 4095fd32b6d3c..49c5a393328b9 100644
--- a/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java
@@ -36,8 +36,9 @@
import org.opensearch.core.common.breaker.NoopCircuitBreaker;
/**
- * Class that returns a breaker that never breaks
+ * Class that returns a breaker that use the NoopCircuitBreaker and never breaks
*
+ * @see org.opensearch.core.common.breaker.NoopCircuitBreaker
* @opensearch.internal
*/
public class NoneCircuitBreakerService extends CircuitBreakerService {
@@ -48,6 +49,12 @@ public NoneCircuitBreakerService() {
super();
}
+ /**
+ * Returns a breaker that use the NoopCircuitBreaker and never breaks
+ *
+ * @param name name of the breaker (ignored)
+ * @return a NoopCircuitBreaker
+ */
@Override
public CircuitBreaker getBreaker(String name) {
return breaker;
@@ -58,6 +65,12 @@ public AllCircuitBreakerStats stats() {
return new AllCircuitBreakerStats(new CircuitBreakerStats[] { stats(CircuitBreaker.FIELDDATA) });
}
+ /**
+ * Always returns the same stats, a NoopCircuitBreaker never breaks and all operations are noops.
+ *
+ * @param name name of the breaker (ignored)
+ * @return always "fielddata", limit: -1, estimated: -1, overhead: 0, trippedCount: 0
+ */
@Override
public CircuitBreakerStats stats(String name) {
return new CircuitBreakerStats(CircuitBreaker.FIELDDATA, -1, -1, 0, 0);
diff --git a/libs/core/src/main/java/org/opensearch/core/rest/RestStatus.java b/libs/core/src/main/java/org/opensearch/core/rest/RestStatus.java
index 313bc23bedc90..8441ce8b1b622 100644
--- a/libs/core/src/main/java/org/opensearch/core/rest/RestStatus.java
+++ b/libs/core/src/main/java/org/opensearch/core/rest/RestStatus.java
@@ -527,6 +527,15 @@ public int getStatus() {
return status;
}
+ /**
+ * Get category class of a rest status code.
+ *
+ * @return Integer representing class category of the concrete rest status code
+ */
+ public int getStatusFamilyCode() {
+ return status / 100;
+ }
+
public static RestStatus readFrom(StreamInput in) throws IOException {
return RestStatus.valueOf(in.readString());
}
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStats.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStats.java
index d65f75581dd1b..e99afbb759031 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStats.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStats.java
@@ -8,11 +8,14 @@
package org.opensearch.core.tasks.resourcetracker;
+import org.opensearch.common.annotation.PublicApi;
+
/**
* Different resource stats are defined.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public enum ResourceStats {
CPU("cpu_time_in_nanos"),
MEMORY("memory_in_bytes");
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStatsType.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStatsType.java
index fce8cc65e9bc5..2aedff2940d83 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStatsType.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceStatsType.java
@@ -8,11 +8,14 @@
package org.opensearch.core.tasks.resourcetracker;
+import org.opensearch.common.annotation.PublicApi;
+
/**
* Defines the different types of resource stats.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public enum ResourceStatsType {
// resource stats of the worker thread reported directly from runnable.
WORKER_STATS("worker_stats", false);
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
index 2cbc3d4b2f5c3..a278b61894a65 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
@@ -10,6 +10,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import org.opensearch.common.annotation.PublicApi;
import java.util.Collections;
import java.util.EnumMap;
@@ -22,8 +23,9 @@
* It captures the resource usage information like memory, CPU about a particular execution of thread
* for a specific stats type.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public class ResourceUsageInfo {
private static final Logger logger = LogManager.getLogger(ResourceUsageInfo.class);
private final EnumMap statsInfo = new EnumMap<>(ResourceStats.class);
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageMetric.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageMetric.java
index 262dbe20dabda..f4cce2de820a0 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageMetric.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageMetric.java
@@ -8,11 +8,14 @@
package org.opensearch.core.tasks.resourcetracker;
+import org.opensearch.common.annotation.PublicApi;
+
/**
* Information about resource usage
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public class ResourceUsageMetric {
private final ResourceStats stats;
private final long value;
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceStats.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceStats.java
index d0d26550a4742..048c4a228fbd5 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceStats.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceStats.java
@@ -9,6 +9,7 @@
package org.opensearch.core.tasks.resourcetracker;
import org.opensearch.Version;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
@@ -29,8 +30,9 @@
* Writeable TaskResourceStats objects are used to represent resource
* snapshot information about currently running task.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public class TaskResourceStats implements Writeable, ToXContentFragment {
private final Map resourceUsage;
private final TaskThreadUsage threadUsage;
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceUsage.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceUsage.java
index 7d6cadbef23d7..654f1c5695937 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceUsage.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceUsage.java
@@ -8,6 +8,7 @@
package org.opensearch.core.tasks.resourcetracker;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -30,8 +31,9 @@
* Writeable TaskResourceUsage objects are used to represent resource usage
* information of running tasks.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public class TaskResourceUsage implements Writeable, ToXContentFragment {
private static final ParseField CPU_TIME_IN_NANOS = new ParseField("cpu_time_in_nanos");
private static final ParseField MEMORY_IN_BYTES = new ParseField("memory_in_bytes");
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskThreadUsage.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskThreadUsage.java
index b593ec96e5996..abe03e3c520e0 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskThreadUsage.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskThreadUsage.java
@@ -8,6 +8,7 @@
package org.opensearch.core.tasks.resourcetracker;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -29,8 +30,9 @@
* Writeable TaskThreadExecutions objects are used to represent thread related resource usage of running tasks.
* asd
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public class TaskThreadUsage implements Writeable, ToXContentFragment {
private static final String THREAD_EXECUTIONS = "thread_executions";
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ThreadResourceInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ThreadResourceInfo.java
index 4b341a94256c4..703fdfdf8a784 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ThreadResourceInfo.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ThreadResourceInfo.java
@@ -8,14 +8,17 @@
package org.opensearch.core.tasks.resourcetracker;
+import org.opensearch.common.annotation.PublicApi;
+
/**
* Resource consumption information about a particular execution of thread.
*
* It captures the resource usage information about a particular execution of thread
* for a specific stats type like worker_stats or response_stats etc.,
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "2.1.0")
public class ThreadResourceInfo {
private final long threadId;
private volatile boolean isActive = true;
diff --git a/libs/core/src/main/java/org/opensearch/core/util/BytesRefUtils.java b/libs/core/src/main/java/org/opensearch/core/util/BytesRefUtils.java
index 30c9f182fcae6..2aad068534b9d 100644
--- a/libs/core/src/main/java/org/opensearch/core/util/BytesRefUtils.java
+++ b/libs/core/src/main/java/org/opensearch/core/util/BytesRefUtils.java
@@ -32,6 +32,7 @@
package org.opensearch.core.util;
+import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefArray;
import org.apache.lucene.util.BytesRefBuilder;
@@ -103,12 +104,12 @@ public static int sortAndDedup(final BytesRefArray bytes, final int[] indices) {
return uniqueCount;
}
+ /**
+ * Decodes a long value written as bytes in big endian order.
+ * @param bytes in big endian order
+ * @return long value
+ */
public static long bytesToLong(BytesRef bytes) {
- int high = (bytes.bytes[bytes.offset + 0] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 2]
- & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
- int low = (bytes.bytes[bytes.offset + 4] << 24) | ((bytes.bytes[bytes.offset + 5] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 6]
- & 0xff) << 8) | (bytes.bytes[bytes.offset + 7] & 0xff);
- return (((long) high) << 32) | (low & 0x0ffffffffL);
+ return (long) BitUtil.VH_BE_LONG.get(bytes.bytes, bytes.offset);
}
-
}
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractObjectParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractObjectParser.java
index a0e2a54fce91c..32bbfc600f1f0 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractObjectParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractObjectParser.java
@@ -33,6 +33,7 @@
package org.opensearch.core.xcontent;
import org.opensearch.common.CheckedFunction;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.ParseField;
import org.opensearch.core.xcontent.ObjectParser.NamedObjectParser;
import org.opensearch.core.xcontent.ObjectParser.ValueType;
@@ -47,8 +48,9 @@
/**
* Superclass for {@link ObjectParser} and {@link ConstructingObjectParser}. Defines most of the "declare" methods so they can be shared.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public abstract class AbstractObjectParser {
/**
@@ -108,7 +110,7 @@ public abstract void declareNamedObject(
*
* Unlike the other version of this method, "ordered" mode (arrays of
* objects) is not supported.
- *
+ *
* See NamedObjectHolder in ObjectParserTests for examples of how to invoke
* this.
*
@@ -163,7 +165,7 @@ public abstract void declareNamedObjects(
* the order sent but tools that generate json are free to put object
* members in an unordered Map, jumbling them. Thus, if you care about order
* you can send the object in the second way.
- *
+ *
* See NamedObjectHolder in ObjectParserTests for examples of how to invoke
* this.
*
@@ -366,10 +368,10 @@ public void declareFieldArray(
/**
* Declares a set of fields that are required for parsing to succeed. Only one of the values
* provided per String[] must be matched.
- *
+ *
* E.g. declareRequiredFieldSet("foo", "bar");
means at least one of "foo" or
* "bar" fields must be present. If neither of those fields are present, an exception will be thrown.
- *
+ *
* Multiple required sets can be configured:
*
*
@@ -379,7 +381,7 @@ public void declareFieldArray(
*
* requires that one of "foo" or "bar" fields are present, and also that one of "bizz" or
* "buzz" fields are present.
- *
+ *
* In JSON, it means any of these combinations are acceptable:
*
*
@@ -415,12 +417,12 @@ public void declareFieldArray(
/**
* Declares a set of fields of which at most one must appear for parsing to succeed
- *
+ *
* E.g. declareExclusiveFieldSet("foo", "bar");
means that only one of 'foo'
* or 'bar' must be present, and if both appear then an exception will be thrown. Note
* that this does not make 'foo' or 'bar' required - see {@link #declareRequiredFieldSet(String...)}
* for required fields.
- *
+ *
* Multiple exclusive sets may be declared
*
* @param exclusiveSet a set of field names, at most one of which must appear
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/ContextParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/ContextParser.java
index d50dd2e68d890..f6e5647532bee 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/ContextParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/ContextParser.java
@@ -32,11 +32,16 @@
package org.opensearch.core.xcontent;
+import org.opensearch.common.annotation.PublicApi;
+
import java.io.IOException;
/**
* Reads an object from a parser using some context.
+ *
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
@FunctionalInterface
public interface ContextParser {
T parse(XContentParser p, Context c) throws IOException;
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/DeprecationHandler.java b/libs/core/src/main/java/org/opensearch/core/xcontent/DeprecationHandler.java
index 570a13ad8e093..a0e4027290742 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/DeprecationHandler.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/DeprecationHandler.java
@@ -32,12 +32,17 @@
package org.opensearch.core.xcontent;
+import org.opensearch.common.annotation.PublicApi;
+
import java.util.function.Supplier;
/**
* Callback for notifying the creator of the {@link XContentParser} that
* parsing hit a deprecated field.
+ *
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public interface DeprecationHandler {
/**
* Throws an {@link UnsupportedOperationException} when parsing hits a
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MapXContentParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MapXContentParser.java
index 254c340f8836f..0a5cda324ddb7 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/MapXContentParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MapXContentParser.java
@@ -277,7 +277,7 @@ public Token currentToken() {
/**
* field name that the child element needs to inherit.
- *
+ *
* In most cases this is the same as currentName() except with embedded arrays. In "foo": [[42]] the first START_ARRAY
* token will have the name "foo", but the second START_ARRAY will have no name.
*/
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/ObjectParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/ObjectParser.java
index 365b36c755dd2..04d0bce27c04f 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/ObjectParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/ObjectParser.java
@@ -32,6 +32,7 @@
package org.opensearch.core.xcontent;
import org.opensearch.common.Nullable;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.ParseField;
import java.io.IOException;
@@ -83,8 +84,9 @@
* It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of
* {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public final class ObjectParser extends AbstractObjectParser
implements
BiFunction,
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/ToXContent.java b/libs/core/src/main/java/org/opensearch/core/xcontent/ToXContent.java
index 90dd0cbfb9a1a..ee8dad198df09 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/ToXContent.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/ToXContent.java
@@ -33,6 +33,7 @@
package org.opensearch.core.xcontent;
import org.opensearch.common.Booleans;
+import org.opensearch.common.annotation.PublicApi;
import java.io.IOException;
import java.util.Map;
@@ -42,15 +43,17 @@
* The output may or may not be a value object. Objects implementing {@link ToXContentObject} output a valid value
* but those that don't may or may not require emitting a startObject and an endObject.
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public interface ToXContent {
/**
* Base parameters class
*
- * @opensearch.internal
+ * @opensearch.api
*/
+ @PublicApi(since = "1.0.0")
interface Params {
String param(String key);
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContent.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContent.java
index dbc0041af42b5..1ebdd69d2b7a3 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContent.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContent.java
@@ -32,6 +32,8 @@
package org.opensearch.core.xcontent;
+import org.opensearch.common.annotation.PublicApi;
+
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -41,7 +43,10 @@
/**
* A generic abstraction on top of handling content, inspired by JSON and pull parsing.
+ *
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public interface XContent {
/**
* The type this content handles and produces.
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
index a38bdd049ee88..976f353100c55 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
@@ -726,7 +726,7 @@ public XContentBuilder value(byte[] value, int offset, int length) throws IOExce
/**
* Writes the binary content of the given byte array as UTF-8 bytes.
- *
+ *
* Use {@link XContentParser#charBuffer()} to read the value back
*/
public XContentBuilder utf8Value(byte[] bytes, int offset, int length) throws IOException {
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilderExtension.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilderExtension.java
index 0535da1a584be..9b13ebb23be86 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilderExtension.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilderExtension.java
@@ -37,7 +37,7 @@
/**
* This interface provides a way for non-JDK classes to plug in a way to serialize to xcontent.
- *
+ *
* It is greatly preferred that you implement {@link ToXContentFragment}
* in the class for encoding, however, in some situations you may not own the
* class, in which case you can add an implementation here for encoding it.
@@ -63,7 +63,7 @@ public interface XContentBuilderExtension {
* Used for plugging in a human readable version of a class's encoding. It is assumed that
* the human readable equivalent is always behind the {@code toString()} method, so
* this transformer returns the raw value to be used.
- *
+ *
* An example implementation:
*
*
@@ -79,7 +79,7 @@ public interface XContentBuilderExtension {
/**
* Used for plugging a transformer for a date or time type object into a String (or other
* encodable object).
- *
+ *
* For example:
*
*
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParser.java
index a2f16209a5b7f..85c3579b74cd5 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParser.java
@@ -33,6 +33,7 @@
package org.opensearch.core.xcontent;
import org.opensearch.common.CheckedFunction;
+import org.opensearch.common.annotation.PublicApi;
import java.io.Closeable;
import java.io.IOException;
@@ -44,7 +45,7 @@
/**
* Interface for pull - parsing {@link XContent} see {@code XContentType} for supported types.
- *
+ *
* To obtain an instance of this class use the following pattern:
*
*
@@ -53,8 +54,9 @@
* NamedXContentRegistry.EMPTY, ParserField."{\"key\" : \"value\"}");
*
*
- * @opensearch.internal
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public interface XContentParser extends Closeable {
/**
@@ -202,11 +204,11 @@ Map map(Supplier> mapFactory, CheckedFunction
* Default implementation simply returns false since only actual
* implementation class has knowledge of its internal buffering
* state.
- *
+ *
* This method shouldn't be used to check if the token contains text or not.
*/
boolean hasTextCharacters();
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParserUtils.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParserUtils.java
index 13e2f6a695d1b..b10be393f9adb 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParserUtils.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentParserUtils.java
@@ -142,10 +142,10 @@ public static Object parseFieldsValue(XContentParser parser) throws IOException
* This method expects that the current field name is the concatenation of a type, a delimiter and a name
* (ex: terms#foo where "terms" refers to the type of a registered {@link NamedXContentRegistry.Entry},
* "#" is the delimiter and "foo" the name of the object to parse).
- *
+ *
* It also expected that following this field name is either an Object or an array xContent structure and
* the cursor points to the start token of this structure.
- *
+ *
* The method splits the field's name to extract the type and name and then parses the object
* using the {@link XContentParser#namedObject(Class, String, Object)} method.
*
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentSubParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentSubParser.java
index d1cdda4aeb8be..337cf9f95fe5f 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentSubParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentSubParser.java
@@ -43,7 +43,7 @@
/**
* Wrapper for a XContentParser that makes a single object/array look like a complete document.
- *
+ *
* The wrapper prevents the parsing logic to consume tokens outside of the wrapped object as well
* as skipping to the end of the object in case of a parsing error. The wrapper is intended to be
* used for parsing objects that should be ignored if they are malformed.
diff --git a/libs/core/src/test/java/org/opensearch/core/util/BytesRefUtilsTests.java b/libs/core/src/test/java/org/opensearch/core/util/BytesRefUtilsTests.java
index 421263b883f2a..214f9292ae3a5 100644
--- a/libs/core/src/test/java/org/opensearch/core/util/BytesRefUtilsTests.java
+++ b/libs/core/src/test/java/org/opensearch/core/util/BytesRefUtilsTests.java
@@ -12,7 +12,6 @@
import org.apache.lucene.util.BytesRefArray;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.Counter;
-import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.test.OpenSearchTestCase;
import java.nio.ByteBuffer;
@@ -90,8 +89,12 @@ public void testSortByteRefArray() {
}
public void testBytesToLong() {
- final long value = randomLong();
- final BytesReference buffer = BytesReference.fromByteBuffer(ByteBuffer.allocate(8).putLong(value).flip());
- assertThat(BytesRefUtils.bytesToLong(buffer.toBytesRef()), equalTo(value));
+ long value = randomLong();
+ int paddingStart = randomIntBetween(0, 10);
+ int paddingEnd = randomIntBetween(0, 10);
+ byte[] bytes = new byte[paddingStart + Long.BYTES + paddingEnd];
+ ByteBuffer.wrap(bytes).putLong(paddingStart, value);
+ BytesRef bytesRef = new BytesRef(bytes, paddingStart, Long.BYTES);
+ assertThat(BytesRefUtils.bytesToLong(bytesRef), equalTo(value));
}
}
diff --git a/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java b/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java
index 9861847c9e1ea..828d4b7de450e 100644
--- a/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java
+++ b/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java
@@ -194,26 +194,25 @@ public DissectParser(String pattern, String appendSeparator) {
* @throws DissectException if unable to dissect a pair into it's parts.
*/
public Map parse(String inputString) {
- /**
- *
- * This implements a naive string matching algorithm. The string is walked left to right, comparing each byte against
- * another string's bytes looking for matches. If the bytes match, then a second cursor looks ahead to see if all the bytes
- * of the other string matches. If they all match, record it and advances the primary cursor to the match point. If it can not match
- * all of the bytes then progress the main cursor. Repeat till the end of the input string. Since the string being searching for
- * (the delimiter) is generally small and rare the naive approach is efficient.
- *
- * In this case the string that is walked is the input string, and the string being searched for is the current delimiter.
- * For example for a dissect pattern of {@code %{a},%{b}:%{c}} the delimiters (comma then colon) are searched for in the
- * input string. At class construction the list of keys+delimiters are found (dissectPairs), which allows the use of that ordered
- * list to know which delimiter to use for the search. The delimiters is progressed once the current delimiter is matched.
- *
- * There are two special cases that requires additional parsing beyond the standard naive algorithm. Consecutive delimiters should
- * results in a empty matches unless the {@code ->} is provided. For example given the dissect pattern of
- * {@code %{a},%{b},%{c},%{d}} and input string of {@code foo,,,} the match should be successful with empty values for b,c and d.
- * However, if the key modifier {@code ->}, is present it will simply skip over any delimiters just to the right of the key
- * without assigning any values. For example {@code %{a->},{%b}} will match the input string of {@code foo,,,,,,bar} with a=foo and
- * b=bar.
- *
+ /*
+
+ This implements a naive string matching algorithm. The string is walked left to right, comparing each byte against
+ another string's bytes looking for matches. If the bytes match, then a second cursor looks ahead to see if all the bytes
+ of the other string matches. If they all match, record it and advances the primary cursor to the match point. If it can not match
+ all of the bytes then progress the main cursor. Repeat till the end of the input string. Since the string being searching for
+ (the delimiter) is generally small and rare the naive approach is efficient.
+
+ In this case the string that is walked is the input string, and the string being searched for is the current delimiter.
+ For example for a dissect pattern of {@code %{a},%{b}:%{c}} the delimiters (comma then colon) are searched for in the
+ input string. At class construction the list of keys+delimiters are found (dissectPairs), which allows the use of that ordered
+ list to know which delimiter to use for the search. The delimiters is progressed once the current delimiter is matched.
+
+ There are two special cases that requires additional parsing beyond the standard naive algorithm. Consecutive delimiters should
+ results in a empty matches unless the {@code ->} is provided. For example given the dissect pattern of
+ {@code %{a},%{b},%{c},%{d}} and input string of {@code foo,,,} the match should be successful with empty values for b,c and d.
+ However, if the key modifier {@code ->}, is present it will simply skip over any delimiters just to the right of the key
+ without assigning any values. For example {@code %{a->},{%b}} will match the input string of {@code foo,,,,,,bar} with a=foo and
+ b=bar.
*/
DissectMatch dissectMatch = new DissectMatch(appendSeparator, maxMatches, maxResults, appendCount, referenceCount);
Iterator it = matchPairs.iterator();
@@ -232,7 +231,10 @@ public Map parse(String inputString) {
int lookAheadMatches;
// start walking the input string byte by byte, look ahead for matches where needed
// if a match is found jump forward to the end of the match
- for (; i < input.length; i++) {
+ while (i < input.length) {
+ // start is only used to record the value of i
+ int start = i;
+
lookAheadMatches = 0;
// potential match between delimiter and input string
if (delimiter.length > 0 && input[i] == delimiter[0]) {
@@ -284,8 +286,14 @@ public Map parse(String inputString) {
delimiter = dissectPair.getDelimiter().getBytes(StandardCharsets.UTF_8);
// i is always one byte after the last found delimiter, aka the start of the next value
valueStart = i;
+ } else {
+ i++;
}
+ } else {
+ i++;
}
+ // i should change anyway
+ assert (i != start);
}
// the last key, grab the rest of the input (unless consecutive delimiters already grabbed the last key)
// and there is no trailing delimiter
diff --git a/libs/geo/src/main/java/org/opensearch/geometry/Circle.java b/libs/geo/src/main/java/org/opensearch/geometry/Circle.java
index 6f8b0dc6929cc..c05f316b53b9c 100644
--- a/libs/geo/src/main/java/org/opensearch/geometry/Circle.java
+++ b/libs/geo/src/main/java/org/opensearch/geometry/Circle.java
@@ -39,12 +39,19 @@
* and optional altitude in meters.
*/
public class Circle implements Geometry {
+
+ /** Empty circle : x=0, y=0, z=NaN radius=-1 */
public static final Circle EMPTY = new Circle();
+ /** Latitude of the center of the circle in degrees */
private final double y;
+ /** Longitude of the center of the circle in degrees */
private final double x;
+ /** Altitude of the center of the circle in meters (NaN if irrelevant) */
private final double z;
+ /** Radius of the circle in meters */
private final double radiusMeters;
+ /** Create an {@link #EMPTY} circle */
private Circle() {
y = 0;
x = 0;
@@ -52,10 +59,23 @@ private Circle() {
radiusMeters = -1;
}
+ /**
+ * Create a circle with no altitude.
+ * @param x Longitude of the center of the circle in degrees
+ * @param y Latitude of the center of the circle in degrees
+ * @param radiusMeters Radius of the circle in meters
+ */
public Circle(final double x, final double y, final double radiusMeters) {
this(x, y, Double.NaN, radiusMeters);
}
+ /**
+ * Create a circle with altitude.
+ * @param x Longitude of the center of the circle in degrees
+ * @param y Latitude of the center of the circle in degrees
+ * @param z Altitude of the center of the circle in meters
+ * @param radiusMeters Radius of the circle in meters
+ */
public Circle(final double x, final double y, final double z, final double radiusMeters) {
this.y = y;
this.x = x;
@@ -66,39 +86,68 @@ public Circle(final double x, final double y, final double z, final double radiu
}
}
+ /**
+ * @return The type of this geometry (always {@link ShapeType#CIRCLE})
+ */
@Override
public ShapeType type() {
return ShapeType.CIRCLE;
}
+ /**
+ * @return The y (latitude) of the center of the circle in degrees
+ */
public double getY() {
return y;
}
+ /**
+ * @return The x (longitude) of the center of the circle in degrees
+ */
public double getX() {
return x;
}
+ /**
+ * @return The radius of the circle in meters
+ */
public double getRadiusMeters() {
return radiusMeters;
}
+ /**
+ * @return The altitude of the center of the circle in meters (NaN if irrelevant)
+ */
public double getZ() {
return z;
}
+ /**
+ * @return The latitude (y) of the center of the circle in degrees
+ */
public double getLat() {
return y;
}
+ /**
+ * @return The longitude (x) of the center of the circle in degrees
+ */
public double getLon() {
return x;
}
+ /**
+ * @return The altitude (z) of the center of the circle in meters (NaN if irrelevant)
+ */
public double getAlt() {
return z;
}
+ /**
+ * Compare this circle to another circle.
+ * @param o The other circle
+ * @return True if the two circles are equal in all their properties. False if null or different.
+ */
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -111,6 +160,9 @@ public boolean equals(Object o) {
return (Double.compare(circle.z, z) == 0);
}
+ /**
+ * @return The hashcode of this circle.
+ */
@Override
public int hashCode() {
int result;
@@ -126,11 +178,23 @@ public int hashCode() {
return result;
}
+ /**
+ * Visit this circle with a {@link GeometryVisitor}.
+ *
+ * @param visitor The visitor
+ * @param The return type of the visitor
+ * @param The exception type of the visitor
+ * @return The result of the visitor
+ * @throws E The exception thrown by the visitor
+ */
@Override
public T visit(GeometryVisitor visitor) throws E {
return visitor.visit(this);
}
+ /**
+ * @return True if this circle is empty (radius less than 0)
+ */
@Override
public boolean isEmpty() {
return radiusMeters < 0;
@@ -141,6 +205,9 @@ public String toString() {
return WellKnownText.INSTANCE.toWKT(this);
}
+ /**
+ * @return True if this circle has an altitude. False if NaN.
+ */
@Override
public boolean hasZ() {
return Double.isNaN(z) == false;
diff --git a/libs/geo/src/main/java/org/opensearch/geometry/utils/BitUtil.java b/libs/geo/src/main/java/org/opensearch/geometry/utils/BitUtil.java
index 664e7e68d96a5..c946cc2473202 100644
--- a/libs/geo/src/main/java/org/opensearch/geometry/utils/BitUtil.java
+++ b/libs/geo/src/main/java/org/opensearch/geometry/utils/BitUtil.java
@@ -48,8 +48,8 @@ public class BitUtil { // magic numbers for bit interleaving
/**
* Interleaves the first 32 bits of each long value
- *
- * Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
+ *
+ * Adapted from: bithacks.html#InterleaveBMN
*/
public static long interleave(int even, int odd) {
long v1 = 0x00000000FFFFFFFFL & even;
diff --git a/libs/geo/src/main/java/org/opensearch/geometry/utils/Geohash.java b/libs/geo/src/main/java/org/opensearch/geometry/utils/Geohash.java
index 8b3b841e221e5..33c423e136613 100644
--- a/libs/geo/src/main/java/org/opensearch/geometry/utils/Geohash.java
+++ b/libs/geo/src/main/java/org/opensearch/geometry/utils/Geohash.java
@@ -39,12 +39,12 @@
/**
* Utilities for converting to/from the GeoHash standard
- *
+ *
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
* representing the level (1-12) [xyxy...xyxyllll]
- *
+ *
* This differs from a morton encoded value which interleaves lat/lon (y/x).
- *
+ *
* NOTE: this will replace {@code org.opensearch.common.geo.GeoHashUtils}
*/
public class Geohash {
diff --git a/libs/grok/src/main/java/org/opensearch/grok/Grok.java b/libs/grok/src/main/java/org/opensearch/grok/Grok.java
index cd786b74be039..7aa3347ba4f4b 100644
--- a/libs/grok/src/main/java/org/opensearch/grok/Grok.java
+++ b/libs/grok/src/main/java/org/opensearch/grok/Grok.java
@@ -151,7 +151,7 @@ private void validatePatternBank() {
/**
* Checks whether patterns reference each other in a circular manner and, if so, fail with an exception.
* Also checks for malformed pattern definitions and fails with an exception.
- *
+ *
* In a pattern, anything between %{
and }
or :
is considered
* a reference to another named pattern. This method will navigate to all these named patterns and
* check for a circular reference.
diff --git a/libs/grok/src/main/java/org/opensearch/grok/MatcherWatchdog.java b/libs/grok/src/main/java/org/opensearch/grok/MatcherWatchdog.java
index 5c7eaca2a634a..d5b7566ecc90f 100644
--- a/libs/grok/src/main/java/org/opensearch/grok/MatcherWatchdog.java
+++ b/libs/grok/src/main/java/org/opensearch/grok/MatcherWatchdog.java
@@ -44,7 +44,7 @@
* Protects against long running operations that happen between the register and unregister invocations.
* Threads that invoke {@link #register(Matcher)}, but take too long to invoke the {@link #unregister(Matcher)} method
* will be interrupted.
- *
+ *
* This is needed for Joni's {@link org.joni.Matcher#search(int, int, int)} method, because
* it can end up spinning endlessly if the regular expression is too complex. Joni has checks
* that for every 30k iterations it checks if the current thread is interrupted and if so
diff --git a/libs/nio/src/main/java/org/opensearch/nio/ChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/ChannelContext.java
index 797dfe859fa6c..0e29661978716 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/ChannelContext.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/ChannelContext.java
@@ -116,10 +116,10 @@ protected void handleException(Exception e) {
/**
* Schedules a channel to be closed by the selector event loop with which it is registered.
- *
+ *
* If the channel is open and the state can be transitioned to closed, the close operation will
* be scheduled with the event loop.
- *
+ *
* Depending on the underlying protocol of the channel, a close operation might simply close the socket
* channel or may involve reading and writing messages.
*/
diff --git a/libs/nio/src/main/java/org/opensearch/nio/NioSelector.java b/libs/nio/src/main/java/org/opensearch/nio/NioSelector.java
index a38a33182afea..4ed745723515c 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/NioSelector.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/NioSelector.java
@@ -512,12 +512,12 @@ private void handleQueuedWrites() {
* This is a convenience method to be called after some object (normally channels) are enqueued with this
* selector. This method will check if the selector is still open. If it is open, normal operation can
* proceed.
- *
+ *
* If the selector is closed, then we attempt to remove the object from the queue. If the removal
* succeeds then we throw an {@link IllegalStateException} indicating that normal operation failed. If
* the object cannot be removed from the queue, then the object has already been handled by the selector
* and operation can proceed normally.
- *
+ *
* If this method is called from the selector thread, we will not allow the queuing to occur as the
* selector thread can manipulate its queues internally even if it is no longer open.
*
diff --git a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
index 12a1e80055823..3df8e42fe4f14 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
@@ -59,7 +59,7 @@
* that it is ready to perform certain operations (read, write, etc) the {@link SocketChannelContext} will
* be called. This context will need to implement all protocol related logic. Additionally, if any special
* close behavior is required, it should be implemented in this context.
- *
+ *
* The only methods of the context that should ever be called from a non-selector thread are
* {@link #closeChannel()} and {@link #sendMessage(Object, BiConsumer)}.
*/
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Counter.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Counter.java
new file mode 100644
index 0000000000000..c62288d280e2f
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Counter.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * Counter adds the value to the existing metric.
+ * {@opensearch.experimental}
+ */
+@ExperimentalApi
+public interface Counter {
+
+ /**
+ * add value.
+ * @param value value to be added.
+ */
+ void add(double value);
+
+ /**
+ * add value along with the attributes.
+ *
+ * @param value value to be added.
+ * @param tags attributes/dimensions of the metric.
+ */
+ void add(double value, Tags tags);
+
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
new file mode 100644
index 0000000000000..d57def9406b17
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
@@ -0,0 +1,41 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import java.io.IOException;
+
+/**
+ * Default implementation for {@link MetricsRegistry}
+ */
+class DefaultMetricsRegistry implements MetricsRegistry {
+ private final MetricsTelemetry metricsTelemetry;
+
+ /**
+ * Constructor
+ * @param metricsTelemetry metrics telemetry.
+ */
+ public DefaultMetricsRegistry(MetricsTelemetry metricsTelemetry) {
+ this.metricsTelemetry = metricsTelemetry;
+ }
+
+ @Override
+ public Counter createCounter(String name, String description, String unit) {
+ return metricsTelemetry.createCounter(name, description, unit);
+ }
+
+ @Override
+ public Counter createUpDownCounter(String name, String description, String unit) {
+ return metricsTelemetry.createUpDownCounter(name, description, unit);
+ }
+
+ @Override
+ public void close() throws IOException {
+ metricsTelemetry.close();
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
new file mode 100644
index 0000000000000..61b3df089928b
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
@@ -0,0 +1,39 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+
+import java.io.Closeable;
+
+/**
+ * MetricsRegistry helps in creating the metric instruments.
+ * @opensearch.experimental
+ */
+@ExperimentalApi
+public interface MetricsRegistry extends Closeable {
+
+ /**
+ * Creates the counter.
+ * @param name name of the counter.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @return counter.
+ */
+ Counter createCounter(String name, String description, String unit);
+
+ /**
+ * Creates the upDown counter.
+ * @param name name of the upDown counter.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @return counter.
+ */
+ Counter createUpDownCounter(String name, String description, String unit);
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsTelemetry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsTelemetry.java
index 0bf9482fe58d8..fb3dec8152b4f 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsTelemetry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsTelemetry.java
@@ -16,6 +16,6 @@
* @opensearch.experimental
*/
@ExperimentalApi
-public interface MetricsTelemetry {
+public interface MetricsTelemetry extends MetricsRegistry {
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopCounter.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopCounter.java
new file mode 100644
index 0000000000000..c1daf564dd3bc
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopCounter.java
@@ -0,0 +1,38 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics.noop;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.telemetry.metrics.Counter;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * No-op {@link Counter}
+ * {@opensearch.internal}
+ */
+@InternalApi
+public class NoopCounter implements Counter {
+
+ /**
+ * No-op Counter instance
+ */
+ public final static NoopCounter INSTANCE = new NoopCounter();
+
+ private NoopCounter() {}
+
+ @Override
+ public void add(double value) {
+
+ }
+
+ @Override
+ public void add(double value, Tags tags) {
+
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
new file mode 100644
index 0000000000000..640c6842a8960
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
@@ -0,0 +1,45 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics.noop;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.telemetry.metrics.Counter;
+import org.opensearch.telemetry.metrics.MetricsRegistry;
+
+import java.io.IOException;
+
+/**
+ *No-op {@link MetricsRegistry}
+ * {@opensearch.internal}
+ */
+@InternalApi
+public class NoopMetricsRegistry implements MetricsRegistry {
+
+ /**
+ * No-op Meter instance
+ */
+ public final static NoopMetricsRegistry INSTANCE = new NoopMetricsRegistry();
+
+ private NoopMetricsRegistry() {}
+
+ @Override
+ public Counter createCounter(String name, String description, String unit) {
+ return NoopCounter.INSTANCE;
+ }
+
+ @Override
+ public Counter createUpDownCounter(String name, String description, String unit) {
+ return NoopCounter.INSTANCE;
+ }
+
+ @Override
+ public void close() throws IOException {
+
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/package-info.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/package-info.java
new file mode 100644
index 0000000000000..7c7ed08044993
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/package-info.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Contains metrics related classes
+ * {@opensearch.internal}
+ */
+@InternalApi
+package org.opensearch.telemetry.metrics.noop;
+
+import org.opensearch.common.annotation.InternalApi;
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/tags/Tags.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/tags/Tags.java
new file mode 100644
index 0000000000000..f2a8764f8021d
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/tags/Tags.java
@@ -0,0 +1,99 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics.tags;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * Class to create tags for a meter.
+ *
+ * @opensearch.experimental
+ */
+@ExperimentalApi
+public class Tags {
+ private final Map tagsMap;
+ /**
+ * Empty value.
+ */
+ public final static Tags EMPTY = new Tags(Collections.emptyMap());
+
+ /**
+ * Factory method.
+ * @return tags.
+ */
+ public static Tags create() {
+ return new Tags(new HashMap<>());
+ }
+
+ /**
+ * Constructor.
+ */
+ private Tags(Map tagsMap) {
+ this.tagsMap = tagsMap;
+ }
+
+ /**
+ * Add String attribute.
+ * @param key key
+ * @param value value
+ * @return Same instance.
+ */
+ public Tags addTag(String key, String value) {
+ Objects.requireNonNull(value, "value cannot be null");
+ tagsMap.put(key, value);
+ return this;
+ }
+
+ /**
+ * Add long attribute.
+ * @param key key
+ * @param value value
+ * @return Same instance.
+ */
+ public Tags addTag(String key, long value) {
+ tagsMap.put(key, value);
+ return this;
+ };
+
+ /**
+ * Add double attribute.
+ * @param key key
+ * @param value value
+ * @return Same instance.
+ */
+ public Tags addTag(String key, double value) {
+ tagsMap.put(key, value);
+ return this;
+ };
+
+ /**
+ * Add boolean attribute.
+ * @param key key
+ * @param value value
+ * @return Same instance.
+ */
+ public Tags addTag(String key, boolean value) {
+ tagsMap.put(key, value);
+ return this;
+ };
+
+ /**
+ * Returns the attribute map.
+ * @return tags map
+ */
+ public Map getTagsMap() {
+ return Collections.unmodifiableMap(tagsMap);
+ }
+
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/tags/package-info.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/tags/package-info.java
new file mode 100644
index 0000000000000..70bc9be992b32
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/tags/package-info.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Contains metrics related classes
+ * @opensearch.experimental
+ */
+@ExperimentalApi
+package org.opensearch.telemetry.metrics.tags;
+
+import org.opensearch.common.annotation.ExperimentalApi;
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java
index a5d515443b54d..93600da510977 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java
@@ -21,6 +21,7 @@
class DefaultSpanScope implements SpanScope {
private final Span span;
private final SpanScope previousSpanScope;
+ private final Span beforeSpan;
private static final ThreadLocal spanScopeThreadLocal = new ThreadLocal<>();
private final TracerContextStorage tracerContextStorage;
@@ -29,8 +30,14 @@ class DefaultSpanScope implements SpanScope {
* @param span span
* @param previousSpanScope before attached span scope.
*/
- private DefaultSpanScope(Span span, SpanScope previousSpanScope, TracerContextStorage tracerContextStorage) {
+ private DefaultSpanScope(
+ Span span,
+ final Span beforeSpan,
+ SpanScope previousSpanScope,
+ TracerContextStorage tracerContextStorage
+ ) {
this.span = Objects.requireNonNull(span);
+ this.beforeSpan = beforeSpan;
this.previousSpanScope = previousSpanScope;
this.tracerContextStorage = tracerContextStorage;
}
@@ -43,26 +50,27 @@ private DefaultSpanScope(Span span, SpanScope previousSpanScope, TracerContextSt
*/
public static SpanScope create(Span span, TracerContextStorage tracerContextStorage) {
final SpanScope beforeSpanScope = spanScopeThreadLocal.get();
- SpanScope newSpanScope = new DefaultSpanScope(span, beforeSpanScope, tracerContextStorage);
- spanScopeThreadLocal.set(newSpanScope);
+ final Span beforeSpan = tracerContextStorage.get(TracerContextStorage.CURRENT_SPAN);
+ SpanScope newSpanScope = new DefaultSpanScope(span, beforeSpan, beforeSpanScope, tracerContextStorage);
return newSpanScope;
}
@Override
public void close() {
detach();
- spanScopeThreadLocal.set(previousSpanScope);
}
@Override
public SpanScope attach() {
+ spanScopeThreadLocal.set(this);
tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, this.span);
return this;
}
private void detach() {
- if (previousSpanScope != null) {
- tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, previousSpanScope.getSpan());
+ spanScopeThreadLocal.set(previousSpanScope);
+ if (beforeSpan != null) {
+ tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, beforeSpan);
} else {
tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, null);
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultTracer.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultTracer.java
index 5b7795a5647f5..8f1a26d99e725 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultTracer.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultTracer.java
@@ -9,11 +9,10 @@
package org.opensearch.telemetry.tracing;
import org.opensearch.common.annotation.InternalApi;
-import org.opensearch.telemetry.tracing.attributes.Attributes;
import java.io.Closeable;
import java.io.IOException;
-import java.util.List;
+import java.util.Collection;
import java.util.Map;
import java.util.Optional;
@@ -26,7 +25,10 @@
*/
@InternalApi
class DefaultTracer implements Tracer {
- static final String THREAD_NAME = "th_name";
+ /**
+ * Current thread name.
+ */
+ static final String THREAD_NAME = "thread.name";
private final TracingTelemetry tracingTelemetry;
private final TracerContextStorage tracerContextStorage;
@@ -44,28 +46,13 @@ public DefaultTracer(TracingTelemetry tracingTelemetry, TracerContextStorage> headers) {
+ public Span startSpan(SpanCreationContext spanCreationContext, Map> headers) {
Optional propagatedSpan = tracingTelemetry.getContextPropagator().extractFromHeaders(headers);
- return startSpan(
- spanCreationContext.getSpanName(),
- propagatedSpan.map(SpanContext::new).orElse(null),
- spanCreationContext.getAttributes()
- );
+ return startSpan(spanCreationContext.parent(propagatedSpan.map(SpanContext::new).orElse(null)));
}
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanCreationContext.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanCreationContext.java
index 10cb665e83b01..cbbcfe7a85d57 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanCreationContext.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanCreationContext.java
@@ -18,17 +18,74 @@
*/
@ExperimentalApi
public final class SpanCreationContext {
- private final String spanName;
- private final Attributes attributes;
+ private String spanName;
+ private Attributes attributes;
+ private SpanKind spanKind = SpanKind.INTERNAL;
+ private SpanContext parent;
/**
* Constructor.
+ */
+ private SpanCreationContext() {}
+
+ /**
+ * Sets the span type to server.
+ * @return spanCreationContext
+ */
+ public static SpanCreationContext server() {
+ SpanCreationContext spanCreationContext = new SpanCreationContext();
+ spanCreationContext.spanKind = SpanKind.SERVER;
+ return spanCreationContext;
+ }
+
+ /**
+ * Sets the span type to client.
+ * @return spanCreationContext
+ */
+ public static SpanCreationContext client() {
+ SpanCreationContext spanCreationContext = new SpanCreationContext();
+ spanCreationContext.spanKind = SpanKind.CLIENT;
+ return spanCreationContext;
+ }
+
+ /**
+ * Sets the span type to internal.
+ * @return spanCreationContext
+ */
+ public static SpanCreationContext internal() {
+ SpanCreationContext spanCreationContext = new SpanCreationContext();
+ spanCreationContext.spanKind = SpanKind.INTERNAL;
+ return spanCreationContext;
+ }
+
+ /**
+ * Sets the span name.
* @param spanName span name.
- * @param attributes attributes.
+ * @return spanCreationContext
*/
- public SpanCreationContext(String spanName, Attributes attributes) {
+ public SpanCreationContext name(String spanName) {
this.spanName = spanName;
+ return this;
+ }
+
+ /**
+ * Sets the span attributes.
+ * @param attributes attributes.
+ * @return spanCreationContext
+ */
+ public SpanCreationContext attributes(Attributes attributes) {
this.attributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the parent for spann
+ * @param parent parent
+ * @return spanCreationContext
+ */
+ public SpanCreationContext parent(SpanContext parent) {
+ this.parent = parent;
+ return this;
}
/**
@@ -46,4 +103,20 @@ public String getSpanName() {
public Attributes getAttributes() {
return attributes;
}
+
+ /**
+ * Returns the span kind.
+ * @return spankind.
+ */
+ public SpanKind getSpanKind() {
+ return spanKind;
+ }
+
+ /**
+ * Returns the parent span
+ * @return parent.
+ */
+ public SpanContext getParent() {
+ return parent;
+ }
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanKind.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanKind.java
new file mode 100644
index 0000000000000..d674bb2c866f2
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/SpanKind.java
@@ -0,0 +1,31 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.tracing;
+
+import org.opensearch.common.annotation.PublicApi;
+
+/**
+ * Type of Span.
+ */
+@PublicApi(since = "2.11.0")
+public enum SpanKind {
+ /**
+ * Span represents the client side code.
+ */
+ CLIENT,
+ /**
+ * Span represents the server side code.
+ */
+ SERVER,
+
+ /**
+ * Span represents the internal calls. This is the default value of a span type.
+ */
+ INTERNAL;
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/Tracer.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/Tracer.java
index 19ffc68a62df0..9b49ca7668992 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/Tracer.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/Tracer.java
@@ -9,21 +9,20 @@
package org.opensearch.telemetry.tracing;
import org.opensearch.common.annotation.ExperimentalApi;
-import org.opensearch.telemetry.tracing.attributes.Attributes;
-import org.opensearch.telemetry.tracing.http.HttpTracer;
+import org.opensearch.telemetry.tracing.transport.TransportTracer;
import java.io.Closeable;
/**
* Tracer is the interface used to create a {@link Span}
* It automatically handles the context propagation between threads, tasks, nodes etc.
- *
+ *
* All methods on the Tracer object are multi-thread safe.
*
* @opensearch.experimental
*/
@ExperimentalApi
-public interface Tracer extends HttpTracer, Closeable {
+public interface Tracer extends TransportTracer, Closeable {
/**
* Starts the {@link Span} with given {@link SpanCreationContext}
*
@@ -32,34 +31,6 @@ public interface Tracer extends HttpTracer, Closeable {
*/
Span startSpan(SpanCreationContext context);
- /**
- * Starts the {@link Span} with given name
- *
- * @param spanName span name
- * @return span, must be closed.
- */
- Span startSpan(String spanName);
-
- /**
- * Starts the {@link Span} with given name and attributes. This is required in cases when some attribute based
- * decision needs to be made before starting the span. Very useful in the case of Sampling.
- *
- * @param spanName span name.
- * @param attributes attributes to be added.
- * @return span, must be closed.
- */
- Span startSpan(String spanName, Attributes attributes);
-
- /**
- * Starts the {@link Span} with the given name, parent and attributes.
- *
- * @param spanName span name.
- * @param parentSpan parent span.
- * @param attributes attributes to be added.
- * @return span, must be closed.
- */
- Span startSpan(String spanName, SpanContext parentSpan, Attributes attributes);
-
/**
* Returns the current span.
* @return current wrapped span.
@@ -74,15 +45,6 @@ public interface Tracer extends HttpTracer, Closeable {
*/
ScopedSpan startScopedSpan(SpanCreationContext spanCreationContext);
- /**
- * Start the span and scoped it. This must be used for scenarios where {@link SpanScope} and {@link Span} lifecycles
- * are same and ends within the same thread where created.
- * @param spanCreationContext span creation context
- * @param parentSpan parent span.
- * @return scope of the span, must be closed with explicit close or with try-with-resource
- */
- ScopedSpan startScopedSpan(SpanCreationContext spanCreationContext, SpanContext parentSpan);
-
/**
* Creates the Span Scope for a current thread. It's mandatory to scope the span just after creation so that it will
* automatically manage the attach /detach to the current thread.
@@ -91,4 +53,10 @@ public interface Tracer extends HttpTracer, Closeable {
*/
SpanScope withSpanInScope(Span span);
+ /**
+ * Tells if the traces are being recorded or not
+ * @return boolean
+ */
+ boolean isRecording();
+
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingContextPropagator.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingContextPropagator.java
index 5fbc5d329e227..d7d48d1db10d6 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingContextPropagator.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingContextPropagator.java
@@ -10,7 +10,7 @@
import org.opensearch.common.annotation.ExperimentalApi;
-import java.util.List;
+import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiConsumer;
@@ -36,7 +36,7 @@ public interface TracingContextPropagator {
* @param headers request headers to extract the context from
* @return current span
*/
- Optional extractFromHeaders(Map> headers);
+ Optional extractFromHeaders(Map> headers);
/**
* Injects tracing context
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingTelemetry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingTelemetry.java
index e1811e85f8890..f04a505088424 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingTelemetry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/TracingTelemetry.java
@@ -9,7 +9,6 @@
package org.opensearch.telemetry.tracing;
import org.opensearch.common.annotation.ExperimentalApi;
-import org.opensearch.telemetry.tracing.attributes.Attributes;
import java.io.Closeable;
@@ -24,12 +23,11 @@ public interface TracingTelemetry extends Closeable {
/**
* Creates span with provided arguments
*
- * @param spanName name of the span
- * @param parentSpan span's parent span
- * @param attributes attributes to be added.
+ * @param spanCreationContext span creation context.
+ * @param parentSpan parent span.
* @return span instance
*/
- Span createSpan(String spanName, Span parentSpan, Attributes attributes);
+ Span createSpan(SpanCreationContext spanCreationContext, Span parentSpan);
/**
* provides tracing context propagator
@@ -37,9 +35,4 @@ public interface TracingTelemetry extends Closeable {
*/
TracingContextPropagator getContextPropagator();
- /**
- * closes the resource
- */
- void close();
-
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/noop/NoopTracer.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/noop/NoopTracer.java
index d7206a3c6b094..c57eaccf1f3df 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/noop/NoopTracer.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/noop/NoopTracer.java
@@ -15,9 +15,8 @@
import org.opensearch.telemetry.tracing.SpanCreationContext;
import org.opensearch.telemetry.tracing.SpanScope;
import org.opensearch.telemetry.tracing.Tracer;
-import org.opensearch.telemetry.tracing.attributes.Attributes;
-import java.util.List;
+import java.util.Collection;
import java.util.Map;
/**
@@ -40,21 +39,6 @@ public Span startSpan(SpanCreationContext context) {
return NoopSpan.INSTANCE;
}
- @Override
- public Span startSpan(String spanName) {
- return NoopSpan.INSTANCE;
- }
-
- @Override
- public Span startSpan(String spanName, Attributes attributes) {
- return NoopSpan.INSTANCE;
- }
-
- @Override
- public Span startSpan(String spanName, SpanContext parentSpan, Attributes attributes) {
- return NoopSpan.INSTANCE;
- }
-
@Override
public SpanContext getCurrentSpan() {
return new SpanContext(NoopSpan.INSTANCE);
@@ -66,13 +50,13 @@ public ScopedSpan startScopedSpan(SpanCreationContext spanCreationContext) {
}
@Override
- public ScopedSpan startScopedSpan(SpanCreationContext spanCreationContext, SpanContext parentSpan) {
- return ScopedSpan.NO_OP;
+ public SpanScope withSpanInScope(Span span) {
+ return SpanScope.NO_OP;
}
@Override
- public SpanScope withSpanInScope(Span span) {
- return SpanScope.NO_OP;
+ public boolean isRecording() {
+ return false;
}
@Override
@@ -81,7 +65,7 @@ public void close() {
}
@Override
- public Span startSpan(SpanCreationContext spanCreationContext, Map> header) {
+ public Span startSpan(SpanCreationContext spanCreationContext, Map> header) {
return NoopSpan.INSTANCE;
}
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/runnable/TraceableRunnable.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/runnable/TraceableRunnable.java
index 4672574e9f4ca..8a61dd70d6d54 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/runnable/TraceableRunnable.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/runnable/TraceableRunnable.java
@@ -9,40 +9,32 @@
package org.opensearch.telemetry.tracing.runnable;
import org.opensearch.telemetry.tracing.ScopedSpan;
-import org.opensearch.telemetry.tracing.SpanContext;
import org.opensearch.telemetry.tracing.SpanCreationContext;
import org.opensearch.telemetry.tracing.Tracer;
-import org.opensearch.telemetry.tracing.attributes.Attributes;
/**
* Wraps the runnable and add instrumentation to trace the {@link Runnable}
*/
public class TraceableRunnable implements Runnable {
private final Runnable runnable;
- private final SpanContext parent;
+ private final SpanCreationContext spanCreationContext;
private final Tracer tracer;
- private final String spanName;
- private final Attributes attributes;
/**
* Constructor.
* @param tracer tracer
- * @param spanName spanName
- * @param parent parent Span.
- * @param attributes attributes.
+ * @param spanCreationContext spanCreationContext
* @param runnable runnable.
*/
- public TraceableRunnable(Tracer tracer, String spanName, SpanContext parent, Attributes attributes, Runnable runnable) {
+ public TraceableRunnable(Tracer tracer, SpanCreationContext spanCreationContext, Runnable runnable) {
this.tracer = tracer;
- this.spanName = spanName;
- this.parent = parent;
- this.attributes = attributes;
+ this.spanCreationContext = spanCreationContext;
this.runnable = runnable;
}
@Override
public void run() {
- try (ScopedSpan spanScope = tracer.startScopedSpan(new SpanCreationContext(spanName, attributes), parent)) {
+ try (ScopedSpan spanScope = tracer.startScopedSpan(spanCreationContext)) {
runnable.run();
}
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/http/HttpTracer.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/transport/TransportTracer.java
similarity index 64%
rename from libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/http/HttpTracer.java
rename to libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/transport/TransportTracer.java
index b0692f1b62a48..5883d7de8e83a 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/http/HttpTracer.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/transport/TransportTracer.java
@@ -6,31 +6,31 @@
* compatible open source license.
*/
-package org.opensearch.telemetry.tracing.http;
+package org.opensearch.telemetry.tracing.transport;
import org.opensearch.common.annotation.ExperimentalApi;
import org.opensearch.telemetry.tracing.Span;
import org.opensearch.telemetry.tracing.SpanCreationContext;
-import java.util.List;
+import java.util.Collection;
import java.util.Map;
/**
- * HttpTracer helps in creating a {@link Span} which reads the incoming tracing information
- * from the HttpRequest header and propagate the span accordingly.
- *
+ * TransportTracer helps in creating a {@link Span} which reads the incoming tracing information
+ * from the HTTP or TCP transport headers and propagate the span accordingly.
+ *
* All methods on the Tracer object are multi-thread safe.
*
* @opensearch.experimental
*/
@ExperimentalApi
-public interface HttpTracer {
+public interface TransportTracer {
/**
* Start the span with propagating the tracing info from the HttpRequest header.
*
* @param spanCreationContext span name.
- * @param header http request header.
- * @return span.
+ * @param headers transport headers
+ * @return the span instance
*/
- Span startSpan(SpanCreationContext spanCreationContext, Map> header);
+ Span startSpan(SpanCreationContext spanCreationContext, Map> headers);
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/transport/package-info.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/transport/package-info.java
new file mode 100644
index 0000000000000..87ffcc43184bb
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/transport/package-info.java
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Contains HTTP or TCP transport related tracer capabilities
+ */
+package org.opensearch.telemetry.tracing.transport;
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
new file mode 100644
index 0000000000000..6171641db5f07
--- /dev/null
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
@@ -0,0 +1,51 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.test.OpenSearchTestCase;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class DefaultMetricsRegistryTests extends OpenSearchTestCase {
+
+ private MetricsTelemetry metricsTelemetry;
+ private DefaultMetricsRegistry defaultMeterRegistry;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ metricsTelemetry = mock(MetricsTelemetry.class);
+ defaultMeterRegistry = new DefaultMetricsRegistry(metricsTelemetry);
+ }
+
+ public void testCounter() {
+ Counter mockCounter = mock(Counter.class);
+ when(defaultMeterRegistry.createCounter(any(String.class), any(String.class), any(String.class))).thenReturn(mockCounter);
+ Counter counter = defaultMeterRegistry.createCounter(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testCounter",
+ "test counter",
+ "1"
+ );
+ assertSame(mockCounter, counter);
+ }
+
+ public void testUpDownCounter() {
+ Counter mockCounter = mock(Counter.class);
+ when(defaultMeterRegistry.createUpDownCounter(any(String.class), any(String.class), any(String.class))).thenReturn(mockCounter);
+ Counter counter = defaultMeterRegistry.createUpDownCounter(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testUpDownCounter",
+ "test up-down counter",
+ "1"
+ );
+ assertSame(mockCounter, counter);
+ }
+
+}
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/DefaultTracerTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/DefaultTracerTests.java
index 48b72e1f673fe..2182b3ea28ac8 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/DefaultTracerTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/DefaultTracerTests.java
@@ -22,7 +22,8 @@
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
-import static org.mockito.ArgumentMatchers.any;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.nullValue;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
@@ -36,9 +37,9 @@ public class DefaultTracerTests extends OpenSearchTestCase {
private Span mockSpan;
private Span mockParentSpan;
- private SpanScope mockSpanScope;
private ThreadPool threadPool;
private ExecutorService executorService;
+ private SpanCreationContext spanCreationContext;
@Override
public void setUp() throws Exception {
@@ -58,27 +59,31 @@ public void tearDown() throws Exception {
public void testCreateSpan() {
DefaultTracer defaultTracer = new DefaultTracer(mockTracingTelemetry, mockTracerContextStorage);
- defaultTracer.startSpan("span_name");
+ defaultTracer.startSpan(spanCreationContext);
- assertEquals("span_name", defaultTracer.getCurrentSpan().getSpan().getSpanName());
+ String spanName = defaultTracer.getCurrentSpan().getSpan().getSpanName();
+ assertEquals("span_name", spanName);
+ assertTrue(defaultTracer.isRecording());
}
@SuppressWarnings("unchecked")
public void testCreateSpanWithAttributesWithMock() {
DefaultTracer defaultTracer = new DefaultTracer(mockTracingTelemetry, mockTracerContextStorage);
Attributes attributes = Attributes.create().addAttribute("name", "value");
- when(mockTracingTelemetry.createSpan(eq("span_name"), eq(mockParentSpan), eq(attributes))).thenReturn(mockSpan);
- defaultTracer.startSpan("span_name", attributes);
- verify(mockTracingTelemetry).createSpan(eq("span_name"), eq(mockParentSpan), eq(attributes));
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", attributes, mockParentSpan);
+ when(mockTracingTelemetry.createSpan(eq(spanCreationContext), eq(mockParentSpan))).thenReturn(mockSpan);
+ defaultTracer.startSpan(spanCreationContext);
+ verify(mockTracingTelemetry).createSpan(eq(spanCreationContext), eq(mockParentSpan));
}
@SuppressWarnings("unchecked")
public void testCreateSpanWithAttributesWithParentMock() {
DefaultTracer defaultTracer = new DefaultTracer(mockTracingTelemetry, mockTracerContextStorage);
Attributes attributes = Attributes.create().addAttribute("name", "value");
- when(mockTracingTelemetry.createSpan(eq("span_name"), eq(mockParentSpan), eq(attributes))).thenReturn(mockSpan);
- defaultTracer.startSpan("span_name", new SpanContext(mockParentSpan), attributes);
- verify(mockTracingTelemetry).createSpan(eq("span_name"), eq(mockParentSpan), eq(attributes));
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", attributes, mockParentSpan);
+ when(mockTracingTelemetry.createSpan(eq(spanCreationContext), eq(mockParentSpan))).thenReturn(mockSpan);
+ defaultTracer.startSpan(spanCreationContext);
+ verify(mockTracingTelemetry).createSpan(eq(spanCreationContext), eq(mockParentSpan));
verify(mockTracerContextStorage, never()).get(TracerContextStorage.CURRENT_SPAN);
}
@@ -90,16 +95,19 @@ public void testCreateSpanWithAttributes() {
new ThreadContextBasedTracerContextStorage(threadContext, tracingTelemetry)
);
- Span span = defaultTracer.startSpan(
+ SpanCreationContext spanCreationContext = buildSpanCreationContext(
"span_name",
- Attributes.create().addAttribute("key1", 1.0).addAttribute("key2", 2l).addAttribute("key3", true).addAttribute("key4", "key4")
+ Attributes.create().addAttribute("key1", 1.0).addAttribute("key2", 2l).addAttribute("key3", true).addAttribute("key4", "key4"),
+ null
);
- assertEquals("span_name", defaultTracer.getCurrentSpan().getSpan().getSpanName());
- assertEquals(1.0, ((MockSpan) defaultTracer.getCurrentSpan().getSpan()).getAttribute("key1"));
- assertEquals(2l, ((MockSpan) defaultTracer.getCurrentSpan().getSpan()).getAttribute("key2"));
- assertEquals(true, ((MockSpan) defaultTracer.getCurrentSpan().getSpan()).getAttribute("key3"));
- assertEquals("key4", ((MockSpan) defaultTracer.getCurrentSpan().getSpan()).getAttribute("key4"));
+ Span span = defaultTracer.startSpan(spanCreationContext);
+
+ assertThat(defaultTracer.getCurrentSpan(), is(nullValue()));
+ assertEquals(1.0, ((MockSpan) span).getAttribute("key1"));
+ assertEquals(2l, ((MockSpan) span).getAttribute("key2"));
+ assertEquals(true, ((MockSpan) span).getAttribute("key3"));
+ assertEquals("key4", ((MockSpan) span).getAttribute("key4"));
span.endSpan();
}
@@ -110,25 +118,32 @@ public void testCreateSpanWithParent() {
new ThreadContextBasedTracerContextStorage(new ThreadContext(Settings.EMPTY), tracingTelemetry)
);
- Span span = defaultTracer.startSpan("span_name", null);
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", null, null);
- SpanContext parentSpan = defaultTracer.getCurrentSpan();
+ Span span = defaultTracer.startSpan(spanCreationContext, null);
- Span span1 = defaultTracer.startSpan("span_name_1", parentSpan, Attributes.EMPTY);
+ try (final SpanScope scope = defaultTracer.withSpanInScope(span)) {
+ SpanContext parentSpan = defaultTracer.getCurrentSpan();
- assertEquals("span_name_1", defaultTracer.getCurrentSpan().getSpan().getSpanName());
- assertEquals(parentSpan.getSpan(), defaultTracer.getCurrentSpan().getSpan().getParentSpan());
- span1.endSpan();
- span.endSpan();
+ SpanCreationContext spanCreationContext1 = buildSpanCreationContext("span_name_1", Attributes.EMPTY, parentSpan.getSpan());
+
+ try (final ScopedSpan span1 = defaultTracer.startScopedSpan(spanCreationContext1)) {
+ assertEquals("span_name_1", defaultTracer.getCurrentSpan().getSpan().getSpanName());
+ assertEquals(parentSpan.getSpan(), defaultTracer.getCurrentSpan().getSpan().getParentSpan());
+ }
+ } finally {
+ span.endSpan();
+ }
}
@SuppressWarnings("unchecked")
public void testCreateSpanWithContext() {
DefaultTracer defaultTracer = new DefaultTracer(mockTracingTelemetry, mockTracerContextStorage);
Attributes attributes = Attributes.create().addAttribute("name", "value");
- when(mockTracingTelemetry.createSpan(eq("span_name"), eq(mockParentSpan), eq(attributes))).thenReturn(mockSpan);
- defaultTracer.startSpan(new SpanCreationContext("span_name", attributes));
- verify(mockTracingTelemetry).createSpan(eq("span_name"), eq(mockParentSpan), eq(attributes));
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", attributes, mockParentSpan);
+ when(mockTracingTelemetry.createSpan(eq(spanCreationContext), eq(mockParentSpan))).thenReturn(mockSpan);
+ defaultTracer.startSpan(spanCreationContext);
+ verify(mockTracingTelemetry).createSpan(eq(spanCreationContext), eq(mockParentSpan));
}
public void testCreateSpanWithNullParent() {
@@ -139,10 +154,11 @@ public void testCreateSpanWithNullParent() {
new ThreadContextBasedTracerContextStorage(threadContext, tracingTelemetry)
);
- Span span = defaultTracer.startSpan("span_name", (SpanContext) null, Attributes.EMPTY);
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", Attributes.EMPTY, null);
- assertEquals("span_name", defaultTracer.getCurrentSpan().getSpan().getSpanName());
- assertEquals(null, defaultTracer.getCurrentSpan().getSpan().getParentSpan());
+ Span span = defaultTracer.startSpan(spanCreationContext);
+
+ assertThat(defaultTracer.getCurrentSpan(), is(nullValue()));
span.endSpan();
}
@@ -154,7 +170,10 @@ public void testEndSpanByClosingScopedSpan() {
tracingTelemetry
);
DefaultTracer defaultTracer = new DefaultTracer(tracingTelemetry, spanTracerStorage);
- ScopedSpan scopedSpan = defaultTracer.startScopedSpan(new SpanCreationContext("span_name", Attributes.EMPTY));
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", Attributes.EMPTY, null);
+
+ ScopedSpan scopedSpan = defaultTracer.startScopedSpan(spanCreationContext);
+
assertEquals("span_name", defaultTracer.getCurrentSpan().getSpan().getSpanName());
assertEquals(((DefaultScopedSpan) scopedSpan).getSpanScope(), DefaultSpanScope.getCurrentSpanScope());
scopedSpan.close();
@@ -172,8 +191,11 @@ public void testEndSpanByClosingScopedSpanMultiple() {
tracingTelemetry
);
DefaultTracer defaultTracer = new DefaultTracer(tracingTelemetry, spanTracerStorage);
- ScopedSpan scopedSpan = defaultTracer.startScopedSpan(new SpanCreationContext("span_name", Attributes.EMPTY));
- ScopedSpan scopedSpan1 = defaultTracer.startScopedSpan(new SpanCreationContext("span_name_1", Attributes.EMPTY));
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", Attributes.EMPTY, null);
+ SpanCreationContext spanCreationContext1 = buildSpanCreationContext("span_name_1", Attributes.EMPTY, null);
+
+ ScopedSpan scopedSpan = defaultTracer.startScopedSpan(spanCreationContext);
+ ScopedSpan scopedSpan1 = defaultTracer.startScopedSpan(spanCreationContext1);
assertEquals("span_name_1", defaultTracer.getCurrentSpan().getSpan().getSpanName());
assertEquals(((DefaultScopedSpan) scopedSpan1).getSpanScope(), DefaultSpanScope.getCurrentSpanScope());
@@ -198,7 +220,8 @@ public void testEndSpanByClosingSpanScope() {
tracingTelemetry
);
DefaultTracer defaultTracer = new DefaultTracer(tracingTelemetry, spanTracerStorage);
- Span span = defaultTracer.startSpan(new SpanCreationContext("span_name", Attributes.EMPTY));
+ SpanCreationContext spanCreationContext = buildSpanCreationContext("span_name", Attributes.EMPTY, null);
+ Span span = defaultTracer.startSpan(spanCreationContext);
SpanScope spanScope = defaultTracer.withSpanInScope(span);
assertEquals("span_name", defaultTracer.getCurrentSpan().getSpan().getSpanName());
assertEquals(spanScope, DefaultSpanScope.getCurrentSpanScope());
@@ -218,8 +241,8 @@ public void testEndSpanByClosingSpanScopeMultiple() {
tracingTelemetry
);
DefaultTracer defaultTracer = new DefaultTracer(tracingTelemetry, spanTracerStorage);
- Span span = defaultTracer.startSpan(new SpanCreationContext("span_name", Attributes.EMPTY));
- Span span1 = defaultTracer.startSpan(new SpanCreationContext("span_name_1", Attributes.EMPTY));
+ Span span = defaultTracer.startSpan(buildSpanCreationContext("span_name", Attributes.EMPTY, null));
+ Span span1 = defaultTracer.startSpan(buildSpanCreationContext("span_name_1", Attributes.EMPTY, null));
SpanScope spanScope = defaultTracer.withSpanInScope(span);
SpanScope spanScope1 = defaultTracer.withSpanInScope(span1);
assertEquals("span_name_1", defaultTracer.getCurrentSpan().getSpan().getSpanName());
@@ -261,11 +284,11 @@ public void testSpanAcrossThreads() {
CompletableFuture> asyncTask = CompletableFuture.runAsync(() -> {
// create a span
- Span span = defaultTracer.startSpan(new SpanCreationContext("span_name_t_1", Attributes.EMPTY));
+ Span span = defaultTracer.startSpan(buildSpanCreationContext("span_name_t_1", Attributes.EMPTY, null));
SpanScope spanScope = defaultTracer.withSpanInScope(span);
CompletableFuture> asyncTask1 = CompletableFuture.runAsync(() -> {
- Span spanT2 = defaultTracer.startSpan(new SpanCreationContext("span_name_t_2", Attributes.EMPTY));
+ Span spanT2 = defaultTracer.startSpan(buildSpanCreationContext("span_name_t_2", Attributes.EMPTY, null));
SpanScope spanScopeT2 = defaultTracer.withSpanInScope(spanT2);
assertEquals(spanT2, defaultTracer.getCurrentSpan().getSpan());
@@ -289,7 +312,7 @@ public void testSpanCloseOnThread2() {
tracingTelemetry
);
DefaultTracer defaultTracer = new DefaultTracer(tracingTelemetry, spanTracerStorage);
- final Span span = defaultTracer.startSpan(new SpanCreationContext("span_name_t1", Attributes.EMPTY));
+ final Span span = defaultTracer.startSpan(buildSpanCreationContext("span_name_t1", Attributes.EMPTY, null));
try (SpanScope spanScope = defaultTracer.withSpanInScope(span)) {
CompletableFuture> asyncTask = CompletableFuture.runAsync(() -> async(new ActionListener() {
@Override
@@ -337,16 +360,16 @@ public void testSpanAcrossThreadsMultipleSpans() {
CompletableFuture> asyncTask = CompletableFuture.runAsync(() -> {
// create a parent span
- Span parentSpan = defaultTracer.startSpan(new SpanCreationContext("p_span_name", Attributes.EMPTY));
+ Span parentSpan = defaultTracer.startSpan(buildSpanCreationContext("p_span_name", Attributes.EMPTY, null));
SpanScope parentSpanScope = defaultTracer.withSpanInScope(parentSpan);
// create a span
- Span span = defaultTracer.startSpan(new SpanCreationContext("span_name_t_1", Attributes.EMPTY));
+ Span span = defaultTracer.startSpan(buildSpanCreationContext("span_name_t_1", Attributes.EMPTY, null));
SpanScope spanScope = defaultTracer.withSpanInScope(span);
CompletableFuture> asyncTask1 = CompletableFuture.runAsync(() -> {
- Span spanT2 = defaultTracer.startSpan(new SpanCreationContext("span_name_t_2", Attributes.EMPTY));
+ Span spanT2 = defaultTracer.startSpan(buildSpanCreationContext("span_name_t_2", Attributes.EMPTY, null));
SpanScope spanScopeT2 = defaultTracer.withSpanInScope(spanT2);
- Span spanT21 = defaultTracer.startSpan(new SpanCreationContext("span_name_t_2", Attributes.EMPTY));
+ Span spanT21 = defaultTracer.startSpan(buildSpanCreationContext("span_name_t_2", Attributes.EMPTY, null));
SpanScope spanScopeT21 = defaultTracer.withSpanInScope(spanT21);
assertEquals(spanT21, defaultTracer.getCurrentSpan().getSpan());
spanScopeT21.close();
@@ -382,7 +405,6 @@ private void setupMocks() {
mockTracingTelemetry = mock(TracingTelemetry.class);
mockSpan = mock(Span.class);
mockParentSpan = mock(Span.class);
- mockSpanScope = mock(SpanScope.class);
mockTracerContextStorage = mock(TracerContextStorage.class);
when(mockSpan.getSpanName()).thenReturn("span_name");
when(mockSpan.getSpanId()).thenReturn("span_id");
@@ -390,7 +412,16 @@ private void setupMocks() {
when(mockSpan.getParentSpan()).thenReturn(mockParentSpan);
when(mockParentSpan.getSpanId()).thenReturn("parent_span_id");
when(mockParentSpan.getTraceId()).thenReturn("trace_id");
- when(mockTracerContextStorage.get(TracerContextStorage.CURRENT_SPAN)).thenReturn(mockParentSpan, mockSpan);
- when(mockTracingTelemetry.createSpan(eq("span_name"), eq(mockParentSpan), any(Attributes.class))).thenReturn(mockSpan);
+ spanCreationContext = buildSpanCreationContext("span_name", Attributes.EMPTY, mockParentSpan);
+ when(mockTracerContextStorage.get(TracerContextStorage.CURRENT_SPAN)).thenReturn(mockSpan, mockParentSpan);
+ when(mockTracingTelemetry.createSpan(eq(spanCreationContext), eq(mockParentSpan))).thenReturn(mockSpan);
+ }
+
+ private SpanCreationContext buildSpanCreationContext(String spanName, Attributes attributes, Span parentSpan) {
+ SpanCreationContext spanCreationContext = SpanCreationContext.internal().name(spanName).attributes(attributes);
+ if (parentSpan != null) {
+ spanCreationContext.parent(new SpanContext(parentSpan));
+ }
+ return spanCreationContext;
}
}
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/TraceableRunnableTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/TraceableRunnableTests.java
index a67d9b22ca738..4c4f762653d57 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/TraceableRunnableTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/tracing/TraceableRunnableTests.java
@@ -34,9 +34,7 @@ public void testRunnableWithNullParent() throws Exception {
final AtomicReference attributeValue = new AtomicReference<>();
TraceableRunnable traceableRunnable = new TraceableRunnable(
defaultTracer,
- spanName,
- null,
- Attributes.create().addAttribute("name", "value"),
+ SpanCreationContext.internal().name(spanName).attributes(Attributes.create().addAttribute("name", "value")),
() -> {
spanNameCaptured.set(defaultTracer.getCurrentSpan().getSpan().getSpanName());
attributeValue.set((String) ((MockSpan) defaultTracer.getCurrentSpan().getSpan()).getAttribute("name"));
@@ -55,14 +53,23 @@ public void testRunnableWithParent() throws Exception {
String spanName = "testRunnable";
String parentSpanName = "parentSpan";
DefaultTracer defaultTracer = new DefaultTracer(new MockTracingTelemetry(), contextStorage);
- ScopedSpan scopedSpan = defaultTracer.startScopedSpan(new SpanCreationContext(parentSpanName, Attributes.EMPTY));
+ ScopedSpan scopedSpan = defaultTracer.startScopedSpan(
+ SpanCreationContext.internal().name(parentSpanName).attributes(Attributes.EMPTY)
+ );
SpanContext parentSpanContext = defaultTracer.getCurrentSpan();
AtomicReference currentSpan = new AtomicReference<>();
final AtomicBoolean isRunnableCompleted = new AtomicBoolean(false);
- TraceableRunnable traceableRunnable = new TraceableRunnable(defaultTracer, spanName, parentSpanContext, Attributes.EMPTY, () -> {
- isRunnableCompleted.set(true);
- currentSpan.set(defaultTracer.getCurrentSpan());
- });
+ TraceableRunnable traceableRunnable = new TraceableRunnable(
+ defaultTracer,
+ SpanCreationContext.internal()
+ .name(spanName)
+ .attributes(Attributes.create().addAttribute("name", "value"))
+ .parent(parentSpanContext),
+ () -> {
+ isRunnableCompleted.set(true);
+ currentSpan.set(defaultTracer.getCurrentSpan());
+ }
+ );
traceableRunnable.run();
assertTrue(isRunnableCompleted.get());
assertEquals(spanName, currentSpan.get().getSpan().getSpanName());
diff --git a/libs/x-content/licenses/jackson-core-2.15.2.jar.sha1 b/libs/x-content/licenses/jackson-core-2.15.2.jar.sha1
deleted file mode 100644
index ec6781b968eed..0000000000000
--- a/libs/x-content/licenses/jackson-core-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a6fe1836469a69b3ff66037c324d75fc66ef137c
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..c2b70fb4ae202
--- /dev/null
+++ b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
@@ -0,0 +1 @@
+899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.15.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.15.2.jar.sha1
deleted file mode 100644
index 0022265a84b68..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-cbor-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-baafc85c70765594add14bd93f3efd68e1945b76
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..8da478fc6013d
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
@@ -0,0 +1 @@
+35e8b7bf4fc1d078766bb155103d433ed5bb1627
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.15.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.15.2.jar.sha1
deleted file mode 100644
index 2b8caad846fec..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-smile-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-16d1dd22f7d641459ed056399d4f7df0220f1176
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..3e952ffe92418
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
@@ -0,0 +1 @@
+3c422d7f3901c9a1becf9df3cf41efc68a5ab95c
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.15.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.15.2.jar.sha1
deleted file mode 100644
index 4ad7255e2318f..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-yaml-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-58194ff9f51915ad6bf6b6f24818232d7566418a
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..d62b5874ab023
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
@@ -0,0 +1 @@
+2033e2c5f531785d17f3a2bc31842e3bbb7983b2
\ No newline at end of file
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java
index 9d1581a3a1517..453107fe4ff65 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java
@@ -35,6 +35,7 @@
import com.fasterxml.jackson.dataformat.cbor.CBORConstants;
import com.fasterxml.jackson.dataformat.smile.SmileConstants;
+import org.opensearch.common.annotation.PublicApi;
import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.smile.SmileXContent;
@@ -49,7 +50,9 @@
/**
* The content type of {@link XContent}.
+ * @opensearch.api
*/
+@PublicApi(since = "1.0.0")
public enum XContentType implements MediaType {
/**
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
index 3b857a2c690b9..d3d9ea174cf1b 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
@@ -628,7 +628,7 @@ public void testCreateRootSubParser() throws IOException {
/**
* Generates a random object {"first_field": "foo", "marked_field": {...random...}, "last_field": "bar}
- *
+ *
* Returns the number of tokens in the marked field
*/
private static int generateRandomObjectForMarking(XContentBuilder builder) throws IOException {
diff --git a/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/stats/RunningStats.java b/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/stats/RunningStats.java
index de67cc2930652..de6b59b1546a5 100644
--- a/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/stats/RunningStats.java
+++ b/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/stats/RunningStats.java
@@ -46,7 +46,7 @@
/**
* Descriptive stats gathered per shard. Coordinating node computes final correlation and covariance stats
* based on these descriptive stats. This single pass, parallel approach is based on:
- *
+ *
* http://prod.sandia.gov/techlib/access-control.cgi/2008/086212.pdf
*/
public class RunningStats implements Writeable, Cloneable {
@@ -222,7 +222,7 @@ private void updateCovariance(final String[] fieldNames, final Map
* running computations taken from: http://prod.sandia.gov/techlib/access-control.cgi/2008/086212.pdf
**/
public void merge(final RunningStats other) {
diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
index 785e597857825..71af708f2e1dc 100644
--- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
+++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
@@ -32,20 +32,42 @@
package org.opensearch.analysis.common;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.query.Operator;
import org.opensearch.plugins.Plugin;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import java.util.Arrays;
import java.util.Collection;
import static org.opensearch.index.query.QueryBuilders.queryStringQuery;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
-public class QueryStringWithAnalyzersIT extends OpenSearchIntegTestCase {
+public class QueryStringWithAnalyzersIT extends ParameterizedOpenSearchIntegTestCase {
+
+ public QueryStringWithAnalyzersIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
+
@Override
protected Collection> nodePlugins() {
return Arrays.asList(CommonAnalysisModulePlugin.class);
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java
index b0d9c1765190a..cf2736a8583d2 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java
@@ -394,7 +394,17 @@ public Map> getTokenizers() {
// TODO deprecate and remove in API
tokenizers.put("lowercase", XLowerCaseTokenizerFactory::new);
tokenizers.put("path_hierarchy", PathHierarchyTokenizerFactory::new);
- tokenizers.put("PathHierarchy", PathHierarchyTokenizerFactory::new);
+ tokenizers.put("PathHierarchy", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
+ // TODO Remove "PathHierarchy" tokenizer name in 4.0 and throw exception
+ if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) {
+ deprecationLogger.deprecate(
+ "PathHierarchy_tokenizer_deprecation",
+ "The [PathHierarchy] tokenizer name is deprecated and will be removed in a future version. "
+ + "Please change the tokenizer name to [path_hierarchy] instead."
+ );
+ }
+ return new PathHierarchyTokenizerFactory(indexSettings, environment, name, settings);
+ });
tokenizers.put("pattern", PatternTokenizerFactory::new);
tokenizers.put("uax_url_email", UAX29URLEmailTokenizerFactory::new);
tokenizers.put("whitespace", WhitespaceTokenizerFactory::new);
@@ -555,7 +565,7 @@ public List getPreConfiguredTokenFilters() {
filters.add(PreConfiguredTokenFilter.singleton("scandinavian_normalization", true, ScandinavianNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("shingle", false, false, input -> {
TokenStream ts = new ShingleFilter(input);
- /**
+ /*
* We disable the graph analysis on this token stream
* because it produces shingles of different size.
* Graph analysis on such token stream is useless and dangerous as it may create too many paths
@@ -662,8 +672,17 @@ public List getPreConfiguredTokenizers() {
}
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}));
- tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new));
-
+ tokenizers.add(PreConfiguredTokenizer.openSearchVersion("PathHierarchy", (version) -> {
+ // TODO Remove "PathHierarchy" tokenizer name in 4.0 and throw exception
+ if (version.onOrAfter(Version.V_3_0_0)) {
+ deprecationLogger.deprecate(
+ "PathHierarchy_tokenizer_deprecation",
+ "The [PathHierarchy] tokenizer name is deprecated and will be removed in a future version. "
+ + "Please change the tokenizer name to [path_hierarchy] instead."
+ );
+ }
+ return new PathHierarchyTokenizer();
+ }));
return tokenizers;
}
}
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/KeywordMarkerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/KeywordMarkerTokenFilterFactory.java
index ad968aeee62cb..e9f3fd96dd69d 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/KeywordMarkerTokenFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/KeywordMarkerTokenFilterFactory.java
@@ -49,12 +49,12 @@
* A factory for creating keyword marker token filters that prevent tokens from
* being modified by stemmers. Two types of keyword marker filters are available:
* the {@link SetKeywordMarkerFilter} and the {@link PatternKeywordMarkerFilter}.
- *
+ *
* The {@link SetKeywordMarkerFilter} uses a set of keywords to denote which tokens
* should be excluded from stemming. This filter is created if the settings include
* {@code keywords}, which contains the list of keywords, or {@code `keywords_path`},
* which contains a path to a file in the config directory with the keywords.
- *
+ *
* The {@link PatternKeywordMarkerFilter} uses a regular expression pattern to match
* against tokens that should be excluded from stemming. This filter is created if
* the settings include {@code keywords_pattern}, which contains the regular expression
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java
index bd241de749f11..d6d9f8975f2fc 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java
@@ -54,7 +54,7 @@ public class MappingCharFilterFactory extends AbstractCharFilterFactory implemen
MappingCharFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name);
- List> rules = Analysis.parseWordList(env, settings, "mappings", this::parse, false);
+ List> rules = Analysis.parseWordList(env, settings, "mappings", this::parse);
if (rules == null) {
throw new IllegalArgumentException("mapping requires either `mappings` or `mappings_path` to be configured");
}
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SnowballAnalyzer.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SnowballAnalyzer.java
index 78d151ee16c3b..04786689b50f0 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SnowballAnalyzer.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SnowballAnalyzer.java
@@ -45,7 +45,7 @@
/** Filters {@link StandardTokenizer} with {@link
* LowerCaseFilter}, {@link StopFilter} and {@link SnowballFilter}.
- *
+ *
* Available stemmers are listed in org.tartarus.snowball.ext. The name of a
* stemmer is the part of the class name before "Stemmer", e.g., the stemmer in
* {@link org.tartarus.snowball.ext.EnglishStemmer} is named "English".
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java
index 94c7d63f2bee7..f37d5862b9d3f 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java
@@ -211,8 +211,8 @@ private void createTokenFilterFactoryWithTypeTable(String[] rules) throws IOExce
}
public void testTypeTableParsingError() {
- String[] rules = { "# This is a comment", "$ => DIGIT", "\\u200D => ALPHANUM", "abc => ALPHA" };
+ String[] rules = { "# This is a comment", "# => ALPHANUM", "$ => DIGIT", "\\u200D => ALPHANUM", "abc => ALPHA" };
RuntimeException ex = expectThrows(RuntimeException.class, () -> createTokenFilterFactoryWithTypeTable(rules));
- assertEquals("Line [4]: Invalid mapping rule: [abc => ALPHA]. Only a single character is allowed.", ex.getMessage());
+ assertEquals("Line [5]: Invalid mapping rule: [abc => ALPHA]. Only a single character is allowed.", ex.getMessage());
}
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
index 46e57faec3a69..26f4acb2b1e6a 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
@@ -32,8 +32,11 @@
package org.opensearch.analysis.common;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.IndexSettings;
@@ -41,7 +44,7 @@
import org.opensearch.plugins.Plugin;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import java.io.IOException;
import java.util.Arrays;
@@ -55,6 +58,7 @@
import static org.opensearch.index.query.QueryBuilders.matchPhraseQuery;
import static org.opensearch.index.query.QueryBuilders.matchQuery;
import static org.opensearch.index.query.QueryBuilders.termQuery;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.search.builder.SearchSourceBuilder.highlight;
import static org.opensearch.search.builder.SearchSourceBuilder.searchSource;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@@ -64,7 +68,25 @@
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.startsWith;
-public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase {
+public class HighlighterWithAnalyzersTests extends ParameterizedOpenSearchIntegTestCase {
+
+ public HighlighterWithAnalyzersTests(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
+
@Override
protected Collection> nodePlugins() {
return Arrays.asList(CommonAnalysisModulePlugin.class);
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java
index 387eb4a377007..28e041ac8c92d 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java
@@ -37,6 +37,7 @@ public static CharFilterFactory create(String... rules) throws IOException {
public void testRulesOk() throws IOException {
MappingCharFilterFactory mappingCharFilterFactory = (MappingCharFilterFactory) create(
+ "# This is a comment",
"# => _hashtag_",
":) => _happy_",
":( => _sad_"
@@ -64,7 +65,10 @@ public void testRuleError() {
}
public void testRulePartError() {
- RuntimeException ex = expectThrows(RuntimeException.class, () -> create("# => _hashtag_", ":) => _happy_", "a:b"));
- assertEquals("Line [3]: Invalid mapping rule : [a:b]", ex.getMessage());
+ RuntimeException ex = expectThrows(
+ RuntimeException.class,
+ () -> create("# This is a comment", "# => _hashtag_", ":) => _happy_", "a:b")
+ );
+ assertEquals("Line [4]: Invalid mapping rule : [a:b]", ex.getMessage());
}
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java
index 1fe7c582449ec..555d6c78b6ec5 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java
@@ -35,16 +35,61 @@
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.analysis.Tokenizer;
+import org.opensearch.Version;
+import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.index.Index;
+import org.opensearch.env.Environment;
+import org.opensearch.env.TestEnvironment;
+import org.opensearch.index.IndexSettings;
+import org.opensearch.index.analysis.IndexAnalyzers;
+import org.opensearch.index.analysis.NamedAnalyzer;
+import org.opensearch.indices.analysis.AnalysisModule;
import org.opensearch.test.IndexSettingsModule;
import org.opensearch.test.OpenSearchTokenStreamTestCase;
+import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.io.StringReader;
+import java.util.Collections;
public class PathHierarchyTokenizerFactoryTests extends OpenSearchTokenStreamTestCase {
+ private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws IOException {
+ Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
+ Settings indexSettings = Settings.builder()
+ .put(IndexMetadata.SETTING_VERSION_CREATED, version)
+ .put("index.analysis.analyzer.my_analyzer.tokenizer", tokenizer)
+ .build();
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
+ return new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(new CommonAnalysisModulePlugin()))
+ .getAnalysisRegistry()
+ .build(idxSettings);
+ }
+
+ /**
+ * Test that deprecated "PathHierarchy" tokenizer name is still available via {@link CommonAnalysisModulePlugin} starting in 3.x.
+ */
+ public void testPreConfiguredTokenizer() throws IOException {
+
+ {
+ try (
+ IndexAnalyzers indexAnalyzers = buildAnalyzers(
+ VersionUtils.randomVersionBetween(random(), Version.V_3_0_0, Version.CURRENT),
+ "PathHierarchy"
+ )
+ ) {
+ NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
+ assertNotNull(analyzer);
+ assertTokenStreamContents(analyzer.tokenStream("dummy", "/a/b/c"), new String[] { "/a", "/a/b", "/a/b/c" });
+ // Once LUCENE-12750 is fixed we can use the following testing method instead.
+ // Similar testing approach has been used for deprecation of (Edge)NGrams tokenizers as well.
+ // assertAnalyzesTo(analyzer, "/a/b/c", new String[] { "/a", "/a/b", "/a/b/c" });
+
+ }
+ }
+ }
+
public void testDefaults() throws IOException {
final Index index = new Index("test", "_na_");
final Settings indexSettings = newAnalysisSettingsBuilder().build();
diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml
index 56ed2175df60a..179de835a4105 100644
--- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml
+++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml
@@ -298,6 +298,9 @@
---
"path_hierarchy":
+ - skip:
+ features: "allowed_warnings"
+
- do:
indices.analyze:
body:
@@ -312,6 +315,8 @@
- match: { detail.tokenizer.tokens.2.token: a/b/c }
- do:
+ allowed_warnings:
+ - 'The [PathHierarchy] tokenizer name is deprecated and will be removed in a future version. Please change the tokenizer name to [path_hierarchy] instead.'
indices.analyze:
body:
text: "a/b/c"
@@ -337,11 +342,13 @@
- match: { detail.tokenizer.tokens.2.token: a/b/c }
- do:
+ allowed_warnings:
+ - 'The [PathHierarchy] tokenizer name is deprecated and will be removed in a future version. Please change the tokenizer name to [path_hierarchy] instead.'
indices.analyze:
body:
text: "a/b/c"
explain: true
- tokenizer: PathHierarchy
+ tokenizer: PathHierarchy
- length: { detail.tokenizer.tokens: 3 }
- match: { detail.tokenizer.name: PathHierarchy }
- match: { detail.tokenizer.tokens.0.token: a }
diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/40_token_filters.yml
index e92cc0c4838c7..802c79c780689 100644
--- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/40_token_filters.yml
+++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/40_token_filters.yml
@@ -127,6 +127,69 @@
- match: { tokens.2.token: brown }
- match: { tokens.3.token: fox }
+ - do:
+ indices.analyze:
+ body:
+ text: 'text1 #text2'
+ tokenizer: whitespace
+ filter:
+ - type: word_delimiter
+ split_on_numerics: false
+ type_table:
+ - "\\u0023 => ALPHANUM"
+ - length: { tokens: 2 }
+ - match: { tokens.0.token: text1 }
+ - match: { tokens.0.start_offset: 0 }
+ - match: { tokens.0.end_offset: 5 }
+ - match: { tokens.0.position: 0 }
+ - match: { tokens.1.token: "#text2" }
+ - match: { tokens.1.start_offset: 6 }
+ - match: { tokens.1.end_offset: 12 }
+ - match: { tokens.1.position: 1 }
+
+ - do:
+ indices.analyze:
+ body:
+ text: 'text1 #text2'
+ tokenizer: whitespace
+ filter:
+ - type: word_delimiter
+ split_on_numerics: false
+ type_table:
+ - "# This is a comment"
+ - "# => ALPHANUM"
+ - length: { tokens: 2 }
+ - match: { tokens.0.token: text1 }
+ - match: { tokens.0.start_offset: 0 }
+ - match: { tokens.0.end_offset: 5 }
+ - match: { tokens.0.position: 0 }
+ - match: { tokens.1.token: "#text2" }
+ - match: { tokens.1.start_offset: 6 }
+ - match: { tokens.1.end_offset: 12 }
+ - match: { tokens.1.position: 1 }
+
+ - do:
+ indices.analyze:
+ body:
+ text: 'text1 #text2'
+ tokenizer: whitespace
+ filter:
+ - type: word_delimiter
+ split_on_numerics: false
+ type_table:
+ - "# This is a comment"
+ - "# => ALPHANUM"
+ - "@ => ALPHANUM"
+ - length: { tokens: 2 }
+ - match: { tokens.0.token: text1 }
+ - match: { tokens.0.start_offset: 0 }
+ - match: { tokens.0.end_offset: 5 }
+ - match: { tokens.0.position: 0 }
+ - match: { tokens.1.token: "#text2" }
+ - match: { tokens.1.start_offset: 6 }
+ - match: { tokens.1.end_offset: 12 }
+ - match: { tokens.1.position: 1 }
+
---
"word_delimiter_graph":
- do:
@@ -231,6 +294,69 @@
- match: { detail.tokenfilters.0.tokens.5.end_offset: 19 }
- match: { detail.tokenfilters.0.tokens.5.position: 5 }
+ - do:
+ indices.analyze:
+ body:
+ text: 'text1 #text2'
+ tokenizer: whitespace
+ filter:
+ - type: word_delimiter_graph
+ split_on_numerics: false
+ type_table:
+ - "\\u0023 => ALPHANUM"
+ - length: { tokens: 2 }
+ - match: { tokens.0.token: text1 }
+ - match: { tokens.0.start_offset: 0 }
+ - match: { tokens.0.end_offset: 5 }
+ - match: { tokens.0.position: 0 }
+ - match: { tokens.1.token: "#text2" }
+ - match: { tokens.1.start_offset: 6 }
+ - match: { tokens.1.end_offset: 12 }
+ - match: { tokens.1.position: 1 }
+
+ - do:
+ indices.analyze:
+ body:
+ text: 'text1 #text2'
+ tokenizer: whitespace
+ filter:
+ - type: word_delimiter_graph
+ split_on_numerics: false
+ type_table:
+ - "# This is a comment"
+ - "# => ALPHANUM"
+ - length: { tokens: 2 }
+ - match: { tokens.0.token: text1 }
+ - match: { tokens.0.start_offset: 0 }
+ - match: { tokens.0.end_offset: 5 }
+ - match: { tokens.0.position: 0 }
+ - match: { tokens.1.token: "#text2" }
+ - match: { tokens.1.start_offset: 6 }
+ - match: { tokens.1.end_offset: 12 }
+ - match: { tokens.1.position: 1 }
+
+ - do:
+ indices.analyze:
+ body:
+ text: 'text1 #text2'
+ tokenizer: whitespace
+ filter:
+ - type: word_delimiter_graph
+ split_on_numerics: false
+ type_table:
+ - "# This is a comment"
+ - "# => ALPHANUM"
+ - "@ => ALPHANUM"
+ - length: { tokens: 2 }
+ - match: { tokens.0.token: text1 }
+ - match: { tokens.0.start_offset: 0 }
+ - match: { tokens.0.end_offset: 5 }
+ - match: { tokens.0.position: 0 }
+ - match: { tokens.1.token: "#text2" }
+ - match: { tokens.1.start_offset: 6 }
+ - match: { tokens.1.end_offset: 12 }
+ - match: { tokens.1.position: 1 }
+
---
"unique":
- do:
diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/50_char_filters.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/50_char_filters.yml
index 0078575ae8e57..5e266c10cba8f 100644
--- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/50_char_filters.yml
+++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/50_char_filters.yml
@@ -69,6 +69,7 @@
char_filter:
- type: mapping
mappings:
+ - "# This is a comment"
- "# => _hashsign_"
- "@ => _atsign_"
- length: { tokens: 3 }
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
index b17f4804d4d50..c38b29502e282 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
@@ -8,26 +8,50 @@
package org.opensearch.geo;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.geometry.utils.StandardValidator;
import org.opensearch.geometry.utils.WellKnownText;
import org.opensearch.index.mapper.GeoShapeFieldMapper;
import org.opensearch.plugins.Plugin;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import org.opensearch.test.TestGeoShapeFieldMapperPlugin;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
+
/**
* This is the base class for all the Geo related integration tests. Use this class to add the features and settings
* for the test cluster on which integration tests are running.
*/
-public abstract class GeoModulePluginIntegTestCase extends OpenSearchIntegTestCase {
+public abstract class GeoModulePluginIntegTestCase extends ParameterizedOpenSearchIntegTestCase {
protected static final double GEOHASH_TOLERANCE = 1E-5D;
protected static final WellKnownText WKT = new WellKnownText(true, new StandardValidator(true));
+ public GeoModulePluginIntegTestCase(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
+
/**
* Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this
* geo plugin is not getting packaged in a zip, we need to load it before the tests run.
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
index a9dd7d1fd22e7..7344903fd5220 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
@@ -10,6 +10,7 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.common.settings.Settings;
import org.opensearch.geo.GeoModulePluginIntegTestCase;
import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper;
import org.opensearch.geo.search.aggregations.metrics.GeoBounds;
@@ -43,6 +44,10 @@ public class MissingValueIT extends GeoModulePluginIntegTestCase {
private GeoPoint bottomRight;
private GeoPoint topLeft;
+ public MissingValueIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
@Override
protected void setupSuiteScopeCluster() throws Exception {
assertAcked(
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/AbstractGeoBucketAggregationIntegTest.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/AbstractGeoBucketAggregationIntegTest.java
index d9ff3e8f473ef..7316847ac6046 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/AbstractGeoBucketAggregationIntegTest.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/AbstractGeoBucketAggregationIntegTest.java
@@ -67,6 +67,10 @@ public abstract class AbstractGeoBucketAggregationIntegTest extends GeoModulePlu
protected final Version version = VersionUtils.randomIndexCompatibleVersion(random());
+ public AbstractGeoBucketAggregationIntegTest(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
@Override
protected boolean forbidPrivateIndexSettings() {
return false;
@@ -83,7 +87,7 @@ protected boolean forbidPrivateIndexSettings() {
*/
protected void prepareGeoShapeIndexForAggregations(final Random random) throws Exception {
expectedDocsCountForGeoShapes = new HashMap<>();
- final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
+ final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
final List geoshapes = new ArrayList<>();
assertAcked(prepareCreate(GEO_SHAPE_INDEX_NAME).setSettings(settings).setMapping(GEO_SHAPE_FIELD_NAME, "type" + "=geo_shape"));
boolean isShapeIntersectingBB = false;
@@ -132,7 +136,7 @@ protected void prepareSingleValueGeoPointIndex(final Random random) throws Excep
expectedDocCountsForSingleGeoPoint = new HashMap<>();
createIndex("idx_unmapped");
final Settings settings = Settings.builder()
- .put(IndexMetadata.SETTING_VERSION_CREATED, version)
+ .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.number_of_shards", 4)
.put("index.number_of_replicas", 0)
.build();
@@ -156,7 +160,7 @@ protected void prepareSingleValueGeoPointIndex(final Random random) throws Excep
protected void prepareMultiValuedGeoPointIndex(final Random random) throws Exception {
multiValuedExpectedDocCountsGeoPoint = new HashMap<>();
- final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
+ final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
final List cities = new ArrayList<>();
assertAcked(
prepareCreate("multi_valued_idx").setSettings(settings)
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java
index 459a0986d3103..4048bb62f8818 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java
@@ -35,6 +35,7 @@
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.geo.GeoShapeDocValue;
+import org.opensearch.common.settings.Settings;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper;
@@ -64,6 +65,10 @@ public class GeoHashGridIT extends AbstractGeoBucketAggregationIntegTest {
private static final String AGG_NAME = "geohashgrid";
+ public GeoHashGridIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
@Override
public void setupSuiteScopeCluster() throws Exception {
Random random = random();
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridIT.java
index 6b09a843af566..2a5772d417530 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridIT.java
@@ -12,6 +12,7 @@
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.geo.GeoShapeDocValue;
+import org.opensearch.common.settings.Settings;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper;
@@ -38,6 +39,10 @@ public class GeoTileGridIT extends AbstractGeoBucketAggregationIntegTest {
private static final String AGG_NAME = "geotilegrid";
+ public GeoTileGridIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
@Override
public void setupSuiteScopeCluster() throws Exception {
final Random random = random();
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java
index d22d2089a3ae3..85541c60f133c 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java
@@ -10,6 +10,7 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
+import org.opensearch.common.settings.Settings;
import org.opensearch.geo.GeoModulePluginIntegTestCase;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
import org.opensearch.geo.tests.common.AggregationBuilders;
@@ -34,6 +35,10 @@
@OpenSearchIntegTestCase.SuiteScopeTestCase
public class ShardReduceIT extends GeoModulePluginIntegTestCase {
+ public ShardReduceIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
private IndexRequestBuilder indexDoc(String date, int value) throws Exception {
return client().prepareIndex("idx")
.setSource(
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
index d76104882d676..711744b944ce3 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
@@ -65,6 +65,10 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul
protected static Map expectedDocCountsForGeoHash = null;
protected static Map expectedCentroidsForGeoHash = null;
+ public AbstractGeoAggregatorModulePluginTestCase(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex(UNMAPPED_IDX_NAME);
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java
index d95cd85b49cd4..1c28df6bc4ea2 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java
@@ -34,6 +34,7 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.common.settings.Settings;
import org.opensearch.core.common.util.BigArray;
import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.bucket.global.Global;
@@ -61,6 +62,10 @@
public class GeoBoundsITTestCase extends AbstractGeoAggregatorModulePluginTestCase {
private static final String aggName = "geoBounds";
+ public GeoBoundsITTestCase(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
public void testSingleValuedField() throws Exception {
SearchResponse response = client().prepareSearch(IDX_NAME)
.addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false))
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java
index 01d2656adb750..2dc8a91600419 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java
@@ -34,6 +34,7 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.common.settings.Settings;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
import org.opensearch.geo.tests.common.AggregationBuilders;
import org.opensearch.search.aggregations.metrics.GeoCentroid;
@@ -51,6 +52,10 @@
public class GeoCentroidITTestCase extends AbstractGeoAggregatorModulePluginTestCase {
private static final String aggName = "geoCentroid";
+ public GeoCentroidITTestCase(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
public void testSingleValueFieldAsSubAggToGeohashGrid() throws Exception {
SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME)
.addAggregation(
diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java
index 9149b8939b739..665ea6c5f2f37 100644
--- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java
@@ -48,7 +48,7 @@
/**
* A {@link SingleDimensionValuesSource} for geotile values.
- *
+ *
* Since geotile values can be represented as long values, this class is almost the same as {@link LongValuesSource}
* The main differences is {@link GeoTileValuesSource#setAfter(Comparable)} as it needs to accept geotile string values i.e. "zoom/x/y".
*
diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/cells/BoundedCellValues.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/cells/BoundedCellValues.java
index 588c8bc59c2e0..6ff38fa28978e 100644
--- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/cells/BoundedCellValues.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/cells/BoundedCellValues.java
@@ -37,7 +37,7 @@
/**
* Class representing {@link CellValues} whose values are filtered
* according to whether they are within the specified {@link GeoBoundingBox}.
- *
+ *
* The specified bounding box is assumed to be bounded.
*
* @opensearch.internal
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java
index 39c2d67ac0b85..0eab6334854ab 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java
@@ -118,25 +118,15 @@ public Processor create(
) throws Exception {
String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
if (field.contains(".") == false) {
- throw ConfigurationUtils.newConfigurationException(
- ConfigurationUtils.TAG_KEY,
- tag,
- "field",
- "field does not contain a dot"
- );
+ throw ConfigurationUtils.newConfigurationException(TYPE, tag, "field", "field does not contain a dot");
}
if (field.indexOf('.') == 0 || field.lastIndexOf('.') == field.length() - 1) {
- throw ConfigurationUtils.newConfigurationException(
- ConfigurationUtils.TAG_KEY,
- tag,
- "field",
- "Field can't start or end with a dot"
- );
+ throw ConfigurationUtils.newConfigurationException(TYPE, tag, "field", "Field can't start or end with a dot");
}
int firstIndex = -1;
for (int index = field.indexOf('.'); index != -1; index = field.indexOf('.', index + 1)) {
if (index - firstIndex == 1) {
- throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", "No space between dots");
+ throw ConfigurationUtils.newConfigurationException(TYPE, tag, "field", "No space between dots");
}
firstIndex = index;
}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java
index 37320c0e900a5..7e114023fb86f 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java
@@ -37,7 +37,7 @@
/**
* Exception class thrown by {@link FailProcessor}.
- *
+ *
* This exception is caught in the {@link CompoundProcessor} and
* then changes the state of {@link IngestDocument}. This
* exception should get serialized.
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java
index 741a4fb29cfb8..b7c417f5f44a5 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java
@@ -53,10 +53,10 @@
/**
* A processor that for each value in a list executes a one or more processors.
- *
+ *
* This can be useful in cases to do string operations on json array of strings,
* or remove a field from objects inside a json array.
- *
+ *
* Note that this processor is experimental.
*/
public final class ForEachProcessor extends AbstractProcessor implements WrappingProcessor {
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
index 0f8422ea474d2..a2a51d968e078 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
@@ -98,7 +98,7 @@ public Map getProcessors(Processor.Parameters paramet
processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService));
processors.put(DotExpanderProcessor.TYPE, new DotExpanderProcessor.Factory());
processors.put(JsonProcessor.TYPE, new JsonProcessor.Factory());
- processors.put(KeyValueProcessor.TYPE, new KeyValueProcessor.Factory());
+ processors.put(KeyValueProcessor.TYPE, new KeyValueProcessor.Factory(parameters.scriptService));
processors.put(URLDecodeProcessor.TYPE, new URLDecodeProcessor.Factory());
processors.put(BytesProcessor.TYPE, new BytesProcessor.Factory());
processors.put(PipelineProcessor.TYPE, new PipelineProcessor.Factory(parameters.ingestService));
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java
index ff3cca4ce111f..73f03b3cb2e0f 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java
@@ -33,10 +33,13 @@
package org.opensearch.ingest.common;
import org.opensearch.common.util.set.Sets;
+import org.opensearch.core.common.Strings;
import org.opensearch.ingest.AbstractProcessor;
import org.opensearch.ingest.ConfigurationUtils;
import org.opensearch.ingest.IngestDocument;
import org.opensearch.ingest.Processor;
+import org.opensearch.script.ScriptService;
+import org.opensearch.script.TemplateScript;
import java.util.Collections;
import java.util.List;
@@ -56,24 +59,24 @@ public final class KeyValueProcessor extends AbstractProcessor {
private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)");
- private final String field;
+ private final TemplateScript.Factory field;
private final String fieldSplit;
private final String valueSplit;
private final Set includeKeys;
private final Set excludeKeys;
- private final String targetField;
+ private final TemplateScript.Factory targetField;
private final boolean ignoreMissing;
private final Consumer execution;
KeyValueProcessor(
String tag,
String description,
- String field,
+ TemplateScript.Factory field,
String fieldSplit,
String valueSplit,
Set includeKeys,
Set excludeKeys,
- String targetField,
+ TemplateScript.Factory targetField,
boolean ignoreMissing,
String trimKey,
String trimValue,
@@ -106,10 +109,10 @@ public final class KeyValueProcessor extends AbstractProcessor {
private static Consumer buildExecution(
String fieldSplit,
String valueSplit,
- String field,
+ TemplateScript.Factory field,
Set includeKeys,
Set excludeKeys,
- String targetField,
+ TemplateScript.Factory targetField,
boolean ignoreMissing,
String trimKey,
String trimValue,
@@ -130,41 +133,62 @@ private static Consumer buildExecution(
keyFilter = key -> includeKeys.contains(key) && excludeKeys.contains(key) == false;
}
}
- final String fieldPathPrefix;
- String keyPrefix = prefix == null ? "" : prefix;
- if (targetField == null) {
- fieldPathPrefix = keyPrefix;
- } else {
- fieldPathPrefix = targetField + "." + keyPrefix;
- }
- final Function keyPrefixer;
- if (fieldPathPrefix.isEmpty()) {
- keyPrefixer = val -> val;
- } else {
- keyPrefixer = val -> fieldPathPrefix + val;
- }
- final Function fieldSplitter = buildSplitter(fieldSplit, true);
- Function valueSplitter = buildSplitter(valueSplit, false);
- final Function keyTrimmer = buildTrimmer(trimKey);
- final Function bracketStrip;
- if (stripBrackets) {
- bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll("");
- } else {
- bracketStrip = val -> val;
- }
- final Function valueTrimmer = buildTrimmer(trimValue);
+
return document -> {
- String value = document.getFieldValue(field, String.class, ignoreMissing);
+ final String fieldPathPrefix;
+ String keyPrefix = prefix == null ? "" : prefix;
+ if (targetField != null) {
+ String targetFieldPath = document.renderTemplate(targetField);
+ if (!Strings.isNullOrEmpty((targetFieldPath))) {
+ fieldPathPrefix = targetFieldPath + "." + keyPrefix;
+ } else {
+ fieldPathPrefix = keyPrefix;
+ }
+ } else {
+ fieldPathPrefix = keyPrefix;
+ }
+
+ final Function keyPrefixer;
+ if (fieldPathPrefix.isEmpty()) {
+ keyPrefixer = val -> val;
+ } else {
+ keyPrefixer = val -> fieldPathPrefix + val;
+ }
+ final Function fieldSplitter = buildSplitter(fieldSplit, true);
+ Function valueSplitter = buildSplitter(valueSplit, false);
+ final Function keyTrimmer = buildTrimmer(trimKey);
+ final Function bracketStrip;
+ if (stripBrackets) {
+ bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll("");
+ } else {
+ bracketStrip = val -> val;
+ }
+ final Function valueTrimmer = buildTrimmer(trimValue);
+
+ String path = document.renderTemplate(field);
+ final boolean fieldPathNullOrEmpty = Strings.isNullOrEmpty(path);
+ if (fieldPathNullOrEmpty || document.hasField(path, true) == false) {
+ if (ignoreMissing) {
+ return;
+ } else if (fieldPathNullOrEmpty) {
+ throw new IllegalArgumentException("field path cannot be null nor empty");
+ } else {
+ throw new IllegalArgumentException("field [" + path + "] doesn't exist");
+ }
+ }
+
+ String value = document.getFieldValue(path, String.class, ignoreMissing);
if (value == null) {
if (ignoreMissing) {
return;
}
- throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
+ throw new IllegalArgumentException("field [" + path + "] is null, cannot extract key-value pairs. ");
}
+
for (String part : fieldSplitter.apply(value)) {
String[] kv = valueSplitter.apply(part);
if (kv.length != 2) {
- throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
+ throw new IllegalArgumentException("field [" + path + "] does not contain value_split [" + valueSplit + "]");
}
String key = keyTrimmer.apply(kv[0]);
if (keyFilter.test(key)) {
@@ -193,7 +217,7 @@ private static Function buildSplitter(String split, boolean fi
}
}
- String getField() {
+ TemplateScript.Factory getField() {
return field;
}
@@ -213,7 +237,7 @@ Set getExcludeKeys() {
return excludeKeys;
}
- String getTargetField() {
+ TemplateScript.Factory getTargetField() {
return targetField;
}
@@ -241,6 +265,12 @@ public String getType() {
}
public static class Factory implements Processor.Factory {
+ private final ScriptService scriptService;
+
+ public Factory(ScriptService scriptService) {
+ this.scriptService = scriptService;
+ }
+
@Override
public KeyValueProcessor create(
Map registry,
@@ -249,7 +279,13 @@ public KeyValueProcessor create(
Map config
) throws Exception {
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ TemplateScript.Factory fieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", field, scriptService);
String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field");
+ TemplateScript.Factory targetFieldTemplate = null;
+ if (!Strings.isNullOrEmpty(targetField)) {
+ targetFieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "target_field", targetField, scriptService);
+ }
+
String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split");
String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split");
String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key");
@@ -270,12 +306,12 @@ public KeyValueProcessor create(
return new KeyValueProcessor(
processorTag,
description,
- field,
+ fieldTemplate,
fieldSplit,
valueSplit,
includeKeys,
excludeKeys,
- targetField,
+ targetFieldTemplate,
ignoreMissing,
trimKey,
trimValue,
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java
index 5da3b6bea7bc2..bb3d4bca47859 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java
@@ -32,6 +32,8 @@
package org.opensearch.ingest.common;
+import org.opensearch.core.common.Strings;
+import org.opensearch.index.VersionType;
import org.opensearch.ingest.AbstractProcessor;
import org.opensearch.ingest.ConfigurationUtils;
import org.opensearch.ingest.IngestDocument;
@@ -42,6 +44,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.stream.Collectors;
/**
@@ -66,16 +69,38 @@ public List getFields() {
@Override
public IngestDocument execute(IngestDocument document) {
- if (ignoreMissing) {
- fields.forEach(field -> {
- String path = document.renderTemplate(field);
- if (document.hasField(path)) {
- document.removeField(path);
+ fields.forEach(field -> {
+ String path = document.renderTemplate(field);
+ final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path);
+ if (fieldPathIsNullOrEmpty || document.hasField(path) == false) {
+ if (ignoreMissing) {
+ return;
+ } else if (fieldPathIsNullOrEmpty) {
+ throw new IllegalArgumentException("field path cannot be null nor empty");
+ } else {
+ throw new IllegalArgumentException("field [" + path + "] doesn't exist");
}
- });
- } else {
- fields.forEach(document::removeField);
- }
+ }
+ // cannot remove _index, _version and _version_type.
+ if (path.equals(IngestDocument.Metadata.INDEX.getFieldName())
+ || path.equals(IngestDocument.Metadata.VERSION.getFieldName())
+ || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) {
+ throw new IllegalArgumentException("cannot remove metadata field [" + path + "]");
+ }
+ // removing _id is disallowed when there's an external version specified in the request
+ String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class);
+ if (path.equals(IngestDocument.Metadata.ID.getFieldName())
+ && !Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) {
+ Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class);
+ throw new IllegalArgumentException(
+ "cannot remove metadata field [_id] when specifying external version for the document, version: "
+ + version
+ + ", version_type: "
+ + versionType
+ );
+ }
+ document.removeField(path);
+ });
return document;
}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java
index af356eb10d79c..7564bbdf95f45 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java
@@ -32,6 +32,7 @@
package org.opensearch.ingest.common;
+import org.opensearch.core.common.Strings;
import org.opensearch.ingest.AbstractProcessor;
import org.opensearch.ingest.ConfigurationUtils;
import org.opensearch.ingest.IngestDocument;
@@ -80,9 +81,12 @@ boolean isIgnoreMissing() {
@Override
public IngestDocument execute(IngestDocument document) {
String path = document.renderTemplate(field);
- if (document.hasField(path, true) == false) {
+ final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path);
+ if (fieldPathIsNullOrEmpty || document.hasField(path, true) == false) {
if (ignoreMissing) {
return document;
+ } else if (fieldPathIsNullOrEmpty) {
+ throw new IllegalArgumentException("field path cannot be null nor empty");
} else {
throw new IllegalArgumentException("field [" + path + "] doesn't exist");
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java
index ca0c0df40f009..e42a1147825d1 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java
@@ -155,4 +155,28 @@ public void testNullValueWithOutIgnoreMissing() {
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
}
+
+ public void testMatchEmptyBrackets() {
+ IngestDocument ingestDocument = new IngestDocument(
+ "_index",
+ "_id",
+ null,
+ null,
+ null,
+ Collections.singletonMap("message", "[foo],[bar],[]")
+ );
+ DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "[%{a}],[%{b}],[%{c}]", "", true);
+ dissectProcessor.execute(ingestDocument);
+ assertEquals("foo", ingestDocument.getFieldValue("a", String.class));
+ assertEquals("bar", ingestDocument.getFieldValue("b", String.class));
+ assertEquals("", ingestDocument.getFieldValue("c", String.class));
+
+ ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("message", "{}{}{}{baz}"));
+ dissectProcessor = new DissectProcessor("", null, "message", "{%{a}}{%{b}}{%{c}}{%{d}}", "", true);
+ dissectProcessor.execute(ingestDocument);
+ assertEquals("", ingestDocument.getFieldValue("a", String.class));
+ assertEquals("", ingestDocument.getFieldValue("b", String.class));
+ assertEquals("", ingestDocument.getFieldValue("c", String.class));
+ assertEquals("baz", ingestDocument.getFieldValue("d", String.class));
+ }
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java
index 62060a682c0cb..78972ff8d5dea 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java
@@ -35,7 +35,9 @@
import org.opensearch.OpenSearchException;
import org.opensearch.OpenSearchParseException;
import org.opensearch.common.util.set.Sets;
+import org.opensearch.ingest.TestTemplateService;
import org.opensearch.test.OpenSearchTestCase;
+import org.junit.Before;
import java.util.Arrays;
import java.util.Collections;
@@ -48,8 +50,14 @@
public class KeyValueProcessorFactoryTests extends OpenSearchTestCase {
+ private KeyValueProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new KeyValueProcessor.Factory(TestTemplateService.instance());
+ }
+
public void testCreateWithDefaults() throws Exception {
- KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map config = new HashMap<>();
config.put("field", "field1");
config.put("field_split", "&");
@@ -57,7 +65,7 @@ public void testCreateWithDefaults() throws Exception {
String processorTag = randomAlphaOfLength(10);
KeyValueProcessor processor = factory.create(null, processorTag, null, config);
assertThat(processor.getTag(), equalTo(processorTag));
- assertThat(processor.getField(), equalTo("field1"));
+ assertThat(processor.getField().newInstance(Collections.emptyMap()).execute(), equalTo("field1"));
assertThat(processor.getFieldSplit(), equalTo("&"));
assertThat(processor.getValueSplit(), equalTo("="));
assertThat(processor.getIncludeKeys(), is(nullValue()));
@@ -66,7 +74,6 @@ public void testCreateWithDefaults() throws Exception {
}
public void testCreateWithAllFieldsSet() throws Exception {
- KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map config = new HashMap<>();
config.put("field", "field1");
config.put("field_split", "&");
@@ -78,17 +85,16 @@ public void testCreateWithAllFieldsSet() throws Exception {
String processorTag = randomAlphaOfLength(10);
KeyValueProcessor processor = factory.create(null, processorTag, null, config);
assertThat(processor.getTag(), equalTo(processorTag));
- assertThat(processor.getField(), equalTo("field1"));
+ assertThat(processor.getField().newInstance(Collections.emptyMap()).execute(), equalTo("field1"));
assertThat(processor.getFieldSplit(), equalTo("&"));
assertThat(processor.getValueSplit(), equalTo("="));
assertThat(processor.getIncludeKeys(), equalTo(Sets.newHashSet("a", "b")));
assertThat(processor.getExcludeKeys(), equalTo(Collections.emptySet()));
- assertThat(processor.getTargetField(), equalTo("target"));
+ assertThat(processor.getTargetField().newInstance(Collections.emptyMap()).execute(), equalTo("target"));
assertTrue(processor.isIgnoreMissing());
}
public void testCreateWithMissingField() {
- KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map config = new HashMap<>();
String processorTag = randomAlphaOfLength(10);
OpenSearchException exception = expectThrows(
@@ -99,7 +105,6 @@ public void testCreateWithMissingField() {
}
public void testCreateWithMissingFieldSplit() {
- KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map config = new HashMap<>();
config.put("field", "field1");
String processorTag = randomAlphaOfLength(10);
@@ -111,7 +116,6 @@ public void testCreateWithMissingFieldSplit() {
}
public void testCreateWithMissingValueSplit() {
- KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map config = new HashMap<>();
config.put("field", "field1");
config.put("field_split", "&");
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java
index 685a78e2e769b..5f71ea6f16a4f 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java
@@ -36,6 +36,7 @@
import org.opensearch.ingest.IngestDocument;
import org.opensearch.ingest.Processor;
import org.opensearch.ingest.RandomDocumentPicks;
+import org.opensearch.ingest.TestTemplateService;
import org.opensearch.test.OpenSearchTestCase;
import java.util.ArrayList;
@@ -51,7 +52,7 @@
public class KeyValueProcessorTests extends OpenSearchTestCase {
- private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory();
+ private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory(TestTemplateService.instance());
public void test() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
@@ -123,7 +124,12 @@ public void testMissingField() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
Processor processor = createKvProcessor("unknown", "&", "=", null, null, "target", false);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
- assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]"));
+ assertThat(exception.getMessage(), equalTo("field [unknown] doesn't exist"));
+
+ // when using template snippet, the resolved field path maybe empty
+ Processor processorWithEmptyFieldPath = createKvProcessor("", "&", "=", null, null, "target", false);
+ exception = expectThrows(IllegalArgumentException.class, () -> processorWithEmptyFieldPath.execute(ingestDocument));
+ assertThat(exception.getMessage(), equalTo("field path cannot be null nor empty"));
}
public void testNullValueWithIgnoreMissing() throws Exception {
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java
index cf65236157111..1a5630a4730f2 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java
@@ -32,6 +32,7 @@
package org.opensearch.ingest.common;
+import org.opensearch.index.VersionType;
import org.opensearch.ingest.IngestDocument;
import org.opensearch.ingest.Processor;
import org.opensearch.ingest.RandomDocumentPicks;
@@ -40,16 +41,17 @@
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
-import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class RemoveProcessorTests extends OpenSearchTestCase {
public void testRemoveFields() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
- String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ String field = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomAlphaOfLength(10));
Processor processor = new RemoveProcessor(
randomAlphaOfLength(10),
null,
@@ -67,12 +69,44 @@ public void testRemoveNonExistingField() throws Exception {
config.put("field", fieldName);
String processorTag = randomAlphaOfLength(10);
Processor processor = new RemoveProcessor.Factory(TestTemplateService.instance()).create(null, processorTag, null, config);
- try {
- processor.execute(ingestDocument);
- fail("remove field should have failed");
- } catch (IllegalArgumentException e) {
- assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
- }
+ assertThrows(
+ "field [" + fieldName + "] doesn't exist",
+ IllegalArgumentException.class,
+ () -> { processor.execute(ingestDocument); }
+ );
+
+ Map configWithEmptyField = new HashMap<>();
+ configWithEmptyField.put("field", "");
+ processorTag = randomAlphaOfLength(10);
+ Processor removeProcessorWithEmptyField = new RemoveProcessor.Factory(TestTemplateService.instance()).create(
+ null,
+ processorTag,
+ null,
+ configWithEmptyField
+ );
+ assertThrows(
+ "field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> removeProcessorWithEmptyField.execute(ingestDocument)
+ );
+ }
+
+ public void testRemoveEmptyField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ Map config = new HashMap<>();
+ config.put("field", "");
+ String processorTag = randomAlphaOfLength(10);
+ Processor removeProcessorWithEmptyField = new RemoveProcessor.Factory(TestTemplateService.instance()).create(
+ null,
+ processorTag,
+ null,
+ config
+ );
+ assertThrows(
+ "field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> removeProcessorWithEmptyField.execute(ingestDocument)
+ );
}
public void testIgnoreMissing() throws Exception {
@@ -84,5 +118,67 @@ public void testIgnoreMissing() throws Exception {
String processorTag = randomAlphaOfLength(10);
Processor processor = new RemoveProcessor.Factory(TestTemplateService.instance()).create(null, processorTag, null, config);
processor.execute(ingestDocument);
+
+ // when using template snippet, the resolved field path maybe empty
+ Map configWithEmptyField = new HashMap<>();
+ configWithEmptyField.put("field", "");
+ configWithEmptyField.put("ignore_missing", true);
+ processorTag = randomAlphaOfLength(10);
+ processor = new RemoveProcessor.Factory(TestTemplateService.instance()).create(null, processorTag, null, configWithEmptyField);
+ processor.execute(ingestDocument);
+ }
+
+ public void testRemoveMetadataField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ List metadataFields = ingestDocument.getMetadata()
+ .keySet()
+ .stream()
+ .map(IngestDocument.Metadata::getFieldName)
+ .collect(Collectors.toList());
+
+ for (String metadataFieldName : metadataFields) {
+ Map config = new HashMap<>();
+ config.put("field", metadataFieldName);
+ String processorTag = randomAlphaOfLength(10);
+ Processor processor = new RemoveProcessor.Factory(TestTemplateService.instance()).create(null, processorTag, null, config);
+ // _if_seq_no and _if_primary_term do not exist in the enriched document, removing them will throw IllegalArgumentException
+ if (metadataFieldName.equals(IngestDocument.Metadata.IF_SEQ_NO.getFieldName())
+ || metadataFieldName.equals(IngestDocument.Metadata.IF_PRIMARY_TERM.getFieldName())) {
+ assertThrows(
+ "field: [" + metadataFieldName + "] doesn't exist",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+ } else if (metadataFieldName.equals(IngestDocument.Metadata.INDEX.getFieldName())
+ || metadataFieldName.equals(IngestDocument.Metadata.VERSION.getFieldName())
+ || metadataFieldName.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) {
+ // _index, _version and _version_type cannot be removed
+ assertThrows(
+ "cannot remove metadata field [" + metadataFieldName + "]",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+ } else if (metadataFieldName.equals(IngestDocument.Metadata.ID.getFieldName())) {
+ Long version = ingestDocument.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class);
+ String versionType = ingestDocument.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class);
+ if (!versionType.equals(VersionType.toString(VersionType.INTERNAL))) {
+ assertThrows(
+ "cannot remove metadata field [_id] when specifying external version for the document, version: "
+ + version
+ + ", version_type: "
+ + versionType,
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+ } else {
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(metadataFieldName), equalTo(false));
+ }
+ } else if (metadataFieldName.equals(IngestDocument.Metadata.ROUTING.getFieldName())
+ && ingestDocument.hasField(IngestDocument.Metadata.ROUTING.getFieldName())) {
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(metadataFieldName), equalTo(false));
+ }
+ }
}
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java
index fc95693024cb0..a600464371af8 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java
@@ -112,6 +112,15 @@ public void testRenameNonExistingField() throws Exception {
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [" + fieldName + "] doesn't exist"));
}
+
+ // when using template snippet, the resolved field path maybe empty
+ processor = createRenameProcessor("", RandomDocumentPicks.randomFieldName(random()), false);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field path cannot be null nor empty"));
+ }
}
public void testRenameNonExistingFieldWithIgnoreMissing() throws Exception {
@@ -121,6 +130,11 @@ public void testRenameNonExistingFieldWithIgnoreMissing() throws Exception {
Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true);
processor.execute(ingestDocument);
assertIngestDocument(originalIngestDocument, ingestDocument);
+
+ // when using template snippet, the resolved field path maybe empty
+ processor = createRenameProcessor("", RandomDocumentPicks.randomFieldName(random()), true);
+ processor.execute(ingestDocument);
+ assertIngestDocument(originalIngestDocument, ingestDocument);
}
public void testRenameNewFieldAlreadyExists() throws Exception {
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml
index 836243652b2e0..30a0a520b5c40 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml
@@ -39,3 +39,151 @@ teardown:
id: 1
- match: { _source.goodbye: "everybody" }
- match: { _source.hello: "world" }
+
+---
+"Test KV Processor with template snippets":
+ - skip:
+ version: " - 2.11.99"
+ reason: "KV Processor with template snippets is only supported since 2.12.0"
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "kv" : {
+ "field" : "{{source}}",
+ "target_field" : "{{target}}",
+ "field_split": " ",
+ "value_split": "="
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "foo",
+ target: "zoo",
+ foo: "goodbye=everybody hello=world"
+ }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.zoo.goodbye: "everybody" }
+ - match: { _source.zoo.hello: "world" }
+
+---
+"Test KV Processor with non-existing field and without ignore_missing":
+ - skip:
+ version: " - 2.11.99"
+ reason: "KV Processor with template snippets is only supported since 2.12.0"
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "kv" : {
+ "field" : "{{source}}",
+ "target_field" : "{{target}}",
+ "field_split": " ",
+ "value_split": "="
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /field path cannot be null nor empty/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ target: "zoo",
+ foo: "goodbye=everybody hello=world"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "kv" : {
+ "field" : "{{source}}",
+ "target_field" : "{{target}}",
+ "field_split": " ",
+ "value_split": "="
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /field \[unknown\] doesn\'t exist/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "unknown",
+ target: "zoo",
+ foo: "goodbye=everybody hello=world"
+ }
+
+---
+"Test KV Processor with non-existing field and ignore_missing":
+ - skip:
+ version: " - 2.11.99"
+ reason: "KV Processor with template snippets is only supported since 2.12.0"
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "kv" : {
+ "field" : "{{source}}",
+ "target_field" : "{{target}}",
+ "field_split": " ",
+ "value_split": "=",
+ "ignore_missing": true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ target: "zoo",
+ foo: "goodbye=everybody hello=world"
+ }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { target: "zoo", foo: "goodbye=everybody hello=world"}}
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml
index 916a7fe656cc2..d90e5fbf2362b 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml
@@ -84,3 +84,38 @@ teardown:
}
]
}
+
+---
+"Test dissect processor can match empty brackets":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "dissect" : {
+ "field" : "message",
+ "pattern" : "[%{a}][%{b}][%{c}]"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {message: "[foo][bar][]"}
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.message: "[foo][bar][]" }
+ - match: { _source.a: "foo" }
+ - match: { _source.b: "bar" }
+ - match: { _source.c: "" }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename_processor.yml
new file mode 100644
index 0000000000000..96b2256bcc1dc
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename_processor.yml
@@ -0,0 +1,66 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "my_pipeline"
+ ignore: 404
+
+---
+"Test rename processor with non-existing field and without ignore_missing":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "rename" : {
+ "field" : "{{field_foo}}",
+ "target_field" : "bar"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: '/field path cannot be null nor empty/'
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: { message: "foo bar baz" }
+
+---
+"Test rename processor with non-existing field and ignore_missing":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "rename" : {
+ "field" : "{{field_foo}}",
+ "target_field" : "bar",
+ "ignore_missing" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: { message: "foo bar baz" }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.message: "foo bar baz" }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml
new file mode 100644
index 0000000000000..4811769d04f0e
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml
@@ -0,0 +1,229 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "my_pipeline"
+ ignore: 404
+
+---
+"Test remove processor with non-existing field and without ignore_missing":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "{{unknown}}"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /field path cannot be null nor empty/
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: { message: "foo bar baz" }
+
+---
+"Test remove processor with resolved field path doesn't exist":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "{{foo}}"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /field \[bar\] doesn\'t exist/
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ message: "foo bar baz",
+ foo: "bar"
+ }
+
+---
+"Test remove processor with non-existing field and ignore_missing":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "{{unknown}}",
+ "ignore_missing" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: { message: "foo bar baz" }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.message: "foo bar baz" }
+
+#Related issue: https://github.com/opensearch-project/OpenSearch/issues/10732
+---
+"Test remove metadata field":
+ - skip:
+ version: " - 2.11.99"
+ reason: "The bug was fixed in 2.12"
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "{{foo}}"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /cannot remove metadata field \[\_index\]/
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ foo: "_index"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "_version"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /cannot remove metadata field \[\_version\]/
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ foo: "bar"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "_version_type"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /cannot remove metadata field \[\_version\_type\]/
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ foo: "bar"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : ["_id", "_routing"]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ routing: abc
+ pipeline: "my_pipeline"
+ body: { message: "foo bar baz" }
+ - match: { result: created }
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "field" : "_id"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /cannot remove metadata field \[\_id\] when specifying external version for the document/
+ index:
+ index: test
+ id: "test_id_10000"
+ pipeline: "my_pipeline"
+ version: 1
+ version_type: "external"
+ body: { message: "foo bar baz" }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
index e012a82b15927..7c073739f6a1f 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
@@ -976,3 +976,140 @@ teardown:
}
- match: { error.root_cause.0.type: "illegal_argument_exception" }
- match: { error.root_cause.0.reason: "Pipeline processor configured for non-existent pipeline [____pipeline_doesnot_exist___]" }
+
+---
+"Test simulate with docs containing metadata fields":
+ - do:
+ ingest.simulate:
+ body: >
+ {
+ "pipeline": {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field": "field2",
+ "value": "foo"
+ }
+ }
+ ]
+ },
+ "docs": [
+ {
+ "_index": "index",
+ "_id": "id",
+ "_routing": "foo",
+ "_version": 100,
+ "_if_seq_no": 12333333333333333,
+ "_if_primary_term": 1,
+ "_source": {
+ "foo": "bar"
+ }
+ }
+ ]
+ }
+
+ - length: { docs: 1 }
+ - match: { docs.0.doc._index: "index" }
+ - match: { docs.0.doc._id: "id" }
+ - match: { docs.0.doc._routing: "foo" }
+ - match: { docs.0.doc._version: "100" }
+ - match: { docs.0.doc._if_seq_no: "12333333333333333" }
+ - match: { docs.0.doc._if_primary_term: "1" }
+ - match: { docs.0.doc._source.foo: "bar" }
+
+ - do:
+ catch: bad_request
+ ingest.simulate:
+ body: >
+ {
+ "pipeline": {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "foo"
+ }
+ }
+ ]
+ },
+ "docs": [
+ {
+ "_index": "index",
+ "_id": "id",
+ "_routing": "foo",
+ "_version": "bar",
+ "_source": {
+ "foo": "bar"
+ }
+ }
+ ]
+ }
+ - match: { status: 400 }
+ - match: { error.root_cause.0.type: "illegal_argument_exception" }
+ - match: { error.root_cause.0.reason: "Failed to parse parameter [_version], only int or long is accepted" }
+
+ - do:
+ catch: bad_request
+ ingest.simulate:
+ body: >
+ {
+ "pipeline": {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "foo"
+ }
+ }
+ ]
+ },
+ "docs": [
+ {
+ "_index": "index",
+ "_id": "id",
+ "_routing": "foo",
+ "_if_seq_no": "123",
+ "_source": {
+ "foo": "bar"
+ }
+ }
+ ]
+ }
+ - match: { status: 400 }
+ - match: { error.root_cause.0.type: "illegal_argument_exception" }
+ - match: { error.root_cause.0.reason: "Failed to parse parameter [_if_seq_no], only int or long is accepted" }
+
+ - do:
+ catch: bad_request
+ ingest.simulate:
+ body: >
+ {
+ "pipeline": {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "foo"
+ }
+ }
+ ]
+ },
+ "docs": [
+ {
+ "_index": "index",
+ "_id": "id",
+ "_routing": "foo",
+ "_if_primary_term": "1",
+ "_source": {
+ "foo": "bar"
+ }
+ }
+ ]
+ }
+ - match: { status: 400 }
+ - match: { error.root_cause.0.type: "illegal_argument_exception" }
+ - match: { error.root_cause.0.reason: "Failed to parse parameter [_if_primary_term], only int or long is accepted" }
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.15.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.15.2.jar.sha1
deleted file mode 100644
index f63416ddb8ceb..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-annotations-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4724a65ac8e8d156a24898d50fd5dbd3642870b8
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..79ed9e0c63fc8
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1
@@ -0,0 +1 @@
+dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.15.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.15.2.jar.sha1
deleted file mode 100644
index f16d80af8dce6..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-databind-2.15.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9353b021f10c307c00328f52090de2bdb4b6ff9c
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1
new file mode 100644
index 0000000000000..da00d281934b1
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1
@@ -0,0 +1 @@
+3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-9.5.jar.sha1 b/modules/lang-expression/licenses/asm-9.5.jar.sha1
deleted file mode 100644
index ea4aa3581dc87..0000000000000
--- a/modules/lang-expression/licenses/asm-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc6ea1875f4d64fbc85e1691c95b96a3d8569c90
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-9.6.jar.sha1 b/modules/lang-expression/licenses/asm-9.6.jar.sha1
new file mode 100644
index 0000000000000..2d9e6a9d3cfd6
--- /dev/null
+++ b/modules/lang-expression/licenses/asm-9.6.jar.sha1
@@ -0,0 +1 @@
+aa205cf0a06dbd8e04ece91c0b37c3f5d567546a
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-commons-9.5.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.5.jar.sha1
deleted file mode 100644
index 5be792660c19f..0000000000000
--- a/modules/lang-expression/licenses/asm-commons-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-19ab5b5800a3910d30d3a3e64fdb00fd0cb42de0
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-commons-9.6.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.6.jar.sha1
new file mode 100644
index 0000000000000..a0814f495771f
--- /dev/null
+++ b/modules/lang-expression/licenses/asm-commons-9.6.jar.sha1
@@ -0,0 +1 @@
+f1a9e5508eff490744144565c47326c8648be309
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-tree-9.5.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.5.jar.sha1
deleted file mode 100644
index fb42db6a9d15c..0000000000000
--- a/modules/lang-expression/licenses/asm-tree-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fd33c8b6373abaa675be407082fdfda35021254a
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-tree-9.6.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.6.jar.sha1
new file mode 100644
index 0000000000000..101eb03b4b736
--- /dev/null
+++ b/modules/lang-expression/licenses/asm-tree-9.6.jar.sha1
@@ -0,0 +1 @@
+c0cdda9d211e965d2a4448aa3fd86110f2f8c2de
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.8.0-snapshot-4373c3b.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.8.0-snapshot-4373c3b.jar.sha1
deleted file mode 100644
index 6eaa40708e4ae..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.8.0-snapshot-4373c3b.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9f8a34fc3d450343ab05ccb5af318a836a6a5fb3
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1
new file mode 100644
index 0000000000000..892865a017f48
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1
@@ -0,0 +1 @@
+7725476acfcb9bdfeff1b813ce15c39c6b857dc2
\ No newline at end of file
diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java
index 46cac9afa38fc..8ca28a905f216 100644
--- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java
+++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java
@@ -32,12 +32,16 @@
package org.opensearch.script.expression;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.SearchType;
import org.opensearch.action.update.UpdateRequestBuilder;
import org.opensearch.common.lucene.search.function.CombineFunction;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.query.QueryBuilders;
@@ -53,9 +57,10 @@
import org.opensearch.search.aggregations.pipeline.SimpleValue;
import org.opensearch.search.sort.SortBuilders;
import org.opensearch.search.sort.SortOrder;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import org.opensearch.test.hamcrest.OpenSearchAssertions;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -64,6 +69,7 @@
import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.search.aggregations.AggregationBuilders.histogram;
import static org.opensearch.search.aggregations.AggregationBuilders.sum;
import static org.opensearch.search.aggregations.PipelineAggregatorBuilders.bucketScript;
@@ -74,7 +80,24 @@
import static org.hamcrest.Matchers.notNullValue;
// TODO: please convert to unit tests!
-public class MoreExpressionIT extends OpenSearchIntegTestCase {
+public class MoreExpressionIT extends ParameterizedOpenSearchIntegTestCase {
+
+ public MoreExpressionIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
@Override
protected Collection> nodePlugins() {
diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java
index 665ebf3c2caea..b1cb5356a4405 100644
--- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java
+++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java
@@ -32,7 +32,10 @@
package org.opensearch.script.expression;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.plugins.Plugin;
@@ -40,16 +43,36 @@
import org.opensearch.script.ScriptType;
import org.opensearch.search.aggregations.AggregationBuilders;
import org.opensearch.search.builder.SearchSourceBuilder;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.hamcrest.Matchers.containsString;
//TODO: please convert to unit tests!
-public class StoredExpressionIT extends OpenSearchIntegTestCase {
+public class StoredExpressionIT extends ParameterizedOpenSearchIntegTestCase {
+
+ public StoredExpressionIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
+
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal));
diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java
index 5eebb9c4d60ad..a2af636ffdc8a 100644
--- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java
+++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java
@@ -53,9 +53,9 @@ class ExpressionAggregationScript implements AggregationScript.LeafFactory {
final SimpleBindings bindings;
final DoubleValuesSource source;
final boolean needsScore;
- final ReplaceableConstDoubleValueSource specialValue; // _value
+ final PerThreadReplaceableConstDoubleValueSource specialValue; // _value
- ExpressionAggregationScript(Expression e, SimpleBindings b, boolean n, ReplaceableConstDoubleValueSource v) {
+ ExpressionAggregationScript(Expression e, SimpleBindings b, boolean n, PerThreadReplaceableConstDoubleValueSource v) {
exprScript = e;
bindings = b;
source = exprScript.getDoubleValuesSource(bindings);
diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java
index 035d2402857e0..5629b3b4a6972 100644
--- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java
+++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java
@@ -73,7 +73,7 @@
/**
* Provides the infrastructure for Lucene expressions as a scripting language for OpenSearch.
- *
+ *
* Only contexts returning numeric types or {@link Object} are supported.
*/
public class ExpressionScriptEngine implements ScriptEngine {
@@ -316,14 +316,14 @@ private static AggregationScript.LeafFactory newAggregationScript(
// instead of complicating SimpleBindings (which should stay simple)
SimpleBindings bindings = new SimpleBindings();
boolean needsScores = false;
- ReplaceableConstDoubleValueSource specialValue = null;
+ PerThreadReplaceableConstDoubleValueSource specialValue = null;
for (String variable : expr.variables) {
try {
if (variable.equals("_score")) {
bindings.add("_score", DoubleValuesSource.SCORES);
needsScores = true;
} else if (variable.equals("_value")) {
- specialValue = new ReplaceableConstDoubleValueSource();
+ specialValue = new PerThreadReplaceableConstDoubleValueSource();
bindings.add("_value", specialValue);
// noop: _value is special for aggregations, and is handled in ExpressionScriptBindings
// TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a
@@ -388,7 +388,7 @@ private static ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLoo
// NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
// instead of complicating SimpleBindings (which should stay simple)
SimpleBindings bindings = new SimpleBindings();
- ReplaceableConstDoubleValueSource specialValue = null;
+ PerThreadReplaceableConstDoubleValueSource specialValue = null;
boolean needsScores = false;
for (String variable : expr.variables) {
try {
@@ -396,7 +396,7 @@ private static ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLoo
bindings.add("_score", DoubleValuesSource.SCORES);
needsScores = true;
} else if (variable.equals("_value")) {
- specialValue = new ReplaceableConstDoubleValueSource();
+ specialValue = new PerThreadReplaceableConstDoubleValueSource();
bindings.add("_value", specialValue);
// noop: _value is special for aggregations, and is handled in ExpressionScriptBindings
// TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a
diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/PerThreadReplaceableConstDoubleValueSource.java
similarity index 62%
rename from modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java
rename to modules/lang-expression/src/main/java/org/opensearch/script/expression/PerThreadReplaceableConstDoubleValueSource.java
index 28e4707a07192..40bb957c248f2 100644
--- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java
+++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/PerThreadReplaceableConstDoubleValueSource.java
@@ -39,20 +39,25 @@
import org.apache.lucene.search.IndexSearcher;
import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
/**
- * A {@link DoubleValuesSource} which has a stub {@link DoubleValues} that holds a dynamically replaceable constant double.
+ * A {@link DoubleValuesSource} which has a stub {@link DoubleValues} that holds a dynamically replaceable constant double. This is made
+ * thread-safe for concurrent segment search use case by keeping the {@link DoubleValues} per thread. Any update to the value happens in
+ * thread specific {@link DoubleValuesSource} instance.
*/
-final class ReplaceableConstDoubleValueSource extends DoubleValuesSource {
- final ReplaceableConstDoubleValues fv;
+final class PerThreadReplaceableConstDoubleValueSource extends DoubleValuesSource {
+ // Multiple slices can be processed by same thread but that will be sequential, so keeping per thread is fine
+ final Map perThreadDoubleValues;
- ReplaceableConstDoubleValueSource() {
- fv = new ReplaceableConstDoubleValues();
+ PerThreadReplaceableConstDoubleValueSource() {
+ perThreadDoubleValues = new ConcurrentHashMap<>();
}
@Override
public DoubleValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
- return fv;
+ return perThreadDoubleValues.computeIfAbsent(Thread.currentThread().getId(), threadId -> new ReplaceableConstDoubleValues());
}
@Override
@@ -62,7 +67,11 @@ public boolean needsScores() {
@Override
public Explanation explain(LeafReaderContext ctx, int docId, Explanation scoreExplanation) throws IOException {
- if (fv.advanceExact(docId)) return Explanation.match((float) fv.doubleValue(), "ReplaceableConstDoubleValues");
+ final ReplaceableConstDoubleValues currentFv = perThreadDoubleValues.computeIfAbsent(
+ Thread.currentThread().getId(),
+ threadId -> new ReplaceableConstDoubleValues()
+ );
+ if (currentFv.advanceExact(docId)) return Explanation.match((float) currentFv.doubleValue(), "ReplaceableConstDoubleValues");
else return Explanation.noMatch("ReplaceableConstDoubleValues");
}
@@ -77,7 +86,11 @@ public int hashCode() {
}
public void setValue(double v) {
- fv.setValue(v);
+ final ReplaceableConstDoubleValues currentFv = perThreadDoubleValues.computeIfAbsent(
+ Thread.currentThread().getId(),
+ threadId -> new ReplaceableConstDoubleValues()
+ );
+ currentFv.setValue(v);
}
@Override
diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java
index bb11e493ba3d1..e480fbbd22ad2 100644
--- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java
+++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java
@@ -32,12 +32,16 @@
package org.opensearch.script.mustache;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchRequest;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.IndexNotFoundException;
import org.opensearch.plugins.Plugin;
import org.opensearch.script.ScriptType;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import java.util.Arrays;
import java.util.Collection;
@@ -46,6 +50,7 @@
import java.util.Map;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo;
@@ -53,7 +58,24 @@
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
-public class MultiSearchTemplateIT extends OpenSearchIntegTestCase {
+public class MultiSearchTemplateIT extends ParameterizedOpenSearchIntegTestCase {
+
+ public MultiSearchTemplateIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
@Override
protected Collection> nodePlugins() {
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java
index 434a117d9b47e..6b33ac3b6be08 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java
@@ -65,6 +65,7 @@ public ScriptEngine getScriptEngine(Settings settings, Collection> getActions() {
return Arrays.asList(
new ActionHandler<>(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class),
+ new ActionHandler<>(RenderSearchTemplateAction.INSTANCE, TransportRenderSearchTemplateAction.class),
new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class)
);
}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java
index f4d7198dc2124..ec84475b70bb6 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java
@@ -59,7 +59,7 @@
/**
* Main entry point handling template registration, compilation and
* execution.
- *
+ *
* Template handling is based on Mustache. Template handling is a two step
* process: First compile the string representing the template, the resulting
* {@link Mustache} object can then be re-used for subsequent executions.
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java
new file mode 100644
index 0000000000000..1feb916c4ce73
--- /dev/null
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java
@@ -0,0 +1,21 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.script.mustache;
+
+import org.opensearch.action.ActionType;
+
+public class RenderSearchTemplateAction extends ActionType {
+
+ public static final RenderSearchTemplateAction INSTANCE = new RenderSearchTemplateAction();
+ public static final String NAME = "indices:data/read/search/template/render";
+
+ private RenderSearchTemplateAction() {
+ super(NAME, SearchTemplateResponse::new);
+ }
+}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java
index 7a94fc45837d9..9ffa2c94cb56f 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java
@@ -81,6 +81,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client
renderRequest.setScript(id);
}
- return channel -> client.execute(SearchTemplateAction.INSTANCE, renderRequest, new RestToXContentListener<>(channel));
+ return channel -> client.execute(RenderSearchTemplateAction.INSTANCE, renderRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java
index 1aabea30fc651..d02c5f1efa591 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java
@@ -259,16 +259,25 @@ public void writeTo(StreamOutput out) throws IOException {
@Override
public String[] indices() {
+ if (request == null) {
+ return new String[0];
+ }
return request.indices();
}
@Override
public IndicesOptions indicesOptions() {
+ if (request == null) {
+ return SearchRequest.DEFAULT_INDICES_OPTIONS;
+ }
return request.indicesOptions();
}
@Override
public IndicesRequest indices(String... indices) {
+ if (request == null) {
+ return new SearchRequest(new String[0]).indices(indices);
+ }
return request.indices(indices);
}
}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java
new file mode 100644
index 0000000000000..993d77ffaa75c
--- /dev/null
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java
@@ -0,0 +1,30 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.script.mustache;
+
+import org.opensearch.action.support.ActionFilters;
+import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.inject.Inject;
+import org.opensearch.core.xcontent.NamedXContentRegistry;
+import org.opensearch.script.ScriptService;
+import org.opensearch.transport.TransportService;
+
+public class TransportRenderSearchTemplateAction extends TransportSearchTemplateAction {
+
+ @Inject
+ public TransportRenderSearchTemplateAction(
+ TransportService transportService,
+ ActionFilters actionFilters,
+ ScriptService scriptService,
+ NamedXContentRegistry xContentRegistry,
+ NodeClient client
+ ) {
+ super(RenderSearchTemplateAction.NAME, transportService, actionFilters, scriptService, xContentRegistry, client);
+ }
+}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java
index 6e8b9d059b583..d75cc0337b66c 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java
@@ -61,9 +61,9 @@ public class TransportSearchTemplateAction extends HandledTransportAction listener) {
final SearchTemplateResponse response = new SearchTemplateResponse();
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java
index 9e97863306148..fbb7d09709a91 100644
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java
+++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java
@@ -200,7 +200,7 @@ private String getChars() {
/**
* From https://www.ietf.org/rfc/rfc4627.txt:
- *
+ *
* All Unicode characters may be placed within the
* quotation marks except for the characters that must be escaped:
* quotation mark, reverse solidus, and the control characters (U+0000
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java
index 72443d1323b44..71ce616fd5d94 100644
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java
+++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java
@@ -32,6 +32,7 @@
package org.opensearch.script.mustache;
+import org.opensearch.action.search.SearchRequest;
import org.opensearch.core.common.io.stream.Writeable;
import org.opensearch.script.ScriptType;
import org.opensearch.search.RandomSearchRequestGenerator;
@@ -110,4 +111,19 @@ public static SearchTemplateRequest createRandomRequest() {
request.setRequest(RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource));
return request;
}
+
+ public void testSimulatedSearchTemplateRequest() {
+ SearchTemplateRequest request = new SearchTemplateRequest();
+ request.setSimulate(true);
+
+ assertEquals(0, request.indices().length);
+ assertEquals(SearchRequest.DEFAULT_INDICES_OPTIONS, request.indicesOptions());
+ assertEquals(2, request.indices("index1", "index2").indices().length);
+
+ SearchTemplateRequest randomRequest = createRandomRequest();
+ int expectedIndicesLength = randomRequest.indices().length;
+ request.setSimulate(true);
+
+ assertEquals(expectedIndicesLength, randomRequest.indices().length);
+ }
}
diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle
index d7af8621c478a..fb51a0bb7f157 100644
--- a/modules/lang-painless/build.gradle
+++ b/modules/lang-painless/build.gradle
@@ -33,7 +33,6 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowBasePlugin
apply plugin: 'opensearch.validate-rest-spec'
apply plugin: 'opensearch.yaml-rest-test'
-apply plugin: 'com.github.johnrengelman.shadow'
opensearchplugin {
description 'An easy, safe and fast scripting language for OpenSearch'
@@ -62,30 +61,6 @@ dependencies {
api project('spi')
}
-test {
- doFirst {
- test.classpath -= project.files(project.tasks.named('shadowJar'))
- test.classpath -= project.configurations.getByName(ShadowBasePlugin.CONFIGURATION_NAME)
- test.classpath += project.extensions.getByType(SourceSetContainer).getByName(SourceSet.MAIN_SOURCE_SET_NAME).runtimeClasspath
- }
-}
-
-shadowJar {
- archiveClassifier.set('')
- relocate 'org.objectweb', 'org.opensearch.repackage.org.objectweb'
- dependencies {
- include(dependency("org.ow2.asm:asm:${versions.asm}"))
- include(dependency("org.ow2.asm:asm-util:${versions.asm}"))
- include(dependency("org.ow2.asm:asm-tree:${versions.asm}"))
- include(dependency("org.ow2.asm:asm-commons:${versions.asm}"))
- include(dependency("org.ow2.asm:asm-analysis:${versions.asm}"))
- }
-}
-
-tasks.validateNebulaPom.dependsOn tasks.generatePomFileForShadowPublication
-tasks.validateShadowPom.dependsOn tasks.generatePomFileForNebulaPublication
-tasks.withType(AbstractPublishToMaven)*.dependsOn "generatePomFileForShadowPublication", "generatePomFileForNebulaPublication"
-
tasks.named("dependencyLicenses").configure {
mapping from: /asm-.*/, to: 'asm'
}
diff --git a/modules/lang-painless/licenses/asm-9.5.jar.sha1 b/modules/lang-painless/licenses/asm-9.5.jar.sha1
deleted file mode 100644
index ea4aa3581dc87..0000000000000
--- a/modules/lang-painless/licenses/asm-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc6ea1875f4d64fbc85e1691c95b96a3d8569c90
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-9.6.jar.sha1 b/modules/lang-painless/licenses/asm-9.6.jar.sha1
new file mode 100644
index 0000000000000..2d9e6a9d3cfd6
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-9.6.jar.sha1
@@ -0,0 +1 @@
+aa205cf0a06dbd8e04ece91c0b37c3f5d567546a
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-analysis-9.5.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.5.jar.sha1
deleted file mode 100644
index 9e87d3ce7d719..0000000000000
--- a/modules/lang-painless/licenses/asm-analysis-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-490bacc77de7cbc0be1a30bb3471072d705be4a4
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-analysis-9.6.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.6.jar.sha1
new file mode 100644
index 0000000000000..fa42ea1198165
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-analysis-9.6.jar.sha1
@@ -0,0 +1 @@
+9ce6c7b174bd997fc2552dff47964546bd7a5ec3
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-commons-9.5.jar.sha1 b/modules/lang-painless/licenses/asm-commons-9.5.jar.sha1
deleted file mode 100644
index 5be792660c19f..0000000000000
--- a/modules/lang-painless/licenses/asm-commons-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-19ab5b5800a3910d30d3a3e64fdb00fd0cb42de0
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-commons-9.6.jar.sha1 b/modules/lang-painless/licenses/asm-commons-9.6.jar.sha1
new file mode 100644
index 0000000000000..a0814f495771f
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-commons-9.6.jar.sha1
@@ -0,0 +1 @@
+f1a9e5508eff490744144565c47326c8648be309
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-tree-9.5.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.5.jar.sha1
deleted file mode 100644
index fb42db6a9d15c..0000000000000
--- a/modules/lang-painless/licenses/asm-tree-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fd33c8b6373abaa675be407082fdfda35021254a
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-tree-9.6.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.6.jar.sha1
new file mode 100644
index 0000000000000..101eb03b4b736
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-tree-9.6.jar.sha1
@@ -0,0 +1 @@
+c0cdda9d211e965d2a4448aa3fd86110f2f8c2de
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-util-9.5.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.5.jar.sha1
deleted file mode 100644
index 5fffbfe655deb..0000000000000
--- a/modules/lang-painless/licenses/asm-util-9.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-64b5a1fc8c1b15ed2efd6a063e976bc8d3dc5ffe
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-util-9.6.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.6.jar.sha1
new file mode 100644
index 0000000000000..1f42ac62dc69c
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-util-9.6.jar.sha1
@@ -0,0 +1 @@
+f77caf84eb93786a749b2baa40865b9613e3eaee
\ No newline at end of file
diff --git a/modules/lang-painless/spi/build.gradle b/modules/lang-painless/spi/build.gradle
index 59a77870b4987..32556f907fdc0 100644
--- a/modules/lang-painless/spi/build.gradle
+++ b/modules/lang-painless/spi/build.gradle
@@ -33,7 +33,7 @@ apply plugin: 'opensearch.publish'
base {
group = 'org.opensearch.plugin'
- archivesBaseName = 'opensearch-scripting-painless-spi'
+ archivesName = 'opensearch-scripting-painless-spi'
}
dependencies {
diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Allowlist.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Allowlist.java
index 56ade63efa5e2..265263b41ca89 100644
--- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Allowlist.java
+++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Allowlist.java
@@ -42,7 +42,7 @@
* Allowlist contains data structures designed to be used to generate an allowlist of Java classes,
* constructors, methods, and fields that can be used within a Painless script at both compile-time
* and run-time.
- *
+ *
* A Allowlist consists of several pieces with {@link AllowlistClass}s as the top level. Each
* {@link AllowlistClass} will contain zero-to-many {@link AllowlistConstructor}s, {@link AllowlistMethod}s, and
* {@link AllowlistField}s which are what will be available with a Painless script. See each individual
diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistClass.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistClass.java
index 17e6814addf3b..67f5a07846c53 100644
--- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistClass.java
+++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistClass.java
@@ -45,12 +45,12 @@
* classes. Though, since multiple allowlists may be combined into a single allowlist for a
* specific context, as long as multiple classes representing the same Java class have the same
* class name and have legal constructor/method overloading they can be merged together.
- *
+ *
* Classes in Painless allow for arity overloading for constructors and methods. Arity overloading
* means that multiple constructors are allowed for a single class as long as they have a different
* number of parameters, and multiples methods with the same name are allowed for a single class
* as long as they have the same return type and a different number of parameters.
- *
+ *
* Classes will automatically extend other allowlisted classes if the Java class they represent is a
* subclass of other classes including Java interfaces.
*/
diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java
index 71265f82acacc..632fee9187eba 100644
--- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java
+++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java
@@ -67,11 +67,11 @@ public static Allowlist loadFromResourceFiles(Class> resource, String... filep
* is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java
* reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field}
* specified as part of the allowlist in the text file.
- *
+ *
* A single pass is made through each file to collect all the information about each class, constructor, method,
* and field. Most validation will be done at a later point after all allowlists have been gathered and their
* merging takes place.
- *
+ *
* A painless type name is one of the following:
*
* def - The Painless dynamic type which is automatically included without a need to be
@@ -129,13 +129,13 @@ public static Allowlist loadFromResourceFiles(Class> resource, String... filep
* be appropriately parsed and handled. Painless complex types must be specified with the
* fully-qualified Java class name. Method argument types, method return types, and field types
* must be specified with Painless type names (def, fully-qualified, or short) as described earlier.
- *
+ *
* The following example is used to create a single allowlist text file:
*
- * {@code
+ *
* # primitive types
*
- * class int -> int {
+ * class int -> int {
* }
*
* # complex types
@@ -161,7 +161,7 @@ public static Allowlist loadFromResourceFiles(Class> resource, String... filep
* int value1
* def value2
* }
- * }
+ *
*/
public static Allowlist loadFromResourceFiles(Class> resource, Map parsers, String... filepaths) {
List allowlistClasses = new ArrayList<>();
diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistMethod.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistMethod.java
index 8bb0231ff3f4f..9fcaec3dbf7b6 100644
--- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistMethod.java
+++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistMethod.java
@@ -45,7 +45,7 @@
* are using the '.' operator on an existing class variable/field. Painless classes may have multiple
* methods with the same name as long as they comply with arity overloading described in
* {@link AllowlistClass}.
- *
+ *
* Classes may also have additional methods that are not part of the Java class the class represents -
* these are known as augmented methods. An augmented method can be added to a class as a part of any
* Java class as long as the method is static and the first parameter of the method is the Java class
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java
index 35c676653fdc3..c19d4f361b2b6 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java
@@ -73,9 +73,7 @@ final class Compiler {
*/
private static final CodeSource CODESOURCE;
- /**
- * Setup the code privileges.
- */
+ /* Setup the code privileges. */
static {
try {
// Setup the code privileges.
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java
index cae425ad1fe3b..3164f5e6388c7 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java
@@ -42,13 +42,13 @@
/**
* PainlessLookupUtility contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within
* Painless for conversion between type names and types along with some other various utility methods.
- *
+ *
* The following terminology is used for variable names throughout the lookup package:
- *
+ *
* A class is a set of methods and fields under a specific class name. A type is either a class or an array under a specific type name.
* Note the distinction between class versus type is class means that no array classes will be be represented whereas type allows array
* classes to be represented. The set of available classes will always be a subset of the available types.
- *
+ *
* Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. If the variable value is the same for asm,
* java, and painless, no prefix is used. Target is used as a prefix to represent if a constructor, method, or field is being
* called/accessed on that specific class. Parameter is often a postfix used to represent if a type is used as a parameter to a
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java
index 04165f44ba212..8a05d6742af97 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java
@@ -126,9 +126,9 @@ public void visitFunction(SFunction userFunctionNode, ScriptScope scriptScope) {
/**
* Visits an expression that is also considered a statement.
- *
+ *
* If the statement is a return from the execute method, performs return value conversion.
- *
+ *
* Checks: control flow, type validation
*/
@Override
@@ -168,9 +168,9 @@ public void visitExpression(SExpression userExpressionNode, SemanticScope semant
/**
* Visits a return statement and casts the value to the return type if possible.
- *
+ *
* If the statement is a return from the execute method, performs return value conversion.
- *
+ *
* Checks: type validation
*/
@Override
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java
index e27530d745e8f..5ac802038afa6 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java
@@ -49,7 +49,7 @@
* Tracks information within a scope required for compilation during the
* semantic phase in the user tree. There are three types of scopes -
* {@link FunctionScope}, {@link LambdaScope}, and {@link BlockScope}.
- *
+ *
* Scopes are stacked as they are created during the user tree's semantic
* phase with each scope beyond the top-level containing a reference to
* its parent. As a scope is no longer necessary, it's dropped automatically
diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt
index 5533f0bc55522..9bce617099c6f 100644
--- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt
+++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt
@@ -24,7 +24,6 @@ class org.opensearch.script.ScoreScript @no_import {
static_import {
int termFreq(org.opensearch.script.ScoreScript, String, String) bound_to org.opensearch.script.ScoreScriptUtils$TermFreq
- float tf(org.opensearch.script.ScoreScript, String, String) bound_to org.opensearch.script.ScoreScriptUtils$TF
long totalTermFreq(org.opensearch.script.ScoreScript, String, String) bound_to org.opensearch.script.ScoreScriptUtils$TotalTermFreq
long sumTotalTermFreq(org.opensearch.script.ScoreScript, String) bound_to org.opensearch.script.ScoreScriptUtils$SumTotalTermFreq
double saturation(double, double) from_class org.opensearch.script.ScoreScriptUtils
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java
index 397a7b48b472a..366e848416328 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java
@@ -82,7 +82,7 @@
/**
* Mapper for a text field that optimizes itself for as-you-type completion by indexing its content into subfields. Each subfield
* modifies the analysis chain of the root field to index terms the user would create as they type out the value in the root field
- *
+ *
* The structure of these fields is
*
*
diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java
index 1a51259e9e4e4..e930780613ed6 100644
--- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java
+++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java
@@ -373,7 +373,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException {
* A query that rewrites into another query using
* {@link JoinUtil#createJoinQuery(String, Query, Query, IndexSearcher, ScoreMode, OrdinalMap, int, int)}
* that executes the actual join.
- *
+ *
* This query is exclusively used by the {@link HasChildQueryBuilder} and {@link HasParentQueryBuilder} to get access
* to the {@link DirectoryReader} used by the current search in order to retrieve the {@link OrdinalMap}.
* The {@link OrdinalMap} is required by {@link JoinUtil} to execute the join.
diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java
index b5c082a5667c1..c8763c2f3f749 100644
--- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java
+++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java
@@ -31,6 +31,8 @@
package org.opensearch.percolator;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.apache.lucene.search.join.ScoreMode;
import org.opensearch.OpenSearchException;
import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
@@ -39,6 +41,7 @@
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.DistanceUnit;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.bytes.BytesArray;
@@ -54,7 +57,7 @@
import org.opensearch.plugins.Plugin;
import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.opensearch.search.sort.SortOrder;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import java.io.IOException;
import java.util.Arrays;
@@ -77,6 +80,7 @@
import static org.opensearch.index.query.QueryBuilders.spanNotQuery;
import static org.opensearch.index.query.QueryBuilders.spanTermQuery;
import static org.opensearch.index.query.QueryBuilders.termQuery;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchHits;
@@ -86,7 +90,24 @@
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.IsNull.notNullValue;
-public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase {
+public class PercolatorQuerySearchIT extends ParameterizedOpenSearchIntegTestCase {
+
+ public PercolatorQuerySearchIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
@Override
protected boolean addMockGeoShapeFieldMapper() {
diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java
index 6eb974c77a5f3..cdc3cac1a1f06 100644
--- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java
+++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java
@@ -32,10 +32,14 @@
package org.opensearch.index.rankeval;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.opensearch.OpenSearchException;
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.support.IndicesOptions;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.IndexNotFoundException;
import org.opensearch.index.query.MatchAllQueryBuilder;
import org.opensearch.index.query.QueryBuilders;
@@ -43,7 +47,7 @@
import org.opensearch.indices.IndexClosedException;
import org.opensearch.plugins.Plugin;
import org.opensearch.search.builder.SearchSourceBuilder;
-import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
import org.junit.Before;
import java.util.ArrayList;
@@ -54,15 +58,33 @@
import java.util.Set;
import static org.opensearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.hamcrest.Matchers.instanceOf;
-public class RankEvalRequestIT extends OpenSearchIntegTestCase {
+public class RankEvalRequestIT extends ParameterizedOpenSearchIntegTestCase {
private static final String TEST_INDEX = "test";
private static final String INDEX_ALIAS = "alias0";
private static final int RELEVANT_RATING_1 = 1;
+ public RankEvalRequestIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection parameters() {
+ return Arrays.asList(
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
+ new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
+ );
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build();
+ }
+
@Override
protected Collection> nodePlugins() {
return Arrays.asList(RankEvalModulePlugin.class);
diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java
index ffdea14e0873d..8e72c6ef06849 100644
--- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java
+++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java
@@ -71,10 +71,10 @@
* supplied query parameters) against a set of possible search requests (read:
* search specifications, expressed as query/search request templates) and
* compares the result against a set of annotated documents per search intent.
- *
+ *
* If any documents are returned that haven't been annotated the document id of
* those is returned per search intent.
- *
+ *
* The resulting search quality is computed in terms of precision at n and
* returned for each search specification for the full set of search intents as
* averaged precision at n.
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
index fcf1fa489f740..d96e3212e05a2 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
@@ -67,7 +67,7 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase {
/**
* Assuming the docs are ranked in the following order:
- *
+ *
* rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
* -------------------------------------------------------------------------------------------
* 1 | 3 | 7.0 | 1.0 | 7.0 | 7.0 |
@@ -76,7 +76,7 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase {
* 4 | 0 | 0.0 | 2.321928094887362 | 0.0
* 5 | 1 | 1.0 | 2.584962500721156 | 0.38685280723454163
* 6 | 2 | 3.0 | 2.807354922057604 | 1.0686215613240666
- *
+ *
* dcg = 13.84826362927298 (sum of last column)
*/
public void testDCGAt() {
@@ -91,20 +91,20 @@ public void testDCGAt() {
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
- /**
- * Check with normalization: to get the maximal possible dcg, sort documents by
- * relevance in descending order
- *
- * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
- * ---------------------------------------------------------------------------------------
- * 1 | 3 | 7.0 | 1.0 | 7.0
- * 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202
- * 3 | 2 | 3.0 | 2.0 | 1.5
- * 4 | 2 | 3.0 | 2.321928094887362 | 1.2920296742201793
- * 5 | 1 | 1.0 | 2.584962500721156 | 0.38685280723454163
- * 6 | 0 | 0.0 | 2.807354922057604 | 0.0
- *
- * idcg = 14.595390756454922 (sum of last column)
+ /*
+ Check with normalization: to get the maximal possible dcg, sort documents by
+ relevance in descending order
+
+ rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
+ ---------------------------------------------------------------------------------------
+ 1 | 3 | 7.0 | 1.0 | 7.0
+ 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202
+ 3 | 2 | 3.0 | 2.0 | 1.5
+ 4 | 2 | 3.0 | 2.321928094887362 | 1.2920296742201793
+ 5 | 1 | 1.0 | 2.584962500721156 | 0.38685280723454163
+ 6 | 0 | 0.0 | 2.807354922057604 | 0.0
+
+ idcg = 14.595390756454922 (sum of last column)
*/
dcg = new DiscountedCumulativeGain(true, null, 10);
assertEquals(EXPECTED_NDCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
@@ -113,7 +113,7 @@ public void testDCGAt() {
/**
* This tests metric when some documents in the search result don't have a
* rating provided by the user.
- *
+ *
* rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
* -------------------------------------------------------------------------------------------
* 1 | 3 | 7.0 | 1.0 | 7.0 2 |
@@ -122,7 +122,7 @@ public void testDCGAt() {
* 4 | n/a | n/a | n/a | n/a
* 5 | 1 | 1.0 | 2.584962500721156 | 0.38685280723454163
* 6 | n/a | n/a | n/a | n/a
- *
+ *
* dcg = 12.779642067948913 (sum of last column)
*/
public void testDCGAtSixMissingRatings() {
@@ -143,20 +143,20 @@ public void testDCGAtSixMissingRatings() {
assertEquals(12.779642067948913, result.metricScore(), DELTA);
assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size());
- /**
- * Check with normalization: to get the maximal possible dcg, sort documents by
- * relevance in descending order
- *
- * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
- * ----------------------------------------------------------------------------------------
- * 1 | 3 | 7.0 | 1.0 | 7.0
- * 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202
- * 3 | 2 | 3.0 | 2.0 | 1.5
- * 4 | 1 | 1.0 | 2.321928094887362 | 0.43067655807339
- * 5 | n.a | n.a | n.a. | n.a.
- * 6 | n.a | n.a | n.a | n.a
- *
- * idcg = 13.347184833073591 (sum of last column)
+ /*
+ Check with normalization: to get the maximal possible dcg, sort documents by
+ relevance in descending order
+
+ rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
+ ----------------------------------------------------------------------------------------
+ 1 | 3 | 7.0 | 1.0 | 7.0
+ 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202
+ 3 | 2 | 3.0 | 2.0 | 1.5
+ 4 | 1 | 1.0 | 2.321928094887362 | 0.43067655807339
+ 5 | n.a | n.a | n.a. | n.a.
+ 6 | n.a | n.a | n.a | n.a
+
+ idcg = 13.347184833073591 (sum of last column)
*/
dcg = new DiscountedCumulativeGain(true, null, 10);
assertEquals(12.779642067948913 / 13.347184833073591, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
@@ -166,7 +166,7 @@ public void testDCGAtSixMissingRatings() {
* This tests that normalization works as expected when there are more rated
* documents than search hits because we restrict DCG to be calculated at the
* fourth position
- *
+ *
* rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
* -------------------------------------------------------------------------------------------
* 1 | 3 | 7.0 | 1.0 | 7.0 2 |
@@ -176,7 +176,7 @@ public void testDCGAtSixMissingRatings() {
* -----------------------------------------------------------------
* 5 | 1 | 1.0 | 2.584962500721156 | 0.38685280723454163
* 6 | n/a | n/a | n/a | n/a
- *
+ *
* dcg = 12.392789260714371 (sum of last column until position 4)
*/
public void testDCGAtFourMoreRatings() {
@@ -200,21 +200,21 @@ public void testDCGAtFourMoreRatings() {
assertEquals(12.392789260714371, result.metricScore(), DELTA);
assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size());
- /**
- * Check with normalization: to get the maximal possible dcg, sort documents by
- * relevance in descending order
- *
- * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
- * ---------------------------------------------------------------------------------------
- * 1 | 3 | 7.0 | 1.0 | 7.0
- * 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202
- * 3 | 2 | 3.0 | 2.0 | 1.5
- * 4 | 1 | 1.0 | 2.321928094887362 | 0.43067655807339
- * ---------------------------------------------------------------------------------------
- * 5 | n.a | n.a | n.a. | n.a.
- * 6 | n.a | n.a | n.a | n.a
- *
- * idcg = 13.347184833073591 (sum of last column)
+ /*
+ Check with normalization: to get the maximal possible dcg, sort documents by
+ relevance in descending order
+
+ rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1)
+ ---------------------------------------------------------------------------------------
+ 1 | 3 | 7.0 | 1.0 | 7.0
+ 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202
+ 3 | 2 | 3.0 | 2.0 | 1.5
+ 4 | 1 | 1.0 | 2.321928094887362 | 0.43067655807339
+ ---------------------------------------------------------------------------------------
+ 5 | n.a | n.a | n.a. | n.a.
+ 6 | n.a | n.a | n.a | n.a
+
+ idcg = 13.347184833073591 (sum of last column)
*/
dcg = new DiscountedCumulativeGain(true, null, 10);
assertEquals(12.392789260714371 / 13.347184833073591, dcg.evaluate("id", hits, ratedDocs).metricScore(), DELTA);
diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java
deleted file mode 100644
index 604c233ca49c4..0000000000000
--- a/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-package org.opensearch.index.codec;
-
-import org.opensearch.action.admin.indices.flush.FlushResponse;
-import org.opensearch.action.admin.indices.refresh.RefreshResponse;
-import org.opensearch.action.admin.indices.segments.IndicesSegmentsRequest;
-import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest;
-import org.opensearch.action.support.ActiveShardCount;
-import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.settings.Settings;
-import org.opensearch.index.engine.Segment;
-import org.opensearch.index.reindex.BulkByScrollResponse;
-import org.opensearch.index.reindex.ReindexAction;
-import org.opensearch.index.reindex.ReindexModulePlugin;
-import org.opensearch.index.reindex.ReindexRequestBuilder;
-import org.opensearch.index.reindex.ReindexTestCase;
-import org.opensearch.plugins.Plugin;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ExecutionException;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-
-import static java.util.stream.Collectors.toList;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_METADATA;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_READ;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_WRITE;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;
-
-public class MultiCodecReindexIT extends ReindexTestCase {
-
- @Override
- protected Collection> nodePlugins() {
- return List.of(ReindexModulePlugin.class);
- }
-
- public void testReindexingMultipleCodecs() throws InterruptedException, ExecutionException {
- internalCluster().ensureAtLeastNumDataNodes(1);
- Map codecMap = Map.of(
- "best_compression",
- "BEST_COMPRESSION",
- "zlib",
- "BEST_COMPRESSION",
- "default",
- "BEST_SPEED",
- "lz4",
- "BEST_SPEED"
- );
-
- for (Map.Entry codec : codecMap.entrySet()) {
- assertReindexingWithMultipleCodecs(codec.getKey(), codec.getValue(), codecMap);
- }
-
- }
-
- private void assertReindexingWithMultipleCodecs(String destCodec, String destCodecMode, Map codecMap)
- throws ExecutionException, InterruptedException {
-
- final String index = "test-index" + destCodec;
- final String destIndex = "dest-index" + destCodec;
-
- // creating source index
- createIndex(
- index,
- Settings.builder()
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
- .put("index.codec", "default")
- .put("index.merge.policy.max_merged_segment", "1b")
- .build()
- );
- ensureGreen(index);
-
- final int nbDocs = randomIntBetween(2, 5);
-
- // indexing with all 4 codecs
- for (Map.Entry codec : codecMap.entrySet()) {
- useCodec(index, codec.getKey());
- ingestDocs(index, nbDocs);
- }
-
- assertTrue(
- getSegments(index).stream()
- .flatMap(s -> s.getAttributes().values().stream())
- .collect(Collectors.toSet())
- .containsAll(codecMap.values())
- );
-
- // creating destination index with destination codec
- createIndex(
- destIndex,
- Settings.builder()
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
- .put("index.codec", destCodec)
- .build()
- );
-
- BulkByScrollResponse bulkResponse = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source(index)
- .destination(destIndex)
- .refresh(true)
- .waitForActiveShards(ActiveShardCount.ONE)
- .get();
-
- assertEquals(codecMap.size() * nbDocs, bulkResponse.getCreated());
- assertEquals(codecMap.size() * nbDocs, bulkResponse.getTotal());
- assertEquals(0, bulkResponse.getDeleted());
- assertEquals(0, bulkResponse.getNoops());
- assertEquals(0, bulkResponse.getVersionConflicts());
- assertEquals(1, bulkResponse.getBatches());
- assertTrue(bulkResponse.getTook().getMillis() > 0);
- assertEquals(0, bulkResponse.getBulkFailures().size());
- assertEquals(0, bulkResponse.getSearchFailures().size());
- assertTrue(getSegments(destIndex).stream().allMatch(segment -> segment.attributes.containsValue(destCodecMode)));
- }
-
- private void useCodec(String index, String codec) throws ExecutionException, InterruptedException {
- assertAcked(client().admin().indices().prepareClose(index).setWaitForActiveShards(1));
-
- assertAcked(
- client().admin()
- .indices()
- .updateSettings(new UpdateSettingsRequest(index).settings(Settings.builder().put("index.codec", codec)))
- .get()
- );
-
- assertAcked(client().admin().indices().prepareOpen(index).setWaitForActiveShards(1));
- }
-
- private void flushAndRefreshIndex(String index) {
-
- // Request is not blocked
- for (String blockSetting : Arrays.asList(
- SETTING_BLOCKS_READ,
- SETTING_BLOCKS_WRITE,
- SETTING_READ_ONLY,
- SETTING_BLOCKS_METADATA,
- SETTING_READ_ONLY_ALLOW_DELETE
- )) {
- try {
- enableIndexBlock(index, blockSetting);
- // flush
- FlushResponse flushResponse = client().admin().indices().prepareFlush(index).setForce(true).execute().actionGet();
- assertNoFailures(flushResponse);
-
- // refresh
- RefreshResponse refreshResponse = client().admin().indices().prepareRefresh(index).execute().actionGet();
- assertNoFailures(refreshResponse);
- } finally {
- disableIndexBlock(index, blockSetting);
- }
- }
- }
-
- private void ingestDocs(String index, int nbDocs) throws InterruptedException {
-
- indexRandom(
- randomBoolean(),
- false,
- randomBoolean(),
- IntStream.range(0, nbDocs)
- .mapToObj(i -> client().prepareIndex(index).setId(UUID.randomUUID().toString()).setSource("num", i))
- .collect(toList())
- );
- flushAndRefreshIndex(index);
- }
-
- private ArrayList getSegments(String index) {
-
- return new ArrayList<>(
- client().admin()
- .indices()
- .segments(new IndicesSegmentsRequest(index))
- .actionGet()
- .getIndices()
- .get(index)
- .getShards()
- .get(0)
- .getShards()[0].getSegments()
- );
- }
-
-}
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
index e2442e1f483f0..6ed486fbdb33b 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
@@ -206,7 +206,7 @@ public abstract class AbstractAsyncBulkByScrollAction<
/**
* Build the {@link BiFunction} to apply to all {@link RequestWrapper}.
- *
+ *
* Public for testings....
*/
public BiFunction, ScrollableHitSource.Hit, RequestWrapper>> buildScriptApplier() {
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java
index d5a6e392f2019..7534de1408bcc 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java
@@ -63,14 +63,14 @@ private BulkByScrollParallelizationHelper() {}
/**
* Takes an action created by a {@link BulkByScrollTask} and runs it with regard to whether the request is sliced or not.
- *
+ *
* If the request is not sliced (i.e. the number of slices is 1), the worker action in the given {@link Runnable} will be started on
* the local node. If the request is sliced (i.e. the number of slices is more than 1), then a subrequest will be created for each
* slice and sent.
- *
+ *
* If slices are set as {@code "auto"}, this class will resolve that to a specific number based on characteristics of the source
* indices. A request with {@code "auto"} slices may end up being sliced or unsliced.
- *
+ *
* This method is equivalent to calling {@link #initTaskState} followed by {@link #executeSlicedAction}
*/
static > void startSlicedAction(
@@ -98,11 +98,11 @@ public void onFailure(Exception e) {
/**
* Takes an action and a {@link BulkByScrollTask} and runs it with regard to whether this task is a
* leader or worker.
- *
+ *
* If this task is a worker, the worker action in the given {@link Runnable} will be started on the local
* node. If the task is a leader (i.e. the number of slices is more than 1), then a subrequest will be
* created for each slice and sent.
- *
+ *
* This method can only be called after the task state is initialized {@link #initTaskState}.
*/
static > void executeSlicedAction(
@@ -125,7 +125,7 @@ static > void executeSliced
/**
* Takes a {@link BulkByScrollTask} and ensures that its initial task state (leader or worker) is set.
- *
+ *
* If slices are set as {@code "auto"}, this method will resolve that to a specific number based on
* characteristics of the source indices. A request with {@code "auto"} slices may end up being sliced or
* unsliced. This method does not execute the action. In order to execute the action see
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/MultiCodecReindexTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/MultiCodecReindexTests.java
new file mode 100644
index 0000000000000..53a0545fd2ff7
--- /dev/null
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/MultiCodecReindexTests.java
@@ -0,0 +1,160 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.index.reindex;
+
+import org.opensearch.action.admin.indices.segments.IndicesSegmentsRequest;
+import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest;
+import org.opensearch.action.support.ActiveShardCount;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.index.MergePolicyProvider;
+import org.opensearch.index.engine.Segment;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.InternalSettingsPlugin;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+import static java.util.stream.Collectors.toList;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+
+public class MultiCodecReindexTests extends ReindexTestCase {
+ final static Map codecMap = Map.of(
+ "best_compression",
+ "BEST_COMPRESSION",
+ "zlib",
+ "BEST_COMPRESSION",
+ "default",
+ "BEST_SPEED",
+ "lz4",
+ "BEST_SPEED"
+ );
+ final static String[] codecChoices = codecMap.keySet().toArray(String[]::new);
+
+ @Override
+ protected Collection> nodePlugins() {
+ return List.of(InternalSettingsPlugin.class, ReindexModulePlugin.class);
+ }
+
+ public void testReindexingMultipleCodecs() throws InterruptedException, ExecutionException {
+ for (Map.Entry candidate : codecMap.entrySet()) {
+ final int nbDocs = randomIntBetween(2, 5);
+
+ final String destCodec = candidate.getKey();
+ final String destCodecMode = candidate.getValue();
+
+ final String index = "test-index-" + destCodec;
+ final String destIndex = "dest-index-" + destCodec;
+
+ // create source index
+ createIndex(
+ index,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", randomFrom(codecChoices))
+ .put(MergePolicyProvider.INDEX_MERGE_ENABLED, false)
+ .build()
+ );
+ ensureGreen(index);
+
+ // index using all codecs
+ for (String codec : codecMap.keySet()) {
+ useCodec(index, codec);
+ ingestDocs(index, nbDocs);
+ }
+
+ assertTrue(
+ getSegments(index).stream()
+ .flatMap(s -> s.getAttributes().values().stream())
+ .collect(Collectors.toSet())
+ .containsAll(codecMap.values())
+ );
+
+ // create destination index with destination codec
+ createIndex(
+ destIndex,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", destCodec)
+ .build()
+ );
+ ensureGreen(destIndex);
+
+ // perform reindex
+ BulkByScrollResponse response = reindex().source(index)
+ .destination(destIndex)
+ .refresh(true)
+ .waitForActiveShards(ActiveShardCount.ONE)
+ .get();
+ final int expectedResponseSize = codecMap.size() * nbDocs;
+
+ // assertions
+ assertEquals(0, response.getNoops());
+ assertEquals(1, response.getBatches());
+ assertEquals(0, response.getDeleted());
+ assertEquals(0, response.getVersionConflicts());
+ assertEquals(0, response.getBulkFailures().size());
+ assertEquals(0, response.getSearchFailures().size());
+
+ assertEquals(expectedResponseSize, response.getTotal());
+ assertEquals(expectedResponseSize, response.getCreated());
+
+ assertTrue(response.getTook().getMillis() > 0);
+ assertTrue(getSegments(destIndex).stream().allMatch(segment -> segment.attributes.containsValue(destCodecMode)));
+ }
+ }
+
+ private void useCodec(String index, String codec) throws ExecutionException, InterruptedException {
+ assertAcked(client().admin().indices().prepareClose(index).setWaitForActiveShards(1));
+
+ assertAcked(
+ client().admin()
+ .indices()
+ .updateSettings(new UpdateSettingsRequest(index).settings(Settings.builder().put("index.codec", codec)))
+ .get()
+ );
+
+ assertAcked(client().admin().indices().prepareOpen(index).setWaitForActiveShards(1));
+ }
+
+ private void ingestDocs(String index, int nbDocs) throws InterruptedException {
+ indexRandom(
+ randomBoolean(),
+ false,
+ randomBoolean(),
+ IntStream.range(0, nbDocs)
+ .mapToObj(i -> client().prepareIndex(index).setId(UUID.randomUUID().toString()).setSource("num", i))
+ .collect(toList())
+ );
+
+ flushAndRefresh(index);
+ }
+
+ private ArrayList getSegments(String index) {
+ return new ArrayList<>(
+ client().admin()
+ .indices()
+ .segments(new IndicesSegmentsRequest(index))
+ .actionGet()
+ .getIndices()
+ .get(index)
+ .getShards()
+ .get(0)
+ .getShards()[0].getSegments()
+ );
+ }
+}
diff --git a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java
index 9e9d94c8e8fc0..4c8d8aab4532b 100644
--- a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java
+++ b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java
@@ -113,7 +113,7 @@ public URLRepository(
ClusterService clusterService,
RecoverySettings recoverySettings
) {
- super(metadata, false, namedXContentRegistry, clusterService, recoverySettings);
+ super(metadata, namedXContentRegistry, clusterService, recoverySettings);
if (URL_SETTING.exists(metadata.settings()) == false && REPOSITORIES_URL_SETTING.exists(environment.settings()) == false) {
throw new RepositoryException(metadata.name(), "missing url");
diff --git a/modules/search-pipeline-common/README.md b/modules/search-pipeline-common/README.md
new file mode 100644
index 0000000000000..70615d846987b
--- /dev/null
+++ b/modules/search-pipeline-common/README.md
@@ -0,0 +1,202 @@
+- [Search Pipelines](#search-pipelines)
+ - [Architecture](#architecture)
+ - [Search Processors](#search-processors)
+ - [Creating a Search Processor](#creating-a-search-processor)
+ - [Creating a Pipeline](#creating-a-search-pipeline)
+
+# Search Pipelines
+
+This README briefly covers the two types of search processors, explains how you can use them to create search pipelines, and walks through the creation of a new processor.
+
+## Architecture
+
+Search pipelines allow cluster operators to create and reuse [components](#search-processors) to transform search queries and results.
+
+With search pipelines, the operator can combine multiple [search processors](#search-processors) to create a transform which acts on the search request and/or search response.
+
+Search pipelines offer numerous benefits:
+
+1. search processors living in OpenSearch can be used by _all_ calling applications;
+2. search pipeline operations occur inside the OpenSearch cluster, so large results can be processed before returning to the calling application\*;
+3. search processors can be distributed in plugins to be shared with other OpenSearch users;
+4. search pipelines only need to be modified once (and without changing or redeploying any calling applications) to have a change occur on all incoming queries\*\*;
+5. search pipelines support standard APIs for accessing metrics and disaster recovery.
+
+*Within a cluster, results are passed using a more efficient, but version-specific binary protocol. You can pass result information back to a coordinator, allow it to post-process (e.g. rerank or collapse), and finally truncate it before sending it to the client over the less efficient but flexible JSON API.
+
+**For example, the `FilterQueryRequestProcessor` could be used to exclude search results immediately, without needing to make a code change in the application layer and deploy the change across your fleet.
+
+## Search Processors
+
+You can create many search pipelines by combining search processors in various orders. There are two types of search processors:
+
+1. search request processors which transform a request _before_ it is executed;
+2. search response processors which transform the output of a request _after_ it is executed.
+
+You can find all existing search processors registered in `SearchPipelineCommonModulePlugin.java` and described on the documentation website.
+
+### Creating a search processor
+
+New search processors can be created in two different ways.
+
+Generally, a search processor can be created in your own `SearchPipelinePlugin`. This method is best for when you are creating a unique search
+processor for your niche application. This method should also be used when your processor relies on an outside service. To get started creating a search processor in a `SearchPipelinePlugin`, you can use the [plugin template](https://github.com/opensearch-project/opensearch-plugin-template-java ).
+
+Alternatively, if you think your processor may be valuable to _all_ OpenSearch users you can follow these steps:
+
+1. Create a new class in `org.opensearch.search.pipeline.common`, this class will hold your new processor and should include whether it is a request or response processor. For example, a response processor which deleted a target field could be called `DeleteFieldResponseProcessor`.
+
+2. Make the class extend the generic `AbstractProcessor` class as well as implement either the `SearchRequestProcessor` or `SearchResponseProcessor` class depending on what type of processor it is. In the `DeleteFieldResponseProcessor` example, this would look like:
+
+```public class DeleteFieldResponseProcessor extends AbstractProcessor implements SearchResponseProcessor```
+
+3. Create the main functionality of your processor and implement the methods required by the implemented interface. This will be `SearchRequest processRequest(SearchRequest request) throws Exception;` for a search request processor or `SearchResponse processResponse(SearchRequest request, SearchResponse response) throws Exception;` for a search response processor.
+
+For the example field `DeleteFieldResponseProcessor`, this will look like:
+
+```
+@Override
+public SearchResponse processResponse(SearchRequest request, SearchResponse response) throws Exception {
+
+ boolean foundField = false;
+ SearchHit[] hits = response.getHits().getHits();
+ for (SearchHit hit : hits) {
+
+ // Process each hit as desired
+
+ if (hit.hasSource()) {
+ // Change hit source if needed
+ );
+
+ Map sourceAsMap = typeAndSourceMap.v2();
+ if (sourceAsMap.containsKey(field)) {
+ // Handle source as map
+ }
+ }
+
+ if (!foundField && !ignoreMissing) {
+ // Handle error scenarios
+ }
+
+ return response;
+}
+```
+
+4. Create a factory to parse processor-specific JSON configurations. These are used for constructing a processor instance.
+
+In the `DeleteFieldResponseProcessor`, this would look something like:
+
+```
+public static final class Factory implements Processor.Factory {
+
+ /**
+ * Constructor for factory
+ */
+ Factory() {}
+
+ @Override
+ public DeleteFieldResponseProcessor create(
+ Map> processorFactories,
+ String tag,
+ String description,
+ boolean ignoreFailure,
+ Map config,
+ PipelineContext pipelineContext
+ ) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
+ boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, tag, config, "ignore_missing", false);
+ return new DeleteFieldResponseProcessor(tag, description, ignoreFailure, field, ignoreMissing);
+ }
+}
+```
+
+In this example, we provide specific configurations for which field should be deleted and whether the processor should ignore attempts to remove a non-existent field.
+
+5. Add the newly added search processor to the `SearchPieplineCommonModulePlugin` getter for the corresponding processor type.
+
+For the `DeleteFieldResponseProcessor`, you would modify the response processor getter to have:
+
+```
+@Override
+public Map> getResponseProcessors(Parameters parameters) {
+ return Map.of(
+ RenameFieldResponseProcessor.TYPE,
+ new RenameFieldResponseProcessor.Factory(),
+ DeleteFieldResponseProcessor.TYPE,
+ new DeleteFieldResponseProcessor.Factory()
+ );
+}
+```
+
+6. After creating a search processor, the processor is ready to be tested in a search pipeline.
+
+To test your new search processor, you can make use of the test [`SearchPipelineCommonYamlTestSuiteIT`](src/yamlRestTest/java/org/opensearch/search/pipeline/common).
+
+Following the format of the YAML files in [`rest-api-spec.test.search_pipeline`](src/yamlRestTest/resources/rest-api-spec/test/search_pipeline), you should be able to create your own YAML test file to exercise your new processor.
+
+To run the tests, from the root of the OpenSearch repository, you can run `./gradlew :modules:search-pipeline-common:yamlRestTest`.
+
+7. Finally, the processor is ready to used in a cluster.
+
+To use the new processor, make sure the cluster is reloaded and that the new processor is accessible.
+
+The new processor should show when calling `GET /_nodes/search_pipelines`.
+
+If the new processor is shown in the cURL response, the new processor should be available for use in a search pipeline.
+
+## Creating a Search Pipeline
+
+To create a search pipeline, you must create an ordered list of search processors in the OpenSearch cluster.
+
+An example creation request is:
+
+```
+PUT /_search/pipeline/my_pipeline
+{
+ "request_processors": [
+ {
+ "filter_query" : {
+ "tag" : "tag1",
+ "description" : "This processor is going to restrict to publicly visible documents",
+ "query" : {
+ "term": {
+ "visibility": "public"
+ }
+ }
+ }
+ }
+ ],
+ "response_processors": [
+ {
+ "rename_field": {
+ "field": "message",
+ "target_field": "notification"
+ }
+ }
+ ]
+}
+```
+
+Alternatively, if you want to use just the `DeleteFieldResponseProcessor` created before, you would use:
+
+```
+PUT /_search/pipeline/my_pipeline2
+
+{
+ "response_processors": [
+ {
+ "delete_field": {
+ "field": "message"
+ }
+ }
+ ]
+}
+```
+
+## Running a search request using a search pipeline
+
+To run a search request using a search pipeline, you first need to create the pipeline using the request format shown above.
+
+After that is completed, you can run a request using the format: `POST /myindex/_search?search_pipeline=`.
+
+In the example of the `DeleteFieldResponseProcessor` this would be called with `POST /myindex/_search?search_pipeline=my_pipeline2`.
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/BasicMap.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/BasicMap.java
new file mode 100644
index 0000000000000..6ddc22420416b
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/BasicMap.java
@@ -0,0 +1,126 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
+import java.util.function.Function;
+
+/**
+ * Helper for map abstractions passed to scripting processors. Throws {@link UnsupportedOperationException} for almost
+ * all methods. Subclasses just need to implement get and put.
+ */
+abstract class BasicMap implements Map {
+
+ /**
+ * No-args constructor.
+ */
+ protected BasicMap() {}
+
+ private static final String UNSUPPORTED_OP_ERR = " Method not supported in Search pipeline script";
+
+ @Override
+ public boolean isEmpty() {
+ throw new UnsupportedOperationException("isEmpty" + UNSUPPORTED_OP_ERR);
+ }
+
+ public int size() {
+ throw new UnsupportedOperationException("size" + UNSUPPORTED_OP_ERR);
+ }
+
+ public boolean containsKey(Object key) {
+ return get(key) != null;
+ }
+
+ public boolean containsValue(Object value) {
+ throw new UnsupportedOperationException("containsValue" + UNSUPPORTED_OP_ERR);
+ }
+
+ public Object remove(Object key) {
+ throw new UnsupportedOperationException("remove" + UNSUPPORTED_OP_ERR);
+ }
+
+ public void putAll(Map extends String, ?> m) {
+ throw new UnsupportedOperationException("putAll" + UNSUPPORTED_OP_ERR);
+ }
+
+ public void clear() {
+ throw new UnsupportedOperationException("clear" + UNSUPPORTED_OP_ERR);
+ }
+
+ public Set keySet() {
+ throw new UnsupportedOperationException("keySet" + UNSUPPORTED_OP_ERR);
+ }
+
+ public Collection values() {
+ throw new UnsupportedOperationException("values" + UNSUPPORTED_OP_ERR);
+ }
+
+ public Set> entrySet() {
+ throw new UnsupportedOperationException("entrySet" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object getOrDefault(Object key, Object defaultValue) {
+ throw new UnsupportedOperationException("getOrDefault" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public void forEach(BiConsumer super String, ? super Object> action) {
+ throw new UnsupportedOperationException("forEach" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public void replaceAll(BiFunction super String, ? super Object, ?> function) {
+ throw new UnsupportedOperationException("replaceAll" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object putIfAbsent(String key, Object value) {
+ throw new UnsupportedOperationException("putIfAbsent" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public boolean remove(Object key, Object value) {
+ throw new UnsupportedOperationException("remove" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public boolean replace(String key, Object oldValue, Object newValue) {
+ throw new UnsupportedOperationException("replace" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object replace(String key, Object value) {
+ throw new UnsupportedOperationException("replace" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object computeIfAbsent(String key, Function super String, ?> mappingFunction) {
+ throw new UnsupportedOperationException("computeIfAbsent" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object computeIfPresent(String key, BiFunction super String, ? super Object, ?> remappingFunction) {
+ throw new UnsupportedOperationException("computeIfPresent" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object compute(String key, BiFunction super String, ? super Object, ?> remappingFunction) {
+ throw new UnsupportedOperationException("compute" + UNSUPPORTED_OP_ERR);
+ }
+
+ @Override
+ public Object merge(String key, Object value, BiFunction super Object, ? super Object, ?> remappingFunction) {
+ throw new UnsupportedOperationException("merge" + UNSUPPORTED_OP_ERR);
+ }
+}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/CollapseResponseProcessor.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/CollapseResponseProcessor.java
new file mode 100644
index 0000000000000..3e6c4fef6a559
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/CollapseResponseProcessor.java
@@ -0,0 +1,122 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.common.document.DocumentField;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.SearchHits;
+import org.opensearch.search.pipeline.AbstractProcessor;
+import org.opensearch.search.pipeline.Processor;
+import org.opensearch.search.pipeline.SearchResponseProcessor;
+import org.opensearch.search.pipeline.common.helpers.SearchResponseUtil;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A simple implementation of field collapsing on search responses. Note that this is not going to work as well as
+ * field collapsing at the shard level, as implemented with the "collapse" parameter in a search request. Mostly
+ * just using this to demo the oversample / truncate_hits processors.
+ */
+public class CollapseResponseProcessor extends AbstractProcessor implements SearchResponseProcessor {
+ /**
+ * Key to reference this processor type from a search pipeline.
+ */
+ public static final String TYPE = "collapse";
+ static final String COLLAPSE_FIELD = "field";
+ private final String collapseField;
+
+ private CollapseResponseProcessor(String tag, String description, boolean ignoreFailure, String collapseField) {
+ super(tag, description, ignoreFailure);
+ this.collapseField = Objects.requireNonNull(collapseField);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ @Override
+ public SearchResponse processResponse(SearchRequest request, SearchResponse response) {
+
+ if (response.getHits() != null) {
+ if (response.getHits().getCollapseField() != null) {
+ throw new IllegalStateException(
+ "Cannot collapse on " + collapseField + ". Results already collapsed on " + response.getHits().getCollapseField()
+ );
+ }
+ Map collapsedHits = new LinkedHashMap<>();
+ List collapseValues = new ArrayList<>();
+ for (SearchHit hit : response.getHits()) {
+ Object fieldValue = null;
+ DocumentField docField = hit.getFields().get(collapseField);
+ if (docField != null) {
+ if (docField.getValues().size() > 1) {
+ throw new IllegalStateException(
+ "Failed to collapse " + hit.getId() + ": doc has multiple values for field " + collapseField
+ );
+ }
+ fieldValue = docField.getValues().get(0);
+ } else if (hit.getSourceAsMap() != null) {
+ fieldValue = hit.getSourceAsMap().get(collapseField);
+ }
+ String fieldValueString;
+ if (fieldValue == null) {
+ fieldValueString = "__missing__";
+ } else {
+ fieldValueString = fieldValue.toString();
+ }
+
+ // Results are already sorted by sort criterion. Only keep the first hit for each field.
+ if (collapsedHits.containsKey(fieldValueString) == false) {
+ collapsedHits.put(fieldValueString, hit);
+ collapseValues.add(fieldValue);
+ }
+ }
+ SearchHit[] newHits = new SearchHit[collapsedHits.size()];
+ int i = 0;
+ for (SearchHit collapsedHit : collapsedHits.values()) {
+ newHits[i++] = collapsedHit;
+ }
+ SearchHits searchHits = new SearchHits(
+ newHits,
+ response.getHits().getTotalHits(),
+ response.getHits().getMaxScore(),
+ response.getHits().getSortFields(),
+ collapseField,
+ collapseValues.toArray()
+ );
+ return SearchResponseUtil.replaceHits(searchHits, response);
+ }
+ return response;
+ }
+
+ static class Factory implements Processor.Factory {
+
+ @Override
+ public CollapseResponseProcessor create(
+ Map> processorFactories,
+ String tag,
+ String description,
+ boolean ignoreFailure,
+ Map config,
+ PipelineContext pipelineContext
+ ) {
+ String collapseField = ConfigurationUtils.readStringProperty(TYPE, tag, config, COLLAPSE_FIELD);
+ return new CollapseResponseProcessor(tag, description, ignoreFailure, collapseField);
+ }
+ }
+
+}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/OversampleRequestProcessor.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/OversampleRequestProcessor.java
new file mode 100644
index 0000000000000..182cf6ba79504
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/OversampleRequestProcessor.java
@@ -0,0 +1,83 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.search.SearchService;
+import org.opensearch.search.pipeline.AbstractProcessor;
+import org.opensearch.search.pipeline.PipelineProcessingContext;
+import org.opensearch.search.pipeline.Processor;
+import org.opensearch.search.pipeline.SearchRequestProcessor;
+import org.opensearch.search.pipeline.StatefulSearchRequestProcessor;
+import org.opensearch.search.pipeline.common.helpers.ContextUtils;
+
+import java.util.Map;
+
+import static org.opensearch.search.pipeline.common.helpers.ContextUtils.applyContextPrefix;
+
+/**
+ * Multiplies the "size" parameter on the {@link SearchRequest} by the given scaling factor, storing the original value
+ * in the request context as "original_size".
+ */
+public class OversampleRequestProcessor extends AbstractProcessor implements StatefulSearchRequestProcessor {
+
+ /**
+ * Key to reference this processor type from a search pipeline.
+ */
+ public static final String TYPE = "oversample";
+ static final String SAMPLE_FACTOR = "sample_factor";
+ static final String ORIGINAL_SIZE = "original_size";
+ private final double sampleFactor;
+ private final String contextPrefix;
+
+ private OversampleRequestProcessor(String tag, String description, boolean ignoreFailure, double sampleFactor, String contextPrefix) {
+ super(tag, description, ignoreFailure);
+ this.sampleFactor = sampleFactor;
+ this.contextPrefix = contextPrefix;
+ }
+
+ @Override
+ public SearchRequest processRequest(SearchRequest request, PipelineProcessingContext requestContext) {
+ if (request.source() != null) {
+ int originalSize = request.source().size();
+ if (originalSize == -1) {
+ originalSize = SearchService.DEFAULT_SIZE;
+ }
+ requestContext.setAttribute(applyContextPrefix(contextPrefix, ORIGINAL_SIZE), originalSize);
+ int newSize = (int) Math.ceil(originalSize * sampleFactor);
+ request.source().size(newSize);
+ }
+ return request;
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ static class Factory implements Processor.Factory {
+ @Override
+ public OversampleRequestProcessor create(
+ Map> processorFactories,
+ String tag,
+ String description,
+ boolean ignoreFailure,
+ Map config,
+ PipelineContext pipelineContext
+ ) {
+ double sampleFactor = ConfigurationUtils.readDoubleProperty(TYPE, tag, config, SAMPLE_FACTOR);
+ if (sampleFactor < 1.0) {
+ throw ConfigurationUtils.newConfigurationException(TYPE, tag, SAMPLE_FACTOR, "Value must be >= 1.0");
+ }
+ String contextPrefix = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, ContextUtils.CONTEXT_PREFIX_PARAMETER);
+ return new OversampleRequestProcessor(tag, description, ignoreFailure, sampleFactor, contextPrefix);
+ }
+ }
+}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/ScriptRequestProcessor.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/ScriptRequestProcessor.java
index 90f71fd1754e4..a4052d0892ee6 100644
--- a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/ScriptRequestProcessor.java
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/ScriptRequestProcessor.java
@@ -23,9 +23,10 @@
import org.opensearch.script.ScriptType;
import org.opensearch.script.SearchScript;
import org.opensearch.search.pipeline.AbstractProcessor;
+import org.opensearch.search.pipeline.PipelineProcessingContext;
import org.opensearch.search.pipeline.Processor;
import org.opensearch.search.pipeline.SearchRequestProcessor;
-import org.opensearch.search.pipeline.common.helpers.SearchRequestMap;
+import org.opensearch.search.pipeline.StatefulSearchRequestProcessor;
import java.io.InputStream;
import java.util.HashMap;
@@ -38,7 +39,7 @@
* Processor that evaluates a script with a search request in its context
* and then returns the modified search request.
*/
-public final class ScriptRequestProcessor extends AbstractProcessor implements SearchRequestProcessor {
+public final class ScriptRequestProcessor extends AbstractProcessor implements StatefulSearchRequestProcessor {
/**
* Key to reference this processor type from a search pipeline.
*/
@@ -72,15 +73,8 @@ public final class ScriptRequestProcessor extends AbstractProcessor implements S
this.scriptService = scriptService;
}
- /**
- * Executes the script with the search request in context.
- *
- * @param request The search request passed into the script context.
- * @return The modified search request.
- * @throws Exception if an error occurs while processing the request.
- */
@Override
- public SearchRequest processRequest(SearchRequest request) throws Exception {
+ public SearchRequest processRequest(SearchRequest request, PipelineProcessingContext requestContext) throws Exception {
// assert request is not null and source is not null
if (request == null || request.source() == null) {
throw new IllegalArgumentException("search request must not be null");
@@ -93,10 +87,33 @@ public SearchRequest processRequest(SearchRequest request) throws Exception {
searchScript = precompiledSearchScript;
}
// execute the script with the search request in context
- searchScript.execute(Map.of("_source", new SearchRequestMap(request)));
+ searchScript.execute(Map.of("_source", new SearchRequestMap(request), "request_context", new RequestContextMap(requestContext)));
return request;
}
+ private static class RequestContextMap extends BasicMap {
+ private final PipelineProcessingContext pipelinedRequestContext;
+
+ private RequestContextMap(PipelineProcessingContext pipelinedRequestContext) {
+ this.pipelinedRequestContext = pipelinedRequestContext;
+ }
+
+ @Override
+ public Object get(Object key) {
+ if (key instanceof String) {
+ return pipelinedRequestContext.getAttribute(key.toString());
+ }
+ return null;
+ }
+
+ @Override
+ public Object put(String key, Object value) {
+ Object originalValue = get(key);
+ pipelinedRequestContext.setAttribute(key, value);
+ return originalValue;
+ }
+ }
+
/**
* Returns the type of the processor.
*
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java
index 49681b80fdead..5378a6721efb2 100644
--- a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java
@@ -38,12 +38,21 @@ public Map> getRequestProcesso
FilterQueryRequestProcessor.TYPE,
new FilterQueryRequestProcessor.Factory(parameters.namedXContentRegistry),
ScriptRequestProcessor.TYPE,
- new ScriptRequestProcessor.Factory(parameters.scriptService)
+ new ScriptRequestProcessor.Factory(parameters.scriptService),
+ OversampleRequestProcessor.TYPE,
+ new OversampleRequestProcessor.Factory()
);
}
@Override
public Map> getResponseProcessors(Parameters parameters) {
- return Map.of(RenameFieldResponseProcessor.TYPE, new RenameFieldResponseProcessor.Factory());
+ return Map.of(
+ RenameFieldResponseProcessor.TYPE,
+ new RenameFieldResponseProcessor.Factory(),
+ TruncateHitsResponseProcessor.TYPE,
+ new TruncateHitsResponseProcessor.Factory(),
+ CollapseResponseProcessor.TYPE,
+ new CollapseResponseProcessor.Factory()
+ );
}
}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchRequestMap.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchRequestMap.java
new file mode 100644
index 0000000000000..c6430b96dcbed
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchRequestMap.java
@@ -0,0 +1,140 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.search.builder.SearchSourceBuilder;
+
+import java.util.Map;
+
+/**
+ * A custom implementation of {@link Map} that provides access to the properties of a {@link SearchRequest}'s
+ * {@link SearchSourceBuilder}. The class allows retrieving and modifying specific properties of the search request.
+ */
+class SearchRequestMap extends BasicMap implements Map {
+
+ private final SearchSourceBuilder source;
+
+ /**
+ * Constructs a new instance of the {@link SearchRequestMap} with the provided {@link SearchRequest}.
+ *
+ * @param searchRequest The SearchRequest containing the SearchSourceBuilder to be accessed.
+ */
+ public SearchRequestMap(SearchRequest searchRequest) {
+ source = searchRequest.source();
+ }
+
+ /**
+ * Checks if the SearchSourceBuilder is empty.
+ *
+ * @return {@code true} if the SearchSourceBuilder is empty, {@code false} otherwise.
+ */
+ @Override
+ public boolean isEmpty() {
+ return source == null;
+ }
+
+ /**
+ * Retrieves the value associated with the specified property from the SearchSourceBuilder.
+ *
+ * @param key The SearchSourceBuilder property whose value is to be retrieved.
+ * @return The value associated with the specified property or null if the property has not been initialized.
+ * @throws IllegalArgumentException if the property name is not a String.
+ * @throws SearchRequestMapProcessingException if the property is not supported.
+ */
+ @Override
+ public Object get(Object key) {
+ if (!(key instanceof String)) {
+ throw new IllegalArgumentException("key must be a String");
+ }
+ // This is the explicit implementation of fetch value from source
+ switch ((String) key) {
+ case "from":
+ return source.from();
+ case "size":
+ return source.size();
+ case "explain":
+ return source.explain();
+ case "version":
+ return source.version();
+ case "seq_no_primary_term":
+ return source.seqNoAndPrimaryTerm();
+ case "track_scores":
+ return source.trackScores();
+ case "track_total_hits":
+ return source.trackTotalHitsUpTo();
+ case "min_score":
+ return source.minScore();
+ case "terminate_after":
+ return source.terminateAfter();
+ case "profile":
+ return source.profile();
+ default:
+ throw new SearchRequestMapProcessingException("Unsupported key: " + key);
+ }
+ }
+
+ /**
+ * Sets the value for the specified property in the SearchSourceBuilder.
+ *
+ * @param key The property whose value is to be set.
+ * @param value The value to be set for the specified property.
+ * @return The original value associated with the property, or null if none existed.
+ * @throws IllegalArgumentException if the property is not a String.
+ * @throws SearchRequestMapProcessingException if the property is not supported or an error occurs during the setting.
+ */
+ @Override
+ public Object put(String key, Object value) {
+ Object originalValue = get(key);
+ try {
+ switch (key) {
+ case "from":
+ source.from((Integer) value);
+ break;
+ case "size":
+ source.size((Integer) value);
+ break;
+ case "explain":
+ source.explain((Boolean) value);
+ break;
+ case "version":
+ source.version((Boolean) value);
+ break;
+ case "seq_no_primary_term":
+ source.seqNoAndPrimaryTerm((Boolean) value);
+ break;
+ case "track_scores":
+ source.trackScores((Boolean) value);
+ break;
+ case "track_total_hits":
+ source.trackTotalHitsUpTo((Integer) value);
+ break;
+ case "min_score":
+ source.minScore((Float) value);
+ break;
+ case "terminate_after":
+ source.terminateAfter((Integer) value);
+ break;
+ case "profile":
+ source.profile((Boolean) value);
+ break;
+ case "stats": // Not modifying stats, sorts, docvalue_fields, etc. as they require more complex handling
+ case "sort":
+ case "timeout":
+ case "docvalue_fields":
+ case "indices_boost":
+ default:
+ throw new SearchRequestMapProcessingException("Unsupported SearchRequest source property: " + key);
+ }
+ } catch (Exception e) {
+ throw new SearchRequestMapProcessingException("Error while setting value for SearchRequest source property: " + key, e);
+ }
+ return originalValue;
+ }
+}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMapProcessingException.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchRequestMapProcessingException.java
similarity index 76%
rename from modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMapProcessingException.java
rename to modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchRequestMapProcessingException.java
index cb1e45a20b624..2f00d0f82c2f1 100644
--- a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMapProcessingException.java
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchRequestMapProcessingException.java
@@ -6,7 +6,7 @@
* compatible open source license.
*/
-package org.opensearch.search.pipeline.common.helpers;
+package org.opensearch.search.pipeline.common;
import org.opensearch.OpenSearchException;
import org.opensearch.OpenSearchWrapperException;
@@ -14,12 +14,12 @@
/**
* An exception that indicates an error occurred while processing a {@link SearchRequestMap}.
*/
-public class SearchRequestMapProcessingException extends OpenSearchException implements OpenSearchWrapperException {
+class SearchRequestMapProcessingException extends OpenSearchException implements OpenSearchWrapperException {
/**
* Constructs a new SearchRequestMapProcessingException with the specified message.
*
- * @param msg The error message.
+ * @param msg The error message.
* @param args Arguments to substitute in the error message.
*/
public SearchRequestMapProcessingException(String msg, Object... args) {
@@ -29,9 +29,9 @@ public SearchRequestMapProcessingException(String msg, Object... args) {
/**
* Constructs a new SearchRequestMapProcessingException with the specified message and cause.
*
- * @param msg The error message.
+ * @param msg The error message.
* @param cause The cause of the exception.
- * @param args Arguments to substitute in the error message.
+ * @param args Arguments to substitute in the error message.
*/
public SearchRequestMapProcessingException(String msg, Throwable cause, Object... args) {
super(msg, cause, args);
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/TruncateHitsResponseProcessor.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/TruncateHitsResponseProcessor.java
new file mode 100644
index 0000000000000..e3413bf41720f
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/TruncateHitsResponseProcessor.java
@@ -0,0 +1,96 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.pipeline.AbstractProcessor;
+import org.opensearch.search.pipeline.PipelineProcessingContext;
+import org.opensearch.search.pipeline.Processor;
+import org.opensearch.search.pipeline.SearchResponseProcessor;
+import org.opensearch.search.pipeline.StatefulSearchResponseProcessor;
+import org.opensearch.search.pipeline.common.helpers.ContextUtils;
+import org.opensearch.search.pipeline.common.helpers.SearchResponseUtil;
+
+import java.util.Map;
+
+import static org.opensearch.search.pipeline.common.helpers.ContextUtils.applyContextPrefix;
+
+/**
+ * Truncates the returned search hits from the {@link SearchResponse}. If no target size is specified in the pipeline, then
+ * we try using the "original_size" value from the request context, which may have been set by {@link OversampleRequestProcessor}.
+ */
+public class TruncateHitsResponseProcessor extends AbstractProcessor implements StatefulSearchResponseProcessor {
+ /**
+ * Key to reference this processor type from a search pipeline.
+ */
+ public static final String TYPE = "truncate_hits";
+ static final String TARGET_SIZE = "target_size";
+ private final int targetSize;
+ private final String contextPrefix;
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ private TruncateHitsResponseProcessor(String tag, String description, boolean ignoreFailure, int targetSize, String contextPrefix) {
+ super(tag, description, ignoreFailure);
+ this.targetSize = targetSize;
+ this.contextPrefix = contextPrefix;
+ }
+
+ @Override
+ public SearchResponse processResponse(SearchRequest request, SearchResponse response, PipelineProcessingContext requestContext) {
+ int size;
+ if (targetSize < 0) { // No value specified in processor config. Use context value instead.
+ String key = applyContextPrefix(contextPrefix, OversampleRequestProcessor.ORIGINAL_SIZE);
+ Object o = requestContext.getAttribute(key);
+ if (o == null) {
+ throw new IllegalStateException("Must specify " + TARGET_SIZE + " unless an earlier processor set " + key);
+ }
+ size = (int) o;
+ } else {
+ size = targetSize;
+ }
+ if (response.getHits() != null && response.getHits().getHits().length > size) {
+ SearchHit[] newHits = new SearchHit[size];
+ System.arraycopy(response.getHits().getHits(), 0, newHits, 0, size);
+ return SearchResponseUtil.replaceHits(newHits, response);
+ }
+ return response;
+ }
+
+ static class Factory implements Processor.Factory {
+ @Override
+ public TruncateHitsResponseProcessor create(
+ Map> processorFactories,
+ String tag,
+ String description,
+ boolean ignoreFailure,
+ Map config,
+ PipelineContext pipelineContext
+ ) {
+ Integer targetSize = ConfigurationUtils.readIntProperty(TYPE, tag, config, TARGET_SIZE, null);
+ if (targetSize == null) {
+ // Use -1 as an "unset" marker to avoid repeated unboxing of an Integer.
+ targetSize = -1;
+ } else {
+ // Explicitly set values must be >= 0.
+ if (targetSize < 0) {
+ throw ConfigurationUtils.newConfigurationException(TYPE, tag, TARGET_SIZE, "Value must be >= 0");
+ }
+ }
+ String contextPrefix = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, ContextUtils.CONTEXT_PREFIX_PARAMETER);
+ return new TruncateHitsResponseProcessor(tag, description, ignoreFailure, targetSize, contextPrefix);
+ }
+ }
+}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/ContextUtils.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/ContextUtils.java
new file mode 100644
index 0000000000000..9697da85dbecf
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/ContextUtils.java
@@ -0,0 +1,38 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common.helpers;
+
+/**
+ * Helpers for working with request-scoped context.
+ */
+public final class ContextUtils {
+ private ContextUtils() {}
+
+ /**
+ * Parameter that can be passed to a stateful processor to avoid collisions between contextual variables by
+ * prefixing them with distinct qualifiers.
+ */
+ public static final String CONTEXT_PREFIX_PARAMETER = "context_prefix";
+
+ /**
+ * Replaces a "global" variable name with one scoped to a given context prefix (unless prefix is null or empty).
+ * @param contextPrefix the prefix qualifier for the variable
+ * @param variableName the generic "global" form of the context variable
+ * @return the variableName prefixed with contextPrefix followed by ".", or just variableName if contextPrefix is null or empty
+ */
+ public static String applyContextPrefix(String contextPrefix, String variableName) {
+ String contextVariable;
+ if (contextPrefix != null && contextPrefix.isEmpty() == false) {
+ contextVariable = contextPrefix + "." + variableName;
+ } else {
+ contextVariable = variableName;
+ }
+ return contextVariable;
+ }
+}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMap.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMap.java
deleted file mode 100644
index 7af3ac66be146..0000000000000
--- a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMap.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-package org.opensearch.search.pipeline.common.helpers;
-
-import org.opensearch.action.search.SearchRequest;
-import org.opensearch.search.builder.SearchSourceBuilder;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.BiConsumer;
-import java.util.function.BiFunction;
-import java.util.function.Function;
-
-/**
- * A custom implementation of {@link Map} that provides access to the properties of a {@link SearchRequest}'s
- * {@link SearchSourceBuilder}. The class allows retrieving and modifying specific properties of the search request.
- */
-public class SearchRequestMap implements Map {
- private static final String UNSUPPORTED_OP_ERR = " Method not supported in Search pipeline script";
-
- private final SearchSourceBuilder source;
-
- /**
- * Constructs a new instance of the {@link SearchRequestMap} with the provided {@link SearchRequest}.
- *
- * @param searchRequest The SearchRequest containing the SearchSourceBuilder to be accessed.
- */
- public SearchRequestMap(SearchRequest searchRequest) {
- source = searchRequest.source();
- }
-
- /**
- * Retrieves the number of properties in the SearchSourceBuilder.
- *
- * @return The number of properties in the SearchSourceBuilder.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public int size() {
- throw new UnsupportedOperationException("size" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Checks if the SearchSourceBuilder is empty.
- *
- * @return {@code true} if the SearchSourceBuilder is empty, {@code false} otherwise.
- */
- @Override
- public boolean isEmpty() {
- return source == null;
- }
-
- /**
- * Checks if the SearchSourceBuilder contains the specified property.
- *
- * @param key The property to check for.
- * @return {@code true} if the SearchSourceBuilder contains the specified property, {@code false} otherwise.
- */
- @Override
- public boolean containsKey(Object key) {
- return get(key) != null;
- }
-
- /**
- * Checks if the SearchSourceBuilder contains the specified value.
- *
- * @param value The value to check for.
- * @return {@code true} if the SearchSourceBuilder contains the specified value, {@code false} otherwise.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public boolean containsValue(Object value) {
- throw new UnsupportedOperationException("containsValue" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Retrieves the value associated with the specified property from the SearchSourceBuilder.
- *
- * @param key The SearchSourceBuilder property whose value is to be retrieved.
- * @return The value associated with the specified property or null if the property has not been initialized.
- * @throws IllegalArgumentException if the property name is not a String.
- * @throws SearchRequestMapProcessingException if the property is not supported.
- */
- @Override
- public Object get(Object key) {
- if (!(key instanceof String)) {
- throw new IllegalArgumentException("key must be a String");
- }
- // This is the explicit implementation of fetch value from source
- switch ((String) key) {
- case "from":
- return source.from();
- case "size":
- return source.size();
- case "explain":
- return source.explain();
- case "version":
- return source.version();
- case "seq_no_primary_term":
- return source.seqNoAndPrimaryTerm();
- case "track_scores":
- return source.trackScores();
- case "track_total_hits":
- return source.trackTotalHitsUpTo();
- case "min_score":
- return source.minScore();
- case "terminate_after":
- return source.terminateAfter();
- case "profile":
- return source.profile();
- default:
- throw new SearchRequestMapProcessingException("Unsupported key: " + key);
- }
- }
-
- /**
- * Sets the value for the specified property in the SearchSourceBuilder.
- *
- * @param key The property whose value is to be set.
- * @param value The value to be set for the specified property.
- * @return The original value associated with the property, or null if none existed.
- * @throws IllegalArgumentException if the property is not a String.
- * @throws SearchRequestMapProcessingException if the property is not supported or an error occurs during the setting.
- */
- @Override
- public Object put(String key, Object value) {
- Object originalValue = get(key);
- try {
- switch (key) {
- case "from":
- source.from((Integer) value);
- break;
- case "size":
- source.size((Integer) value);
- break;
- case "explain":
- source.explain((Boolean) value);
- break;
- case "version":
- source.version((Boolean) value);
- break;
- case "seq_no_primary_term":
- source.seqNoAndPrimaryTerm((Boolean) value);
- break;
- case "track_scores":
- source.trackScores((Boolean) value);
- break;
- case "track_total_hits":
- source.trackTotalHitsUpTo((Integer) value);
- break;
- case "min_score":
- source.minScore((Float) value);
- break;
- case "terminate_after":
- source.terminateAfter((Integer) value);
- break;
- case "profile":
- source.profile((Boolean) value);
- break;
- case "stats": // Not modifying stats, sorts, docvalue_fields, etc. as they require more complex handling
- case "sort":
- case "timeout":
- case "docvalue_fields":
- case "indices_boost":
- default:
- throw new SearchRequestMapProcessingException("Unsupported SearchRequest source property: " + key);
- }
- } catch (Exception e) {
- throw new SearchRequestMapProcessingException("Error while setting value for SearchRequest source property: " + key, e);
- }
- return originalValue;
- }
-
- /**
- * Removes the specified property from the SearchSourceBuilder.
- *
- * @param key The name of the property that will be removed.
- * @return The value associated with the property before it was removed, or null if the property was not found.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object remove(Object key) {
- throw new UnsupportedOperationException("remove" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Sets all the properties from the specified map to the SearchSourceBuilder.
- *
- * @param m The map containing the properties to be set.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public void putAll(Map extends String, ?> m) {
- throw new UnsupportedOperationException("putAll" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Removes all properties from the SearchSourceBuilder.
- *
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public void clear() {
- throw new UnsupportedOperationException("clear" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Returns a set view of the property names in the SearchSourceBuilder.
- *
- * @return A set view of the property names in the SearchSourceBuilder.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Set keySet() {
- throw new UnsupportedOperationException("keySet" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Returns a collection view of the property values in the SearchSourceBuilder.
- *
- * @return A collection view of the property values in the SearchSourceBuilder.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Collection values() {
- throw new UnsupportedOperationException("values" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Returns a set view of the properties in the SearchSourceBuilder.
- *
- * @return A set view of the properties in the SearchSourceBuilder.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Set> entrySet() {
- throw new UnsupportedOperationException("entrySet" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Returns the value to which the specified property has, or the defaultValue if the property is not present in the
- * SearchSourceBuilder.
- *
- * @param key The property whose associated value is to be returned.
- * @param defaultValue The default value to be returned if the property is not present.
- * @return The value to which the specified property has, or the defaultValue if the property is not present.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object getOrDefault(Object key, Object defaultValue) {
- throw new UnsupportedOperationException("getOrDefault" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Performs the given action for each property in the SearchSourceBuilder until all properties have been processed or the
- * action throws an exception
- *
- * @param action The action to be performed for each property.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public void forEach(BiConsumer super String, ? super Object> action) {
- throw new UnsupportedOperationException("forEach" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Replaces each property's value with the result of invoking the given function on that property until all properties have
- * been processed or the function throws an exception.
- *
- * @param function The function to apply to each property.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public void replaceAll(BiFunction super String, ? super Object, ?> function) {
- throw new UnsupportedOperationException("replaceAll" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * If the specified property is not already associated with a value, associates it with the given value and returns null,
- * else returns the current value.
- *
- * @param key The property whose value is to be set if absent.
- * @param value The value to be associated with the specified property.
- * @return The current value associated with the property, or null if the property is not present.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object putIfAbsent(String key, Object value) {
- throw new UnsupportedOperationException("putIfAbsent" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Removes the property only if it has the given value.
- *
- * @param key The property to be removed.
- * @param value The value expected to be associated with the property.
- * @return {@code true} if the entry was removed, {@code false} otherwise.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public boolean remove(Object key, Object value) {
- throw new UnsupportedOperationException("remove" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Replaces the specified property only if it has the given value.
- *
- * @param key The property to be replaced.
- * @param oldValue The value expected to be associated with the property.
- * @param newValue The value to be associated with the property.
- * @return {@code true} if the property was replaced, {@code false} otherwise.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public boolean replace(String key, Object oldValue, Object newValue) {
- throw new UnsupportedOperationException("replace" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * Replaces the specified property only if it has the given value.
- *
- * @param key The property to be replaced.
- * @param value The value to be associated with the property.
- * @return The previous value associated with the property, or null if the property was not found.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object replace(String key, Object value) {
- throw new UnsupportedOperationException("replace" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * The computed value associated with the property, or null if the property is not present.
- *
- * @param key The property whose value is to be computed if absent.
- * @param mappingFunction The function to compute a value based on the property.
- * @return The computed value associated with the property, or null if the property is not present.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object computeIfAbsent(String key, Function super String, ?> mappingFunction) {
- throw new UnsupportedOperationException("computeIfAbsent" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * If the value for the specified property is present, attempts to compute a new mapping given the property and its current
- * mapped value.
- *
- * @param key The property for which the mapping is to be computed.
- * @param remappingFunction The function to compute a new mapping.
- * @return The new value associated with the property, or null if the property is not present.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object computeIfPresent(String key, BiFunction super String, ? super Object, ?> remappingFunction) {
- throw new UnsupportedOperationException("computeIfPresent" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * If the value for the specified property is present, attempts to compute a new mapping given the property and its current
- * mapped value, or removes the property if the computed value is null.
- *
- * @param key The property for which the mapping is to be computed.
- * @param remappingFunction The function to compute a new mapping.
- * @return The new value associated with the property, or null if the property is not present.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object compute(String key, BiFunction super String, ? super Object, ?> remappingFunction) {
- throw new UnsupportedOperationException("compute" + UNSUPPORTED_OP_ERR);
- }
-
- /**
- * If the specified property is not already associated with a value or is associated with null, associates it with the
- * given non-null value. Otherwise, replaces the associated value with the results of applying the given
- * remapping function to the current and new values.
- *
- * @param key The property for which the mapping is to be merged.
- * @param value The non-null value to be merged with the existing value.
- * @param remappingFunction The function to merge the existing and new values.
- * @return The new value associated with the property, or null if the property is not present.
- * @throws UnsupportedOperationException always, as the method is not supported.
- */
- @Override
- public Object merge(String key, Object value, BiFunction super Object, ? super Object, ?> remappingFunction) {
- throw new UnsupportedOperationException("merge" + UNSUPPORTED_OP_ERR);
- }
-}
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchResponseUtil.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchResponseUtil.java
new file mode 100644
index 0000000000000..0710548c6429f
--- /dev/null
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/helpers/SearchResponseUtil.java
@@ -0,0 +1,93 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common.helpers;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.action.search.SearchResponseSections;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.SearchHits;
+import org.opensearch.search.aggregations.InternalAggregations;
+import org.opensearch.search.internal.InternalSearchResponse;
+import org.opensearch.search.profile.SearchProfileShardResults;
+
+/**
+ * Helper methods for manipulating {@link SearchResponse}.
+ */
+public final class SearchResponseUtil {
+ private SearchResponseUtil() {
+
+ }
+
+ /**
+ * Construct a new {@link SearchResponse} based on an existing one, replacing just the {@link SearchHits}.
+ * @param newHits new {@link SearchHits}
+ * @param response the existing search response
+ * @return a new search response where the {@link SearchHits} has been replaced
+ */
+ public static SearchResponse replaceHits(SearchHits newHits, SearchResponse response) {
+ SearchResponseSections searchResponseSections;
+ if (response.getAggregations() == null || response.getAggregations() instanceof InternalAggregations) {
+ // We either have no aggregations, or we have Writeable InternalAggregations.
+ // Either way, we can produce a Writeable InternalSearchResponse.
+ searchResponseSections = new InternalSearchResponse(
+ newHits,
+ (InternalAggregations) response.getAggregations(),
+ response.getSuggest(),
+ new SearchProfileShardResults(response.getProfileResults()),
+ response.isTimedOut(),
+ response.isTerminatedEarly(),
+ response.getNumReducePhases()
+ );
+ } else {
+ // We have non-Writeable Aggregations, so the whole SearchResponseSections is non-Writeable.
+ searchResponseSections = new SearchResponseSections(
+ newHits,
+ response.getAggregations(),
+ response.getSuggest(),
+ response.isTimedOut(),
+ response.isTerminatedEarly(),
+ new SearchProfileShardResults(response.getProfileResults()),
+ response.getNumReducePhases()
+ );
+ }
+
+ return new SearchResponse(
+ searchResponseSections,
+ response.getScrollId(),
+ response.getTotalShards(),
+ response.getSuccessfulShards(),
+ response.getSkippedShards(),
+ response.getTook().millis(),
+ response.getShardFailures(),
+ response.getClusters(),
+ response.pointInTimeId()
+ );
+ }
+
+ /**
+ * Convenience method when only replacing the {@link SearchHit} array within the {@link SearchHits} in a {@link SearchResponse}.
+ * @param newHits the new array of {@link SearchHit} elements.
+ * @param response the search response to update
+ * @return a {@link SearchResponse} where the underlying array of {@link SearchHit} within the {@link SearchHits} has been replaced.
+ */
+ public static SearchResponse replaceHits(SearchHit[] newHits, SearchResponse response) {
+ if (response.getHits() == null) {
+ throw new IllegalStateException("Response must have hits");
+ }
+ SearchHits searchHits = new SearchHits(
+ newHits,
+ response.getHits().getTotalHits(),
+ response.getHits().getMaxScore(),
+ response.getHits().getSortFields(),
+ response.getHits().getCollapseField(),
+ response.getHits().getCollapseValues()
+ );
+ return replaceHits(searchHits, response);
+ }
+}
diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/CollapseResponseProcessorTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/CollapseResponseProcessorTests.java
new file mode 100644
index 0000000000000..cda011f24fea1
--- /dev/null
+++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/CollapseResponseProcessorTests.java
@@ -0,0 +1,86 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.apache.lucene.search.TotalHits;
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.common.document.DocumentField;
+import org.opensearch.core.common.bytes.BytesArray;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.SearchHits;
+import org.opensearch.search.internal.InternalSearchResponse;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class CollapseResponseProcessorTests extends OpenSearchTestCase {
+ public void testWithDocumentFields() {
+ testProcessor(true);
+ }
+
+ public void testWithSourceField() {
+ testProcessor(false);
+ }
+
+ private void testProcessor(boolean includeDocField) {
+ Map config = new HashMap<>(Map.of(CollapseResponseProcessor.COLLAPSE_FIELD, "groupid"));
+ CollapseResponseProcessor processor = new CollapseResponseProcessor.Factory().create(
+ Collections.emptyMap(),
+ null,
+ null,
+ false,
+ config,
+ null
+ );
+ int numHits = randomIntBetween(1, 100);
+ SearchResponse inputResponse = generateResponse(numHits, includeDocField);
+
+ SearchResponse processedResponse = processor.processResponse(new SearchRequest(), inputResponse);
+ if (numHits % 2 == 0) {
+ assertEquals(numHits / 2, processedResponse.getHits().getHits().length);
+ } else {
+ assertEquals(numHits / 2 + 1, processedResponse.getHits().getHits().length);
+ }
+ for (SearchHit collapsedHit : processedResponse.getHits()) {
+ assertEquals(0, collapsedHit.docId() % 2);
+ }
+ assertEquals("groupid", processedResponse.getHits().getCollapseField());
+ assertEquals(processedResponse.getHits().getHits().length, processedResponse.getHits().getCollapseValues().length);
+ for (int i = 0; i < processedResponse.getHits().getHits().length; i++) {
+ assertEquals(i, processedResponse.getHits().getCollapseValues()[i]);
+ }
+ }
+
+ private static SearchResponse generateResponse(int numHits, boolean includeDocField) {
+ SearchHit[] hitsArray = new SearchHit[numHits];
+ for (int i = 0; i < numHits; i++) {
+ Map docFields;
+ int groupValue = i / 2;
+ if (includeDocField) {
+ docFields = Map.of("groupid", new DocumentField("groupid", List.of(groupValue)));
+ } else {
+ docFields = Collections.emptyMap();
+ }
+ SearchHit hit = new SearchHit(i, Integer.toString(i), docFields, Collections.emptyMap());
+ hit.sourceRef(new BytesArray("{\"groupid\": " + groupValue + "}"));
+ hitsArray[i] = hit;
+ }
+ SearchHits searchHits = new SearchHits(
+ hitsArray,
+ new TotalHits(Math.max(numHits, 1000), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO),
+ 1.0f
+ );
+ InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, null, false, false, 0);
+ return new SearchResponse(internalSearchResponse, null, 1, 1, 0, 10, null, null);
+ }
+}
diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/OversampleRequestProcessorTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/OversampleRequestProcessorTests.java
new file mode 100644
index 0000000000000..96e99dff9cc03
--- /dev/null
+++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/OversampleRequestProcessorTests.java
@@ -0,0 +1,62 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.search.builder.SearchSourceBuilder;
+import org.opensearch.search.pipeline.PipelineProcessingContext;
+import org.opensearch.search.pipeline.common.helpers.ContextUtils;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+public class OversampleRequestProcessorTests extends OpenSearchTestCase {
+
+ public void testEmptySource() {
+ OversampleRequestProcessor.Factory factory = new OversampleRequestProcessor.Factory();
+ Map config = new HashMap<>(Map.of(OversampleRequestProcessor.SAMPLE_FACTOR, 3.0));
+ OversampleRequestProcessor processor = factory.create(Collections.emptyMap(), null, null, false, config, null);
+
+ SearchRequest request = new SearchRequest();
+ PipelineProcessingContext context = new PipelineProcessingContext();
+ SearchRequest transformedRequest = processor.processRequest(request, context);
+ assertEquals(request, transformedRequest);
+ assertNull(context.getAttribute("original_size"));
+ }
+
+ public void testBasicBehavior() {
+ OversampleRequestProcessor.Factory factory = new OversampleRequestProcessor.Factory();
+ Map config = new HashMap<>(Map.of(OversampleRequestProcessor.SAMPLE_FACTOR, 3.0));
+ OversampleRequestProcessor processor = factory.create(Collections.emptyMap(), null, null, false, config, null);
+
+ SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(10);
+ SearchRequest request = new SearchRequest().source(sourceBuilder);
+ PipelineProcessingContext context = new PipelineProcessingContext();
+ SearchRequest transformedRequest = processor.processRequest(request, context);
+ assertEquals(30, transformedRequest.source().size());
+ assertEquals(10, context.getAttribute("original_size"));
+ }
+
+ public void testContextPrefix() {
+ OversampleRequestProcessor.Factory factory = new OversampleRequestProcessor.Factory();
+ Map config = new HashMap<>(
+ Map.of(OversampleRequestProcessor.SAMPLE_FACTOR, 3.0, ContextUtils.CONTEXT_PREFIX_PARAMETER, "foo")
+ );
+ OversampleRequestProcessor processor = factory.create(Collections.emptyMap(), null, null, false, config, null);
+
+ SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(10);
+ SearchRequest request = new SearchRequest().source(sourceBuilder);
+ PipelineProcessingContext context = new PipelineProcessingContext();
+ SearchRequest transformedRequest = processor.processRequest(request, context);
+ assertEquals(30, transformedRequest.source().size());
+ assertEquals(10, context.getAttribute("foo.original_size"));
+ }
+}
diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/ScriptRequestProcessorTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/ScriptRequestProcessorTests.java
index fde9757312e30..b372b220b71ac 100644
--- a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/ScriptRequestProcessorTests.java
+++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/ScriptRequestProcessorTests.java
@@ -18,7 +18,7 @@
import org.opensearch.script.ScriptType;
import org.opensearch.script.SearchScript;
import org.opensearch.search.builder.SearchSourceBuilder;
-import org.opensearch.search.pipeline.common.helpers.SearchRequestMap;
+import org.opensearch.search.pipeline.PipelineProcessingContext;
import org.opensearch.test.OpenSearchTestCase;
import org.junit.Before;
@@ -27,8 +27,6 @@
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import static org.hamcrest.core.Is.is;
-
public class ScriptRequestProcessorTests extends OpenSearchTestCase {
private ScriptService scriptService;
@@ -87,7 +85,7 @@ public void testScriptingWithoutPrecompiledScriptFactory() throws Exception {
searchRequest.source(createSearchSourceBuilder());
assertNotNull(searchRequest);
- processor.processRequest(searchRequest);
+ processor.processRequest(searchRequest, new PipelineProcessingContext());
assertSearchRequest(searchRequest);
}
@@ -104,7 +102,7 @@ public void testScriptingWithPrecompiledIngestScript() throws Exception {
searchRequest.source(createSearchSourceBuilder());
assertNotNull(searchRequest);
- processor.processRequest(searchRequest);
+ processor.processRequest(searchRequest, new PipelineProcessingContext());
assertSearchRequest(searchRequest);
}
@@ -124,15 +122,15 @@ private SearchSourceBuilder createSearchSourceBuilder() {
}
private void assertSearchRequest(SearchRequest searchRequest) {
- assertThat(searchRequest.source().from(), is(20));
- assertThat(searchRequest.source().size(), is(30));
- assertThat(searchRequest.source().explain(), is(false));
- assertThat(searchRequest.source().version(), is(false));
- assertThat(searchRequest.source().seqNoAndPrimaryTerm(), is(false));
- assertThat(searchRequest.source().trackScores(), is(false));
- assertThat(searchRequest.source().trackTotalHitsUpTo(), is(4));
- assertThat(searchRequest.source().minScore(), is(2.0f));
- assertThat(searchRequest.source().timeout(), is(new TimeValue(60, TimeUnit.SECONDS)));
- assertThat(searchRequest.source().terminateAfter(), is(6));
+ assertEquals(20, searchRequest.source().from());
+ assertEquals(30, searchRequest.source().size());
+ assertFalse(searchRequest.source().explain());
+ assertFalse(searchRequest.source().version());
+ assertFalse(searchRequest.source().seqNoAndPrimaryTerm());
+ assertFalse(searchRequest.source().trackScores());
+ assertEquals(4, searchRequest.source().trackTotalHitsUpTo().intValue());
+ assertEquals(2.0f, searchRequest.source().minScore(), 0.0001);
+ assertEquals(new TimeValue(60, TimeUnit.SECONDS), searchRequest.source().timeout());
+ assertEquals(6, searchRequest.source().terminateAfter());
}
}
diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMapTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/SearchRequestMapTests.java
similarity index 99%
rename from modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMapTests.java
rename to modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/SearchRequestMapTests.java
index 5572f28335e1c..c982ada7b5ea5 100644
--- a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/helpers/SearchRequestMapTests.java
+++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/SearchRequestMapTests.java
@@ -5,7 +5,7 @@
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
-package org.opensearch.search.pipeline.common.helpers;
+package org.opensearch.search.pipeline.common;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.search.builder.SearchSourceBuilder;
diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/TruncateHitsResponseProcessorTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/TruncateHitsResponseProcessorTests.java
new file mode 100644
index 0000000000000..7615225c7f77e
--- /dev/null
+++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/TruncateHitsResponseProcessorTests.java
@@ -0,0 +1,91 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.apache.lucene.search.TotalHits;
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.SearchHits;
+import org.opensearch.search.internal.InternalSearchResponse;
+import org.opensearch.search.pipeline.PipelineProcessingContext;
+import org.opensearch.search.pipeline.common.helpers.ContextUtils;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+public class TruncateHitsResponseProcessorTests extends OpenSearchTestCase {
+
+ public void testBasicBehavior() {
+ int targetSize = randomInt(50);
+ TruncateHitsResponseProcessor.Factory factory = new TruncateHitsResponseProcessor.Factory();
+ Map config = new HashMap<>(Map.of(TruncateHitsResponseProcessor.TARGET_SIZE, targetSize));
+ TruncateHitsResponseProcessor processor = factory.create(Collections.emptyMap(), null, null, false, config, null);
+
+ int numHits = randomInt(100);
+ SearchResponse response = constructResponse(numHits);
+ SearchResponse transformedResponse = processor.processResponse(new SearchRequest(), response, new PipelineProcessingContext());
+ assertEquals(Math.min(targetSize, numHits), transformedResponse.getHits().getHits().length);
+ }
+
+ public void testTargetSizePassedViaContext() {
+ TruncateHitsResponseProcessor.Factory factory = new TruncateHitsResponseProcessor.Factory();
+ TruncateHitsResponseProcessor processor = factory.create(Collections.emptyMap(), null, null, false, Collections.emptyMap(), null);
+
+ int targetSize = randomInt(50);
+ int numHits = randomInt(100);
+ SearchResponse response = constructResponse(numHits);
+ PipelineProcessingContext requestContext = new PipelineProcessingContext();
+ requestContext.setAttribute("original_size", targetSize);
+ SearchResponse transformedResponse = processor.processResponse(new SearchRequest(), response, requestContext);
+ assertEquals(Math.min(targetSize, numHits), transformedResponse.getHits().getHits().length);
+ }
+
+ public void testTargetSizePassedViaContextWithPrefix() {
+ TruncateHitsResponseProcessor.Factory factory = new TruncateHitsResponseProcessor.Factory();
+ Map config = new HashMap<>(Map.of(ContextUtils.CONTEXT_PREFIX_PARAMETER, "foo"));
+ TruncateHitsResponseProcessor processor = factory.create(Collections.emptyMap(), null, null, false, config, null);
+
+ int targetSize = randomInt(50);
+ int numHits = randomInt(100);
+ SearchResponse response = constructResponse(numHits);
+ PipelineProcessingContext requestContext = new PipelineProcessingContext();
+ requestContext.setAttribute("foo.original_size", targetSize);
+ SearchResponse transformedResponse = processor.processResponse(new SearchRequest(), response, requestContext);
+ assertEquals(Math.min(targetSize, numHits), transformedResponse.getHits().getHits().length);
+ }
+
+ public void testTargetSizeMissing() {
+ TruncateHitsResponseProcessor.Factory factory = new TruncateHitsResponseProcessor.Factory();
+ TruncateHitsResponseProcessor processor = factory.create(Collections.emptyMap(), null, null, false, Collections.emptyMap(), null);
+
+ int numHits = randomInt(100);
+ SearchResponse response = constructResponse(numHits);
+ assertThrows(
+ IllegalStateException.class,
+ () -> processor.processResponse(new SearchRequest(), response, new PipelineProcessingContext())
+ );
+ }
+
+ private static SearchResponse constructResponse(int numHits) {
+ SearchHit[] hitsArray = new SearchHit[numHits];
+ for (int i = 0; i < numHits; i++) {
+ hitsArray[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
+ }
+ SearchHits searchHits = new SearchHits(
+ hitsArray,
+ new TotalHits(Math.max(numHits, 1000), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO),
+ 1.0f
+ );
+ InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, null, false, false, 0);
+ return new SearchResponse(internalSearchResponse, null, 1, 1, 0, 10, null, null);
+ }
+}
diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/60_oversample_truncate.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/60_oversample_truncate.yml
new file mode 100644
index 0000000000000..1f9e95084322d
--- /dev/null
+++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/60_oversample_truncate.yml
@@ -0,0 +1,105 @@
+---
+teardown:
+ - do:
+ search_pipeline.delete:
+ id: "my_pipeline"
+ ignore: 404
+
+---
+"Test state propagating from oversample to truncate_hits processor":
+ - do:
+ search_pipeline.put:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "request_processors": [
+ {
+ "oversample" : {
+ "sample_factor" : 2
+ }
+ }
+ ],
+ "response_processors": [
+ {
+ "collapse" : {
+ "field" : "group_id"
+ }
+ },
+ {
+ "truncate_hits" : {}
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ "group_id": "a",
+ "popularity" : 1
+ }
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ "group_id": "a",
+ "popularity" : 2
+ }
+ - do:
+ index:
+ index: test
+ id: 3
+ body: {
+ "group_id": "b",
+ "popularity" : 3
+ }
+ - do:
+ index:
+ index: test
+ id: 4
+ body: {
+ "group_id": "b",
+ "popularity" : 4
+ }
+ - do:
+ indices.refresh:
+ index: test
+
+ - do:
+ search:
+ body: {
+ "query" : {
+ "function_score" : {
+ "field_value_factor" : {
+ "field" : "popularity"
+ }
+ }
+ },
+ "size" : 2
+ }
+ - match: { hits.total.value: 4 }
+ - length: { hits.hits: 2 }
+ - match: { hits.hits.0._id: "4" }
+ - match: { hits.hits.1._id: "3" }
+
+ - do:
+ search:
+ search_pipeline: my_pipeline
+ body: {
+ "query" : {
+ "function_score" : {
+ "field_value_factor" : {
+ "field" : "popularity"
+ }
+ }
+ },
+ "size" : 2
+ }
+ - match: { hits.total.value: 4 }
+ - length: { hits.hits: 2 }
+ - match: { hits.hits.0._id: "4" }
+ - match: { hits.hits.1._id: "2" }
diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/70_script_truncate.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/70_script_truncate.yml
new file mode 100644
index 0000000000000..9c9f6747e9bdc
--- /dev/null
+++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/70_script_truncate.yml
@@ -0,0 +1,70 @@
+---
+teardown:
+ - do:
+ search_pipeline.delete:
+ id: "my_pipeline"
+ ignore: 404
+
+---
+"Test state propagating from script request to truncate_hits processor":
+ - do:
+ search_pipeline.put:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "request_processors": [
+ {
+ "script" : {
+ "source" : "ctx.request_context['foo.original_size'] = 2"
+ }
+ }
+ ],
+ "response_processors": [
+ {
+ "truncate_hits" : {
+ "context_prefix" : "foo"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {}
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {}
+ - do:
+ index:
+ index: test
+ id: 3
+ body: {}
+ - do:
+ index:
+ index: test
+ id: 4
+ body: {}
+ - do:
+ indices.refresh:
+ index: test
+
+ - do:
+ search:
+ body: {
+ }
+ - match: { hits.total.value: 4 }
+ - length: { hits.hits: 4 }
+
+ - do:
+ search:
+ search_pipeline: my_pipeline
+ body: {
+ }
+ - match: { hits.total.value: 4 }
+ - length: { hits.hits: 2 }
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..7bb27bbfcb87d
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+a4d94fd6cdf7a37b15237e32434afd6b955cc591
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.97.Final.jar.sha1
deleted file mode 100644
index 8430355365996..0000000000000
--- a/modules/transport-netty4/licenses/netty-buffer-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f8f3d8644afa5e6e1a40a3a6aeb9d9aa970ecb4f
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..c792b4e834030
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+642523c57c4be15b8b461be7b1532262d193bbb3
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.97.Final.jar.sha1
deleted file mode 100644
index 7a36dc1f2724f..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-384ba4d75670befbedb45c4d3b497a93639c206d
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..8bf31f6c3a2c7
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+c648f863b301e3fc62d0de098ec61be7628b8ea2
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.97.Final.jar.sha1
deleted file mode 100644
index 37b78a32f741f..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-af78acec783ffd77c63d8aeecc21041fd39ac54f
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..48b42bfab9287
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+40590da6c615b852384542051330e9fd51d4c4b1
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.97.Final.jar.sha1
deleted file mode 100644
index cbf685a6d79d3..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-893888d09a7bef0d0ba973d7471943e765d0fd08
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..f4fe86799eaff
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+bdb16e4d308b5757123decc886896815d1daf7a3
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.97.Final.jar.sha1
deleted file mode 100644
index 1bdfec3aae6ba..0000000000000
--- a/modules/transport-netty4/licenses/netty-common-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7cceacaf11df8dc63f23d0fb58e9d4640fc88404
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..cfc0befee237a
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+7800aea949bf95f63df73166d144e49405b279dd
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.97.Final.jar.sha1
deleted file mode 100644
index 8b7b50a6fc9c6..0000000000000
--- a/modules/transport-netty4/licenses/netty-handler-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-abb86c6906bf512bf2b797a41cd7d2e8d3cd7c36
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..f4425d5ac8e5f
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+5e7eddfa4dfffcbd375d265d1fd08297df94f82f
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.97.Final.jar.sha1
deleted file mode 100644
index 032959e98d009..0000000000000
--- a/modules/transport-netty4/licenses/netty-resolver-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cec8348108dc76c47cf87c669d514be52c922144
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..fd9199092452b
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+0b36cc2337b4a4135df7ee2f2d77431c3b541dc5
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.97.Final.jar.sha1
deleted file mode 100644
index 107863c1b3c9d..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f37380d23c9bb079bc702910833b2fd532c9abd0
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1
new file mode 100644
index 0000000000000..291c87ad7987b
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1
@@ -0,0 +1 @@
+d0697ef1d71c6111a1b18a387fa985fd6398ace2
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.97.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.97.Final.jar.sha1
deleted file mode 100644
index f736d37d071b7..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.97.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d469d84265ab70095b01b40886cabdd433b6e664
\ No newline at end of file
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HeaderVerifierIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HeaderVerifierIT.java
new file mode 100644
index 0000000000000..c39567a005fd1
--- /dev/null
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HeaderVerifierIT.java
@@ -0,0 +1,75 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.netty4;
+
+import org.opensearch.OpenSearchNetty4IntegTestCase;
+import org.opensearch.core.common.transport.TransportAddress;
+import org.opensearch.http.HttpServerTransport;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
+import org.opensearch.test.OpenSearchIntegTestCase.Scope;
+import org.opensearch.transport.Netty4BlockingPlugin;
+
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+import io.netty.buffer.ByteBufUtil;
+import io.netty.handler.codec.http.DefaultFullHttpRequest;
+import io.netty.handler.codec.http.FullHttpRequest;
+import io.netty.handler.codec.http.FullHttpResponse;
+import io.netty.handler.codec.http.HttpMethod;
+import io.netty.handler.codec.http.HttpVersion;
+import io.netty.handler.codec.http2.HttpConversionUtil;
+import io.netty.util.ReferenceCounted;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static io.netty.handler.codec.http.HttpHeaderNames.HOST;
+
+@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1)
+public class Netty4HeaderVerifierIT extends OpenSearchNetty4IntegTestCase {
+
+ @Override
+ protected boolean addMockHttpTransport() {
+ return false; // enable http
+ }
+
+ @Override
+ protected Collection> nodePlugins() {
+ return Collections.singletonList(Netty4BlockingPlugin.class);
+ }
+
+ public void testThatNettyHttpServerRequestBlockedWithHeaderVerifier() throws Exception {
+ HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class);
+ TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses();
+ TransportAddress transportAddress = randomFrom(boundAddresses);
+
+ final FullHttpRequest blockedRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
+ blockedRequest.headers().add("blockme", "Not Allowed");
+ blockedRequest.headers().add(HOST, "localhost");
+ blockedRequest.headers().add(HttpConversionUtil.ExtensionHeaderNames.SCHEME.text(), "http");
+
+ final List responses = new ArrayList<>();
+ try (Netty4HttpClient nettyHttpClient = Netty4HttpClient.http2()) {
+ try {
+ FullHttpResponse blockedResponse = nettyHttpClient.send(transportAddress.address(), blockedRequest);
+ responses.add(blockedResponse);
+ String blockedResponseContent = new String(ByteBufUtil.getBytes(blockedResponse.content()), StandardCharsets.UTF_8);
+ assertThat(blockedResponseContent, containsString("Hit header_verifier"));
+ assertThat(blockedResponse.status().code(), equalTo(401));
+ } finally {
+ responses.forEach(ReferenceCounted::release);
+ }
+ }
+ }
+
+}
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
index 6c8ca665424a6..826d4a7e5d61e 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
@@ -57,7 +57,7 @@
/**
* This test checks that in-flight requests are limited on HTTP level and that requests that are excluded from limiting can pass.
- *
+ *
* As the same setting is also used to limit in-flight requests on transport level, we avoid transport messages by forcing
* a single node "cluster".
*/
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/Netty4BlockingPlugin.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/Netty4BlockingPlugin.java
new file mode 100644
index 0000000000000..d5fe49952add3
--- /dev/null
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/Netty4BlockingPlugin.java
@@ -0,0 +1,127 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.transport;
+
+import org.opensearch.common.network.NetworkService;
+import org.opensearch.common.settings.ClusterSettings;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.BigArrays;
+import org.opensearch.common.util.PageCacheRecycler;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.xcontent.NamedXContentRegistry;
+import org.opensearch.http.HttpServerTransport;
+import org.opensearch.http.netty4.Netty4HttpServerTransport;
+import org.opensearch.telemetry.tracing.Tracer;
+import org.opensearch.threadpool.ThreadPool;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Collections;
+import java.util.Map;
+import java.util.function.Supplier;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.Unpooled;
+import io.netty.channel.ChannelFutureListener;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.DefaultFullHttpResponse;
+import io.netty.handler.codec.http.DefaultHttpRequest;
+import io.netty.handler.codec.http.FullHttpResponse;
+import io.netty.handler.codec.http.HttpRequest;
+import io.netty.handler.codec.http.HttpResponseStatus;
+import io.netty.util.ReferenceCountUtil;
+
+public class Netty4BlockingPlugin extends Netty4ModulePlugin {
+
+ public class Netty4BlockingHttpServerTransport extends Netty4HttpServerTransport {
+
+ public Netty4BlockingHttpServerTransport(
+ Settings settings,
+ NetworkService networkService,
+ BigArrays bigArrays,
+ ThreadPool threadPool,
+ NamedXContentRegistry xContentRegistry,
+ Dispatcher dispatcher,
+ ClusterSettings clusterSettings,
+ SharedGroupFactory sharedGroupFactory,
+ Tracer tracer
+ ) {
+ super(
+ settings,
+ networkService,
+ bigArrays,
+ threadPool,
+ xContentRegistry,
+ dispatcher,
+ clusterSettings,
+ sharedGroupFactory,
+ tracer
+ );
+ }
+
+ @Override
+ protected ChannelInboundHandlerAdapter createHeaderVerifier() {
+ return new ExampleBlockingNetty4HeaderVerifier();
+ }
+ }
+
+ @Override
+ public Map> getHttpTransports(
+ Settings settings,
+ ThreadPool threadPool,
+ BigArrays bigArrays,
+ PageCacheRecycler pageCacheRecycler,
+ CircuitBreakerService circuitBreakerService,
+ NamedXContentRegistry xContentRegistry,
+ NetworkService networkService,
+ HttpServerTransport.Dispatcher dispatcher,
+ ClusterSettings clusterSettings,
+ Tracer tracer
+ ) {
+ return Collections.singletonMap(
+ NETTY_HTTP_TRANSPORT_NAME,
+ () -> new Netty4BlockingHttpServerTransport(
+ settings,
+ networkService,
+ bigArrays,
+ threadPool,
+ xContentRegistry,
+ dispatcher,
+ clusterSettings,
+ getSharedGroupFactory(settings),
+ tracer
+ )
+ );
+ }
+
+ /** POC for how an external header verifier would be implemented */
+ public class ExampleBlockingNetty4HeaderVerifier extends SimpleChannelInboundHandler {
+
+ @Override
+ public void channelRead0(ChannelHandlerContext ctx, DefaultHttpRequest msg) throws Exception {
+ ReferenceCountUtil.retain(msg);
+ if (isBlocked(msg)) {
+ ByteBuf buf = Unpooled.copiedBuffer("Hit header_verifier".getBytes(StandardCharsets.UTF_8));
+ final FullHttpResponse response = new DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.UNAUTHORIZED, buf);
+ ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE);
+ ReferenceCountUtil.release(msg);
+ } else {
+ // Lets the request pass to the next channel handler
+ ctx.fireChannelRead(msg);
+ }
+ }
+
+ private boolean isBlocked(HttpRequest request) {
+ final boolean shouldBlock = request.headers().contains("blockme");
+
+ return shouldBlock;
+ }
+ }
+}
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java
index a83330356e35e..6475a0b744c60 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java
@@ -40,6 +40,7 @@
import org.opensearch.transport.netty4.Netty4TcpChannel;
import java.net.InetSocketAddress;
+import java.util.Optional;
import io.netty.channel.Channel;
import io.netty.channel.ChannelPipeline;
@@ -98,6 +99,22 @@ public Channel getNettyChannel() {
return channel;
}
+ @SuppressWarnings("unchecked")
+ @Override
+ public Optional get(String name, Class clazz) {
+ Object handler = getNettyChannel().pipeline().get(name);
+
+ if (handler == null && inboundPipeline() != null) {
+ handler = inboundPipeline().get(name);
+ }
+
+ if (handler != null && clazz.isInstance(handler) == true) {
+ return Optional.of((T) handler);
+ }
+
+ return Optional.empty();
+ }
+
@Override
public String toString() {
return "Netty4HttpChannel{" + "localAddress=" + getLocalAddress() + ", remoteAddress=" + getRemoteAddress() + '}';
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java
index 7d937157c1034..3c96affb7adf7 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java
@@ -258,7 +258,7 @@ public FullHttpRequest nettyRequest() {
/**
* A wrapper of {@link HttpHeaders} that implements a map to prevent copying unnecessarily. This class does not support modifications
* and due to the underlying implementation, it performs case insensitive lookups of key to values.
- *
+ *