Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor bwc test suite to re-use existing resources between tests #1171

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
d15b8a5
refactor bwc test suite to re-use existing resources between tests
will-hwang Feb 5, 2025
d17a88f
make pipeline nullable
will-hwang Feb 5, 2025
6ed41fc
leave change in model load
will-hwang Feb 5, 2025
6422734
encapsulate get response in try catch clause
will-hwang Feb 5, 2025
be97663
share pipeline names between tests
will-hwang Feb 5, 2025
c937cd5
Revert "share pipeline names between tests"
will-hwang Feb 5, 2025
6c3a7cf
Revert "encapsulate get response in try catch clause"
will-hwang Feb 5, 2025
fe4b191
Revert "leave change in model load"
will-hwang Feb 5, 2025
2bf77e0
Revert "make pipeline nullable"
will-hwang Feb 5, 2025
cebc456
Revert "refactor bwc test suite to re-use existing resources between …
will-hwang Feb 5, 2025
256f928
initializing model before tests
will-hwang Feb 5, 2025
dea54b7
remove before annotation
will-hwang Feb 5, 2025
3f1917f
remove model clean up for now
will-hwang Feb 5, 2025
925b3d9
try loading just text embedding model
will-hwang Feb 5, 2025
77b4a9c
fix typo
will-hwang Feb 5, 2025
cec61d2
name method differently to avoid conflict
will-hwang Feb 5, 2025
08b4e2a
Revert "name method differently to avoid conflict"
will-hwang Feb 6, 2025
55cc255
Revert "fix typo"
will-hwang Feb 6, 2025
95a3aa0
Revert "try loading just text embedding model"
will-hwang Feb 6, 2025
c90d1ed
Revert "remove model clean up for now"
will-hwang Feb 6, 2025
adde51c
Revert "remove before annotation"
will-hwang Feb 6, 2025
a75011f
Revert "initializing model before tests"
will-hwang Feb 6, 2025
f22143e
make text embedding model enum
will-hwang Feb 6, 2025
af5d6de
remove getters and setters
will-hwang Feb 6, 2025
01c3372
add enums for all processors
will-hwang Feb 6, 2025
33f12b2
dont remove model id at the end
will-hwang Feb 6, 2025
fab7290
add getter setter annotations
will-hwang Feb 6, 2025
6037c68
remove getter setter
will-hwang Feb 6, 2025
99ec4f1
separate encoding and embedding classes
will-hwang Feb 6, 2025
b0481b3
synchronize get instance call
will-hwang Feb 6, 2025
45ccf32
adding a fail message to debug
will-hwang Feb 6, 2025
30fca43
add debug line in new cluster
will-hwang Feb 6, 2025
bf58371
add more debug line
will-hwang Feb 6, 2025
6b30b2b
get model id
will-hwang Feb 6, 2025
5c994cf
replace in new cluster
will-hwang Feb 6, 2025
ad29cb6
get through cluster in new cluster
will-hwang Feb 6, 2025
48235dc
fix typo
will-hwang Feb 6, 2025
2d0f6a1
change to image
will-hwang Feb 6, 2025
3e0f337
removing model from clean-up
will-hwang Feb 6, 2025
0122ffc
checking against removing finally clause
will-hwang Feb 6, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import java.nio.file.Path;
import java.util.Locale;
import java.util.Optional;

import org.junit.Before;
import org.opensearch.common.settings.Settings;
import org.opensearch.neuralsearch.BaseNeuralSearchIT;
Expand All @@ -16,6 +17,9 @@
import static org.opensearch.neuralsearch.util.TestUtils.RESTART_UPGRADE_OLD_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.BWC_VERSION;
import static org.opensearch.neuralsearch.util.TestUtils.generateModelId;

import org.opensearch.neuralsearch.util.SparseEncodingModel;
import org.opensearch.neuralsearch.util.TextEmbeddingModel;
import org.opensearch.test.rest.OpenSearchRestTestCase;

public abstract class AbstractRestartUpgradeRestTestCase extends BaseNeuralSearchIT {
Expand Down Expand Up @@ -61,9 +65,16 @@ protected final Optional<String> getBWCVersion() {
return Optional.ofNullable(System.getProperty(BWC_VERSION, null));
}

protected String uploadTextEmbeddingModel() throws Exception {
String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI()));
return registerModelGroupAndGetModelId(requestBody);
protected String getOrUploadTextEmbeddingModel() throws Exception {
TextEmbeddingModel textEmbeddingModel = TextEmbeddingModel.getInstance();
String modelId = textEmbeddingModel.getModelId();
if (modelId == null) {
String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI()));
String id = registerModelGroupAndGetModelId(requestBody);
textEmbeddingModel.setModelId(id);
return id;
}
return modelId;
}

protected String registerModelGroupAndGetModelId(final String requestBody) throws Exception {
Expand All @@ -79,11 +90,18 @@ protected void createPipelineProcessor(final String modelId, final String pipeli
createPipelineProcessor(requestBody, pipelineName, modelId, null);
}

protected String uploadSparseEncodingModel() throws Exception {
String requestBody = Files.readString(
Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI())
);
return registerModelGroupAndGetModelId(requestBody);
protected String getOrUploadSparseEncodingModel() throws Exception {
SparseEncodingModel sparseEncodingModel = SparseEncodingModel.getInstance();
String modelId = sparseEncodingModel.getModelId();
if (modelId == null) {
String requestBody = Files.readString(
Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI())
);
String id = registerModelGroupAndGetModelId(requestBody);
sparseEncodingModel.setModelId(id);
return id;
}
return modelId;
}

protected void createPipelineForTextImageProcessor(final String modelId, final String pipelineName) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public void testBatchIngestionWithNeuralSparseProcessor_E2EFlow() throws Excepti
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
String indexName = getIndexNameForTest();
if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel();
loadModel(modelId);
createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME, batchSize);
createIndexWithConfiguration(
Expand All @@ -45,7 +45,7 @@ public void testBatchIngestionWithNeuralSparseProcessor_E2EFlow() throws Excepti
bulkAddDocuments(indexName, TEXT_FIELD_NAME, PIPELINE_NAME, docs);
validateDocCountAndInfo(indexName, 10, () -> getDocById(indexName, "9"), EMBEDDING_FIELD_NAME, Map.class);
} finally {
wipeOfTestResources(indexName, PIPELINE_NAME, modelId, null);
// wipeOfTestResources(indexName, PIPELINE_NAME, null, null);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,16 @@

import org.opensearch.index.query.MatchQueryBuilder;

import static org.opensearch.neuralsearch.util.TestUtils.getModelId;
import static org.opensearch.neuralsearch.util.TestUtils.NODES_BWC_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.PARAM_NAME_WEIGHTS;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_EMBEDDING_PROCESSOR;
import static org.opensearch.neuralsearch.util.TestUtils.DEFAULT_NORMALIZATION_METHOD;
import static org.opensearch.neuralsearch.util.TestUtils.DEFAULT_COMBINATION_METHOD;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_EMBEDDING_PROCESSOR;

import org.opensearch.knn.index.query.rescore.RescoreContext;
import org.opensearch.neuralsearch.query.HybridQueryBuilder;
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;
import org.opensearch.neuralsearch.util.TestUtils;

public class HybridSearchIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "nlp-hybrid-pipeline";
Expand Down Expand Up @@ -56,7 +56,7 @@ private void validateNormalizationProcessor(final String fileName, final String
throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel();
loadModel(modelId);
createPipelineProcessor(modelId, pipelineName);
createIndexWithConfiguration(
Expand All @@ -69,15 +69,15 @@ private void validateNormalizationProcessor(final String fileName, final String
} else {
String modelId = null;
try {
modelId = getModelId(getIngestionPipeline(pipelineName), TEXT_EMBEDDING_PROCESSOR);
modelId = TestUtils.getModelId(getIngestionPipeline(pipelineName), TEXT_EMBEDDING_PROCESSOR);
loadModel(modelId);
addDocuments(getIndexNameForTest(), false);
HybridQueryBuilder hybridQueryBuilder = getQueryBuilder(modelId, null, null, null);
validateTestIndex(getIndexNameForTest(), searchPipelineName, hybridQueryBuilder);
hybridQueryBuilder = getQueryBuilder(modelId, Boolean.FALSE, Map.of("ef_search", 100), RescoreContext.getDefault());
validateTestIndex(getIndexNameForTest(), searchPipelineName, hybridQueryBuilder);
} finally {
wipeOfTestResources(getIndexNameForTest(), pipelineName, modelId, searchPipelineName);
// wipeOfTestResources(getIndexNameForTest(), pipelineName, null, searchPipelineName);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
import static org.opensearch.neuralsearch.util.TestUtils.NODES_BWC_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.PARAM_NAME_WEIGHTS;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_EMBEDDING_PROCESSOR;
import static org.opensearch.neuralsearch.util.TestUtils.getModelId;

import org.opensearch.index.query.MatchQueryBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.knn.index.query.rescore.RescoreContext;
import org.opensearch.neuralsearch.query.HybridQueryBuilder;
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;
import org.opensearch.neuralsearch.util.TestUtils;

public class HybridSearchWithRescoreIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "nlp-hybrid-with-rescore-pipeline";
Expand All @@ -43,7 +43,7 @@ public void testHybridQueryWithRescore_whenIndexWithMultipleShards_E2EFlow() thr
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel();
loadModel(modelId);
createPipelineProcessor(modelId, PIPELINE_NAME);
createIndexWithConfiguration(
Expand All @@ -61,7 +61,8 @@ public void testHybridQueryWithRescore_whenIndexWithMultipleShards_E2EFlow() thr
} else {
String modelId = null;
try {
modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
modelId = TestUtils.getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
;
loadModel(modelId);
addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_UPGRADED, null, null);
HybridQueryBuilder hybridQueryBuilder = getQueryBuilder(modelId, null, null);
Expand All @@ -70,7 +71,7 @@ public void testHybridQueryWithRescore_whenIndexWithMultipleShards_E2EFlow() thr
hybridQueryBuilder = getQueryBuilder(modelId, Map.of("ef_search", 100), RescoreContext.getDefault());
validateTestIndex(getIndexNameForTest(), hybridQueryBuilder, rescorer);
} finally {
wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null);
// wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, null, null);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@
import java.nio.file.Path;
import java.util.Map;
import static org.opensearch.neuralsearch.util.TestUtils.NODES_BWC_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_IMAGE_EMBEDDING_PROCESSOR;
import static org.opensearch.neuralsearch.util.TestUtils.getModelId;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_EMBEDDING_PROCESSOR;

import org.opensearch.neuralsearch.query.NeuralQueryBuilder;
import org.opensearch.neuralsearch.util.TestUtils;

public class KnnRadialSearchIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "radial-search-pipeline";
Expand All @@ -28,7 +29,7 @@ public void testKnnRadialSearch_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel();
loadModel(modelId);
createPipelineForTextImageProcessor(modelId, PIPELINE_NAME);
createIndexWithConfiguration(
Expand All @@ -40,12 +41,13 @@ public void testKnnRadialSearch_E2EFlow() throws Exception {
} else {
String modelId = null;
try {
modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_IMAGE_EMBEDDING_PROCESSOR);
modelId = TestUtils.getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
;
loadModel(modelId);
addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT_1);
validateIndexQuery(modelId);
} finally {
wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null);
// wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, null, null);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import static org.opensearch.neuralsearch.util.TestUtils.NODES_BWC_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_IMAGE_EMBEDDING_PROCESSOR;
import static org.opensearch.neuralsearch.util.TestUtils.getModelId;

import org.opensearch.neuralsearch.query.NeuralQueryBuilder;

public class MultiModalSearchIT extends AbstractRestartUpgradeRestTestCase {
Expand All @@ -28,7 +29,7 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel();
loadModel(modelId);
createPipelineForTextImageProcessor(modelId, PIPELINE_NAME);
createIndexWithConfiguration(
Expand All @@ -45,7 +46,7 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception {
addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT_1);
validateTestIndex(modelId);
} finally {
wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null);
// wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, null, null);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_EMBEDDING_PROCESSOR;

import org.opensearch.common.settings.Settings;
import org.opensearch.neuralsearch.util.TestUtils;
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;
import org.opensearch.neuralsearch.query.NeuralSparseQueryBuilder;
import org.opensearch.neuralsearch.util.TestUtils;

import java.nio.file.Files;
import java.nio.file.Path;
Expand All @@ -37,7 +37,7 @@ public void testNeuralQueryEnricherProcessor_NeuralSparseSearch_E2EFlow() throws
.queryText(TEXT_1);

if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel();
loadModel(modelId);
sparseEncodingQueryBuilderWithModelId.modelId(modelId);
createPipelineForSparseEncodingProcessor(modelId, SPARSE_INGEST_PIPELINE_NAME);
Expand Down Expand Up @@ -69,7 +69,7 @@ public void testNeuralQueryEnricherProcessor_NeuralSparseSearch_E2EFlow() throws
search(getIndexNameForTest(), sparseEncodingQueryBuilderWithModelId, 1).get("hits")
);
} finally {
wipeOfTestResources(getIndexNameForTest(), SPARSE_INGEST_PIPELINE_NAME, modelId, SPARSE_SEARCH_PIPELINE_NAME);
// wipeOfTestResources(getIndexNameForTest(), SPARSE_INGEST_PIPELINE_NAME, null, SPARSE_SEARCH_PIPELINE_NAME);
}
}
}
Expand All @@ -86,7 +86,7 @@ public void testNeuralQueryEnricherProcessor_NeuralSearch_E2EFlow() throws Excep
.build();

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel();
loadModel(modelId);
neuralQueryBuilderWithModelId.modelId(modelId);
createPipelineProcessor(modelId, DENSE_INGEST_PIPELINE_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public class NeuralSparseSearchIT extends AbstractRestartUpgradeRestTestCase {
public void testSparseEncodingProcessor_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel();
loadModel(modelId);
createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME);
createIndexWithConfiguration(
Expand Down Expand Up @@ -65,7 +65,7 @@ public void testSparseEncodingProcessor_E2EFlow() throws Exception {
);
validateTestIndex(modelId);
} finally {
wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null);
// wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, null, null);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public void testNeuralSparseQueryTwoPhaseProcessor_NeuralSearch_E2EFlow() throws
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
NeuralSparseQueryBuilder neuralSparseQueryBuilder = new NeuralSparseQueryBuilder().fieldName(TEST_ENCODING_FIELD).queryText(TEXT_1);
if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel();
loadModel(modelId);
neuralSparseQueryBuilder.modelId(modelId);
createPipelineForSparseEncodingProcessor(modelId, NEURAL_SPARSE_INGEST_PIPELINE_NAME);
Expand All @@ -53,12 +53,12 @@ public void testNeuralSparseQueryTwoPhaseProcessor_NeuralSearch_E2EFlow() throws
Object resultWith2PhasePipeline = search(getIndexNameForTest(), neuralSparseQueryBuilder, 1).get("hits");
assertNotNull(resultWith2PhasePipeline);
} finally {
wipeOfTestResources(
getIndexNameForTest(),
NEURAL_SPARSE_INGEST_PIPELINE_NAME,
modelId,
NEURAL_SPARSE_TWO_PHASE_SEARCH_PIPELINE_NAME
);
// wipeOfTestResources(
// getIndexNameForTest(),
// NEURAL_SPARSE_INGEST_PIPELINE_NAME,
// null,
// NEURAL_SPARSE_TWO_PHASE_SEARCH_PIPELINE_NAME
// );
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@
import java.nio.file.Path;
import java.util.Map;
import static org.opensearch.neuralsearch.util.TestUtils.NODES_BWC_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.getModelId;
import static org.opensearch.neuralsearch.util.TestUtils.TEXT_EMBEDDING_PROCESSOR;

import org.opensearch.neuralsearch.query.NeuralQueryBuilder;
import org.opensearch.neuralsearch.util.TestUtils;

public class SemanticSearchIT extends AbstractRestartUpgradeRestTestCase {

Expand All @@ -24,9 +25,8 @@ public class SemanticSearchIT extends AbstractRestartUpgradeRestTestCase {
// Validate process , pipeline and document count in restart-upgrade scenario
public void testTextEmbeddingProcessor_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel();
loadModel(modelId);
createPipelineProcessor(modelId, PIPELINE_NAME);
createIndexWithConfiguration(
Expand All @@ -38,12 +38,12 @@ public void testTextEmbeddingProcessor_E2EFlow() throws Exception {
} else {
String modelId = null;
try {
modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
modelId = TestUtils.getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
loadModel(modelId);
addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, null, null);
validateTestIndex(modelId);
} finally {
wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null);
// wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, null, null);
}
}
}
Expand Down
Loading
Loading