Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor bwc test suite to re-use existing resources between tests #1171

Draft
wants to merge 16 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import org.junit.Before;
import org.opensearch.common.settings.Settings;
Expand All @@ -16,6 +17,8 @@
import static org.opensearch.neuralsearch.util.TestUtils.RESTART_UPGRADE_OLD_CLUSTER;
import static org.opensearch.neuralsearch.util.TestUtils.BWC_VERSION;
import static org.opensearch.neuralsearch.util.TestUtils.generateModelId;
import static org.opensearch.neuralsearch.util.TestUtils.getModelId;

import org.opensearch.test.rest.OpenSearchRestTestCase;

public abstract class AbstractRestartUpgradeRestTestCase extends BaseNeuralSearchIT {
Expand Down Expand Up @@ -61,7 +64,11 @@ protected final Optional<String> getBWCVersion() {
return Optional.ofNullable(System.getProperty(BWC_VERSION, null));
}

protected String uploadTextEmbeddingModel() throws Exception {
protected String getOrUploadTextEmbeddingModel(Map<String, Object> pipeline, String processor) throws Exception {
String modelId = getModelId(pipeline, processor);
if (modelId != null) {
return modelId;
}
String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI()));
return registerModelGroupAndGetModelId(requestBody);
}
Expand All @@ -74,35 +81,57 @@ protected String registerModelGroupAndGetModelId(final String requestBody) throw
return uploadModel(String.format(LOCALE, requestBody, modelGroupId));
}

protected void createPipelineProcessor(final String modelId, final String pipelineName) throws Exception {
protected void createPipelineProcessor(final String modelId, final String pipelineName, String processor) throws Exception {
String modelIdInPipeline = getModelId(getIngestionPipeline(pipelineName), processor);
if (modelIdInPipeline != null && modelIdInPipeline.equals(modelId)) {
return;
}
String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI()));
createPipelineProcessor(requestBody, pipelineName, modelId, null);
}

protected String uploadSparseEncodingModel() throws Exception {
protected String getOrUploadSparseEncodingModel(Map<String, Object> pipeline, String processor) throws Exception {
String modelId = getModelId(pipeline, processor);
if (modelId != null) {
return modelId;
}
String requestBody = Files.readString(
Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI())
);
return registerModelGroupAndGetModelId(requestBody);
}

protected void createPipelineForTextImageProcessor(final String modelId, final String pipelineName) throws Exception {
protected void createPipelineForTextImageProcessor(final String modelId, final String pipelineName, final String processor)
throws Exception {
String modelIdInPipeline = getModelId(getIngestionPipeline(pipelineName), processor);
if (modelIdInPipeline != null && modelIdInPipeline.equals(modelId)) {
return;
}
String requestBody = Files.readString(
Path.of(classLoader.getResource("processor/PipelineForTextImageProcessorConfiguration.json").toURI())
);
createPipelineProcessor(requestBody, pipelineName, modelId, null);
}

protected void createPipelineForSparseEncodingProcessor(final String modelId, final String pipelineName, final Integer batchSize)
throws Exception {
protected void createPipelineForSparseEncodingProcessor(
final String modelId,
final String pipelineName,
final String processor,
final Integer batchSize
) throws Exception {
String modelIdInPipeline = getModelId(getIngestionPipeline(pipelineName), processor);
if (modelIdInPipeline != null && modelIdInPipeline.equals(modelId)) {
return;
}
String requestBody = Files.readString(
Path.of(classLoader.getResource("processor/PipelineForSparseEncodingProcessorConfiguration.json").toURI())
);
createPipelineProcessor(requestBody, pipelineName, modelId, batchSize);
}

protected void createPipelineForSparseEncodingProcessor(final String modelId, final String pipelineName) throws Exception {
createPipelineForSparseEncodingProcessor(modelId, pipelineName, null);
protected void createPipelineForSparseEncodingProcessor(final String modelId, final String pipelineName, String processor)
throws Exception {
createPipelineForSparseEncodingProcessor(modelId, pipelineName, processor, null);
}

protected void createPipelineForTextChunkingProcessor(String pipelineName) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import static org.opensearch.neuralsearch.util.TestUtils.SPARSE_ENCODING_PROCESSOR;

public class BatchIngestionIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "pipeline-BatchIngestionIT";
private static final String PIPELINE_NAME = "nlp-pipeline-sparse";
private static final String TEXT_FIELD_NAME = "passage_text";
private static final String EMBEDDING_FIELD_NAME = "passage_embedding";
private static final int batchSize = 3;
Expand All @@ -25,9 +25,9 @@ public void testBatchIngestionWithNeuralSparseProcessor_E2EFlow() throws Excepti
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
String indexName = getIndexNameForTest();
if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel(getIngestionPipeline(PIPELINE_NAME), SPARSE_ENCODING_PROCESSOR);
loadModel(modelId);
createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME, batchSize);
createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME, SPARSE_ENCODING_PROCESSOR, batchSize);
createIndexWithConfiguration(
indexName,
Files.readString(Path.of(classLoader.getResource("processor/SparseIndexMappings.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;

public class HybridSearchIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "nlp-hybrid-pipeline";
private static final String PIPELINE_NAME = "nlp-pipeline-text-embedding";
private static final String PIPELINE1_NAME = "nlp-hybrid-1-pipeline";
private static final String SEARCH_PIPELINE_NAME = "nlp-search-pipeline";
private static final String SEARCH_PIPELINE1_NAME = "nlp-search-1-pipeline";
Expand Down Expand Up @@ -56,9 +56,9 @@ private void validateNormalizationProcessor(final String fileName, final String
throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel(getIngestionPipeline(pipelineName), TEXT_EMBEDDING_PROCESSOR);
loadModel(modelId);
createPipelineProcessor(modelId, pipelineName);
createPipelineProcessor(modelId, pipelineName, TEXT_EMBEDDING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource(fileName).toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;

public class HybridSearchWithRescoreIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "nlp-hybrid-with-rescore-pipeline";
private static final String PIPELINE_NAME = "nlp-pipeline-text-embedding";
private static final String SEARCH_PIPELINE_NAME = "nlp-search-with_rescore-pipeline";
private static final String TEST_FIELD = "passage_text";
private static final String TEXT = "Hello world";
Expand All @@ -43,9 +43,9 @@ public void testHybridQueryWithRescore_whenIndexWithMultipleShards_E2EFlow() thr
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
loadModel(modelId);
createPipelineProcessor(modelId, PIPELINE_NAME);
createPipelineProcessor(modelId, PIPELINE_NAME, TEXT_EMBEDDING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;

public class KnnRadialSearchIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "radial-search-pipeline";
private static final String PIPELINE_NAME = "nlp-pipeline-text-image";
private static final String TEST_FIELD = "passage_text";
private static final String TEST_IMAGE_FIELD = "passage_image";
private static final String TEXT = "Hello world";
Expand All @@ -28,9 +28,9 @@ public void testKnnRadialSearch_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel(getIngestionPipeline(PIPELINE_NAME), TEXT_IMAGE_EMBEDDING_PROCESSOR);
loadModel(modelId);
createPipelineForTextImageProcessor(modelId, PIPELINE_NAME);
createPipelineForTextImageProcessor(modelId, PIPELINE_NAME, TEXT_IMAGE_EMBEDDING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.opensearch.neuralsearch.query.NeuralQueryBuilder;

public class MultiModalSearchIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "nlp-ingest-pipeline";
private static final String PIPELINE_NAME = "nlp-pipeline-text-image";
private static final String TEST_FIELD = "passage_text";
private static final String TEST_IMAGE_FIELD = "passage_image";
private static final String TEXT = "Hello world";
Expand All @@ -28,9 +28,9 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel(getIngestionPipeline(PIPELINE_NAME), TEXT_IMAGE_EMBEDDING_PROCESSOR);
loadModel(modelId);
createPipelineForTextImageProcessor(modelId, PIPELINE_NAME);
createPipelineForTextImageProcessor(modelId, PIPELINE_NAME, TEXT_IMAGE_EMBEDDING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

public class NeuralQueryEnricherProcessorIT extends AbstractRestartUpgradeRestTestCase {
// add prefix to avoid conflicts with other IT class, since we don't wipe resources after first round
private static final String SPARSE_INGEST_PIPELINE_NAME = "nqep-nlp-ingest-pipeline-sparse";
private static final String SPARSE_INGEST_PIPELINE_NAME = "nlp-pipeline-sparse";
private static final String DENSE_INGEST_PIPELINE_NAME = "nqep-nlp-ingest-pipeline-dense";
private static final String SPARSE_SEARCH_PIPELINE_NAME = "nqep-nlp-search-pipeline-sparse";
private static final String DENSE_SEARCH_PIPELINE_NAME = "nqep-nlp-search-pipeline-dense";
Expand All @@ -37,10 +37,10 @@ public void testNeuralQueryEnricherProcessor_NeuralSparseSearch_E2EFlow() throws
.queryText(TEXT_1);

if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel(getIngestionPipeline(SPARSE_INGEST_PIPELINE_NAME), SPARSE_ENCODING_PROCESSOR);
loadModel(modelId);
sparseEncodingQueryBuilderWithModelId.modelId(modelId);
createPipelineForSparseEncodingProcessor(modelId, SPARSE_INGEST_PIPELINE_NAME);
createPipelineForSparseEncodingProcessor(modelId, SPARSE_INGEST_PIPELINE_NAME, SPARSE_ENCODING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/SparseIndexMappings.json").toURI())),
Expand Down Expand Up @@ -86,10 +86,10 @@ public void testNeuralQueryEnricherProcessor_NeuralSearch_E2EFlow() throws Excep
.build();

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel(getIngestionPipeline(DENSE_INGEST_PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
loadModel(modelId);
neuralQueryBuilderWithModelId.modelId(modelId);
createPipelineProcessor(modelId, DENSE_INGEST_PIPELINE_NAME);
createPipelineProcessor(modelId, DENSE_INGEST_PIPELINE_NAME, TEXT_EMBEDDING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import org.opensearch.neuralsearch.query.NeuralSparseQueryBuilder;

public class NeuralSparseSearchIT extends AbstractRestartUpgradeRestTestCase {
private static final String PIPELINE_NAME = "nlp-ingest-pipeline-sparse";
private static final String PIPELINE_NAME = "nlp-pipeline-sparse";
private static final String TEST_SPARSE_ENCODING_FIELD = "passage_embedding";
private static final String TEST_TEXT_FIELD = "passage_text";
private static final String TEXT_1 = "Hello world a b";
Expand All @@ -33,9 +33,9 @@ public class NeuralSparseSearchIT extends AbstractRestartUpgradeRestTestCase {
public void testSparseEncodingProcessor_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel(getIngestionPipeline(PIPELINE_NAME), SPARSE_ENCODING_PROCESSOR);
loadModel(modelId);
createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME);
createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME, SPARSE_ENCODING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/SparseIndexMappings.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

public class NeuralSparseTwoPhaseProcessorIT extends AbstractRestartUpgradeRestTestCase {

private static final String NEURAL_SPARSE_INGEST_PIPELINE_NAME = "nstp-nlp-ingest-pipeline-dense";
private static final String NEURAL_SPARSE_INGEST_PIPELINE_NAME = "nlp-pipeline-sparse";
private static final String NEURAL_SPARSE_TWO_PHASE_SEARCH_PIPELINE_NAME = "nstp-nlp-two-phase-search-pipeline-sparse";
private static final String TEST_ENCODING_FIELD = "passage_embedding";
private static final String TEST_TEXT_FIELD = "passage_text";
Expand All @@ -27,10 +27,13 @@ public void testNeuralSparseQueryTwoPhaseProcessor_NeuralSearch_E2EFlow() throws
waitForClusterHealthGreen(NODES_BWC_CLUSTER);
NeuralSparseQueryBuilder neuralSparseQueryBuilder = new NeuralSparseQueryBuilder().fieldName(TEST_ENCODING_FIELD).queryText(TEXT_1);
if (isRunningAgainstOldCluster()) {
String modelId = uploadSparseEncodingModel();
String modelId = getOrUploadSparseEncodingModel(
getIngestionPipeline(NEURAL_SPARSE_INGEST_PIPELINE_NAME),
SPARSE_ENCODING_PROCESSOR
);
loadModel(modelId);
neuralSparseQueryBuilder.modelId(modelId);
createPipelineForSparseEncodingProcessor(modelId, NEURAL_SPARSE_INGEST_PIPELINE_NAME);
createPipelineForSparseEncodingProcessor(modelId, NEURAL_SPARSE_INGEST_PIPELINE_NAME, SPARSE_ENCODING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/SparseIndexMappings.json").toURI())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

public class SemanticSearchIT extends AbstractRestartUpgradeRestTestCase {

private static final String PIPELINE_NAME = "nlp-pipeline";
private static final String PIPELINE_NAME = "nlp-pipeline-text-embedding";
private static final String TEST_FIELD = "passage_text";
private static final String TEXT = "Hello world";
private static final String TEXT_1 = "Hello world a";
Expand All @@ -26,9 +26,9 @@ public void testTextEmbeddingProcessor_E2EFlow() throws Exception {
waitForClusterHealthGreen(NODES_BWC_CLUSTER);

if (isRunningAgainstOldCluster()) {
String modelId = uploadTextEmbeddingModel();
String modelId = getOrUploadTextEmbeddingModel(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR);
loadModel(modelId);
createPipelineProcessor(modelId, PIPELINE_NAME);
createPipelineProcessor(modelId, PIPELINE_NAME, TEXT_EMBEDDING_PROCESSOR);
createIndexWithConfiguration(
getIndexNameForTest(),
Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())),
Expand Down
Loading
Loading