Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update model to return correct model for CHAT_COMPLETION task type #120326

Merged
merged 12 commits into from
Jan 17, 2025
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ public ActionRequestValidationException validate() {
return e;
}

if (taskType.isAnyOrSame(TaskType.COMPLETION) == false) {
if (taskType.isAnyOrSame(TaskType.CHAT_COMPLETION) == false) {
var e = new ActionRequestValidationException();
e.addValidationError("Field [taskType] must be [completion]");
e.addValidationError("Field [taskType] must be [chat_completion]");
return e;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public void testValidation_ReturnsException_When_TaskType_IsNot_Completion() {
TimeValue.timeValueSeconds(10)
);
var exception = request.validate();
assertThat(exception.getMessage(), is("Validation Failed: 1: Field [taskType] must be [completion];"));
assertThat(exception.getMessage(), is("Validation Failed: 1: Field [taskType] must be [chat_completion];"));
}

public void testValidation_ReturnsNull_When_TaskType_IsAny() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,9 +272,9 @@ public void testGetServicesWithChatCompletionTaskType() throws IOException {
List<Object> services = getServices(TaskType.CHAT_COMPLETION);
if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()
|| ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) {
assertThat(services.size(), equalTo(2));
assertThat(services.size(), equalTo(3));
} else {
assertThat(services.size(), equalTo(1));
assertThat(services.size(), equalTo(2));
}

String[] providers = new String[services.size()];
Expand All @@ -283,7 +283,7 @@ public void testGetServicesWithChatCompletionTaskType() throws IOException {
providers[i] = (String) serviceConfig.get("service");
}

var providerList = new ArrayList<>(List.of("openai"));
var providerList = new ArrayList<>(List.of("openai", "streaming_completion_test_service"));

if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()
|| ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) {
Expand Down Expand Up @@ -519,14 +519,19 @@ public void testSupportedStream() throws Exception {

public void testUnifiedCompletionInference() throws Exception {
String modelId = "streaming";
putModel(modelId, mockCompletionServiceModelConfig(TaskType.COMPLETION));
putModel(modelId, mockCompletionServiceModelConfig(TaskType.CHAT_COMPLETION));
var singleModel = getModel(modelId);
assertEquals(modelId, singleModel.get("inference_id"));
assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type"));
assertEquals(TaskType.CHAT_COMPLETION.toString(), singleModel.get("task_type"));

var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphanumericOfLength(5)).toList();
try {
var events = unifiedCompletionInferOnMockService(modelId, TaskType.COMPLETION, input, VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER);
var events = unifiedCompletionInferOnMockService(
modelId,
TaskType.CHAT_COMPLETION,
input,
VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER
);
var expectedResponses = expectedResultsIterator(input);
assertThat(events.size(), equalTo((input.size() + 1) * 2));
events.forEach(event -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,9 @@ public List<Factory> getInferenceServiceFactories() {

public static class TestInferenceService extends AbstractTestInferenceService {
private static final String NAME = "streaming_completion_test_service";
private static final Set<TaskType> supportedStreamingTasks = Set.of(TaskType.COMPLETION);
private static final Set<TaskType> supportedStreamingTasks = Set.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION);

private static final EnumSet<TaskType> supportedTaskTypes = EnumSet.of(TaskType.COMPLETION);
private static final EnumSet<TaskType> supportedTaskTypes = EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION);

public TestInferenceService(InferenceServiceExtension.InferenceServiceFactoryContext context) {}

Expand Down Expand Up @@ -129,7 +129,7 @@ public void unifiedCompletionInfer(
ActionListener<InferenceServiceResults> listener
) {
switch (model.getConfigurations().getTaskType()) {
case COMPLETION -> listener.onResponse(makeUnifiedResults(request));
case CHAT_COMPLETION -> listener.onResponse(makeUnifiedResults(request));
default -> listener.onFailure(
new ElasticsearchStatusException(
TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public TransportUnifiedCompletionInferenceAction(

@Override
protected boolean isInvalidTaskTypeForInferenceEndpoint(UnifiedCompletionAction.Request request, UnparsedModel unparsedModel) {
return request.getTaskType().isAnyOrSame(TaskType.COMPLETION) == false || unparsedModel.taskType() != TaskType.COMPLETION;
return request.getTaskType().isAnyOrSame(TaskType.CHAT_COMPLETION) == false || unparsedModel.taskType() != TaskType.CHAT_COMPLETION;
}

@Override
Expand All @@ -64,7 +64,7 @@ protected ElasticsearchStatusException createInvalidTaskTypeException(
"Incompatible task_type for unified API, the requested type [{}] must be one of [{}]",
RestStatus.BAD_REQUEST,
request.getTaskType(),
TaskType.COMPLETION.toString()
TaskType.CHAT_COMPLETION.toString()
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ public class OpenAiCompletionRequestManager extends OpenAiRequestManager {

private static final Logger logger = LogManager.getLogger(OpenAiCompletionRequestManager.class);
private static final ResponseHandler HANDLER = createCompletionHandler();
static final String USER_ROLE = "user";
public static final String USER_ROLE = "user";

public static OpenAiCompletionRequestManager of(OpenAiChatCompletionModel model, ThreadPool threadPool) {
return new OpenAiCompletionRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ private static ElasticInferenceServiceModel createModel(
eisServiceComponents,
context
);
case COMPLETION -> new ElasticInferenceServiceCompletionModel(
case CHAT_COMPLETION -> new ElasticInferenceServiceCompletionModel(
inferenceEntityId,
taskType,
NAME,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ public static ModelValidator buildModelValidator(TaskType taskType) {
case COMPLETION -> {
return new ChatCompletionModelValidator(new SimpleServiceIntegrationValidator());
}
case CHAT_COMPLETION -> {
return new ChatCompletionModelValidator(new SimpleChatCompletionServiceIntegrationValidator());
}
case SPARSE_EMBEDDING, RERANK, ANY -> {
return new SimpleModelValidator(new SimpleServiceIntegrationValidator());
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@

/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

package org.elasticsearch.xpack.inference.services.validation;

import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.inference.InferenceService;
import org.elasticsearch.inference.InferenceServiceResults;
import org.elasticsearch.inference.Model;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.inference.action.InferenceAction;
import org.elasticsearch.xpack.inference.external.http.sender.UnifiedChatInput;

import java.util.List;

import static org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionRequestManager.USER_ROLE;

/**
* This class uses the unified chat completion method to perform validation.
*/
public class SimpleChatCompletionServiceIntegrationValidator implements ServiceIntegrationValidator {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we've a test class SimpleChatCompletionServiceIntegrationValidatorTests for this?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good idea, let me take care of that 👍

private static final List<String> TEST_INPUT = List.of("how big");

@Override
public void validate(InferenceService service, Model model, ActionListener<InferenceServiceResults> listener) {
var chatCompletionInput = new UnifiedChatInput(TEST_INPUT, USER_ROLE, false);
service.unifiedCompletionInfer(
model,
chatCompletionInput.getRequest(),
InferenceAction.Request.DEFAULT_TIMEOUT,
ActionListener.wrap(r -> {
if (r != null) {
listener.onResponse(r);
} else {
listener.onFailure(
new ElasticsearchStatusException(
"Could not complete inference endpoint creation as validation call to service returned null response.",
RestStatus.BAD_REQUEST
)
);
}
}, e -> {
listener.onFailure(
new ElasticsearchStatusException(
"Could not complete inference endpoint creation as validation call to service threw an exception.",
RestStatus.BAD_REQUEST,
e
)
);
})
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,15 @@ public abstract class BaseTransportInferenceActionTestCase<Request extends BaseI
private BaseTransportInferenceAction<Request> action;

protected static final String serviceId = "serviceId";
protected static final TaskType taskType = TaskType.COMPLETION;
protected final TaskType taskType;
protected static final String inferenceId = "inferenceEntityId";
protected InferenceServiceRegistry serviceRegistry;
protected InferenceStats inferenceStats;

public BaseTransportInferenceActionTestCase(TaskType taskType) {
this.taskType = taskType;
}

@Before
public void setUp() throws Exception {
super.setUp();
Expand Down Expand Up @@ -377,7 +381,7 @@ protected void mockModelAndServiceRegistry(InferenceService service) {
when(serviceRegistry.getService(any())).thenReturn(Optional.of(service));
}

protected void mockValidLicenseState(){
protected void mockValidLicenseState() {
when(licenseState.isAllowed(InferencePlugin.INFERENCE_API_FEATURE)).thenReturn(true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.inference.InferenceServiceRegistry;
import org.elasticsearch.inference.TaskType;
import org.elasticsearch.license.MockLicenseState;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.inference.action.InferenceAction;
Expand All @@ -20,6 +21,10 @@

public class TransportInferenceActionTests extends BaseTransportInferenceActionTestCase<InferenceAction.Request> {

public TransportInferenceActionTests() {
super(TaskType.COMPLETION);
}

@Override
protected BaseTransportInferenceAction<InferenceAction.Request> createAction(
TransportService transportService,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@

public class TransportUnifiedCompletionActionTests extends BaseTransportInferenceActionTestCase<UnifiedCompletionAction.Request> {

public TransportUnifiedCompletionActionTests() {
super(TaskType.CHAT_COMPLETION);
}

@Override
protected BaseTransportInferenceAction<UnifiedCompletionAction.Request> createAction(
TransportService transportService,
Expand Down Expand Up @@ -71,7 +75,7 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingATextEmbeddingInfe
assertThat(e, isA(ElasticsearchStatusException.class));
assertThat(
e.getMessage(),
is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [completion]")
is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]")
);
assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST));
}));
Expand All @@ -96,7 +100,7 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingRequestIsAny_Model
assertThat(e, isA(ElasticsearchStatusException.class));
assertThat(
e.getMessage(),
is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [completion]")
is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]")
);
assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST));
}));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion;
import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat;
import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap;
import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel;
import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap;
Expand Down Expand Up @@ -325,7 +325,7 @@ public void testCreate_OpenAiChatCompletionModel() throws IOException {

webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));

var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user");
var model = createCompletionModel(getUrl(webServer), "org", "secret", "model", "user");
var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool));
var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user");
var action = actionCreator.create(model, overriddenTaskSettings);
Expand Down Expand Up @@ -389,7 +389,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOExceptio

webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));

var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", null);
var model = createCompletionModel(getUrl(webServer), "org", "secret", "model", null);
var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool));
var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap(null);
var action = actionCreator.create(model, overriddenTaskSettings);
Expand Down Expand Up @@ -452,7 +452,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IO

webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));

var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null);
var model = createCompletionModel(getUrl(webServer), null, "secret", "model", null);
var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool));
var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user");
var action = actionCreator.create(model, overriddenTaskSettings);
Expand Down Expand Up @@ -521,7 +521,7 @@ public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat(
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));

var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null);
var model = createCompletionModel(getUrl(webServer), null, "secret", "model", null);
var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool));
var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user");
var action = actionCreator.create(model, overriddenTaskSettings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER;
import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion;
import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
Expand Down Expand Up @@ -284,7 +284,7 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc
}

private ExecutableAction createAction(String url, String org, String apiKey, String modelName, @Nullable String user, Sender sender) {
var model = createChatCompletionModel(url, org, apiKey, modelName, user);
var model = createCompletionModel(url, org, apiKey, modelName, user);
var requestCreator = OpenAiCompletionRequestManager.of(model, threadPool);
var errorMessage = constructFailedToSendRequestMessage(model.getServiceSettings().uri(), "OpenAI chat completions");
return new SingleInputSenderExecutableAction(sender, requestCreator, errorMessage, "OpenAI chat completions");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.ArrayList;

import static org.elasticsearch.xpack.inference.Utils.assertJsonEquals;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel;
import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel;

public class ElasticInferenceServiceUnifiedChatCompletionRequestEntityTests extends ESTestCase {

Expand All @@ -40,7 +40,7 @@ public void testModelUserFieldsSerialization() throws IOException {
var unifiedRequest = UnifiedCompletionRequest.of(messageList);

UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true);
OpenAiChatCompletionModel model = createChatCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null);
OpenAiChatCompletionModel model = createCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null);

OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model);

Expand Down
Loading
Loading