Skip to content

Commit

Permalink
OPIK-610 removed unnecessary interface
Browse files Browse the repository at this point in the history
  • Loading branch information
idoberko2 committed Dec 24, 2024
1 parent ab343fe commit 8b7025b
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 62 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.comet.opik.domain;

import com.comet.opik.domain.llmproviders.DefaultLlmProviderStreamHandler;
import com.comet.opik.domain.llmproviders.LlmProviderFactory;
import com.comet.opik.domain.llmproviders.LlmProviderStreamHandler;
import dev.ai4j.openai4j.chat.ChatCompletionRequest;
import dev.ai4j.openai4j.chat.ChatCompletionResponse;
import jakarta.inject.Inject;
Expand All @@ -14,13 +14,12 @@
@Slf4j
public class ChatCompletionService {
private final LlmProviderFactory llmProviderFactory;
private final DefaultLlmProviderStreamHandler defaultStreamHandler;
private final LlmProviderStreamHandler streamHandler;

@Inject
public ChatCompletionService(LlmProviderFactory llmProviderFactory,
DefaultLlmProviderStreamHandler defaultStreamHandler) {
public ChatCompletionService(LlmProviderFactory llmProviderFactory, LlmProviderStreamHandler streamHandler) {
this.llmProviderFactory = llmProviderFactory;
this.defaultStreamHandler = defaultStreamHandler;
this.streamHandler = streamHandler;
}

public ChatCompletionResponse create(@NonNull ChatCompletionRequest request, @NonNull String workspaceId) {
Expand All @@ -35,7 +34,7 @@ public ChunkedOutput<String> createAndStreamResponse(
@NonNull ChatCompletionRequest request, @NonNull String workspaceId) {
log.info("Creating and streaming chat completions, workspaceId '{}', model '{}'", workspaceId, request.model());
var llmProviderClient = llmProviderFactory.getService(workspaceId, request.model());
var chunkedOutput = llmProviderClient.generateStream(request, workspaceId, defaultStreamHandler);
var chunkedOutput = llmProviderClient.generateStream(request, workspaceId, streamHandler);
log.info("Created and streaming chat completions, workspaceId '{}', model '{}'", workspaceId, request.model());
return chunkedOutput;
}
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,9 +1,49 @@
package com.comet.opik.domain.llmproviders;

import com.comet.opik.utils.JsonUtils;
import dev.ai4j.openai4j.OpenAiHttpException;
import io.dropwizard.jersey.errors.ErrorMessage;
import lombok.extern.slf4j.Slf4j;
import org.glassfish.jersey.server.ChunkedOutput;

public interface LlmProviderStreamHandler {
void handleMessage(Object item, ChunkedOutput<String> chunkedOutput);
void handleClose(ChunkedOutput<String> chunkedOutput);
void handleError(Throwable throwable, ChunkedOutput<String> chunkedOutput);
import java.io.IOException;
import java.io.UncheckedIOException;

@Slf4j
public class LlmProviderStreamHandler {
private static final String UNEXPECTED_ERROR_CALLING_LLM_PROVIDER = "Unexpected error calling LLM provider";

public void handleMessage(Object item, ChunkedOutput<String> chunkedOutput) {
if (chunkedOutput.isClosed()) {
log.warn("Output stream is already closed");
return;
}
try {
chunkedOutput.write(JsonUtils.writeValueAsString(item));
} catch (IOException ioException) {
throw new UncheckedIOException(ioException);
}
}

public void handleClose(ChunkedOutput<String> chunkedOutput) {
try {
chunkedOutput.close();
} catch (IOException ioException) {
log.error("Failed to close output stream", ioException);
}
}

public void handleError(Throwable throwable, ChunkedOutput<String> chunkedOutput) {
log.error(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, throwable);
var errorMessage = new ErrorMessage(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER);
if (throwable instanceof OpenAiHttpException openAiHttpException) {
errorMessage = new ErrorMessage(openAiHttpException.code(), openAiHttpException.getMessage());
}
try {
handleMessage(errorMessage, chunkedOutput);
} catch (UncheckedIOException uncheckedIOException) {
log.error("Failed to stream error message to client", uncheckedIOException);
}
handleClose(chunkedOutput);
}
}

0 comments on commit 8b7025b

Please sign in to comment.