Skip to content

Commit

Permalink
feat(BE-190): As a user, i want able to use ollama-client
Browse files Browse the repository at this point in the history
 - support llama3 - streamEventsFrom
 - refactor
  • Loading branch information
hanrw committed Apr 19, 2024
1 parent 257867f commit 2fc98d7
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,30 @@ private const val STREAM_END_TOKEN = "$STREAM_PREFIX [DONE]"
*/
suspend inline fun <reified T> FlowCollector<T>.streamEventsFrom(response: HttpResponse) {
val channel: ByteReadChannel = response.body()

// Continue to read until the channel is closed.
while (!channel.isClosedForRead) {
channel.readUTF8Line()?.let { streamResponse ->
if (notEndStreamResponse(streamResponse)) {
emit(json().decodeFromString(streamResponse.removePrefix(STREAM_PREFIX)))
} else {
// for like ollama api it's returning json string without prefix "data:"
emit(json().decodeFromString(streamResponse))
// If the response indicates streaming data, decode and emit it.
emit(json().decodeFromString<T>(streamResponse.removePrefix(STREAM_PREFIX)))
} else if (isJson(streamResponse)) {
emit(json().decodeFromString<T>(streamResponse))
}
} ?: break
} ?: break // If `readUTF8Line()` returns null, exit the loop (end of input).
}


}


fun json(): Json {
return getInstance()
}


private fun isStreamResponse(line: String) = line.startsWith(STREAM_PREFIX)

fun notEndStreamResponse(line: String) = line != STREAM_END_TOKEN && isStreamResponse(line)
fun notEndStreamResponse(line: String) = line != STREAM_END_TOKEN && isStreamResponse(line)

fun isJson(line: String) = line.startsWith("{") && line.endsWith("}")
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ value class OllamaModel(val value: String) {
val LLAMA3 = OllamaModel("llama3")
val CODE_LLAMA = OllamaModel("codellama")
val MISTRAL = OllamaModel("mistral")
val availableModels = listOf(LLAMA2, CODE_LLAMA, MISTRAL)
val availableModels = listOf(LLAMA2, LLAMA3, CODE_LLAMA, MISTRAL)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ class AnthropicOpenAIProvider(private val client: Anthropic) : OpenAIProvider {
*/
override suspend fun completions(request: ChatCompletionRequest): OpenAIChatCompletion {
val anthropicRequest = request.toAnthropicRequest()
return client.create(anthropicRequest).let {
it.toOpenAIChatCompletion()
}
return client.create(anthropicRequest).toOpenAIChatCompletion()
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ fun initOpenAIGateway(
commonModule(false) +
anthropicModules(anthropicConfig) +
openAIModules(openAIConfig) +
openAIGatewayModules() +
ollamaModules(ollamaConfig)
ollamaModules(ollamaConfig) +
openAIGatewayModules()
)
}.koin.get<OpenAIGateway>()

Expand Down

0 comments on commit 2fc98d7

Please sign in to comment.