diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt new file mode 100644 index 0000000..414d7a0 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt @@ -0,0 +1,38 @@ +package com.tddworks.ollama.api + +import kotlinx.serialization.Serializable +import kotlin.jvm.JvmInline + +/** + * https://docs.anthropic.com/claude/docs/models-overview + * Claude is a family of state-of-the-art large language models developed by Anthropic. Our models are designed to provide you with the best possible experience when interacting with AI, offering a range of capabilities and performance levels to suit your needs and make it easy to deploy high performing, safe, and steerable models. In this guide, we'll introduce you to our latest and greatest models, the Claude 3 family, as well as our legacy models, which are still available for those who need them. + * + */ +@Serializable +@JvmInline +value class Model(val value: String) { + companion object { + /** + * Most powerful model for highly complex tasks + * Max output length: 4096 tokens + * Cost (Input / Output per MTok^) $15.00 / $75.00 + */ + val CLAUDE_3_OPUS = Model("claude-3-opus-20240229") + + /** + * Ideal balance of intelligence and speed for enterprise workloads + * Max output length: 4096 tokens + * Cost (Input / Output per MTok^) $3.00 / $15.00 + */ + val CLAUDE_3_Sonnet = Model("claude-3-sonnet-20240229") + + /** + * Fastest and most compact model for near-instant responsiveness + * Max output length: 4096 tokens + * Cost (Input / Output per MTok^) $0.25 / $1.25 + */ + val CLAUDE_3_HAIKU = Model("claude-3-haiku-20240307") + + val availableModels = listOf(CLAUDE_3_OPUS, CLAUDE_3_Sonnet, CLAUDE_3_HAIKU) + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt new file mode 100644 index 0000000..39a7e59 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt @@ -0,0 +1,9 @@ +package com.tddworks.ollama.api + + +class OllamaApi( + private val apiKey: String, + private val apiURL: String, + private val anthropicVersion: String, +) + diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt new file mode 100644 index 0000000..84ade41 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt @@ -0,0 +1,9 @@ +package com.tddworks.ollama.api + +import org.koin.core.component.KoinComponent + +data class OllamaConfig( + val apiKey: () -> String = { "CONFIG_API_KEY" }, + val baseUrl: () -> String = { Ollama.BASE_URL }, + val ollamaVersion: () -> String = { Ollama.ANTHROPIC_VERSION }, +) : KoinComponent \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt new file mode 100644 index 0000000..82e3b61 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt @@ -0,0 +1,23 @@ +package com.tddworks.ollama.di + +import com.tddworks.di.commonModule +import com.tddworks.ollama.api.Ollama +import com.tddworks.ollama.api.OllamaConfig +import org.koin.core.context.startKoin +import org.koin.dsl.KoinAppDeclaration +import org.koin.dsl.module + +fun iniOllamaKoin(config: OllamaConfig, appDeclaration: KoinAppDeclaration = {}) = + startKoin { + appDeclaration() + modules(commonModule(false) + ollamaModules(config)) + } + +fun ollamaModules( + config: OllamaConfig, +) = module { + single { + Ollama( + ) + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/implement.md b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/implement.md new file mode 100644 index 0000000..9f94304 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/implement.md @@ -0,0 +1,75 @@ +Here's an example of the Kotlin API definition for the chat completion endpoint: + +```kotlin +package com.tddworks.ollama.api.internal + +import com.tddworks.ollama.api.internal.model.ChatRequest +import com.tddworks.ollama.api.internal.model.ChatResponse +import retrofit2.http.Body +import retrofit2.http.POST + +interface ChatApi { + @POST("/api/chat") + suspend fun generateChat( + @Body request: ChatRequest + ): ChatResponse +} +``` + +The `ChatApi` interface defines a single method, `generateChat`, which takes a `ChatRequest` object as an input parameter and returns a `ChatResponse` object. + +The `ChatRequest` class would look like this: + +```kotlin +data class ChatRequest( + @field:JsonProperty("model") val model: String, + @field:JsonProperty("messages") val messages: List, + @field:JsonProperty("format") val format: String? = null, + @field:JsonProperty("options") val options: Map? = null, + @field:JsonProperty("stream") val stream: Boolean? = null, + @field:JsonProperty("keep_alive") val keepAlive: String? = null +) + +data class Message( + @field:JsonProperty("role") val role: String, + @field:JsonProperty("content") val content: String, + @field:JsonProperty("images") val images: List? = null +) +``` + +The `ChatResponse` class would look like this: + +```kotlin +data class ChatResponse( + @field:JsonProperty("model") val model: String, + @field:JsonProperty("created_at") val createdAt: String, + @field:JsonProperty("message") val message: Message?, + @field:JsonProperty("done") val done: Boolean?, + @field:JsonProperty("total_duration") val totalDuration: Long?, + @field:JsonProperty("load_duration") val loadDuration: Long?, + @field:JsonProperty("prompt_eval_count") val promptEvalCount: Int?, + @field:JsonProperty("prompt_eval_duration") val promptEvalDuration: Long?, + @field:JsonProperty("eval_count") val evalCount: Int?, + @field:JsonProperty("eval_duration") val evalDuration: Long? +) +``` + +This API definition assumes that you are using Retrofit for making the HTTP requests. You can then use the `ChatApi` interface in your Kotlin code to generate chat completions: + +```kotlin +val chatApi = retrofit.create(ChatApi::class.java) + +val request = ChatRequest( + model = "llama2", + messages = listOf( + Message( + role = "user", + content = "why is the sky blue?" + ) + ) +) + +val response = chatApi.generateChat(request) +``` + +The `generateChat` function will return a `ChatResponse` object, which you can then process and handle as needed in your application. \ No newline at end of file