From 619da0cd6879233ac78ae6364a38fdd2f81275b8 Mon Sep 17 00:00:00 2001 From: slam Date: Tue, 16 Apr 2024 20:09:14 +0800 Subject: [PATCH] feat(BE-190): As a user, i want able to use ollama-client - add OllamaApi - add unit test - code clean & refactor --- .../kotlin/com/tddworks/anthropic/di/Koin.kt | 2 +- build.gradle.kts | 2 + .../ktor/internal/DefaultHttpRequester.jvm.kt | 2 +- .../internal/DefaultHttpRequester.macos.kt | 2 +- .../kotlin/com/tddworks/ollama/api/Model.kt | 38 -------- .../kotlin/com/tddworks/ollama/api/Ollama.kt | 47 ++++++++- .../com/tddworks/ollama/api/OllamaApi.kt | 9 -- .../com/tddworks/ollama/api/OllamaConfig.kt | 4 +- .../com/tddworks/ollama/api/OllamaModel.kt | 14 +++ .../chat/{OllamaChatApi.kt => OllamaChat.kt} | 2 +- .../ollama/api/chat/OllamaChatRequest.kt | 14 ++- .../ollama/api/chat/OllamaChatResponse.kt | 52 +++++----- .../api/chat/internal/DefaultOllamaChatApi.kt | 6 +- .../tddworks/ollama/api/internal/OllamaApi.kt | 25 +++++ .../kotlin/com/tddworks/ollama/di/Koin.kt | 32 +++++++ .../tddworks/ollama/api/OllamaConfigTest.kt | 34 +++++++ .../tddworks/ollama/api/OllamaModelTest.kt | 14 +++ .../com/tddworks/ollama/api/OllamaTest.kt | 45 +++++++++ .../ollama/api/chat/OllamaChatResponseTest.kt | 96 +++++++++++++++++++ .../api/internal/DefaultOllamaChatApiITest.kt | 73 -------------- .../api/internal/DefaultOllamaChatITest.kt | 77 +++++++++++++++ ...hatApiTest.kt => DefaultOllamaChatTest.kt} | 2 +- .../kotlin/com/tddworks/openai/di/Koin.kt | 2 +- settings.gradle.kts | 3 + 24 files changed, 435 insertions(+), 162 deletions(-) delete mode 100644 ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt delete mode 100644 ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt create mode 100644 ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaModel.kt rename ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/{OllamaChatApi.kt => OllamaChat.kt} (89%) create mode 100644 ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt create mode 100644 ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaConfigTest.kt create mode 100644 ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaModelTest.kt create mode 100644 ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaTest.kt create mode 100644 ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponseTest.kt delete mode 100644 ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiITest.kt create mode 100644 ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatITest.kt rename ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/{DefaultOllamaChatApiTest.kt => DefaultOllamaChatTest.kt} (99%) diff --git a/anthropic-client/anthropic-client-core/src/commonMain/kotlin/com/tddworks/anthropic/di/Koin.kt b/anthropic-client/anthropic-client-core/src/commonMain/kotlin/com/tddworks/anthropic/di/Koin.kt index 05b55fe..566096a 100644 --- a/anthropic-client/anthropic-client-core/src/commonMain/kotlin/com/tddworks/anthropic/di/Koin.kt +++ b/anthropic-client/anthropic-client-core/src/commonMain/kotlin/com/tddworks/anthropic/di/Koin.kt @@ -40,7 +40,7 @@ fun anthropicModules( single(named("anthropicHttpRequester")) { HttpRequester.default( createHttpClient( - url = config.baseUrl, + host = config.baseUrl, json = get(), ) ) diff --git a/build.gradle.kts b/build.gradle.kts index 1957bbe..cc72186 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -22,6 +22,8 @@ sonatypePortalPublisher { dependencies { kover(projects.openaiClient.openaiClientCore) kover(projects.anthropicClient.anthropicClientCore) + kover(projects.openaiGateway.openaiGatewayCore) + kover(projects.ollamaClient.ollamaClientCore) } val autoVersion = project.property( diff --git a/common/src/jvmMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.jvm.kt b/common/src/jvmMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.jvm.kt index 93b2918..aef1d0d 100644 --- a/common/src/jvmMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.jvm.kt +++ b/common/src/jvmMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.jvm.kt @@ -8,7 +8,7 @@ actual fun HttpRequester.Companion.default( ): HttpRequester { return DefaultHttpRequester( createHttpClient( - url = { url }, + host = { url }, authToken = { token }, json = JsonLenient ) diff --git a/common/src/macosMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.macos.kt b/common/src/macosMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.macos.kt index 403997c..a5cd2fc 100644 --- a/common/src/macosMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.macos.kt +++ b/common/src/macosMain/kotlin/com/tddworks/common/network/api/ktor/internal/DefaultHttpRequester.macos.kt @@ -9,7 +9,7 @@ actual fun HttpRequester.Companion.default( ): HttpRequester { return DefaultHttpRequester( createHttpClient( - url = { url }, + host = { url }, authToken = { token }, json = JsonLenient ) diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt deleted file mode 100644 index 414d7a0..0000000 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Model.kt +++ /dev/null @@ -1,38 +0,0 @@ -package com.tddworks.ollama.api - -import kotlinx.serialization.Serializable -import kotlin.jvm.JvmInline - -/** - * https://docs.anthropic.com/claude/docs/models-overview - * Claude is a family of state-of-the-art large language models developed by Anthropic. Our models are designed to provide you with the best possible experience when interacting with AI, offering a range of capabilities and performance levels to suit your needs and make it easy to deploy high performing, safe, and steerable models. In this guide, we'll introduce you to our latest and greatest models, the Claude 3 family, as well as our legacy models, which are still available for those who need them. - * - */ -@Serializable -@JvmInline -value class Model(val value: String) { - companion object { - /** - * Most powerful model for highly complex tasks - * Max output length: 4096 tokens - * Cost (Input / Output per MTok^) $15.00 / $75.00 - */ - val CLAUDE_3_OPUS = Model("claude-3-opus-20240229") - - /** - * Ideal balance of intelligence and speed for enterprise workloads - * Max output length: 4096 tokens - * Cost (Input / Output per MTok^) $3.00 / $15.00 - */ - val CLAUDE_3_Sonnet = Model("claude-3-sonnet-20240229") - - /** - * Fastest and most compact model for near-instant responsiveness - * Max output length: 4096 tokens - * Cost (Input / Output per MTok^) $0.25 / $1.25 - */ - val CLAUDE_3_HAIKU = Model("claude-3-haiku-20240307") - - val availableModels = listOf(CLAUDE_3_OPUS, CLAUDE_3_Sonnet, CLAUDE_3_HAIKU) - } -} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt index 08ee94c..fc0b76c 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt @@ -1,12 +1,49 @@ package com.tddworks.ollama.api +import com.tddworks.ollama.api.chat.OllamaChat +import com.tddworks.ollama.api.internal.OllamaApi + /** - * @author hanrw - * @date 2024/4/14 17:32 + * Interface for interacting with the Ollama API. */ -class Ollama { +interface Ollama : OllamaChat { + companion object { - const val BASE_URL = "https://ollama.com" - const val ANTHROPIC_VERSION = "1.0.0" + const val BASE_URL = "localhost" + const val PORT = 11434 + const val PROTOCOL = "http" } + + /** + * This function returns the base URL as a string. + * + * @return a string representing the base URL + */ + fun baseUrl(): String + + /** + * This function returns the port as an integer. + * + * @return an integer representing the port + */ + fun port(): Int + + /** + * This function returns the protocol as a string. + * + * @return a string representing the protocol + */ + fun protocol(): String +} + +fun Ollama( + baseUrl: () -> String = { Ollama.BASE_URL }, + port: () -> Int = { Ollama.PORT }, + protocol: () -> String = { Ollama.PROTOCOL }, +): Ollama { + return OllamaApi( + baseUrl = baseUrl(), + port = port(), + protocol = protocol() + ) } \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt deleted file mode 100644 index 39a7e59..0000000 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaApi.kt +++ /dev/null @@ -1,9 +0,0 @@ -package com.tddworks.ollama.api - - -class OllamaApi( - private val apiKey: String, - private val apiURL: String, - private val anthropicVersion: String, -) - diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt index 84ade41..5176d65 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaConfig.kt @@ -3,7 +3,7 @@ package com.tddworks.ollama.api import org.koin.core.component.KoinComponent data class OllamaConfig( - val apiKey: () -> String = { "CONFIG_API_KEY" }, val baseUrl: () -> String = { Ollama.BASE_URL }, - val ollamaVersion: () -> String = { Ollama.ANTHROPIC_VERSION }, + val protocol: () -> String = { Ollama.PROTOCOL }, + val port: () -> Int = { Ollama.PORT }, ) : KoinComponent \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaModel.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaModel.kt new file mode 100644 index 0000000..afef334 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/OllamaModel.kt @@ -0,0 +1,14 @@ +package com.tddworks.ollama.api + +import kotlinx.serialization.Serializable +import kotlin.jvm.JvmInline + +@Serializable +@JvmInline +value class OllamaModel(val value: String) { + companion object { + val LLAMA2 = OllamaModel("llama2") + val CODE_LLAMA = OllamaModel("codellama") + val MISTRAL = OllamaModel("mistral") + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatApi.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChat.kt similarity index 89% rename from ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatApi.kt rename to ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChat.kt index 85617f0..6928089 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatApi.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChat.kt @@ -2,7 +2,7 @@ package com.tddworks.ollama.api.chat import kotlinx.coroutines.flow.Flow -interface OllamaChatApi { +interface OllamaChat { suspend fun stream(request: OllamaChatRequest): Flow suspend fun request(request: OllamaChatRequest): OllamaChatResponse } \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatRequest.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatRequest.kt index 5a6acec..b755067 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatRequest.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatRequest.kt @@ -1,8 +1,10 @@ package com.tddworks.ollama.api.chat import com.tddworks.common.network.api.StreamableRequest +import com.tddworks.common.network.api.StreamableRequest.Companion.STREAM import kotlinx.serialization.SerialName import kotlinx.serialization.Serializable +import kotlinx.serialization.json.* @Serializable @@ -11,9 +13,17 @@ data class OllamaChatRequest( @SerialName("messages") val messages: List, @SerialName("format") val format: String? = null, // @SerialName("options") val options: Map? = null, -// @SerialName("stream") val stream: Boolean? = null, @SerialName("keep_alive") val keepAlive: String? = null, -) : StreamableRequest +) : StreamableRequest { + fun asNonStreaming(jsonLenient: Json): JsonElement { + return jsonLenient.encodeToJsonElement(this) + .jsonObject.toMutableMap() + .apply { + put(STREAM, JsonPrimitive(false)) + } + .let { JsonObject(it) } + } +} @Serializable diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponse.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponse.kt index c28c56a..264ca0b 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponse.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponse.kt @@ -13,22 +13,7 @@ import kotlinx.serialization.Serializable * }, * "done": false * } - */ -@Serializable -data class OllamaChatResponse( - @SerialName("model") val model: String, - @SerialName("created_at") val createdAt: String, - @SerialName("message") val message: OllamaChatMessage? = null, - @SerialName("done") val done: Boolean?, - @SerialName("total_duration") val totalDuration: Long? = null, - @SerialName("load_duration") val loadDuration: Long? = null, - @SerialName("prompt_eval_count") val promptEvalCount: Int? = null, - @SerialName("prompt_eval_duration") val promptEvalDuration: Long? = null, - @SerialName("eval_count") val evalCount: Int? = null, - @SerialName("eval_duration") val evalDuration: Long? = null, -) - -/** + * ======== final response ======== * { * "model": "llama2", * "created_at": "2023-08-04T19:22:45.499127Z", @@ -40,16 +25,35 @@ data class OllamaChatResponse( * "eval_count": 468, * "eval_duration": 7701267000 * } + * + * ======= Non-streaming response ======= + * { + * "model": "llama2", + * "created_at": "2023-12-12T14:13:43.416799Z", + * "message": { + * "role": "assistant", + * "content": "Hello! How are you today?" + * }, + * "done": true, + * "total_duration": 5191566416, + * "load_duration": 2154458, + * "prompt_eval_count": 26, + * "prompt_eval_duration": 383809000, + * "eval_count": 298, + * "eval_duration": 4799921000 + * } */ @Serializable -data class FinalOllamaChatResponse( +data class OllamaChatResponse( @SerialName("model") val model: String, @SerialName("created_at") val createdAt: String, - @SerialName("done") val done: Boolean?, - @SerialName("total_duration") val totalDuration: Long?, - @SerialName("load_duration") val loadDuration: Long?, - @SerialName("prompt_eval_count") val promptEvalCount: Int?, - @SerialName("prompt_eval_duration") val promptEvalDuration: Long?, - @SerialName("eval_count") val evalCount: Int?, - @SerialName("eval_duration") val evalDuration: Long?, + @SerialName("message") val message: OllamaChatMessage? = null, + @SerialName("done") val done: Boolean, + // Below are the fields that are for final response or non-streaming response + @SerialName("total_duration") val totalDuration: Long? = null, + @SerialName("load_duration") val loadDuration: Long? = null, + @SerialName("prompt_eval_count") val promptEvalCount: Int? = null, + @SerialName("prompt_eval_duration") val promptEvalDuration: Long? = null, + @SerialName("eval_count") val evalCount: Int? = null, + @SerialName("eval_duration") val evalDuration: Long? = null, ) \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/internal/DefaultOllamaChatApi.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/internal/DefaultOllamaChatApi.kt index 13d6b8a..86f36cf 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/internal/DefaultOllamaChatApi.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/chat/internal/DefaultOllamaChatApi.kt @@ -3,7 +3,7 @@ package com.tddworks.ollama.api.chat.internal import com.tddworks.common.network.api.ktor.api.HttpRequester import com.tddworks.common.network.api.ktor.api.performRequest import com.tddworks.common.network.api.ktor.api.streamRequest -import com.tddworks.ollama.api.chat.OllamaChatApi +import com.tddworks.ollama.api.chat.OllamaChat import com.tddworks.ollama.api.chat.OllamaChatRequest import com.tddworks.ollama.api.chat.OllamaChatResponse import io.ktor.client.request.* @@ -14,7 +14,7 @@ import kotlinx.serialization.json.Json class DefaultOllamaChatApi( private val requester: HttpRequester, private val jsonLenient: Json = JsonLenient, -) : OllamaChatApi { +) : OllamaChat { override suspend fun stream(request: OllamaChatRequest): Flow { return requester.streamRequest { method = HttpMethod.Post @@ -33,7 +33,7 @@ class DefaultOllamaChatApi( return requester.performRequest { method = HttpMethod.Post url(path = CHAT_API_PATH) - setBody(request) + setBody(request.asNonStreaming(jsonLenient)) contentType(ContentType.Application.Json) } } diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt new file mode 100644 index 0000000..1e15c46 --- /dev/null +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt @@ -0,0 +1,25 @@ +package com.tddworks.ollama.api.internal + +import com.tddworks.di.getInstance +import com.tddworks.ollama.api.Ollama +import com.tddworks.ollama.api.chat.OllamaChat + +class OllamaApi( + private val baseUrl: String, + private val port: Int, + private val protocol: String, +) : Ollama, OllamaChat by getInstance() { + + override fun baseUrl(): String { + return baseUrl + } + + override fun port(): Int { + return port + } + + override fun protocol(): String { + return protocol + } + +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt index 82e3b61..4269387 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt @@ -1,9 +1,17 @@ package com.tddworks.ollama.di +import com.tddworks.common.network.api.ktor.api.HttpRequester +import com.tddworks.common.network.api.ktor.internal.createHttpClient +import com.tddworks.common.network.api.ktor.internal.default import com.tddworks.di.commonModule import com.tddworks.ollama.api.Ollama import com.tddworks.ollama.api.OllamaConfig +import com.tddworks.ollama.api.chat.OllamaChat +import com.tddworks.ollama.api.chat.internal.DefaultOllamaChatApi +import com.tddworks.ollama.api.chat.internal.JsonLenient +import kotlinx.serialization.json.Json import org.koin.core.context.startKoin +import org.koin.core.qualifier.named import org.koin.dsl.KoinAppDeclaration import org.koin.dsl.module @@ -16,8 +24,32 @@ fun iniOllamaKoin(config: OllamaConfig, appDeclaration: KoinAppDeclaration = {}) fun ollamaModules( config: OllamaConfig, ) = module { + single { Ollama( + baseUrl = config.baseUrl, + port = config.port, + protocol = config.protocol + ) + } + + single(named("ollamaJson")) { JsonLenient } + + single(named("ollamaHttpRequester")) { + HttpRequester.default( + createHttpClient( + protocol = config.protocol, + port = config.port, + host = config.baseUrl, + json = get(named("ollamaJson")), + ) + ) + } + + single { + DefaultOllamaChatApi( + jsonLenient = get(named("ollamaJson")), + requester = get(named("ollamaHttpRequester")) ) } } \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaConfigTest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaConfigTest.kt new file mode 100644 index 0000000..7ed0719 --- /dev/null +++ b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaConfigTest.kt @@ -0,0 +1,34 @@ +package com.tddworks.ollama.api + +import org.junit.jupiter.api.Assertions.* +import org.junit.jupiter.api.Test + + +class OllamaConfigTest { + + @Test + fun `should return overridden settings`() { + val target = OllamaConfig( + baseUrl = { "some-url" }, + port = { 8080 }, + protocol = { "https" } + ) + + assertEquals("some-url", target.baseUrl()) + + assertEquals(8080, target.port()) + + assertEquals("https", target.protocol()) + } + + @Test + fun `should return default settings`() { + val target = OllamaConfig() + + assertEquals("localhost", target.baseUrl()) + + assertEquals(11434, target.port()) + + assertEquals("http", target.protocol()) + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaModelTest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaModelTest.kt new file mode 100644 index 0000000..a3b8c8d --- /dev/null +++ b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaModelTest.kt @@ -0,0 +1,14 @@ +package com.tddworks.ollama.api + +import org.junit.jupiter.api.Assertions.* +import org.junit.jupiter.api.Test + +class OllamaModelTest { + + @Test + fun `should return correct latest API model name`() { + assertEquals("llama2", OllamaModel.LLAMA2.value) + assertEquals("codellama", OllamaModel.CODE_LLAMA.value) + assertEquals("mistral", OllamaModel.MISTRAL.value) + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaTest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaTest.kt new file mode 100644 index 0000000..e889c68 --- /dev/null +++ b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/OllamaTest.kt @@ -0,0 +1,45 @@ +package com.tddworks.ollama.api + +import com.tddworks.di.getInstance +import com.tddworks.ollama.di.iniOllamaKoin +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.koin.test.junit5.AutoCloseKoinTest + +class OllamaTestTest : AutoCloseKoinTest() { + + @BeforeEach + fun setUp() { + iniOllamaKoin( + config = OllamaConfig( + baseUrl = { "127.0.0.1" }, + port = { 8080 }, + protocol = { "https" } + ) + ) + } + + @Test + fun `should return overridden settings`() { + val target = getInstance() + + assertEquals("127.0.0.1", target.baseUrl()) + + assertEquals(8080, target.port()) + + assertEquals("https", target.protocol()) + } + + @Test + fun `should return default settings`() { + val target = Ollama() + + assertEquals("localhost", target.baseUrl()) + + assertEquals(11434, target.port()) + + assertEquals("http", target.protocol()) + } + +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponseTest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponseTest.kt new file mode 100644 index 0000000..0e0580e --- /dev/null +++ b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/chat/OllamaChatResponseTest.kt @@ -0,0 +1,96 @@ +package com.tddworks.ollama.api.chat + +import com.tddworks.ollama.api.chat.internal.JsonLenient +import org.junit.jupiter.api.Assertions.* +import org.junit.jupiter.api.Test + +class OllamaChatResponseTest { + @Test + fun `should decode response to non-streaming OllamaChatResponse`() { + val response = """ + { + "model": "registry.ollama.ai/library/llama2:latest", + "created_at": "2023-12-12T14:13:43.416799Z", + "message": { + "role": "assistant", + "content": "Hello! How are you today?" + }, + "done": true, + "total_duration": 5191566416, + "load_duration": 2154458, + "prompt_eval_count": 26, + "prompt_eval_duration": 383809000, + "eval_count": 298, + "eval_duration": 4799921000 + } + """.trimIndent() + + JsonLenient.decodeFromString(response).apply { + assertEquals("registry.ollama.ai/library/llama2:latest", model) + assertEquals("2023-12-12T14:13:43.416799Z", createdAt) + assertEquals("assistant", message?.role) + assertEquals("Hello! How are you today?", message?.content) + assertTrue(done) + assertEquals(5191566416, totalDuration) + assertEquals(2154458, loadDuration) + assertEquals(26, promptEvalCount) + assertEquals(383809000, promptEvalDuration) + assertEquals(298, evalCount) + assertEquals(4799921000, evalDuration) + } + } + + @Test + fun `should decode response to Final OllamaChatResponse`() { + val response = """ + { + "model": "llama2", + "created_at": "2023-08-04T19:22:45.499127Z", + "done": true, + "total_duration": 4883583458, + "load_duration": 1334875, + "prompt_eval_count": 26, + "prompt_eval_duration": 342546000, + "eval_count": 282, + "eval_duration": 4535599000 + } + """.trimIndent() + + JsonLenient.decodeFromString(response).apply { + assertEquals("llama2", model) + assertEquals("2023-08-04T19:22:45.499127Z", createdAt) + assertTrue(done) + assertEquals(4883583458, totalDuration) + assertEquals(1334875, loadDuration) + assertEquals(26, promptEvalCount) + assertEquals(342546000, promptEvalDuration) + assertEquals(282, evalCount) + assertEquals(4535599000, evalDuration) + } + } + + @Test + fun `should decode response to OllamaChatResponse`() { + val response = """ + { + "model": "llama2", + "created_at": "2023-08-04T08:52:19.385406455-07:00", + "message": { + "role": "assistant", + "content": "The", + "images": null + }, + "done": false + } + """.trimIndent() + + JsonLenient.decodeFromString(response).apply { + assertEquals("llama2", model) + assertEquals("2023-08-04T08:52:19.385406455-07:00", createdAt) + assertEquals("assistant", message?.role) + assertEquals("The", message?.content) + assertNull(message?.images) + assertFalse(done) + } + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiITest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiITest.kt deleted file mode 100644 index f855832..0000000 --- a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiITest.kt +++ /dev/null @@ -1,73 +0,0 @@ -package com.tddworks.ollama.api.internal - -import com.tddworks.common.network.api.ktor.internal.DefaultHttpRequester -import com.tddworks.common.network.api.ktor.internal.createHttpClient -import com.tddworks.di.initKoin -import com.tddworks.ollama.api.Ollama -import com.tddworks.ollama.api.chat.OllamaChatMessage -import com.tddworks.ollama.api.chat.OllamaChatRequest -import com.tddworks.ollama.api.chat.internal.DefaultOllamaChatApi -import com.tddworks.ollama.api.chat.internal.JsonLenient -import kotlinx.coroutines.test.runTest -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test -import org.koin.test.junit5.AutoCloseKoinTest - -class DefaultOllamaChatApiITest : AutoCloseKoinTest() { - - @BeforeEach - fun setUp() { - initKoin() - } - - - @Test - fun `should return correct base url`() { - assertEquals("api.anthropic.com", Ollama.BASE_URL) - } - - - @Test - fun `should return stream response`() = runTest { - val ollamaChatApi = DefaultOllamaChatApi( - requester = DefaultHttpRequester( - createHttpClient( - url = { "localhost" }, - json = JsonLenient, - ) - ) - ) - - ollamaChatApi.stream( - OllamaChatRequest( - model = "llama2", - messages = listOf( - OllamaChatMessage( - role = "user", - content = "hello" - ) - ) - ) - ).collect { - println("stream response: $it") - } - } - -// @Test -// fun `should return create response`() = runTest { -// //Client request(POST https://klaude.asusual.life/v1/messages) invalid: 401 Unauthorized. Text: "{"type":"error","error":{"type":"authentication_error","message":"invalid x-api-key"}}" -// //Client request(POST https://klaude.asusual.life/v1/messages) invalid: 400 Bad Request. Text: "{"type":"error","error":{"type":"invalid_request_error","message":"anthropic-version: header is required"}}" -// val anthropic = getInstance() -// -// val r = anthropic.create( -// CreateMessageRequest( -// messages = listOf(Message.user("hello")), -// maxTokens = 1024, -// model = Model.CLAUDE_3_HAIKU -// ) -// ) -// -// assertNotNull(r.content[0].text) -// } -} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatITest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatITest.kt new file mode 100644 index 0000000..ca7fb49 --- /dev/null +++ b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatITest.kt @@ -0,0 +1,77 @@ +package com.tddworks.ollama.api.internal + +import com.tddworks.di.getInstance +import com.tddworks.ollama.api.Ollama +import com.tddworks.ollama.api.OllamaConfig +import com.tddworks.ollama.api.chat.OllamaChatMessage +import com.tddworks.ollama.api.chat.OllamaChatRequest +import com.tddworks.ollama.di.iniOllamaKoin +import kotlinx.coroutines.test.runTest +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test +import org.koin.test.junit5.AutoCloseKoinTest + +@Disabled +class DefaultOllamaChatITest : AutoCloseKoinTest() { + + @BeforeEach + fun setUp() { + iniOllamaKoin( + config = OllamaConfig( + protocol = { "http" }, + baseUrl = { "localhost" }, + port = { 11434 } + ) + ) + } + + + @Test + fun `should return correct base url`() { + assertEquals("localhost", Ollama.BASE_URL) + } + + + @Test + fun `should return stream response`() = runTest { + val ollama = getInstance() + + ollama.stream( + OllamaChatRequest( + model = "llama2", + messages = listOf( + OllamaChatMessage( + role = "user", + content = "hello" + ) + ) + ) + ).collect { + println("stream response: $it") + } + } + + @Test + fun `should return create response`() = runTest { + val ollama = getInstance() + + val r = ollama.request( + OllamaChatRequest( + model = "llama2", + messages = listOf( + OllamaChatMessage( + role = "user", + content = "hello" + ) + ) + ) + ) + + println("create response: $r") + + assertNotNull(r.message?.content) + } +} \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiTest.kt b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatTest.kt similarity index 99% rename from ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiTest.kt rename to ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatTest.kt index e6b43e4..bb45666 100644 --- a/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatApiTest.kt +++ b/ollama-client/ollama-client-core/src/jvmTest/kotlin/com/tddworks/ollama/api/internal/DefaultOllamaChatTest.kt @@ -30,7 +30,7 @@ class TestKoinCoroutineExtension(private val testDispatcher: TestDispatcher = St } } -class DefaultOllamaChatApiTest : KoinTest { +class DefaultOllamaChatTest : KoinTest { @JvmField @RegisterExtension // This extension is used to set the main dispatcher to a test dispatcher diff --git a/openai-client/openai-client-core/src/commonMain/kotlin/com/tddworks/openai/di/Koin.kt b/openai-client/openai-client-core/src/commonMain/kotlin/com/tddworks/openai/di/Koin.kt index a6bbf18..9b3ac27 100644 --- a/openai-client/openai-client-core/src/commonMain/kotlin/com/tddworks/openai/di/Koin.kt +++ b/openai-client/openai-client-core/src/commonMain/kotlin/com/tddworks/openai/di/Koin.kt @@ -34,7 +34,7 @@ fun openAIModules( single(named("openAIHttpRequester")) { HttpRequester.default( createHttpClient( - url = config.baseUrl, + host = config.baseUrl, authToken = config.apiKey, // get from commonModule json = get(), diff --git a/settings.gradle.kts b/settings.gradle.kts index c6a0788..df18582 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -32,4 +32,7 @@ include(":anthropic-client:anthropic-client-core") include(":openai-gateway") include(":openai-gateway:openai-gateway-core") +include(":ollama-client") +include(":ollama-client:ollama-client-core") + //include(":gemini-client")