From 2023a56c35b70690656b2e645f128fbd6fc0a01f Mon Sep 17 00:00:00 2001 From: Georgios Andrianakis Date: Mon, 11 Nov 2024 12:37:04 +0200 Subject: [PATCH] Introduce a global temperature property This makes it easier to have a single configuration that does not need to change when the LLM provider changes (in the same way as we do for timeout). Original reported at: https://quarkusio.zulipchat.com/#narrow/channel/187038-dev/topic/temperature.20not.20honored.20.3F --- .../runtime/config/LangChain4jConfig.java | 6 ++++++ .../quarkus-langchain4j-azure-openai.adoc | 4 ++-- ...hain4j-azure-openai_quarkus.langchain4j.adoc | 4 ++-- .../includes/quarkus-langchain4j-core.adoc | 17 +++++++++++++++++ ...us-langchain4j-core_quarkus.langchain4j.adoc | 17 +++++++++++++++++ .../quarkus-langchain4j-hugging-face.adoc | 4 ++-- ...hain4j-hugging-face_quarkus.langchain4j.adoc | 4 ++-- .../includes/quarkus-langchain4j-ollama.adoc | 16 ++++++++-------- ...-langchain4j-ollama_quarkus.langchain4j.adoc | 16 ++++++++-------- .../includes/quarkus-langchain4j-openai.adoc | 4 ++-- ...-langchain4j-openai_quarkus.langchain4j.adoc | 4 ++-- .../quarkus-langchain4j-vertex-ai-gemini.adoc | 4 ++-- ...4j-vertex-ai-gemini_quarkus.langchain4j.adoc | 4 ++-- .../includes/quarkus-langchain4j-vertex-ai.adoc | 4 ++-- ...ngchain4j-vertex-ai_quarkus.langchain4j.adoc | 4 ++-- .../includes/quarkus-langchain4j-watsonx.adoc | 8 ++++---- ...langchain4j-watsonx_quarkus.langchain4j.adoc | 8 ++++---- .../runtime/config/ChatModelConfig.java | 1 + .../runtime/config/ChatModelConfig.java | 2 +- .../jlama/runtime/config/ChatModelConfig.java | 2 ++ .../llama3/runtime/config/ChatModelConfig.java | 9 +++++++-- .../runtime/config/ChatModelConfig.java | 1 + .../ollama/runtime/config/ChatModelConfig.java | 2 +- .../runtime/config/EmbeddingModelConfig.java | 2 +- .../openai/runtime/config/ChatModelConfig.java | 2 +- .../openai/runtime/config/ChatModelConfig.java | 2 +- .../runtime/gemini/VertexAiGeminiRecorder.java | 2 +- .../runtime/gemini/config/ChatModelConfig.java | 2 +- .../runtime/config/ChatModelConfig.java | 2 +- .../watsonx/runtime/config/ChatModelConfig.java | 2 +- .../runtime/config/GenerationModelConfig.java | 2 +- 31 files changed, 105 insertions(+), 56 deletions(-) diff --git a/core/runtime/src/main/java/io/quarkiverse/langchain4j/runtime/config/LangChain4jConfig.java b/core/runtime/src/main/java/io/quarkiverse/langchain4j/runtime/config/LangChain4jConfig.java index e73035640..b2a69408f 100644 --- a/core/runtime/src/main/java/io/quarkiverse/langchain4j/runtime/config/LangChain4jConfig.java +++ b/core/runtime/src/main/java/io/quarkiverse/langchain4j/runtime/config/LangChain4jConfig.java @@ -4,6 +4,7 @@ import java.time.Duration; import java.util.Optional; +import java.util.OptionalDouble; import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigRoot; @@ -31,6 +32,11 @@ public interface LangChain4jConfig { @ConfigDocDefault("10s") Optional timeout(); + /** + * Global temperature for LLM APIs + */ + OptionalDouble temperature(); + /** * Guardrails configuration */ diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai.adoc index b33899acc..aea19ddc0 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai.adoc @@ -371,7 +371,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI_CHAT_MODEL_TEMPERATUR endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p[`quarkus.langchain4j.azure-openai.chat-model.top-p`]## @@ -1166,7 +1166,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI__MODEL_NAME__CHAT_MOD endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p[`quarkus.langchain4j.azure-openai."model-name".chat-model.top-p`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai_quarkus.langchain4j.adoc index b33899acc..aea19ddc0 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-azure-openai_quarkus.langchain4j.adoc @@ -371,7 +371,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI_CHAT_MODEL_TEMPERATUR endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p[`quarkus.langchain4j.azure-openai.chat-model.top-p`]## @@ -1166,7 +1166,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI__MODEL_NAME__CHAT_MOD endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p[`quarkus.langchain4j.azure-openai."model-name".chat-model.top-p`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core.adoc index ec8cd6501..81bc3709b 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core.adoc @@ -186,6 +186,23 @@ endif::add-copy-button-to-env-var[] |link:https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/time/Duration.html[Duration] link:#duration-note-anchor-{summaryTableId}[icon:question-circle[title=More information about the Duration format]] |`10s` +a| [[quarkus-langchain4j-core_quarkus-langchain4j-temperature]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-temperature[`quarkus.langchain4j.temperature`]## + +[.description] +-- +Global temperature for LLM APIs + + +ifdef::add-copy-button-to-env-var[] +Environment variable: env_var_with_copy_button:+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++[] +endif::add-copy-button-to-env-var[] +ifndef::add-copy-button-to-env-var[] +Environment variable: `+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++` +endif::add-copy-button-to-env-var[] +-- +|double +| + a| [[quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries[`quarkus.langchain4j.guardrails.max-retries`]## [.description] diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core_quarkus.langchain4j.adoc index ec8cd6501..81bc3709b 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-core_quarkus.langchain4j.adoc @@ -186,6 +186,23 @@ endif::add-copy-button-to-env-var[] |link:https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/time/Duration.html[Duration] link:#duration-note-anchor-{summaryTableId}[icon:question-circle[title=More information about the Duration format]] |`10s` +a| [[quarkus-langchain4j-core_quarkus-langchain4j-temperature]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-temperature[`quarkus.langchain4j.temperature`]## + +[.description] +-- +Global temperature for LLM APIs + + +ifdef::add-copy-button-to-env-var[] +Environment variable: env_var_with_copy_button:+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++[] +endif::add-copy-button-to-env-var[] +ifndef::add-copy-button-to-env-var[] +Environment variable: `+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++` +endif::add-copy-button-to-env-var[] +-- +|double +| + a| [[quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries[`quarkus.langchain4j.guardrails.max-retries`]## [.description] diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face.adoc index 55dcbb5d8..f43a15bcc 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face.adoc @@ -129,7 +129,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE_CHAT_MODEL_TEMPERATURE endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface.chat-model.max-new-tokens`]## @@ -447,7 +447,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE__MODEL_NAME__CHAT_MODE endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface."model-name".chat-model.max-new-tokens`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face_quarkus.langchain4j.adoc index 55dcbb5d8..f43a15bcc 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-hugging-face_quarkus.langchain4j.adoc @@ -129,7 +129,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE_CHAT_MODEL_TEMPERATURE endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface.chat-model.max-new-tokens`]## @@ -447,7 +447,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE__MODEL_NAME__CHAT_MODE endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface."model-name".chat-model.max-new-tokens`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama.adoc index 736d39089..f06920732 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama.adoc @@ -46,7 +46,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang [.description] -- -Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3` +Model to use ifdef::add-copy-button-to-env-var[] @@ -57,7 +57,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_MODEL_ID+++` endif::add-copy-button-to-env-var[] -- |string -|`llama3.1` +|`llama3.2` a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id[`quarkus.langchain4j.ollama.embedding-model.model-id`]## @@ -193,7 +193,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_TEMPERATURE+++` endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict[`quarkus.langchain4j.ollama.chat-model.num-predict`]## @@ -352,7 +352,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_EMBEDDING_MODEL_TEMPERATURE endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict[`quarkus.langchain4j.ollama.embedding-model.num-predict`]## @@ -464,7 +464,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang [.description] -- -Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3` +Model to use ifdef::add-copy-button-to-env-var[] @@ -475,7 +475,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_MOD endif::add-copy-button-to-env-var[] -- |string -|`llama3.1` +|`llama3.2` a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id[`quarkus.langchain4j.ollama."model-name".embedding-model.model-id`]## @@ -611,7 +611,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_TEM endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict[`quarkus.langchain4j.ollama."model-name".chat-model.num-predict`]## @@ -770,7 +770,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__EMBEDDING_MODE endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict[`quarkus.langchain4j.ollama."model-name".embedding-model.num-predict`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama_quarkus.langchain4j.adoc index 736d39089..f06920732 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama_quarkus.langchain4j.adoc @@ -46,7 +46,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang [.description] -- -Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3` +Model to use ifdef::add-copy-button-to-env-var[] @@ -57,7 +57,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_MODEL_ID+++` endif::add-copy-button-to-env-var[] -- |string -|`llama3.1` +|`llama3.2` a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id[`quarkus.langchain4j.ollama.embedding-model.model-id`]## @@ -193,7 +193,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_TEMPERATURE+++` endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict[`quarkus.langchain4j.ollama.chat-model.num-predict`]## @@ -352,7 +352,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_EMBEDDING_MODEL_TEMPERATURE endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict[`quarkus.langchain4j.ollama.embedding-model.num-predict`]## @@ -464,7 +464,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang [.description] -- -Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3` +Model to use ifdef::add-copy-button-to-env-var[] @@ -475,7 +475,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_MOD endif::add-copy-button-to-env-var[] -- |string -|`llama3.1` +|`llama3.2` a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id[`quarkus.langchain4j.ollama."model-name".embedding-model.model-id`]## @@ -611,7 +611,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_TEM endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict[`quarkus.langchain4j.ollama."model-name".chat-model.num-predict`]## @@ -770,7 +770,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__EMBEDDING_MODE endif::add-copy-button-to-env-var[] -- |double -|`0.8` +|`${quarkus.langchain4j.temperature:0.8}` a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict[`quarkus.langchain4j.ollama."model-name".embedding-model.num-predict`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai.adoc index b70634da0..388d31c78 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai.adoc @@ -312,7 +312,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI_CHAT_MODEL_TEMPERATURE+++` endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p[`quarkus.langchain4j.openai.chat-model.top-p`]## @@ -1014,7 +1014,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI__MODEL_NAME__CHAT_MODEL_TEM endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p[`quarkus.langchain4j.openai."model-name".chat-model.top-p`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai_quarkus.langchain4j.adoc index b70634da0..388d31c78 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-openai_quarkus.langchain4j.adoc @@ -312,7 +312,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI_CHAT_MODEL_TEMPERATURE+++` endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p[`quarkus.langchain4j.openai.chat-model.top-p`]## @@ -1014,7 +1014,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI__MODEL_NAME__CHAT_MODEL_TEM endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p[`quarkus.langchain4j.openai."model-name".chat-model.top-p`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini.adoc index 0ec763e78..956fcfeda 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini.adoc @@ -203,7 +203,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI_CHAT_MODEL_TEMPERA endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature}` a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini.chat-model.max-output-tokens`]## @@ -505,7 +505,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI__MODEL_NAME__CHAT_ endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature}` a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini."model-name".chat-model.max-output-tokens`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini_quarkus.langchain4j.adoc index 0ec763e78..956fcfeda 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini_quarkus.langchain4j.adoc @@ -203,7 +203,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI_CHAT_MODEL_TEMPERA endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature}` a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini.chat-model.max-output-tokens`]## @@ -505,7 +505,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI__MODEL_NAME__CHAT_ endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature}` a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini."model-name".chat-model.max-output-tokens`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai.adoc index 42595c6e2..09ba83b00 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai.adoc @@ -178,7 +178,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_CHAT_MODEL_TEMPERATURE+++ endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature:0.0}` a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.chat-model.max-output-tokens`]## @@ -428,7 +428,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI__MODEL_NAME__CHAT_MODEL_T endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature:0.0}` a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai."model-name".chat-model.max-output-tokens`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai_quarkus.langchain4j.adoc index 42595c6e2..09ba83b00 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai_quarkus.langchain4j.adoc @@ -178,7 +178,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_CHAT_MODEL_TEMPERATURE+++ endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature:0.0}` a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.chat-model.max-output-tokens`]## @@ -428,7 +428,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI__MODEL_NAME__CHAT_MODEL_T endif::add-copy-button-to-env-var[] -- |double -|`0.0` +|`${quarkus.langchain4j.temperature:0.0}` a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai."model-name".chat-model.max-output-tokens`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx.adoc index 958dca0b9..682618304 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx.adoc @@ -435,7 +435,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX_CHAT_MODEL_TEMPERATURE+++` endif::add-copy-button-to-env-var[] -- |double -|`1` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-chat-model-top-p[`quarkus.langchain4j.watsonx.chat-model.top-p`]## @@ -682,7 +682,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX_GENERATION_MODEL_TEMPERATU endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-generation-model-top-k]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-generation-model-top-k[`quarkus.langchain4j.watsonx.generation-model.top-k`]## @@ -1347,7 +1347,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX__MODEL_NAME__CHAT_MODEL_TE endif::add-copy-button-to-env-var[] -- |double -|`1` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-chat-model-top-p[`quarkus.langchain4j.watsonx."model-name".chat-model.top-p`]## @@ -1594,7 +1594,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX__MODEL_NAME__GENERATION_MO endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-generation-model-top-k]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-generation-model-top-k[`quarkus.langchain4j.watsonx."model-name".generation-model.top-k`]## diff --git a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx_quarkus.langchain4j.adoc b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx_quarkus.langchain4j.adoc index 958dca0b9..682618304 100644 --- a/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx_quarkus.langchain4j.adoc +++ b/docs/modules/ROOT/pages/includes/quarkus-langchain4j-watsonx_quarkus.langchain4j.adoc @@ -435,7 +435,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX_CHAT_MODEL_TEMPERATURE+++` endif::add-copy-button-to-env-var[] -- |double -|`1` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-chat-model-top-p[`quarkus.langchain4j.watsonx.chat-model.top-p`]## @@ -682,7 +682,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX_GENERATION_MODEL_TEMPERATU endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-generation-model-top-k]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-generation-model-top-k[`quarkus.langchain4j.watsonx.generation-model.top-k`]## @@ -1347,7 +1347,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX__MODEL_NAME__CHAT_MODEL_TE endif::add-copy-button-to-env-var[] -- |double -|`1` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-chat-model-top-p[`quarkus.langchain4j.watsonx."model-name".chat-model.top-p`]## @@ -1594,7 +1594,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_WATSONX__MODEL_NAME__GENERATION_MO endif::add-copy-button-to-env-var[] -- |double -|`1.0` +|`${quarkus.langchain4j.temperature:1.0}` a| [[quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-generation-model-top-k]] [.property-path]##link:#quarkus-langchain4j-watsonx_quarkus-langchain4j-watsonx-model-name-generation-model-top-k[`quarkus.langchain4j.watsonx."model-name".generation-model.top-k`]## diff --git a/model-providers/anthropic/runtime/src/main/java/io/quarkiverse/langchain4j/anthropic/runtime/config/ChatModelConfig.java b/model-providers/anthropic/runtime/src/main/java/io/quarkiverse/langchain4j/anthropic/runtime/config/ChatModelConfig.java index beb632323..6cb143b68 100644 --- a/model-providers/anthropic/runtime/src/main/java/io/quarkiverse/langchain4j/anthropic/runtime/config/ChatModelConfig.java +++ b/model-providers/anthropic/runtime/src/main/java/io/quarkiverse/langchain4j/anthropic/runtime/config/ChatModelConfig.java @@ -23,6 +23,7 @@ public interface ChatModelConfig { * It is generally recommended to set this or the {@code top-k} property but not both. */ @ConfigDocDefault("0.7") + @WithDefault("${quarkus.langchain4j.temperature}") OptionalDouble temperature(); /** diff --git a/model-providers/hugging-face/runtime/src/main/java/io/quarkiverse/langchain4j/huggingface/runtime/config/ChatModelConfig.java b/model-providers/hugging-face/runtime/src/main/java/io/quarkiverse/langchain4j/huggingface/runtime/config/ChatModelConfig.java index 98212d599..4a10eb3d1 100644 --- a/model-providers/hugging-face/runtime/src/main/java/io/quarkiverse/langchain4j/huggingface/runtime/config/ChatModelConfig.java +++ b/model-providers/hugging-face/runtime/src/main/java/io/quarkiverse/langchain4j/huggingface/runtime/config/ChatModelConfig.java @@ -31,7 +31,7 @@ public interface ChatModelConfig { * Float (0.0-100.0). The temperature of the sampling operation. 1 means regular sampling, 0 means always take the highest * score, 100.0 is getting closer to uniform probability */ - @WithDefault("1.0") + @WithDefault("${quarkus.langchain4j.temperature:1.0}") Double temperature(); /** diff --git a/model-providers/jlama/runtime/src/main/java/io/quarkiverse/langchain4j/jlama/runtime/config/ChatModelConfig.java b/model-providers/jlama/runtime/src/main/java/io/quarkiverse/langchain4j/jlama/runtime/config/ChatModelConfig.java index 2c85042c0..79bdee384 100644 --- a/model-providers/jlama/runtime/src/main/java/io/quarkiverse/langchain4j/jlama/runtime/config/ChatModelConfig.java +++ b/model-providers/jlama/runtime/src/main/java/io/quarkiverse/langchain4j/jlama/runtime/config/ChatModelConfig.java @@ -5,6 +5,7 @@ import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigGroup; +import io.smallrye.config.WithDefault; @ConfigGroup public interface ChatModelConfig { @@ -18,6 +19,7 @@ public interface ChatModelConfig { * both. */ @ConfigDocDefault("0.3f") + @WithDefault("${quarkus.langchain4j.temperature}") OptionalDouble temperature(); /** diff --git a/model-providers/llama3-java/runtime/src/main/java/io/quarkiverse/langchain4j/llama3/runtime/config/ChatModelConfig.java b/model-providers/llama3-java/runtime/src/main/java/io/quarkiverse/langchain4j/llama3/runtime/config/ChatModelConfig.java index 6953c3dd7..5113b33b3 100644 --- a/model-providers/llama3-java/runtime/src/main/java/io/quarkiverse/langchain4j/llama3/runtime/config/ChatModelConfig.java +++ b/model-providers/llama3-java/runtime/src/main/java/io/quarkiverse/langchain4j/llama3/runtime/config/ChatModelConfig.java @@ -3,19 +3,24 @@ import java.util.OptionalDouble; import java.util.OptionalInt; +import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigGroup; +import io.smallrye.config.WithDefault; @ConfigGroup public interface ChatModelConfig { /** - * TODO + * Temperature in [0,inf] */ + @ConfigDocDefault("0.1") + @WithDefault("${quarkus.langchain4j.temperature}") OptionalDouble temperature(); /** - * TODO + * Number of steps to run for < 0 = limited by context length */ + @ConfigDocDefault("512") OptionalInt maxTokens(); } diff --git a/model-providers/mistral/runtime/src/main/java/io/quarkiverse/langchain4j/mistralai/runtime/config/ChatModelConfig.java b/model-providers/mistral/runtime/src/main/java/io/quarkiverse/langchain4j/mistralai/runtime/config/ChatModelConfig.java index df8abfef8..118cf70b8 100644 --- a/model-providers/mistral/runtime/src/main/java/io/quarkiverse/langchain4j/mistralai/runtime/config/ChatModelConfig.java +++ b/model-providers/mistral/runtime/src/main/java/io/quarkiverse/langchain4j/mistralai/runtime/config/ChatModelConfig.java @@ -24,6 +24,7 @@ public interface ChatModelConfig { * It is generally recommended to set this or the {@code top-k} property but not both. */ @ConfigDocDefault("0.7") + @WithDefault("${quarkus.langchain4j.temperature}") OptionalDouble temperature(); /** diff --git a/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/ChatModelConfig.java b/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/ChatModelConfig.java index 13095b4a6..fd306bf5e 100644 --- a/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/ChatModelConfig.java +++ b/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/ChatModelConfig.java @@ -15,7 +15,7 @@ public interface ChatModelConfig { * The temperature of the model. Increasing the temperature will make the model answer with * more variability. A lower temperature will make the model answer more conservatively. */ - @WithDefault("0.8") + @WithDefault("${quarkus.langchain4j.temperature:0.8}") Double temperature(); /** diff --git a/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/EmbeddingModelConfig.java b/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/EmbeddingModelConfig.java index 306070538..66eff1e6e 100644 --- a/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/EmbeddingModelConfig.java +++ b/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/runtime/config/EmbeddingModelConfig.java @@ -14,7 +14,7 @@ public interface EmbeddingModelConfig { * The temperature of the model. Increasing the temperature will make the model answer with * more variability. A lower temperature will make the model answer more conservatively. */ - @WithDefault("0.8") + @WithDefault("${quarkus.langchain4j.temperature:0.8}") Double temperature(); /** diff --git a/model-providers/openai/azure-openai/runtime/src/main/java/io/quarkiverse/langchain4j/azure/openai/runtime/config/ChatModelConfig.java b/model-providers/openai/azure-openai/runtime/src/main/java/io/quarkiverse/langchain4j/azure/openai/runtime/config/ChatModelConfig.java index e3bccdb38..b7ad8266f 100644 --- a/model-providers/openai/azure-openai/runtime/src/main/java/io/quarkiverse/langchain4j/azure/openai/runtime/config/ChatModelConfig.java +++ b/model-providers/openai/azure-openai/runtime/src/main/java/io/quarkiverse/langchain4j/azure/openai/runtime/config/ChatModelConfig.java @@ -43,7 +43,7 @@ public interface ChatModelConfig { * answer. * It is recommended to alter this or topP, but not both. */ - @WithDefault("1.0") + @WithDefault("${quarkus.langchain4j.temperature:1.0}") Double temperature(); /** diff --git a/model-providers/openai/openai-vanilla/runtime/src/main/java/io/quarkiverse/langchain4j/openai/runtime/config/ChatModelConfig.java b/model-providers/openai/openai-vanilla/runtime/src/main/java/io/quarkiverse/langchain4j/openai/runtime/config/ChatModelConfig.java index 3e49f9c98..292aad127 100644 --- a/model-providers/openai/openai-vanilla/runtime/src/main/java/io/quarkiverse/langchain4j/openai/runtime/config/ChatModelConfig.java +++ b/model-providers/openai/openai-vanilla/runtime/src/main/java/io/quarkiverse/langchain4j/openai/runtime/config/ChatModelConfig.java @@ -23,7 +23,7 @@ public interface ChatModelConfig { * answer. * It is recommended to alter this or topP, but not both. */ - @WithDefault("1.0") + @WithDefault("${quarkus.langchain4j.temperature:1.0}") Double temperature(); /** diff --git a/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/VertexAiGeminiRecorder.java b/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/VertexAiGeminiRecorder.java index 0ed54ea59..8628f0d09 100644 --- a/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/VertexAiGeminiRecorder.java +++ b/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/VertexAiGeminiRecorder.java @@ -41,7 +41,7 @@ public Supplier chatModel(LangChain4jVertexAiGeminiConfig con .logRequests(firstOrDefault(false, chatModelConfig.logRequests(), vertexAiConfig.logRequests())) .logResponses(firstOrDefault(false, chatModelConfig.logResponses(), vertexAiConfig.logResponses())); - if (chatModelConfig.temperature().isEmpty()) { + if (chatModelConfig.temperature().isPresent()) { builder.temperature(chatModelConfig.temperature().getAsDouble()); } if (chatModelConfig.topK().isPresent()) { diff --git a/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/config/ChatModelConfig.java b/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/config/ChatModelConfig.java index 9c7231fc2..edddf5e32 100644 --- a/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/config/ChatModelConfig.java +++ b/model-providers/vertex-ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/gemini/config/ChatModelConfig.java @@ -36,7 +36,7 @@ public interface ChatModelConfig { *

* Default for gemini-1.0-pro-001: 0.9 */ - @WithDefault("0.0") + @WithDefault("${quarkus.langchain4j.temperature}") OptionalDouble temperature(); /** diff --git a/model-providers/vertex-ai/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/config/ChatModelConfig.java b/model-providers/vertex-ai/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/config/ChatModelConfig.java index d9182b7ec..b30e2b49e 100644 --- a/model-providers/vertex-ai/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/config/ChatModelConfig.java +++ b/model-providers/vertex-ai/runtime/src/main/java/io/quarkiverse/langchain4j/vertexai/runtime/config/ChatModelConfig.java @@ -26,7 +26,7 @@ public interface ChatModelConfig { * If the model returns a response that's too generic, too short, or the model gives a fallback response, try increasing the * temperature. */ - @WithDefault("0.0") + @WithDefault("${quarkus.langchain4j.temperature:0.0}") Double temperature(); /** diff --git a/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/ChatModelConfig.java b/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/ChatModelConfig.java index 6ce587876..26496775c 100644 --- a/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/ChatModelConfig.java +++ b/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/ChatModelConfig.java @@ -78,7 +78,7 @@ public interface ChatModelConfig { *

* Possible values: 0 < value < 2 */ - @WithDefault("1") + @WithDefault("${quarkus.langchain4j.temperature:1.0}") Double temperature(); /** diff --git a/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/GenerationModelConfig.java b/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/GenerationModelConfig.java index 9d27cbeed..7ddcedc93 100644 --- a/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/GenerationModelConfig.java +++ b/model-providers/watsonx/runtime/src/main/java/io/quarkiverse/langchain4j/watsonx/runtime/config/GenerationModelConfig.java @@ -90,7 +90,7 @@ public interface GenerationModelConfig { *

* Possible values: 0 ≤ value ≤ 2 */ - @WithDefault("1.0") + @WithDefault("${quarkus.langchain4j.temperature:1.0}") Double temperature(); /**