Skip to content

Commit

Permalink
Merge pull request #1068 from quarkiverse/global-temperature
Browse files Browse the repository at this point in the history
Introduce a global temperature property
  • Loading branch information
geoand authored Nov 11, 2024
2 parents 2039dde + 2023a56 commit 0b5cf23
Show file tree
Hide file tree
Showing 31 changed files with 105 additions and 56 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import java.time.Duration;
import java.util.Optional;
import java.util.OptionalDouble;

import io.quarkus.runtime.annotations.ConfigDocDefault;
import io.quarkus.runtime.annotations.ConfigRoot;
Expand Down Expand Up @@ -31,6 +32,11 @@ public interface LangChain4jConfig {
@ConfigDocDefault("10s")
Optional<Duration> timeout();

/**
* Global temperature for LLM APIs
*/
OptionalDouble temperature();

/**
* Guardrails configuration
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI_CHAT_MODEL_TEMPERATUR
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p[`quarkus.langchain4j.azure-openai.chat-model.top-p`]##

Expand Down Expand Up @@ -1166,7 +1166,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI__MODEL_NAME__CHAT_MOD
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p[`quarkus.langchain4j.azure-openai."model-name".chat-model.top-p`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI_CHAT_MODEL_TEMPERATUR
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-chat-model-top-p[`quarkus.langchain4j.azure-openai.chat-model.top-p`]##

Expand Down Expand Up @@ -1166,7 +1166,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AZURE_OPENAI__MODEL_NAME__CHAT_MOD
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-azure-openai_quarkus-langchain4j-azure-openai-model-name-chat-model-top-p[`quarkus.langchain4j.azure-openai."model-name".chat-model.top-p`]##

Expand Down
17 changes: 17 additions & 0 deletions docs/modules/ROOT/pages/includes/quarkus-langchain4j-core.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,23 @@ endif::add-copy-button-to-env-var[]
|link:https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/time/Duration.html[Duration] link:#duration-note-anchor-{summaryTableId}[icon:question-circle[title=More information about the Duration format]]
|`10s`

a| [[quarkus-langchain4j-core_quarkus-langchain4j-temperature]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-temperature[`quarkus.langchain4j.temperature`]##

[.description]
--
Global temperature for LLM APIs


ifdef::add-copy-button-to-env-var[]
Environment variable: env_var_with_copy_button:+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++[]
endif::add-copy-button-to-env-var[]
ifndef::add-copy-button-to-env-var[]
Environment variable: `+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++`
endif::add-copy-button-to-env-var[]
--
|double
|

a| [[quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries[`quarkus.langchain4j.guardrails.max-retries`]##

[.description]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,23 @@ endif::add-copy-button-to-env-var[]
|link:https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/time/Duration.html[Duration] link:#duration-note-anchor-{summaryTableId}[icon:question-circle[title=More information about the Duration format]]
|`10s`

a| [[quarkus-langchain4j-core_quarkus-langchain4j-temperature]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-temperature[`quarkus.langchain4j.temperature`]##

[.description]
--
Global temperature for LLM APIs


ifdef::add-copy-button-to-env-var[]
Environment variable: env_var_with_copy_button:+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++[]
endif::add-copy-button-to-env-var[]
ifndef::add-copy-button-to-env-var[]
Environment variable: `+++QUARKUS_LANGCHAIN4J_TEMPERATURE+++`
endif::add-copy-button-to-env-var[]
--
|double
|

a| [[quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries]] [.property-path]##link:#quarkus-langchain4j-core_quarkus-langchain4j-guardrails-max-retries[`quarkus.langchain4j.guardrails.max-retries`]##

[.description]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE_CHAT_MODEL_TEMPERATURE
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface.chat-model.max-new-tokens`]##

Expand Down Expand Up @@ -447,7 +447,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE__MODEL_NAME__CHAT_MODE
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface."model-name".chat-model.max-new-tokens`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE_CHAT_MODEL_TEMPERATURE
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface.chat-model.max-new-tokens`]##

Expand Down Expand Up @@ -447,7 +447,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_HUGGINGFACE__MODEL_NAME__CHAT_MODE
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens]] [.property-path]##link:#quarkus-langchain4j-hugging-face_quarkus-langchain4j-huggingface-model-name-chat-model-max-new-tokens[`quarkus.langchain4j.huggingface."model-name".chat-model.max-new-tokens`]##

Expand Down
16 changes: 8 additions & 8 deletions docs/modules/ROOT/pages/includes/quarkus-langchain4j-ollama.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang

[.description]
--
Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3`
Model to use


ifdef::add-copy-button-to-env-var[]
Expand All @@ -57,7 +57,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_MODEL_ID+++`
endif::add-copy-button-to-env-var[]
--
|string
|`llama3.1`
|`llama3.2`

a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id[`quarkus.langchain4j.ollama.embedding-model.model-id`]##

Expand Down Expand Up @@ -193,7 +193,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_TEMPERATURE+++`
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict[`quarkus.langchain4j.ollama.chat-model.num-predict`]##

Expand Down Expand Up @@ -352,7 +352,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_EMBEDDING_MODEL_TEMPERATURE
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict[`quarkus.langchain4j.ollama.embedding-model.num-predict`]##

Expand Down Expand Up @@ -464,7 +464,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang

[.description]
--
Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3`
Model to use


ifdef::add-copy-button-to-env-var[]
Expand All @@ -475,7 +475,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_MOD
endif::add-copy-button-to-env-var[]
--
|string
|`llama3.1`
|`llama3.2`

a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id[`quarkus.langchain4j.ollama."model-name".embedding-model.model-id`]##

Expand Down Expand Up @@ -611,7 +611,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_TEM
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict[`quarkus.langchain4j.ollama."model-name".chat-model.num-predict`]##

Expand Down Expand Up @@ -770,7 +770,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__EMBEDDING_MODE
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict[`quarkus.langchain4j.ollama."model-name".embedding-model.num-predict`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang

[.description]
--
Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3`
Model to use


ifdef::add-copy-button-to-env-var[]
Expand All @@ -57,7 +57,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_MODEL_ID+++`
endif::add-copy-button-to-env-var[]
--
|string
|`llama3.1`
|`llama3.2`

a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-model-id[`quarkus.langchain4j.ollama.embedding-model.model-id`]##

Expand Down Expand Up @@ -193,7 +193,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_CHAT_MODEL_TEMPERATURE+++`
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-chat-model-num-predict[`quarkus.langchain4j.ollama.chat-model.num-predict`]##

Expand Down Expand Up @@ -352,7 +352,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA_EMBEDDING_MODEL_TEMPERATURE
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-embedding-model-num-predict[`quarkus.langchain4j.ollama.embedding-model.num-predict`]##

Expand Down Expand Up @@ -464,7 +464,7 @@ a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-lang

[.description]
--
Model to use. According to link:https://github.com/jmorganca/ollama/blob/main/docs/api.md#model-names[Ollama docs], the default value is `llama3`
Model to use


ifdef::add-copy-button-to-env-var[]
Expand All @@ -475,7 +475,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_MOD
endif::add-copy-button-to-env-var[]
--
|string
|`llama3.1`
|`llama3.2`

a|icon:lock[title=Fixed at build time] [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-model-id[`quarkus.langchain4j.ollama."model-name".embedding-model.model-id`]##

Expand Down Expand Up @@ -611,7 +611,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__CHAT_MODEL_TEM
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-chat-model-num-predict[`quarkus.langchain4j.ollama."model-name".chat-model.num-predict`]##

Expand Down Expand Up @@ -770,7 +770,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OLLAMA__MODEL_NAME__EMBEDDING_MODE
endif::add-copy-button-to-env-var[]
--
|double
|`0.8`
|`${quarkus.langchain4j.temperature:0.8}`

a| [[quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict]] [.property-path]##link:#quarkus-langchain4j-ollama_quarkus-langchain4j-ollama-model-name-embedding-model-num-predict[`quarkus.langchain4j.ollama."model-name".embedding-model.num-predict`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI_CHAT_MODEL_TEMPERATURE+++`
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p[`quarkus.langchain4j.openai.chat-model.top-p`]##

Expand Down Expand Up @@ -1014,7 +1014,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI__MODEL_NAME__CHAT_MODEL_TEM
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p[`quarkus.langchain4j.openai."model-name".chat-model.top-p`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI_CHAT_MODEL_TEMPERATURE+++`
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-chat-model-top-p[`quarkus.langchain4j.openai.chat-model.top-p`]##

Expand Down Expand Up @@ -1014,7 +1014,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_OPENAI__MODEL_NAME__CHAT_MODEL_TEM
endif::add-copy-button-to-env-var[]
--
|double
|`1.0`
|`${quarkus.langchain4j.temperature:1.0}`

a| [[quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p]] [.property-path]##link:#quarkus-langchain4j-openai_quarkus-langchain4j-openai-model-name-chat-model-top-p[`quarkus.langchain4j.openai."model-name".chat-model.top-p`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI_CHAT_MODEL_TEMPERA
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature}`

a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini.chat-model.max-output-tokens`]##

Expand Down Expand Up @@ -505,7 +505,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI__MODEL_NAME__CHAT_
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature}`

a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini."model-name".chat-model.max-output-tokens`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI_CHAT_MODEL_TEMPERA
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature}`

a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini.chat-model.max-output-tokens`]##

Expand Down Expand Up @@ -505,7 +505,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI__MODEL_NAME__CHAT_
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature}`

a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.gemini."model-name".chat-model.max-output-tokens`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_CHAT_MODEL_TEMPERATURE+++
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature:0.0}`

a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.chat-model.max-output-tokens`]##

Expand Down Expand Up @@ -428,7 +428,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI__MODEL_NAME__CHAT_MODEL_T
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature:0.0}`

a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai."model-name".chat-model.max-output-tokens`]##

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_CHAT_MODEL_TEMPERATURE+++
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature:0.0}`

a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai.chat-model.max-output-tokens`]##

Expand Down Expand Up @@ -428,7 +428,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI__MODEL_NAME__CHAT_MODEL_T
endif::add-copy-button-to-env-var[]
--
|double
|`0.0`
|`${quarkus.langchain4j.temperature:0.0}`

a| [[quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens]] [.property-path]##link:#quarkus-langchain4j-vertex-ai_quarkus-langchain4j-vertexai-model-name-chat-model-max-output-tokens[`quarkus.langchain4j.vertexai."model-name".chat-model.max-output-tokens`]##

Expand Down
Loading

0 comments on commit 0b5cf23

Please sign in to comment.