mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Set temperature to 0 for promql builder llm features (#77520)
set temperature for openai calls
This commit is contained in:
@@ -111,6 +111,7 @@ export async function promQailExplain(
|
||||
.streamChatCompletions({
|
||||
model: OPENAI_MODEL_NAME,
|
||||
messages: promptMessages,
|
||||
temperature: 0,
|
||||
})
|
||||
.pipe(llms.openai.accumulateContent())
|
||||
.subscribe((response) => {
|
||||
@@ -371,6 +372,7 @@ export async function promQailSuggest(
|
||||
.streamChatCompletions({
|
||||
model: OPENAI_MODEL_NAME,
|
||||
messages: promptMessages,
|
||||
temperature: 0,
|
||||
})
|
||||
.pipe(llms.openai.accumulateContent())
|
||||
.subscribe((response) => {
|
||||
|
Reference in New Issue
Block a user