Set temperature to 0 for promql builder llm features (#77520)

set temperature for openai calls
This commit is contained in:
Edward Qian
2023-11-07 22:27:05 -05:00
committed by GitHub
parent 08f6abe4ac
commit 9e346616d0

View File

@@ -111,6 +111,7 @@ export async function promQailExplain(
.streamChatCompletions({
model: OPENAI_MODEL_NAME,
messages: promptMessages,
temperature: 0,
})
.pipe(llms.openai.accumulateContent())
.subscribe((response) => {
@@ -371,6 +372,7 @@ export async function promQailSuggest(
.streamChatCompletions({
model: OPENAI_MODEL_NAME,
messages: promptMessages,
temperature: 0,
})
.pipe(llms.openai.accumulateContent())
.subscribe((response) => {