Prometheus: improved PromQAIL v1 prompt (#79232)

* PromQail: update prompt and model

- Split up system and user prompt
- Use latest gpt-3.5-turbo-1106 model
- Add metric type to user prompt
- Simpler formatting for templates

* PromQail: fix fetching all available labels

Previously we were rendering the user selected templates. The prompt is expecting all available label keys for a given metric.

* use fetchSeriesLabels instead

* user prompt: fix trailing newline
This commit is contained in:
Yasir Ekinci 2023-12-08 15:01:09 +01:00 committed by GitHub
parent 50f4e78a39
commit 5e03223ea9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 60 additions and 31 deletions

View File

@ -67,36 +67,48 @@ export function GetExplainUserPrompt({
`;
}
export const SuggestSystemPrompt = `You are a Prometheus Query Language (PromQL) expert assistant inside Grafana.
When the user asks a question, respond with a valid PromQL query and only the query.
To help you answer the question, you will receive:
- List of potentially relevant PromQL templates with descriptions, ranked by semantic search score
- Prometheus metric
- Metric type
- Available Prometheus metric labels
- User question
Policy:
- Do not invent labels names, you can only use the available labels
- For rate queries, use the $__rate_interval variable`;
// rewrite with a type
export type SuggestSystemPromptParams = {
export type SuggestUserPromptParams = {
promql: string;
question: string;
metricType: string;
labels: string;
templates: string;
};
export function GetSuggestSystemPrompt({ promql, question, labels, templates }: SuggestSystemPromptParams): string {
export function GetSuggestUserPrompt({
promql,
question,
metricType,
labels,
templates,
}: SuggestUserPromptParams): string {
if (templates === '') {
templates = 'No templates provided.';
} else {
templates = templates.replace(/\n/g, '\n ');
}
return `You are an PromQL expert assistant. You will be is given a PromQL expression and a user question.
You are to edit the PromQL expression so that it answers the user question. Show only the edited PromQL.
The initial PromQL query is
\`\`\`
${promql}
\`\`\`
The user question is: "${question}"
To help you answer the question, here are 2 pieces of information:
1. List of labels to use: ${labels}
2. Here is a list of possibly relevant PromQL template expressions with descriptions to help target your answer:
${templates}
Rules:
- Do not invent labels names, you must use only the labels provided.
Answer:
\`\`\``;
return `Relevant PromQL templates:
${templates}
Prometheus metric: ${promql}
Metric type: ${metricType}
Available Prometheus metric labels: ${labels}
User question: ${question}
\`\`\`promql`;
}

View File

@ -11,15 +11,16 @@ import { PromVisualQuery } from '../../../types';
import {
ExplainSystemPrompt,
GetExplainUserPrompt,
GetSuggestSystemPrompt,
SuggestSystemPromptParams,
SuggestSystemPrompt,
GetSuggestUserPrompt,
SuggestUserPromptParams,
} from '../prompts';
import { Interaction, QuerySuggestion, SuggestionType } from '../types';
import { createInteraction, stateSlice } from './state';
import { getTemplateSuggestions } from './templates';
const OPENAI_MODEL_NAME = 'gpt-3.5-turbo';
const OPENAI_MODEL_NAME = 'gpt-3.5-turbo-1106';
const promQLTemplatesCollection = 'grafana.promql.templates';
// actions to update the state
const { updateInteraction } = stateSlice.actions;
@ -77,8 +78,17 @@ export function getExplainMessage(
];
}
function getSuggestMessage({ promql, question, labels, templates }: SuggestSystemPromptParams): llms.openai.Message[] {
return [{ role: 'system', content: GetSuggestSystemPrompt({ promql, question, labels, templates }) }];
function getSuggestMessages({
promql,
question,
metricType,
labels,
templates,
}: SuggestUserPromptParams): llms.openai.Message[] {
return [
{ role: 'system', content: SuggestSystemPrompt },
{ role: 'user', content: GetSuggestUserPrompt({ promql, question, metricType, labels, templates }) },
];
}
/**
@ -343,9 +353,15 @@ export async function promQailSuggest(
prompt?: string;
};
// get all available labels
const metricLabels = await datasource.languageProvider.fetchSeriesLabelsMatch(query.metric);
let feedTheAI: SuggestionBody = {
metric: query.metric,
labels: promQueryModeller.renderLabels(query.labels),
// drop __name__ label because it's not useful
labels: Object.keys(metricLabels)
.filter((label) => label !== '__name__')
.join(','),
};
// @ts-ignore llms types issue
@ -372,13 +388,14 @@ export async function promQailSuggest(
const resultsString = results
.map((r) => {
return `PromQL: ${r.payload.promql}\nDescription: ${r.payload.description}`;
return `${r.payload.promql} | ${r.payload.description} (score=${(r.score * 100).toFixed(1)})`;
})
.join('\n');
const promptMessages = getSuggestMessage({
const promptMessages = getSuggestMessages({
promql: query.metric,
question: interaction ? interaction.prompt : '',
metricType: metricType,
labels: labelNames.join(', '),
templates: resultsString,
});
@ -387,7 +404,7 @@ export async function promQailSuggest(
.streamChatCompletions({
model: OPENAI_MODEL_NAME,
messages: promptMessages,
temperature: 0,
temperature: 0.5,
})
.pipe(llms.openai.accumulateContent())
.subscribe((response) => {