Dashboards: Auto-generate dashboard changes description (#75003)

Co-authored-by: Aaron Sanders <aaron.sanders@grafana.com>
Co-authored-by: nmarrs <nathanielmarrs@gmail.com>
This commit is contained in:
Ivan Ortega Alba 2023-09-21 14:41:49 +02:00 committed by GitHub
parent 9fca10bfc3
commit 224e2c9efb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 202 additions and 24 deletions

View File

@ -112,16 +112,16 @@ describe('GenAIButton', () => {
expect(onReply).toHaveBeenCalledTimes(1); expect(onReply).toHaveBeenCalledTimes(1);
}); });
it('should call the LLM service with the messages configured', async () => { it('should call the LLM service with the messages configured and the right temperature', async () => {
const onReply = jest.fn(); const onReply = jest.fn();
const messages = [{ content: 'Generate X', role: 'system' as Role }]; const messages = [{ content: 'Generate X', role: 'system' as Role }];
setup({ onReply, messages }); setup({ onReply, messages, temperature: 3 });
const generateButton = await screen.findByRole('button'); const generateButton = await screen.findByRole('button');
await fireEvent.click(generateButton); await fireEvent.click(generateButton);
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledTimes(1)); await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledTimes(1));
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledWith(messages, expect.any(Function))); await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledWith(messages, expect.any(Function), 3));
}); });
}); });
}); });

View File

@ -12,6 +12,7 @@ export interface GenAIButtonProps {
onClick?: (e: React.MouseEvent<HTMLButtonElement>) => void; onClick?: (e: React.MouseEvent<HTMLButtonElement>) => void;
messages: Message[]; messages: Message[];
onReply: (response: string, isDone: boolean) => void; onReply: (response: string, isDone: boolean) => void;
temperature?: number;
} }
export const GenAIButton = ({ export const GenAIButton = ({
@ -20,6 +21,7 @@ export const GenAIButton = ({
onClick, onClick,
messages, messages,
onReply, onReply,
temperature = 1,
}: GenAIButtonProps) => { }: GenAIButtonProps) => {
const styles = useStyles2(getStyles); const styles = useStyles2(getStyles);
const [enabled, setEnabled] = useState(true); const [enabled, setEnabled] = useState(true);
@ -33,7 +35,7 @@ export const GenAIButton = ({
const onGenerate = (e: React.MouseEvent<HTMLButtonElement>) => { const onGenerate = (e: React.MouseEvent<HTMLButtonElement>) => {
onClick?.(e); onClick?.(e);
setLoading(true); setLoading(true);
generateTextWithLLM(messages, replyHandler); generateTextWithLLM(messages, replyHandler, temperature);
}; };
useEffect(() => { useEffect(() => {

View File

@ -0,0 +1,60 @@
import React, { useMemo } from 'react';
import { DashboardModel } from '../../state';
import { GenAIButton } from './GenAIButton';
import { getDashboardChanges, Message, Role } from './utils';
interface GenAIDashboardChangesButtonProps {
dashboard: DashboardModel;
onGenerate: (title: string, isDone: boolean) => void;
}
const CHANGES_GENERATION_STANDARD_PROMPT = [
'You are an expert in Grafana Dashboards',
'Your goal is to write a description of the changes for a dashboard',
'When referring to panel changes, use the panel title',
'When using panel title, wrap it with double quotes',
'When the panel changes the position, just mention the panel title has changed position',
'When an entire panel is added or removed, use the panel title and only say it was added or removed and disregard the rest of the changes for that panel',
'Group changes when all panels are affected',
'Do not mention line number',
'Refer to templating elements as variables',
'Ignore and never mention changes about plugin version',
'Try to make it as short as possible.',
].join('. ');
export const GenAIDashboardChangesButton = ({ dashboard, onGenerate }: GenAIDashboardChangesButtonProps) => {
const messages = useMemo(() => getMessages(dashboard), [dashboard]);
return (
<GenAIButton messages={messages} onReply={onGenerate} loadingText={'Generating changes summary'} temperature={0} />
);
};
function getMessages(dashboard: DashboardModel): Message[] {
const { userChanges, migrationChanges } = getDashboardChanges(dashboard);
return [
{
content: CHANGES_GENERATION_STANDARD_PROMPT,
role: Role.system,
},
{
content: `This is the list of panel names, when referring to a panel, please use the title: ${JSON.stringify(
dashboard.panels.map((panel) => panel.title)
)}`,
role: Role.system,
},
{
content: `Group the following diff under "User changes" as a bullet list: ${JSON.stringify(userChanges)}`,
role: Role.system,
},
{
content: `Group the following diff under "Migration changes" as a bullet list: ${JSON.stringify(
migrationChanges
)}`,
role: Role.system,
},
];
}

View File

@ -1,5 +1,7 @@
import { llms } from '@grafana/experimental'; import { llms } from '@grafana/experimental';
import { createDashboardModelFixture, createPanelJSONFixture } from '../../state/__fixtures__/dashboardFixtures';
import { import {
generateTextWithLLM, generateTextWithLLM,
isLLMPluginEnabled, isLLMPluginEnabled,
@ -8,6 +10,7 @@ import {
Role, Role,
DONE_MESSAGE, DONE_MESSAGE,
OPEN_AI_MODEL, OPEN_AI_MODEL,
getDashboardChanges,
} from './utils'; } from './utils';
// Mock the llms.openai module // Mock the llms.openai module
@ -42,8 +45,9 @@ describe('generateTextWithLLM', () => {
const messages = [{ role: Role.user, content: 'Hello' }]; const messages = [{ role: Role.user, content: 'Hello' }];
const onReply = jest.fn(); const onReply = jest.fn();
const temperature = 0.5;
await generateTextWithLLM(messages, onReply); await generateTextWithLLM(messages, onReply, temperature);
expect(llms.openai.streamChatCompletions).toHaveBeenCalledWith({ expect(llms.openai.streamChatCompletions).toHaveBeenCalledWith({
model: OPEN_AI_MODEL, model: OPEN_AI_MODEL,
@ -52,6 +56,7 @@ describe('generateTextWithLLM', () => {
DONE_MESSAGE, DONE_MESSAGE,
...messages, ...messages,
], ],
temperature,
}); });
}); });
}); });
@ -103,3 +108,54 @@ describe('cleanupResponse', () => {
expect(cleanedResponse).toBe('This is a response'); expect(cleanedResponse).toBe('This is a response');
}); });
}); });
describe('getDashboardChanges', () => {
it('should correctly split user changes and migration changes', () => {
// Mock data for testing
const deprecatedOptions = {
legend: { displayMode: 'hidden', showLegend: false },
};
const deprecatedVersion = 37;
const dashboard = createDashboardModelFixture({
schemaVersion: deprecatedVersion,
panels: [createPanelJSONFixture({ title: 'Panel 1', options: deprecatedOptions })],
});
// Update title for the first panel
dashboard.updatePanels([
{
...dashboard.panels[0],
title: 'New title',
},
...dashboard.panels.slice(1),
]);
// Call the function to test
const result = getDashboardChanges(dashboard);
// Assertions
expect(result.userChanges).toEqual({
panels: [
{
op: 'replace',
originalValue: 'Panel 1',
value: 'New title',
startLineNumber: expect.any(Number),
path: ['panels', '0', 'title'],
},
],
});
expect(result.migrationChanges).toBeDefined();
expect(result.userChanges).not.toContain({
panels: [
{
op: 'replace',
originalValue: 'Panel 1',
value: 'New title',
startLineNumber: expect.any(Number),
path: ['panels', '0', 'title'],
},
],
});
});
});

View File

@ -1,5 +1,8 @@
import { llms } from '@grafana/experimental'; import { llms } from '@grafana/experimental';
import { DashboardModel } from '../../state';
import { Diffs, jsonDiff } from '../VersionHistory/utils';
export interface Message { export interface Message {
role: Role; role: Role;
content: string; content: string;
@ -39,11 +42,13 @@ export const OPEN_AI_MODEL = 'gpt-4';
* *
* @param messages messages to send to LLM * @param messages messages to send to LLM
* @param onReply callback to call when LLM replies. The reply will be streamed, so it will be called for every token received. * @param onReply callback to call when LLM replies. The reply will be streamed, so it will be called for every token received.
* @param temperature what temperature to use when calling the llm. default 1.
* @returns The subscription to the stream. * @returns The subscription to the stream.
*/ */
export const generateTextWithLLM = async ( export const generateTextWithLLM = async (
messages: Message[], messages: Message[],
onReply: (response: string, isDone: boolean) => void onReply: (response: string, isDone: boolean) => void,
temperature = 1
) => { ) => {
const enabled = await isLLMPluginEnabled(); const enabled = await isLLMPluginEnabled();
@ -55,6 +60,7 @@ export const generateTextWithLLM = async (
.streamChatCompletions({ .streamChatCompletions({
model: OPEN_AI_MODEL, model: OPEN_AI_MODEL,
messages: [DONE_MESSAGE, ...messages], messages: [DONE_MESSAGE, ...messages],
temperature,
}) })
.pipe( .pipe(
// Accumulate the stream content into a stream of strings, where each // Accumulate the stream content into a stream of strings, where each
@ -93,3 +99,27 @@ export function isResponseCompleted(response: string) {
export function cleanupResponse(response: string) { export function cleanupResponse(response: string) {
return response.replace(SPECIAL_DONE_TOKEN, '').replace(/"/g, ''); return response.replace(SPECIAL_DONE_TOKEN, '').replace(/"/g, '');
} }
/**
* Diff the current dashboard with the original dashboard and the dashboard after migration
* to split the changes into user changes and migration changes.
* * User changes: changes made by the user
* * Migration changes: changes made by the DashboardMigrator after opening the dashboard
*
* @param dashboard current dashboard to be saved
* @returns user changes and migration changes
*/
export function getDashboardChanges(dashboard: DashboardModel): {
userChanges: Diffs;
migrationChanges: Diffs;
} {
// Re-parse the dashboard to remove functions and other non-serializable properties
const currentDashboard = JSON.parse(JSON.stringify(dashboard.getSaveModelClone()));
const originalDashboard = dashboard.getOriginalDashboard()!;
const dashboardAfterMigration = JSON.parse(JSON.stringify(new DashboardModel(originalDashboard).getSaveModelClone()));
return {
userChanges: jsonDiff(dashboardAfterMigration, currentDashboard),
migrationChanges: jsonDiff(originalDashboard, dashboardAfterMigration),
};
}

View File

@ -1,10 +1,14 @@
import { css } from '@emotion/css';
import React, { useMemo, useState } from 'react'; import React, { useMemo, useState } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors'; import { selectors } from '@grafana/e2e-selectors';
import { Stack } from '@grafana/experimental'; import { Stack } from '@grafana/experimental';
import { Button, Checkbox, Form, TextArea } from '@grafana/ui'; import { config } from '@grafana/runtime';
import { Button, Checkbox, Form, TextArea, useStyles2 } from '@grafana/ui';
import { DashboardModel } from 'app/features/dashboard/state'; import { DashboardModel } from 'app/features/dashboard/state';
import { GenAIDashboardChangesButton } from '../../GenAI/GenAIDashboardChangesButton';
import { SaveDashboardData, SaveDashboardOptions } from '../types'; import { SaveDashboardData, SaveDashboardOptions } from '../types';
interface FormDTO { interface FormDTO {
@ -36,6 +40,8 @@ export const SaveDashboardForm = ({
const hasVariableChanged = useMemo(() => dashboard.hasVariableValuesChanged(), [dashboard]); const hasVariableChanged = useMemo(() => dashboard.hasVariableValuesChanged(), [dashboard]);
const [saving, setSaving] = useState(false); const [saving, setSaving] = useState(false);
const [message, setMessage] = useState(options.message);
const styles = useStyles2(getStyles);
return ( return (
<Form <Form
@ -44,7 +50,7 @@ export const SaveDashboardForm = ({
return; return;
} }
setSaving(true); setSaving(true);
options = { ...options, message: data.message }; options = { ...options, message };
const result = await onSubmit(saveModel.clone, options, dashboard); const result = await onSubmit(saveModel.clone, options, dashboard);
if (result.status === 'success') { if (result.status === 'success') {
if (options.saveVariables) { if (options.saveVariables) {
@ -60,7 +66,6 @@ export const SaveDashboardForm = ({
}} }}
> >
{({ register, errors }) => { {({ register, errors }) => {
const messageProps = register('message');
return ( return (
<Stack gap={2} direction="column" alignItems="flex-start"> <Stack gap={2} direction="column" alignItems="flex-start">
{hasTimeChanged && ( {hasTimeChanged && (
@ -89,21 +94,35 @@ export const SaveDashboardForm = ({
aria-label={selectors.pages.SaveDashboardModal.saveVariables} aria-label={selectors.pages.SaveDashboardModal.saveVariables}
/> />
)} )}
<TextArea <div className={styles.message}>
{...messageProps} {config.featureToggles.dashgpt && (
aria-label="message" <GenAIDashboardChangesButton
value={options.message} dashboard={dashboard}
onChange={(e) => { onGenerate={(text) => {
onOptionsChange({ onOptionsChange({
...options, ...options,
message: e.currentTarget.value, message: text,
}); });
messageProps.onChange(e); setMessage(text);
}} }}
placeholder="Add a note to describe your changes." />
autoFocus )}
rows={5} <TextArea
/> aria-label="message"
value={message}
onChange={(e) => {
onOptionsChange({
...options,
message: e.currentTarget.value,
});
setMessage(e.currentTarget.value);
}}
placeholder="Add a note to describe your changes."
autoFocus
rows={5}
/>
</div>
<Stack alignItems="center"> <Stack alignItems="center">
<Button variant="secondary" onClick={onCancel} fill="outline"> <Button variant="secondary" onClick={onCancel} fill="outline">
Cancel Cancel
@ -124,3 +143,14 @@ export const SaveDashboardForm = ({
</Form> </Form>
); );
}; };
function getStyles(theme: GrafanaTheme2) {
return {
message: css`
display: flex;
align-items: end;
flex-direction: column;
width: 100%;
`,
};
}