mirror of
https://github.com/grafana/grafana.git
synced 2024-11-25 18:30:41 -06:00
Dashboards: Auto-generate dashboard changes description (#75003)
Co-authored-by: Aaron Sanders <aaron.sanders@grafana.com> Co-authored-by: nmarrs <nathanielmarrs@gmail.com>
This commit is contained in:
parent
9fca10bfc3
commit
224e2c9efb
@ -112,16 +112,16 @@ describe('GenAIButton', () => {
|
||||
expect(onReply).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should call the LLM service with the messages configured', async () => {
|
||||
it('should call the LLM service with the messages configured and the right temperature', async () => {
|
||||
const onReply = jest.fn();
|
||||
const messages = [{ content: 'Generate X', role: 'system' as Role }];
|
||||
setup({ onReply, messages });
|
||||
setup({ onReply, messages, temperature: 3 });
|
||||
|
||||
const generateButton = await screen.findByRole('button');
|
||||
await fireEvent.click(generateButton);
|
||||
|
||||
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledTimes(1));
|
||||
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledWith(messages, expect.any(Function)));
|
||||
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledWith(messages, expect.any(Function), 3));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -12,6 +12,7 @@ export interface GenAIButtonProps {
|
||||
onClick?: (e: React.MouseEvent<HTMLButtonElement>) => void;
|
||||
messages: Message[];
|
||||
onReply: (response: string, isDone: boolean) => void;
|
||||
temperature?: number;
|
||||
}
|
||||
|
||||
export const GenAIButton = ({
|
||||
@ -20,6 +21,7 @@ export const GenAIButton = ({
|
||||
onClick,
|
||||
messages,
|
||||
onReply,
|
||||
temperature = 1,
|
||||
}: GenAIButtonProps) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
const [enabled, setEnabled] = useState(true);
|
||||
@ -33,7 +35,7 @@ export const GenAIButton = ({
|
||||
const onGenerate = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
onClick?.(e);
|
||||
setLoading(true);
|
||||
generateTextWithLLM(messages, replyHandler);
|
||||
generateTextWithLLM(messages, replyHandler, temperature);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -0,0 +1,60 @@
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import { DashboardModel } from '../../state';
|
||||
|
||||
import { GenAIButton } from './GenAIButton';
|
||||
import { getDashboardChanges, Message, Role } from './utils';
|
||||
|
||||
interface GenAIDashboardChangesButtonProps {
|
||||
dashboard: DashboardModel;
|
||||
onGenerate: (title: string, isDone: boolean) => void;
|
||||
}
|
||||
|
||||
const CHANGES_GENERATION_STANDARD_PROMPT = [
|
||||
'You are an expert in Grafana Dashboards',
|
||||
'Your goal is to write a description of the changes for a dashboard',
|
||||
'When referring to panel changes, use the panel title',
|
||||
'When using panel title, wrap it with double quotes',
|
||||
'When the panel changes the position, just mention the panel title has changed position',
|
||||
'When an entire panel is added or removed, use the panel title and only say it was added or removed and disregard the rest of the changes for that panel',
|
||||
'Group changes when all panels are affected',
|
||||
'Do not mention line number',
|
||||
'Refer to templating elements as variables',
|
||||
'Ignore and never mention changes about plugin version',
|
||||
'Try to make it as short as possible.',
|
||||
].join('. ');
|
||||
|
||||
export const GenAIDashboardChangesButton = ({ dashboard, onGenerate }: GenAIDashboardChangesButtonProps) => {
|
||||
const messages = useMemo(() => getMessages(dashboard), [dashboard]);
|
||||
|
||||
return (
|
||||
<GenAIButton messages={messages} onReply={onGenerate} loadingText={'Generating changes summary'} temperature={0} />
|
||||
);
|
||||
};
|
||||
|
||||
function getMessages(dashboard: DashboardModel): Message[] {
|
||||
const { userChanges, migrationChanges } = getDashboardChanges(dashboard);
|
||||
|
||||
return [
|
||||
{
|
||||
content: CHANGES_GENERATION_STANDARD_PROMPT,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `This is the list of panel names, when referring to a panel, please use the title: ${JSON.stringify(
|
||||
dashboard.panels.map((panel) => panel.title)
|
||||
)}`,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `Group the following diff under "User changes" as a bullet list: ${JSON.stringify(userChanges)}`,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `Group the following diff under "Migration changes" as a bullet list: ${JSON.stringify(
|
||||
migrationChanges
|
||||
)}`,
|
||||
role: Role.system,
|
||||
},
|
||||
];
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
import { llms } from '@grafana/experimental';
|
||||
|
||||
import { createDashboardModelFixture, createPanelJSONFixture } from '../../state/__fixtures__/dashboardFixtures';
|
||||
|
||||
import {
|
||||
generateTextWithLLM,
|
||||
isLLMPluginEnabled,
|
||||
@ -8,6 +10,7 @@ import {
|
||||
Role,
|
||||
DONE_MESSAGE,
|
||||
OPEN_AI_MODEL,
|
||||
getDashboardChanges,
|
||||
} from './utils';
|
||||
|
||||
// Mock the llms.openai module
|
||||
@ -42,8 +45,9 @@ describe('generateTextWithLLM', () => {
|
||||
|
||||
const messages = [{ role: Role.user, content: 'Hello' }];
|
||||
const onReply = jest.fn();
|
||||
const temperature = 0.5;
|
||||
|
||||
await generateTextWithLLM(messages, onReply);
|
||||
await generateTextWithLLM(messages, onReply, temperature);
|
||||
|
||||
expect(llms.openai.streamChatCompletions).toHaveBeenCalledWith({
|
||||
model: OPEN_AI_MODEL,
|
||||
@ -52,6 +56,7 @@ describe('generateTextWithLLM', () => {
|
||||
DONE_MESSAGE,
|
||||
...messages,
|
||||
],
|
||||
temperature,
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -103,3 +108,54 @@ describe('cleanupResponse', () => {
|
||||
expect(cleanedResponse).toBe('This is a response');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDashboardChanges', () => {
|
||||
it('should correctly split user changes and migration changes', () => {
|
||||
// Mock data for testing
|
||||
const deprecatedOptions = {
|
||||
legend: { displayMode: 'hidden', showLegend: false },
|
||||
};
|
||||
const deprecatedVersion = 37;
|
||||
const dashboard = createDashboardModelFixture({
|
||||
schemaVersion: deprecatedVersion,
|
||||
panels: [createPanelJSONFixture({ title: 'Panel 1', options: deprecatedOptions })],
|
||||
});
|
||||
|
||||
// Update title for the first panel
|
||||
dashboard.updatePanels([
|
||||
{
|
||||
...dashboard.panels[0],
|
||||
title: 'New title',
|
||||
},
|
||||
...dashboard.panels.slice(1),
|
||||
]);
|
||||
|
||||
// Call the function to test
|
||||
const result = getDashboardChanges(dashboard);
|
||||
|
||||
// Assertions
|
||||
expect(result.userChanges).toEqual({
|
||||
panels: [
|
||||
{
|
||||
op: 'replace',
|
||||
originalValue: 'Panel 1',
|
||||
value: 'New title',
|
||||
startLineNumber: expect.any(Number),
|
||||
path: ['panels', '0', 'title'],
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result.migrationChanges).toBeDefined();
|
||||
expect(result.userChanges).not.toContain({
|
||||
panels: [
|
||||
{
|
||||
op: 'replace',
|
||||
originalValue: 'Panel 1',
|
||||
value: 'New title',
|
||||
startLineNumber: expect.any(Number),
|
||||
path: ['panels', '0', 'title'],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,5 +1,8 @@
|
||||
import { llms } from '@grafana/experimental';
|
||||
|
||||
import { DashboardModel } from '../../state';
|
||||
import { Diffs, jsonDiff } from '../VersionHistory/utils';
|
||||
|
||||
export interface Message {
|
||||
role: Role;
|
||||
content: string;
|
||||
@ -39,11 +42,13 @@ export const OPEN_AI_MODEL = 'gpt-4';
|
||||
*
|
||||
* @param messages messages to send to LLM
|
||||
* @param onReply callback to call when LLM replies. The reply will be streamed, so it will be called for every token received.
|
||||
* @param temperature what temperature to use when calling the llm. default 1.
|
||||
* @returns The subscription to the stream.
|
||||
*/
|
||||
export const generateTextWithLLM = async (
|
||||
messages: Message[],
|
||||
onReply: (response: string, isDone: boolean) => void
|
||||
onReply: (response: string, isDone: boolean) => void,
|
||||
temperature = 1
|
||||
) => {
|
||||
const enabled = await isLLMPluginEnabled();
|
||||
|
||||
@ -55,6 +60,7 @@ export const generateTextWithLLM = async (
|
||||
.streamChatCompletions({
|
||||
model: OPEN_AI_MODEL,
|
||||
messages: [DONE_MESSAGE, ...messages],
|
||||
temperature,
|
||||
})
|
||||
.pipe(
|
||||
// Accumulate the stream content into a stream of strings, where each
|
||||
@ -93,3 +99,27 @@ export function isResponseCompleted(response: string) {
|
||||
export function cleanupResponse(response: string) {
|
||||
return response.replace(SPECIAL_DONE_TOKEN, '').replace(/"/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Diff the current dashboard with the original dashboard and the dashboard after migration
|
||||
* to split the changes into user changes and migration changes.
|
||||
* * User changes: changes made by the user
|
||||
* * Migration changes: changes made by the DashboardMigrator after opening the dashboard
|
||||
*
|
||||
* @param dashboard current dashboard to be saved
|
||||
* @returns user changes and migration changes
|
||||
*/
|
||||
export function getDashboardChanges(dashboard: DashboardModel): {
|
||||
userChanges: Diffs;
|
||||
migrationChanges: Diffs;
|
||||
} {
|
||||
// Re-parse the dashboard to remove functions and other non-serializable properties
|
||||
const currentDashboard = JSON.parse(JSON.stringify(dashboard.getSaveModelClone()));
|
||||
const originalDashboard = dashboard.getOriginalDashboard()!;
|
||||
const dashboardAfterMigration = JSON.parse(JSON.stringify(new DashboardModel(originalDashboard).getSaveModelClone()));
|
||||
|
||||
return {
|
||||
userChanges: jsonDiff(dashboardAfterMigration, currentDashboard),
|
||||
migrationChanges: jsonDiff(originalDashboard, dashboardAfterMigration),
|
||||
};
|
||||
}
|
||||
|
@ -1,10 +1,14 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { Stack } from '@grafana/experimental';
|
||||
import { Button, Checkbox, Form, TextArea } from '@grafana/ui';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { Button, Checkbox, Form, TextArea, useStyles2 } from '@grafana/ui';
|
||||
import { DashboardModel } from 'app/features/dashboard/state';
|
||||
|
||||
import { GenAIDashboardChangesButton } from '../../GenAI/GenAIDashboardChangesButton';
|
||||
import { SaveDashboardData, SaveDashboardOptions } from '../types';
|
||||
|
||||
interface FormDTO {
|
||||
@ -36,6 +40,8 @@ export const SaveDashboardForm = ({
|
||||
const hasVariableChanged = useMemo(() => dashboard.hasVariableValuesChanged(), [dashboard]);
|
||||
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [message, setMessage] = useState(options.message);
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
return (
|
||||
<Form
|
||||
@ -44,7 +50,7 @@ export const SaveDashboardForm = ({
|
||||
return;
|
||||
}
|
||||
setSaving(true);
|
||||
options = { ...options, message: data.message };
|
||||
options = { ...options, message };
|
||||
const result = await onSubmit(saveModel.clone, options, dashboard);
|
||||
if (result.status === 'success') {
|
||||
if (options.saveVariables) {
|
||||
@ -60,7 +66,6 @@ export const SaveDashboardForm = ({
|
||||
}}
|
||||
>
|
||||
{({ register, errors }) => {
|
||||
const messageProps = register('message');
|
||||
return (
|
||||
<Stack gap={2} direction="column" alignItems="flex-start">
|
||||
{hasTimeChanged && (
|
||||
@ -89,21 +94,35 @@ export const SaveDashboardForm = ({
|
||||
aria-label={selectors.pages.SaveDashboardModal.saveVariables}
|
||||
/>
|
||||
)}
|
||||
<TextArea
|
||||
{...messageProps}
|
||||
aria-label="message"
|
||||
value={options.message}
|
||||
onChange={(e) => {
|
||||
onOptionsChange({
|
||||
...options,
|
||||
message: e.currentTarget.value,
|
||||
});
|
||||
messageProps.onChange(e);
|
||||
}}
|
||||
placeholder="Add a note to describe your changes."
|
||||
autoFocus
|
||||
rows={5}
|
||||
/>
|
||||
<div className={styles.message}>
|
||||
{config.featureToggles.dashgpt && (
|
||||
<GenAIDashboardChangesButton
|
||||
dashboard={dashboard}
|
||||
onGenerate={(text) => {
|
||||
onOptionsChange({
|
||||
...options,
|
||||
message: text,
|
||||
});
|
||||
setMessage(text);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<TextArea
|
||||
aria-label="message"
|
||||
value={message}
|
||||
onChange={(e) => {
|
||||
onOptionsChange({
|
||||
...options,
|
||||
message: e.currentTarget.value,
|
||||
});
|
||||
setMessage(e.currentTarget.value);
|
||||
}}
|
||||
placeholder="Add a note to describe your changes."
|
||||
autoFocus
|
||||
rows={5}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Stack alignItems="center">
|
||||
<Button variant="secondary" onClick={onCancel} fill="outline">
|
||||
Cancel
|
||||
@ -124,3 +143,14 @@ export const SaveDashboardForm = ({
|
||||
</Form>
|
||||
);
|
||||
};
|
||||
|
||||
function getStyles(theme: GrafanaTheme2) {
|
||||
return {
|
||||
message: css`
|
||||
display: flex;
|
||||
align-items: end;
|
||||
flex-direction: column;
|
||||
width: 100%;
|
||||
`,
|
||||
};
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user