mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Dashgpt: Implement panel title and description generation (#74284)
* Add "addon" prop to options panel item descriptor; add placeholder generate buttons to panel title + description * Add basic `ai` icon; add separate wrapping component for panel title generation button * Add basic panel title generation * Add basic panel description generation * Add configuration error state for GenAI button * Add GenAI button generic component --------- Co-authored-by: Ivan Ortega <ivanortegaalba@gmail.com> Co-authored-by: Adela Almasan <adela.almasan@grafana.com>
This commit is contained in:
parent
a52fff4e86
commit
8a127c4351
@ -33,6 +33,7 @@ export const availableIconsIndex = {
|
||||
'arrows-v': true,
|
||||
'expand-arrows': true,
|
||||
at: true,
|
||||
ai: true,
|
||||
backward: true,
|
||||
bars: true,
|
||||
bell: true,
|
||||
|
@ -0,0 +1,127 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import React from 'react';
|
||||
import { Router } from 'react-router-dom';
|
||||
import { Subscription } from 'rxjs';
|
||||
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { locationService } from '@grafana/runtime';
|
||||
|
||||
import { GenAIButton, GenAIButtonProps } from './GenAIButton';
|
||||
import { isLLMPluginEnabled, generateTextWithLLM, Role } from './utils';
|
||||
|
||||
jest.mock('./utils', () => ({
|
||||
generateTextWithLLM: jest.fn(),
|
||||
isLLMPluginEnabled: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('GenAIButton', () => {
|
||||
const onReply = jest.fn();
|
||||
|
||||
function setup(props: GenAIButtonProps = { onReply, messages: [] }) {
|
||||
return render(
|
||||
<Router history={locationService.getHistory()}>
|
||||
<GenAIButton text="Auto-generate" {...props} />
|
||||
</Router>
|
||||
);
|
||||
}
|
||||
|
||||
describe('when LLM plugin is not configured', () => {
|
||||
beforeAll(() => {
|
||||
jest.mocked(isLLMPluginEnabled).mockResolvedValue(false);
|
||||
});
|
||||
|
||||
it('should renders text ', async () => {
|
||||
const { getByText } = setup();
|
||||
waitFor(() => expect(getByText('Auto-generate')).toBeInTheDocument());
|
||||
});
|
||||
|
||||
it('should disable the button', async () => {
|
||||
const { getByRole } = setup();
|
||||
waitFor(() => expect(getByRole('button')).toBeDisabled());
|
||||
});
|
||||
|
||||
it('should display an error message when hovering', async () => {
|
||||
const { getByRole, getByTestId } = setup();
|
||||
|
||||
// Wait for the check to be completed
|
||||
const button = getByRole('button');
|
||||
await waitFor(() => expect(button).toBeDisabled());
|
||||
await userEvent.hover(button);
|
||||
|
||||
const tooltip = await waitFor(() => getByTestId(selectors.components.Tooltip.container));
|
||||
expect(tooltip).toBeVisible();
|
||||
|
||||
// The tooltip keeps interactive to be able to click the link
|
||||
await userEvent.hover(tooltip);
|
||||
expect(tooltip).toBeVisible();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when LLM plugin is properly configured', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.mocked(isLLMPluginEnabled).mockResolvedValue(true);
|
||||
});
|
||||
|
||||
it('should renders text ', async () => {
|
||||
setup();
|
||||
|
||||
waitFor(async () => expect(await screen.findByText('Auto-generate')).toBeInTheDocument());
|
||||
});
|
||||
|
||||
it('should enable the button', async () => {
|
||||
setup();
|
||||
waitFor(async () => expect(await screen.findByRole('button')).toBeEnabled());
|
||||
});
|
||||
|
||||
it('disables the button while generating', async () => {
|
||||
const isDoneGeneratingMessage = false;
|
||||
jest.mocked(generateTextWithLLM).mockImplementationOnce((messages = [], replyHandler) => {
|
||||
replyHandler('Generated text', isDoneGeneratingMessage);
|
||||
return new Promise(() => new Subscription());
|
||||
});
|
||||
|
||||
const { getByText, getByRole } = setup();
|
||||
const generateButton = getByText('Auto-generate');
|
||||
|
||||
// Click the button
|
||||
await fireEvent.click(generateButton);
|
||||
|
||||
// The loading text should be visible and the button disabled
|
||||
expect(await screen.findByText('Generating')).toBeVisible();
|
||||
await waitFor(() => expect(getByRole('button')).toBeDisabled());
|
||||
});
|
||||
|
||||
it('handles the response and re-enables the button', async () => {
|
||||
const isDoneGeneratingMessage = true;
|
||||
jest.mocked(generateTextWithLLM).mockImplementationOnce((messages = [], replyHandler) => {
|
||||
replyHandler('Generated text', isDoneGeneratingMessage);
|
||||
return new Promise(() => new Subscription());
|
||||
});
|
||||
const onReply = jest.fn();
|
||||
setup({ onReply, messages: [] });
|
||||
const generateButton = await screen.findByRole('button');
|
||||
|
||||
// Click the button
|
||||
await fireEvent.click(generateButton);
|
||||
await waitFor(() => expect(generateButton).toBeEnabled());
|
||||
await waitFor(() => expect(onReply).toHaveBeenCalledTimes(1));
|
||||
|
||||
// Wait for the loading state to be resolved
|
||||
expect(onReply).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should call the LLM service with the messages configured', async () => {
|
||||
const onReply = jest.fn();
|
||||
const messages = [{ content: 'Generate X', role: 'system' as Role }];
|
||||
setup({ onReply, messages });
|
||||
|
||||
const generateButton = await screen.findByRole('button');
|
||||
await fireEvent.click(generateButton);
|
||||
|
||||
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledTimes(1));
|
||||
await waitFor(() => expect(generateTextWithLLM).toHaveBeenCalledWith(messages, expect.any(Function)));
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,80 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Button, Spinner, useStyles2, Link, Tooltip } from '@grafana/ui';
|
||||
|
||||
import { Message, generateTextWithLLM, isLLMPluginEnabled } from './utils';
|
||||
|
||||
export interface GenAIButtonProps {
|
||||
text?: string;
|
||||
loadingText?: string;
|
||||
onClick?: (e: React.MouseEvent<HTMLButtonElement>) => void;
|
||||
messages: Message[];
|
||||
onReply: (response: string, isDone: boolean) => void;
|
||||
}
|
||||
|
||||
export const GenAIButton = ({
|
||||
text = 'Auto-generate',
|
||||
loadingText = 'Generating',
|
||||
onClick,
|
||||
messages,
|
||||
onReply,
|
||||
}: GenAIButtonProps) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
const [enabled, setEnabled] = useState(true);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const replyHandler = (response: string, isDone: boolean) => {
|
||||
setLoading(!isDone);
|
||||
onReply(response, isDone);
|
||||
};
|
||||
|
||||
const onGenerate = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
onClick?.(e);
|
||||
setLoading(true);
|
||||
generateTextWithLLM(messages, replyHandler);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
isLLMPluginEnabled()
|
||||
.then(setEnabled)
|
||||
.catch(() => setEnabled(false));
|
||||
}, []);
|
||||
|
||||
const getIcon = () => {
|
||||
if (loading) {
|
||||
return undefined;
|
||||
}
|
||||
if (!enabled) {
|
||||
return 'exclamation-circle';
|
||||
}
|
||||
return 'ai';
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles.wrapper}>
|
||||
{loading && <Spinner size={14} />}
|
||||
<Tooltip
|
||||
show={enabled ? false : undefined}
|
||||
interactive
|
||||
content={
|
||||
<span>
|
||||
The LLM plugin is not correctly configured. See your <Link href={`/plugins/grafana-llm-app`}>settings</Link>{' '}
|
||||
and enable your plugin.
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<Button icon={getIcon()} onClick={onGenerate} fill="text" size="sm" disabled={loading || !enabled}>
|
||||
{!loading ? text : loadingText}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
wrapper: css`
|
||||
display: flex;
|
||||
`,
|
||||
});
|
@ -0,0 +1,44 @@
|
||||
import React from 'react';
|
||||
|
||||
import { getDashboardSrv } from '../../services/DashboardSrv';
|
||||
import { PanelModel } from '../../state';
|
||||
|
||||
import { GenAIButton } from './GenAIButton';
|
||||
import { Message, Role } from './utils';
|
||||
|
||||
interface GenAIPanelDescriptionButtonProps {
|
||||
onGenerate: (description: string, isDone: boolean) => void;
|
||||
panel: PanelModel;
|
||||
}
|
||||
|
||||
const DESCRIPTION_GENERATION_STANDARD_PROMPT =
|
||||
'You are an expert in creating Grafana Panels.' +
|
||||
'Your goal is to write short, descriptive, and concise panel description using a JSON object with the panel declaration ' +
|
||||
'The description should be shorter than 140 characters.';
|
||||
|
||||
export const GenAIPanelDescriptionButton = ({ onGenerate, panel }: GenAIPanelDescriptionButtonProps) => {
|
||||
function getMessages(): Message[] {
|
||||
const dashboard = getDashboardSrv().getCurrent()!;
|
||||
|
||||
return [
|
||||
{
|
||||
content: DESCRIPTION_GENERATION_STANDARD_PROMPT,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `The panel is part of a dashboard with the title: ${dashboard.title}`,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `The panel is part of a dashboard with the description: ${dashboard.title}`,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `Use this JSON object which defines the panel: ${JSON.stringify(panel.getSaveModel())}`,
|
||||
role: Role.user,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return <GenAIButton messages={getMessages()} onReply={onGenerate} loadingText={'Generating description'} />;
|
||||
};
|
@ -0,0 +1,44 @@
|
||||
import React from 'react';
|
||||
|
||||
import { getDashboardSrv } from '../../services/DashboardSrv';
|
||||
import { PanelModel } from '../../state';
|
||||
|
||||
import { GenAIButton } from './GenAIButton';
|
||||
import { Message, Role } from './utils';
|
||||
|
||||
interface GenAIPanelTitleButtonProps {
|
||||
onGenerate: (title: string, isDone: boolean) => void;
|
||||
panel: PanelModel;
|
||||
}
|
||||
|
||||
const TITLE_GENERATION_STANDARD_PROMPT =
|
||||
'You are an expert in creating Grafana Panels.' +
|
||||
'Your goal is to write short, descriptive, and concise panel title for a panel.' +
|
||||
'The title should be shorter than 50 characters.';
|
||||
|
||||
export const GenAIPanelTitleButton = ({ onGenerate, panel }: GenAIPanelTitleButtonProps) => {
|
||||
function getMessages(): Message[] {
|
||||
const dashboard = getDashboardSrv().getCurrent()!;
|
||||
|
||||
return [
|
||||
{
|
||||
content: TITLE_GENERATION_STANDARD_PROMPT,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `The panel is part of a dashboard with the title: ${dashboard.title}`,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `The panel is part of a dashboard with the description: ${dashboard.title}`,
|
||||
role: Role.system,
|
||||
},
|
||||
{
|
||||
content: `Use this JSON object which defines the panel: ${JSON.stringify(panel.getSaveModel())}`,
|
||||
role: Role.user,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return <GenAIButton messages={getMessages()} onReply={onGenerate} loadingText={'Generating title'} />;
|
||||
};
|
105
public/app/features/dashboard/components/GenAI/utils.test.ts
Normal file
105
public/app/features/dashboard/components/GenAI/utils.test.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import { llms } from '@grafana/experimental';
|
||||
|
||||
import {
|
||||
generateTextWithLLM,
|
||||
isLLMPluginEnabled,
|
||||
isResponseCompleted,
|
||||
cleanupResponse,
|
||||
Role,
|
||||
DONE_MESSAGE,
|
||||
OPEN_AI_MODEL,
|
||||
} from './utils';
|
||||
|
||||
// Mock the llms.openai module
|
||||
jest.mock('@grafana/experimental', () => ({
|
||||
llms: {
|
||||
openai: {
|
||||
streamChatCompletions: jest.fn(),
|
||||
accumulateContent: jest.fn(),
|
||||
enabled: jest.fn(),
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
describe('generateTextWithLLM', () => {
|
||||
it('should throw an error if LLM plugin is not enabled', async () => {
|
||||
jest.mocked(llms.openai.enabled).mockResolvedValue(false);
|
||||
|
||||
await expect(generateTextWithLLM([{ role: Role.user, content: 'Hello' }], jest.fn())).rejects.toThrow(
|
||||
'LLM plugin is not enabled'
|
||||
);
|
||||
});
|
||||
|
||||
it('should call llms.openai.streamChatCompletions with the correct parameters', async () => {
|
||||
// Mock llms.openai.enabled to return true
|
||||
jest.mocked(llms.openai.enabled).mockResolvedValue(true);
|
||||
|
||||
// Mock llms.openai.streamChatCompletions to return a mock observable (types not exported from library)
|
||||
const mockObservable = { pipe: jest.fn().mockReturnValue({ subscribe: jest.fn() }) } as unknown as ReturnType<
|
||||
typeof llms.openai.streamChatCompletions
|
||||
>;
|
||||
jest.mocked(llms.openai.streamChatCompletions).mockReturnValue(mockObservable);
|
||||
|
||||
const messages = [{ role: Role.user, content: 'Hello' }];
|
||||
const onReply = jest.fn();
|
||||
|
||||
await generateTextWithLLM(messages, onReply);
|
||||
|
||||
expect(llms.openai.streamChatCompletions).toHaveBeenCalledWith({
|
||||
model: OPEN_AI_MODEL,
|
||||
messages: [
|
||||
// It will always includes the DONE_MESSAGE by default as the first message
|
||||
DONE_MESSAGE,
|
||||
...messages,
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isLLMPluginEnabled', () => {
|
||||
it('should return true if LLM plugin is enabled', async () => {
|
||||
// Mock llms.openai.enabled to return true
|
||||
jest.mocked(llms.openai.enabled).mockResolvedValue(true);
|
||||
|
||||
const enabled = await isLLMPluginEnabled();
|
||||
|
||||
expect(enabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if LLM plugin is not enabled', async () => {
|
||||
// Mock llms.openai.enabled to return false
|
||||
jest.mocked(llms.openai.enabled).mockResolvedValue(false);
|
||||
|
||||
const enabled = await isLLMPluginEnabled();
|
||||
|
||||
expect(enabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isResponseCompleted', () => {
|
||||
it('should return true if response ends with the special done token', () => {
|
||||
const response = 'This is a response¬';
|
||||
|
||||
const completed = isResponseCompleted(response);
|
||||
|
||||
expect(completed).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if response does not end with the special done token', () => {
|
||||
const response = 'This is a response';
|
||||
|
||||
const completed = isResponseCompleted(response);
|
||||
|
||||
expect(completed).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupResponse', () => {
|
||||
it('should remove the special done token and quotes from the response', () => {
|
||||
const response = 'This is a "response¬"';
|
||||
|
||||
const cleanedResponse = cleanupResponse(response);
|
||||
|
||||
expect(cleanedResponse).toBe('This is a response');
|
||||
});
|
||||
});
|
95
public/app/features/dashboard/components/GenAI/utils.ts
Normal file
95
public/app/features/dashboard/components/GenAI/utils.ts
Normal file
@ -0,0 +1,95 @@
|
||||
import { llms } from '@grafana/experimental';
|
||||
|
||||
export interface Message {
|
||||
role: Role;
|
||||
content: string;
|
||||
}
|
||||
|
||||
export enum Role {
|
||||
// System content cannot be overwritten by user propmts.
|
||||
'system' = 'system',
|
||||
// User content is the content that the user has entered.
|
||||
// This content can be overwritten by following propmt.
|
||||
'user' = 'user',
|
||||
}
|
||||
|
||||
// TODO: Replace this approach with more stable approach
|
||||
export const SPECIAL_DONE_TOKEN = '¬';
|
||||
|
||||
/**
|
||||
* The llm library doesn't indicate when the stream is done, so we need to ask the LLM to add an special token to indicate that the stream is done at the end of the message.
|
||||
*/
|
||||
export const DONE_MESSAGE = {
|
||||
role: Role.system,
|
||||
content: `When you are done with the response, write "${SPECIAL_DONE_TOKEN}" always at the end of the response.`,
|
||||
};
|
||||
|
||||
/**
|
||||
* The OpenAI model to be used.
|
||||
*/
|
||||
export const OPEN_AI_MODEL = 'gpt-4';
|
||||
|
||||
/**
|
||||
* Generate a text with the instructions for LLM to follow.
|
||||
* Every message will be sent to LLM as a prompt. The messages will be sent in order. The messages will be composed by the content and the role.
|
||||
*
|
||||
* The role can be system or user.
|
||||
* - System messages cannot be overwritten by user input. They are used to send instructions to LLM about how to behave or how to format the response.
|
||||
* - User messages can be overwritten by user input and they will be used to send manually user input.
|
||||
*
|
||||
* @param messages messages to send to LLM
|
||||
* @param onReply callback to call when LLM replies. The reply will be streamed, so it will be called for every token received.
|
||||
* @returns The subscription to the stream.
|
||||
*/
|
||||
export const generateTextWithLLM = async (
|
||||
messages: Message[],
|
||||
onReply: (response: string, isDone: boolean) => void
|
||||
) => {
|
||||
const enabled = await isLLMPluginEnabled();
|
||||
|
||||
if (!enabled) {
|
||||
throw Error('LLM plugin is not enabled');
|
||||
}
|
||||
|
||||
return llms.openai
|
||||
.streamChatCompletions({
|
||||
model: OPEN_AI_MODEL,
|
||||
messages: [DONE_MESSAGE, ...messages],
|
||||
})
|
||||
.pipe(
|
||||
// Accumulate the stream content into a stream of strings, where each
|
||||
// element contains the accumulated message so far.
|
||||
llms.openai.accumulateContent()
|
||||
)
|
||||
.subscribe((response) => {
|
||||
return onReply(cleanupResponse(response), isResponseCompleted(response));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if the LLM plugin is enabled and configured.
|
||||
* @returns true if the LLM plugin is enabled and configured.
|
||||
*/
|
||||
export async function isLLMPluginEnabled() {
|
||||
// Check if the LLM plugin is enabled and configured.
|
||||
// If not, we won't be able to make requests, so return early.
|
||||
return await llms.openai.enabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the response is completed using the special done token.
|
||||
* @param response The response to check.
|
||||
* @returns true if the response is completed.
|
||||
*/
|
||||
export function isResponseCompleted(response: string) {
|
||||
return response.endsWith(SPECIAL_DONE_TOKEN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the special done token and quotes from the response.
|
||||
* @param response The response to clean up.
|
||||
* @returns The cleaned up response.
|
||||
*/
|
||||
export function cleanupResponse(response: string) {
|
||||
return response.replace(SPECIAL_DONE_TOKEN, '').replace(/"/g, '');
|
||||
}
|
@ -1,8 +1,10 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React, { ReactNode } from 'react';
|
||||
import Highlighter from 'react-highlight-words';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { Field, Label } from '@grafana/ui';
|
||||
import { Field, Label, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { OptionsPaneCategoryDescriptor } from './OptionsPaneCategoryDescriptor';
|
||||
import { OptionsPaneItemOverrides } from './OptionsPaneItemOverrides';
|
||||
@ -17,6 +19,7 @@ export interface OptionsPaneItemProps {
|
||||
skipField?: boolean;
|
||||
showIf?: () => boolean;
|
||||
overrides?: OptionPaneItemOverrideInfo[];
|
||||
addon?: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -28,7 +31,7 @@ export class OptionsPaneItemDescriptor {
|
||||
constructor(public props: OptionsPaneItemProps) {}
|
||||
|
||||
getLabel(searchQuery?: string): ReactNode {
|
||||
const { title, description, overrides } = this.props;
|
||||
const { title, description, overrides, addon } = this.props;
|
||||
|
||||
if (!searchQuery) {
|
||||
// Do not render label for categories with only one child
|
||||
@ -36,12 +39,7 @@ export class OptionsPaneItemDescriptor {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Label description={description}>
|
||||
{title}
|
||||
{overrides && overrides.length > 0 && <OptionsPaneItemOverrides overrides={overrides} />}
|
||||
</Label>
|
||||
);
|
||||
return <OptionPaneLabel title={title} description={description} overrides={overrides} addon={addon} />;
|
||||
}
|
||||
|
||||
const categories: React.ReactNode[] = [];
|
||||
@ -99,3 +97,32 @@ export class OptionsPaneItemDescriptor {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
interface OptionPanelLabelProps {
|
||||
title: string;
|
||||
description?: string;
|
||||
overrides?: OptionPaneItemOverrideInfo[];
|
||||
addon: ReactNode;
|
||||
}
|
||||
|
||||
function OptionPaneLabel({ title, description, overrides, addon }: OptionPanelLabelProps) {
|
||||
const styles = useStyles2(getLabelStyles);
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<Label description={description}>
|
||||
{title}
|
||||
{overrides && overrides.length > 0 && <OptionsPaneItemOverrides overrides={overrides} />}
|
||||
</Label>
|
||||
{addon}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function getLabelStyles(theme: GrafanaTheme2) {
|
||||
return {
|
||||
container: css`
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
`,
|
||||
};
|
||||
}
|
||||
|
@ -1,8 +1,11 @@
|
||||
import React from 'react';
|
||||
|
||||
import { config } from '@grafana/runtime';
|
||||
import { DataLinksInlineEditor, Input, RadioButtonGroup, Select, Switch, TextArea } from '@grafana/ui';
|
||||
import { getPanelLinksVariableSuggestions } from 'app/features/panel/panellinks/link_srv';
|
||||
|
||||
import { GenAIPanelDescriptionButton } from '../GenAI/GenAIPanelDescriptionButton';
|
||||
import { GenAIPanelTitleButton } from '../GenAI/GenAIPanelTitleButton';
|
||||
import { RepeatRowSelect } from '../RepeatRowSelect/RepeatRowSelect';
|
||||
|
||||
import { OptionsPaneCategoryDescriptor } from './OptionsPaneCategoryDescriptor';
|
||||
@ -17,6 +20,22 @@ export function getPanelFrameCategory(props: OptionPaneRenderProps): OptionsPane
|
||||
isOpenDefault: true,
|
||||
});
|
||||
|
||||
const setPanelTitle = (title: string) => {
|
||||
const input = document.getElementById('PanelFrameTitle');
|
||||
if (input instanceof HTMLInputElement) {
|
||||
input.value = title;
|
||||
onPanelConfigChange('title', title);
|
||||
}
|
||||
};
|
||||
|
||||
const setPanelDescription = (description: string) => {
|
||||
const input = document.getElementById('description-text-area');
|
||||
if (input instanceof HTMLTextAreaElement) {
|
||||
input.value = description;
|
||||
onPanelConfigChange('description', description);
|
||||
}
|
||||
};
|
||||
|
||||
return descriptor
|
||||
.addItem(
|
||||
new OptionsPaneItemDescriptor({
|
||||
@ -32,6 +51,7 @@ export function getPanelFrameCategory(props: OptionPaneRenderProps): OptionsPane
|
||||
/>
|
||||
);
|
||||
},
|
||||
addon: config.featureToggles.dashgpt && <GenAIPanelTitleButton onGenerate={setPanelTitle} panel={panel} />,
|
||||
})
|
||||
)
|
||||
.addItem(
|
||||
@ -48,6 +68,9 @@ export function getPanelFrameCategory(props: OptionPaneRenderProps): OptionsPane
|
||||
/>
|
||||
);
|
||||
},
|
||||
addon: config.featureToggles.dashgpt && (
|
||||
<GenAIPanelDescriptionButton onGenerate={setPanelDescription} panel={panel} />
|
||||
),
|
||||
})
|
||||
)
|
||||
.addItem(
|
||||
|
27
public/img/icons/unicons/ai.svg
Normal file
27
public/img/icons/unicons/ai.svg
Normal file
@ -0,0 +1,27 @@
|
||||
<svg width="10" height="10" viewBox="0 0 10 10" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="Group 204">
|
||||
<path id="Vector" d="M7.5 5C5.56722 5 4.00015 7.0148 4.00015 9.5C4.00015 7.0148 2.43309 5 0.5 5C2.43309 5 4.00015 2.9855 4.00015 0.5C4.00015 2.9855 5.56722 5 7.5 5Z" fill="url(#paint0_linear_344_481)"/>
|
||||
<path id="Vector_2" d="M9.5 2.5C8.39555 2.5 7.50009 3.39547 7.50009 4.5C7.50009 3.39547 6.60462 2.5 5.5 2.5C6.60462 2.5 7.50009 1.60467 7.50009 0.5C7.50009 1.60467 8.39555 2.5 9.5 2.5Z" fill="url(#paint1_linear_344_481)"/>
|
||||
<path id="Vector_3" d="M9.5 7C8.67166 7 8.00007 7.6716 8.00007 8.5C8.00007 7.6716 7.32847 7 6.5 7C7.32847 7 8.00007 6.3285 8.00007 5.5C8.00007 6.3285 8.67166 7 9.5 7Z" fill="url(#paint2_linear_344_481)"/>
|
||||
</g>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_344_481" x1="0.5" y1="0.5" x2="20.6236" y2="0.33439" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#6E9FFF" stop-opacity="0"/>
|
||||
<stop offset="0.164022" stop-color="#6E9FFF"/>
|
||||
<stop offset="0.197917" stop-color="#6E9FFF" stop-opacity="0.8075"/>
|
||||
<stop offset="0.426692" stop-color="#6E9FFF" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_344_481" x1="5.5" y1="0.5" x2="16.9987" y2="0.378333" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#6E9FFF" stop-opacity="0"/>
|
||||
<stop offset="0.164022" stop-color="#6E9FFF"/>
|
||||
<stop offset="0.197917" stop-color="#6E9FFF" stop-opacity="0.8075"/>
|
||||
<stop offset="0.426692" stop-color="#6E9FFF" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_344_481" x1="6.5" y1="5.5" x2="15.124" y2="5.40875" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#6E9FFF" stop-opacity="0"/>
|
||||
<stop offset="0.164022" stop-color="#6E9FFF"/>
|
||||
<stop offset="0.197917" stop-color="#6E9FFF" stop-opacity="0.8075"/>
|
||||
<stop offset="0.426692" stop-color="#6E9FFF" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
Loading…
Reference in New Issue
Block a user