From d8b6a5bde2737dadbf730f3ccd31d275c94e9968 Mon Sep 17 00:00:00 2001 From: kay delaney <45561153+kaydelaney@users.noreply.github.com> Date: Wed, 4 Sep 2024 16:56:20 +0100 Subject: [PATCH] DashGPT: Fixes issue with generation on Safari (#90337) * DashGPT: Fixes issue with generation on Safari * Fix infinite loop issue + more refactoring * Solve linter --------- Co-authored-by: Ivan Ortega --- .betterer.results | 4 +- .../components/GenAI/GenAIButton.test.tsx | 32 +++--- .../components/GenAI/GenAIButton.tsx | 69 ++++++------ .../components/GenAI/GenAIHistory.tsx | 102 ++++++++---------- .../GenAI/GenerationHistoryCarousel.tsx | 21 +--- .../components/GenAI/QuickFeedback.tsx | 4 +- .../dashboard/components/GenAI/hooks.ts | 78 +++++++------- .../dashboard/components/GenAI/utils.ts | 2 +- public/locales/en-US/grafana.json | 5 + public/locales/pseudo-LOCALE/grafana.json | 5 + 10 files changed, 147 insertions(+), 175 deletions(-) diff --git a/.betterer.results b/.betterer.results index 7dfcd646d68..2edb5a615c0 100644 --- a/.betterer.results +++ b/.betterer.results @@ -3119,9 +3119,7 @@ exports[`better eslint`] = { ], "public/app/features/dashboard/components/GenAI/GenAIHistory.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "2"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "3"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "1"] ], "public/app/features/dashboard/components/GenAI/MinimalisticPagination.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] diff --git a/public/app/features/dashboard/components/GenAI/GenAIButton.test.tsx b/public/app/features/dashboard/components/GenAI/GenAIButton.test.tsx index 9c711d9fa94..b6c94f6fa87 100644 --- a/public/app/features/dashboard/components/GenAI/GenAIButton.test.tsx +++ b/public/app/features/dashboard/components/GenAI/GenAIButton.test.tsx @@ -48,7 +48,7 @@ describe('GenAIButton', () => { streamStatus: StreamStatus.IDLE, reply: 'Some completed genereated text', setMessages: jest.fn(), - setStopGeneration: jest.fn(), + stopGeneration: jest.fn(), value: { enabled: false, stream: new Observable().subscribe(), @@ -74,9 +74,9 @@ describe('GenAIButton', () => { messages: [], error: undefined, streamStatus: StreamStatus.IDLE, - reply: 'Some completed genereated text', + reply: 'Some completed generated text', setMessages: setMessagesMock, - setStopGeneration: setShouldStopMock, + stopGeneration: setShouldStopMock, value: { enabled: true, stream: new Observable().subscribe(), @@ -84,7 +84,7 @@ describe('GenAIButton', () => { }); }); - it('should render text ', async () => { + it('should render text', async () => { setup(); waitFor(async () => expect(await screen.findByText('Auto-generate')).toBeInTheDocument()); @@ -162,7 +162,7 @@ describe('GenAIButton', () => { streamStatus: StreamStatus.GENERATING, reply: 'Some incomplete generated text', setMessages: jest.fn(), - setStopGeneration: setShouldStopMock, + stopGeneration: setShouldStopMock, value: { enabled: true, stream: new Observable().subscribe(), @@ -170,7 +170,7 @@ describe('GenAIButton', () => { }); }); - it('should render loading text ', async () => { + it('should render loading text', async () => { setup(); waitFor(async () => expect(await screen.findByText('Auto-generate')).toBeInTheDocument()); @@ -204,7 +204,6 @@ describe('GenAIButton', () => { await fireEvent.click(generateButton); expect(setShouldStopMock).toHaveBeenCalledTimes(1); - expect(setShouldStopMock).toHaveBeenCalledWith(true); expect(onGenerate).not.toHaveBeenCalled(); }); }); @@ -213,18 +212,27 @@ describe('GenAIButton', () => { const setShouldStopMock = jest.fn(); beforeEach(() => { - jest.mocked(useOpenAIStream).mockReturnValue({ + const reply = 'Some completed generated text'; + const returnValue = { messages: [], error: undefined, streamStatus: StreamStatus.COMPLETED, - reply: 'Some completed generated text', + reply, setMessages: jest.fn(), - setStopGeneration: setShouldStopMock, + stopGeneration: setShouldStopMock, value: { enabled: true, stream: new Observable().subscribe(), }, - }); + }; + + jest + .mocked(useOpenAIStream) + .mockImplementationOnce((options) => { + options?.onResponse?.(reply); + return returnValue; + }) + .mockImplementation(() => returnValue); }); it('should render improve text ', async () => { @@ -260,7 +268,7 @@ describe('GenAIButton', () => { streamStatus: StreamStatus.IDLE, reply: '', setMessages: setMessagesMock, - setStopGeneration: setShouldStopMock, + stopGeneration: setShouldStopMock, value: { enabled: true, stream: new Observable().subscribe(), diff --git a/public/app/features/dashboard/components/GenAI/GenAIButton.tsx b/public/app/features/dashboard/components/GenAI/GenAIButton.tsx index 336afb7f16d..6b6f55823ae 100644 --- a/public/app/features/dashboard/components/GenAI/GenAIButton.tsx +++ b/public/app/features/dashboard/components/GenAI/GenAIButton.tsx @@ -1,5 +1,5 @@ import { css } from '@emotion/css'; -import { useCallback, useEffect, useState } from 'react'; +import { useCallback, useState } from 'react'; import * as React from 'react'; import { GrafanaTheme2 } from '@grafana/data'; @@ -52,16 +52,36 @@ export const GenAIButton = ({ }: GenAIButtonProps) => { const styles = useStyles2(getStyles); - const { setMessages, setStopGeneration, reply, value, error, streamStatus } = useOpenAIStream(model, temperature); - const [history, setHistory] = useState([]); - const [showHistory, setShowHistory] = useState(false); + const unshiftHistoryEntry = useCallback((historyEntry: string) => { + setHistory((h) => [historyEntry, ...h]); + }, []); + const onResponse = useCallback( + (reply: string) => { + const sanitizedReply = sanitizeReply(reply); + onGenerate(sanitizedReply); + unshiftHistoryEntry(sanitizedReply); + }, + [onGenerate, unshiftHistoryEntry] + ); + + const { setMessages, stopGeneration, value, error, streamStatus } = useOpenAIStream({ + model, + temperature, + onResponse, + }); + + const [showHistory, setShowHistory] = useState(false); const hasHistory = history.length > 0; - const isGenerating = streamStatus === StreamStatus.GENERATING; const isFirstHistoryEntry = !hasHistory; + + const isGenerating = streamStatus === StreamStatus.GENERATING; const isButtonDisabled = disabled || (value && !value.enabled && !error); - const reportInteraction = (item: AutoGenerateItem) => reportAutoGenerateInteraction(eventTrackingSrc, item); + const reportInteraction = useCallback( + (item: AutoGenerateItem) => reportAutoGenerateInteraction(eventTrackingSrc, item), + [eventTrackingSrc] + ); const showTooltip = error || tooltip ? undefined : false; const tooltipContent = error @@ -70,7 +90,7 @@ export const GenAIButton = ({ const onClick = (e: React.MouseEvent) => { if (streamStatus === StreamStatus.GENERATING) { - setStopGeneration(true); + stopGeneration(); } else { if (!hasHistory) { onClickProp?.(e); @@ -90,28 +110,6 @@ export const GenAIButton = ({ reportInteraction(buttonItem); }; - const pushHistoryEntry = useCallback( - (historyEntry: string) => { - if (history.indexOf(historyEntry) === -1) { - setHistory([historyEntry, ...history]); - } - }, - [history] - ); - - useEffect(() => { - // Todo: Consider other options for `"` sanitation - if (streamStatus === StreamStatus.COMPLETED && reply) { - onGenerate(sanitizeReply(reply)); - } - }, [streamStatus, reply, onGenerate]); - - useEffect(() => { - if (streamStatus === StreamStatus.COMPLETED) { - pushHistoryEntry(sanitizeReply(reply)); - } - }, [history, streamStatus, reply, pushHistoryEntry]); - // The button is disabled if the plugin is not installed or enabled if (!value?.enabled) { return null; @@ -127,9 +125,11 @@ export const GenAIButton = ({ if (isGenerating) { return undefined; } - if (error || (value && !value?.enabled)) { + + if (error || (value && !value.enabled)) { return 'exclamation-circle'; } + return 'ai'; }; @@ -164,12 +164,7 @@ export const GenAIButton = ({ ); - const getMessages = () => { - if (typeof messages === 'function') { - return messages(); - } - return messages; - }; + const getMessages = () => (typeof messages === 'function' ? messages() : messages); const renderButtonWithToggletip = () => { if (hasHistory) { @@ -183,7 +178,7 @@ export const GenAIButton = ({ history={history} messages={getMessages()} onApplySuggestion={onApplySuggestion} - updateHistory={pushHistoryEntry} + updateHistory={unshiftHistoryEntry} eventTrackingSrc={eventTrackingSrc} /> } diff --git a/public/app/features/dashboard/components/GenAI/GenAIHistory.tsx b/public/app/features/dashboard/components/GenAI/GenAIHistory.tsx index 08e286eabb9..68cf5d68712 100644 --- a/public/app/features/dashboard/components/GenAI/GenAIHistory.tsx +++ b/public/app/features/dashboard/components/GenAI/GenAIHistory.tsx @@ -1,9 +1,9 @@ import { css } from '@emotion/css'; -import { useEffect, useState } from 'react'; -import * as React from 'react'; +import { useState, useCallback } from 'react'; import { GrafanaTheme2 } from '@grafana/data'; import { Alert, Button, Icon, Input, Stack, Text, TextLink, useStyles2 } from '@grafana/ui'; +import { Trans } from 'app/core/internationalization'; import { STOP_GENERATION_TEXT } from './GenAIButton'; import { GenerationHistoryCarousel } from './GenerationHistoryCarousel'; @@ -32,55 +32,36 @@ export const GenAIHistory = ({ const styles = useStyles2(getStyles); const [currentIndex, setCurrentIndex] = useState(1); - const [showError, setShowError] = useState(false); const [customFeedback, setCustomPrompt] = useState(''); - const { setMessages, setStopGeneration, reply, streamStatus, error } = useOpenAIStream( - DEFAULT_OAI_MODEL, - temperature + const onResponse = useCallback( + (response: string) => { + updateHistory(sanitizeReply(response)); + }, + [updateHistory] ); - const isStreamGenerating = streamStatus === StreamStatus.GENERATING; + const { setMessages, stopGeneration, reply, streamStatus, error } = useOpenAIStream({ + model: DEFAULT_OAI_MODEL, + temperature, + onResponse, + }); const reportInteraction = (item: AutoGenerateItem, otherMetadata?: object) => reportAutoGenerateInteraction(eventTrackingSrc, item, otherMetadata); - useEffect(() => { - if (!isStreamGenerating && reply !== '') { - setCurrentIndex(1); - } - }, [isStreamGenerating, reply]); - - useEffect(() => { - if (streamStatus === StreamStatus.COMPLETED) { - updateHistory(sanitizeReply(reply)); - } - }, [streamStatus, reply, updateHistory]); - - useEffect(() => { - if (error) { - setShowError(true); - } - - if (streamStatus === StreamStatus.GENERATING) { - setShowError(false); - } - }, [error, streamStatus]); - const onSubmitCustomFeedback = (text: string) => { onGenerateWithFeedback(text); reportInteraction(AutoGenerateItem.customFeedback, { customFeedback: text }); }; + const onStopGeneration = () => { + stopGeneration(); + reply && onResponse(reply); + }; + const onApply = () => { - if (isStreamGenerating) { - setStopGeneration(true); - if (reply !== '') { - updateHistory(sanitizeReply(reply)); - } - } else { - onApplySuggestion(history[currentIndex - 1]); - } + onApplySuggestion(history[currentIndex - 1]); }; const onNavigate = (index: number) => { @@ -88,14 +69,8 @@ export const GenAIHistory = ({ reportInteraction(index > currentIndex ? AutoGenerateItem.backHistoryItem : AutoGenerateItem.forwardHistoryItem); }; - const onGenerateWithFeedback = (suggestion: string | QuickFeedbackType) => { - if (suggestion !== QuickFeedbackType.Regenerate) { - messages = [...messages, ...getFeedbackMessage(history[currentIndex - 1], suggestion)]; - } else { - messages = [...messages, ...getFeedbackMessage(history[currentIndex - 1], 'Please, regenerate')]; - } - - setMessages(messages); + const onGenerateWithFeedback = (suggestion: string) => { + setMessages((messages) => [...messages, ...getFeedbackMessage(history[currentIndex - 1], suggestion)]); if (suggestion in QuickFeedbackType) { reportInteraction(AutoGenerateItem.quickFeedback, { quickFeedbackItem: suggestion }); @@ -111,23 +86,24 @@ export const GenAIHistory = ({ const onClickDocs = () => reportInteraction(AutoGenerateItem.linkToDocs); + const isStreamGenerating = streamStatus === StreamStatus.GENERATING; + const showError = error && !isStreamGenerating; + return (
{showError && ( - -

Sorry, I was unable to complete your request. Please try again.

+ +

+ + Sorry, I was unable to complete your request. Please try again. + +

)} - +
@@ -142,9 +118,9 @@ export const GenAIHistory = ({ fill="text" aria-label="Send custom feedback" onClick={onClickSubmitCustomFeedback} - disabled={customFeedback === ''} + disabled={!customFeedback} > - Send + Send } value={customFeedback} @@ -153,10 +129,16 @@ export const GenAIHistory = ({ />
- - + + {isStreamGenerating ? ( + + ) : ( + + )}
diff --git a/public/app/features/dashboard/components/GenAI/GenerationHistoryCarousel.tsx b/public/app/features/dashboard/components/GenAI/GenerationHistoryCarousel.tsx index 4390ab00949..dce9764456c 100644 --- a/public/app/features/dashboard/components/GenAI/GenerationHistoryCarousel.tsx +++ b/public/app/features/dashboard/components/GenAI/GenerationHistoryCarousel.tsx @@ -4,39 +4,22 @@ import { GrafanaTheme2 } from '@grafana/data'; import { Text, useStyles2 } from '@grafana/ui'; import { MinimalisticPagination } from './MinimalisticPagination'; -import { StreamStatus } from './hooks'; export interface GenerationHistoryCarouselProps { history: string[]; index: number; - reply: string; - streamStatus: StreamStatus; onNavigate: (index: number) => void; } -export const GenerationHistoryCarousel = ({ - history, - index, - reply, - streamStatus, - onNavigate, -}: GenerationHistoryCarouselProps) => { +export const GenerationHistoryCarousel = ({ history, index, onNavigate }: GenerationHistoryCarouselProps) => { const styles = useStyles2(getStyles); const historySize = history.length; - const getHistoryText = () => { - if (reply && streamStatus !== StreamStatus.IDLE) { - return reply; - } - - return history[index - 1]; - }; - return ( <>
- {getHistoryText()} + {history[index - 1]}
void; + onSuggestionClick: (suggestion: string) => void; isGenerating: boolean; } @@ -40,7 +40,7 @@ export const QuickFeedback = ({ onSuggestionClick, isGenerating }: QuickActionsP variant="secondary" disabled={isGenerating} > - {QuickFeedbackType.Regenerate} + {'Regenerate'}
); diff --git a/public/app/features/dashboard/components/GenAI/hooks.ts b/public/app/features/dashboard/components/GenAI/hooks.ts index 80bcc806d3f..eb0dc9ca9ae 100644 --- a/public/app/features/dashboard/components/GenAI/hooks.ts +++ b/public/app/features/dashboard/components/GenAI/hooks.ts @@ -1,4 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; +import { Dispatch, SetStateAction, useCallback, useEffect, useState } from 'react'; import { useAsync } from 'react-use'; import { Subscription } from 'rxjs'; @@ -22,31 +22,35 @@ export enum StreamStatus { export const TIMEOUT = 10000; -// TODO: Add tests -export function useOpenAIStream( - model = DEFAULT_OAI_MODEL, - temperature = 1 -): { - setMessages: React.Dispatch>; - setStopGeneration: React.Dispatch>; +interface Options { + model: string; + temperature: number; + onResponse?: (response: string) => void; +} + +const defaultOptions = { + model: DEFAULT_OAI_MODEL, + temperature: 1, +}; + +interface UseOpenAIStreamResponse { + setMessages: Dispatch>; + stopGeneration: () => void; messages: Message[]; reply: string; streamStatus: StreamStatus; - error: Error | undefined; - value: - | { - enabled: boolean | undefined; - stream?: undefined; - } - | { - enabled: boolean | undefined; - stream: Subscription; - } - | undefined; -} { + error?: Error; + value?: { + enabled?: boolean | undefined; + stream?: Subscription; + }; +} + +// TODO: Add tests +export function useOpenAIStream({ model, temperature, onResponse }: Options = defaultOptions): UseOpenAIStreamResponse { // The messages array to send to the LLM, updated when the button is clicked. const [messages, setMessages] = useState([]); - const [stopGeneration, setStopGeneration] = useState(false); + // The latest reply from the LLM. const [reply, setReply] = useState(''); const [streamStatus, setStreamStatus] = useState(StreamStatus.IDLE); @@ -59,11 +63,10 @@ export function useOpenAIStream( (e: Error) => { setStreamStatus(StreamStatus.IDLE); setMessages([]); - setStopGeneration(false); setError(e); notifyError( 'Failed to generate content using OpenAI', - `Please try again or if the problem persists, contact your organization admin.` + 'Please try again or if the problem persists, contact your organization admin.' ); console.error(e); genAILogger.logError(e, { messages: JSON.stringify(messages), model, temperature: String(temperature) }); @@ -117,11 +120,8 @@ export function useOpenAIStream( complete: () => { setReply(partialReply); setStreamStatus(StreamStatus.COMPLETED); - setTimeout(() => { - setStreamStatus(StreamStatus.IDLE); - }); + onResponse?.(partialReply); setMessages([]); - setStopGeneration(false); setError(undefined); }, }), @@ -131,22 +131,17 @@ export function useOpenAIStream( // Unsubscribe from the stream when the component unmounts. useEffect(() => { return () => { - if (value?.stream) { - value.stream.unsubscribe(); - } + value?.stream?.unsubscribe(); }; }, [value]); // Unsubscribe from the stream when user stops the generation. - useEffect(() => { - if (stopGeneration) { - value?.stream?.unsubscribe(); - setStreamStatus(StreamStatus.IDLE); - setStopGeneration(false); - setError(undefined); - setMessages([]); - } - }, [stopGeneration, value?.stream]); + const stopGeneration = useCallback(() => { + value?.stream?.unsubscribe(); + setStreamStatus(StreamStatus.IDLE); + setError(undefined); + setMessages([]); + }, [value]); // If the stream is generating and we haven't received a reply, it times out. useEffect(() => { @@ -156,8 +151,9 @@ export function useOpenAIStream( onError(new Error(`OpenAI stream timed out after ${TIMEOUT}ms`)); }, TIMEOUT); } + return () => { - timeout && clearTimeout(timeout); + clearTimeout(timeout); }; }, [streamStatus, reply, onError]); @@ -167,7 +163,7 @@ export function useOpenAIStream( return { setMessages, - setStopGeneration, + stopGeneration, messages, reply, streamStatus, diff --git a/public/app/features/dashboard/components/GenAI/utils.ts b/public/app/features/dashboard/components/GenAI/utils.ts index 78a2cede187..4f641d2c614 100644 --- a/public/app/features/dashboard/components/GenAI/utils.ts +++ b/public/app/features/dashboard/components/GenAI/utils.ts @@ -22,7 +22,7 @@ export type Message = llms.openai.Message; export enum QuickFeedbackType { Shorter = 'Even shorter', MoreDescriptive = 'More descriptive', - Regenerate = 'Regenerate', + Regenerate = 'Please, regenerate', } /** diff --git a/public/locales/en-US/grafana.json b/public/locales/en-US/grafana.json index a5fca3f498c..b4f006446c3 100644 --- a/public/locales/en-US/grafana.json +++ b/public/locales/en-US/grafana.json @@ -922,6 +922,11 @@ "folder-picker": { "loading": "Loading folders..." }, + "gen-ai": { + "apply-suggestion": "Apply", + "incomplete-request-error": "Sorry, I was unable to complete your request. Please try again.", + "send-custom-feedback": "Send" + }, "grafana-ui": { "drawer": { "close": "Close" diff --git a/public/locales/pseudo-LOCALE/grafana.json b/public/locales/pseudo-LOCALE/grafana.json index ef8e4b6bf35..e13ef14d739 100644 --- a/public/locales/pseudo-LOCALE/grafana.json +++ b/public/locales/pseudo-LOCALE/grafana.json @@ -922,6 +922,11 @@ "folder-picker": { "loading": "Ŀőäđįʼnģ ƒőľđęřş..." }, + "gen-ai": { + "apply-suggestion": "Åppľy", + "incomplete-request-error": "Ŝőřřy, Ĩ ŵäş ūʼnäþľę ŧő čőmpľęŧę yőūř řęqūęşŧ. Pľęäşę ŧřy äģäįʼn.", + "send-custom-feedback": "Ŝęʼnđ" + }, "grafana-ui": { "drawer": { "close": "Cľőşę"