useOpenAIStream: Improve error handling (#76212)

* Simplify error message

* Add timeout

* Unsubscribe when unmount

---------

Co-authored-by: Nathan Marrs <nathanielmarrs@gmail.com>
This commit is contained in:
Ivan Ortega Alba 2023-10-10 16:23:08 +02:00 committed by GitHub
parent 473f8899c5
commit 2a2401e673
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 51 additions and 10 deletions

View File

@ -208,7 +208,9 @@ describe('GenAIButton', () => {
// The tooltip keeps interactive to be able to click the link
await userEvent.hover(tooltip);
expect(tooltip).toBeVisible();
expect(tooltip).toHaveTextContent('Something went wrong');
expect(tooltip).toHaveTextContent(
'Failed to generate content using OpenAI. Please try again or if the problem persist, contact your organization admin.'
);
});
it('should call the onClick callback', async () => {

View File

@ -173,7 +173,13 @@ export const GenAIButton = ({
<div className={styles.wrapper}>
{isFirstHistoryEntry && <Spinner size={14} />}
{!hasHistory && (
<Tooltip show={error ? undefined : false} interactive content={`OpenAI error: ${error?.message}`}>
<Tooltip
show={error ? undefined : false}
interactive
content={
'Failed to generate content using OpenAI. Please try again or if the problem persist, contact your organization admin.'
}
>
{button}
</Tooltip>
)}

View File

@ -1,4 +1,4 @@
import { useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { useAsync } from 'react-use';
import { Subscription } from 'rxjs';
@ -18,6 +18,8 @@ export enum StreamStatus {
COMPLETED = 'completed',
}
export const TIMEOUT = 10000;
// TODO: Add tests
export function useOpenAIStream(
model = OPEN_AI_MODEL,
@ -46,6 +48,21 @@ export function useOpenAIStream(
const [error, setError] = useState<Error>();
const { error: notifyError } = useAppNotification();
const onError = useCallback(
(e: Error) => {
setStreamStatus(StreamStatus.IDLE);
setMessages([]);
setError(e);
notifyError(
'Failed to generate content using OpenAI',
`Please try again or if the problem persists, contact your organization admin.`
);
console.error(e);
logError(e, { messages: JSON.stringify(messages), model, temperature: String(temperature) });
},
[messages, model, temperature, notifyError]
);
const { error: enabledError, value: enabled } = useAsync(
async () => await isLLMPluginEnabled(),
[isLLMPluginEnabled]
@ -80,13 +97,7 @@ export function useOpenAIStream(
enabled,
stream: stream.subscribe({
next: setReply,
error: (e: Error) => {
setStreamStatus(StreamStatus.IDLE);
setMessages([]);
setError(e);
notifyError('OpenAI Error', `${e.message}`);
logError(e, { messages: JSON.stringify(messages), model, temperature: String(temperature) });
},
error: onError,
complete: () => {
setStreamStatus(StreamStatus.COMPLETED);
setTimeout(() => {
@ -99,6 +110,28 @@ export function useOpenAIStream(
};
}, [messages, enabled]);
// Unsubscribe from the stream when the component unmounts.
useEffect(() => {
return () => {
if (value?.stream) {
value.stream.unsubscribe();
}
};
}, [value]);
// If the stream is generating and we haven't received a reply, it times out.
useEffect(() => {
let timeout: NodeJS.Timeout | undefined;
if (streamStatus === StreamStatus.GENERATING && reply === '') {
timeout = setTimeout(() => {
onError(new Error(`OpenAI stream timed out after ${TIMEOUT}ms`));
}, TIMEOUT);
}
return () => {
timeout && clearTimeout(timeout);
};
}, [streamStatus, reply, onError]);
if (asyncError || enabledError) {
setError(asyncError || enabledError);
}