2023-10-25 16:38:55 +01:00
import { llms } from '@grafana/experimental' ;
2023-09-28 09:42:56 -05:00
import { DashboardModel , PanelModel } from '../../state' ;
2023-09-21 14:41:49 +02:00
2023-10-14 02:02:45 +02:00
import { getDashboardStringDiff } from './jsonDiffText' ;
2023-09-28 09:42:56 -05:00
2023-09-19 15:40:33 +02:00
export enum Role {
2023-09-27 09:47:06 -05:00
// System content cannot be overwritten by user prompts.
2023-09-19 15:40:33 +02:00
'system' = 'system' ,
// User content is the content that the user has entered.
2023-09-27 09:47:06 -05:00
// This content can be overwritten by following prompt.
2023-09-19 15:40:33 +02:00
'user' = 'user' ,
}
2023-10-25 16:38:55 +01:00
export type Message = llms . openai . Message ;
2023-09-19 15:40:33 +02:00
2023-10-05 08:25:35 -05:00
export enum QuickFeedbackType {
Shorter = 'Even shorter' ,
MoreDescriptive = 'More descriptive' ,
Regenerate = 'Regenerate' ,
}
2023-09-19 15:40:33 +02:00
/ * *
* The OpenAI model to be used .
* /
2023-10-28 00:10:47 -05:00
export const DEFAULT_OAI_MODEL = 'gpt-4' ;
export type OAI_MODEL = 'gpt-4' | 'gpt-4-32k' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' ;
2023-09-19 15:40:33 +02:00
2023-10-06 14:11:15 +02:00
/ * *
* Sanitize the reply from OpenAI by removing the leading and trailing quotes .
* /
export const sanitizeReply = ( reply : string ) = > {
return reply . replace ( /^"|"$/g , '' ) ;
} ;
2023-09-21 14:41:49 +02:00
/ * *
* Diff the current dashboard with the original dashboard and the dashboard after migration
* to split the changes into user changes and migration changes .
* * User changes : changes made by the user
* * Migration changes : changes made by the DashboardMigrator after opening the dashboard
*
* @param dashboard current dashboard to be saved
* @returns user changes and migration changes
* /
export function getDashboardChanges ( dashboard : DashboardModel ) : {
2023-10-14 02:02:45 +02:00
userChanges : string ;
migrationChanges : string ;
2023-09-21 14:41:49 +02:00
} {
2023-10-14 02:02:45 +02:00
const { migrationDiff , userDiff } = getDashboardStringDiff ( dashboard ) ;
2023-09-21 14:41:49 +02:00
return {
2023-10-14 02:02:45 +02:00
userChanges : userDiff ,
migrationChanges : migrationDiff ,
2023-09-21 14:41:49 +02:00
} ;
}
2023-09-27 09:47:06 -05:00
/ * *
2023-10-25 16:38:55 +01:00
* Check if the LLM plugin is enabled .
* @returns true if the LLM plugin is enabled .
2023-09-27 09:47:06 -05:00
* /
export async function isLLMPluginEnabled() {
2023-10-25 16:38:55 +01:00
// Check if the LLM plugin is enabled.
2023-09-27 09:47:06 -05:00
// If not, we won't be able to make requests, so return early.
2023-10-25 16:38:55 +01:00
return llms . openai . enabled ( ) . then ( ( response ) = > response . ok ) ;
2023-09-28 09:42:56 -05:00
}
2023-10-31 13:52:46 -06:00
/ * *
* Get the message to be sent to OpenAI to generate a new response .
* @param previousResponse
* @param feedback
* @returns Message [ ] to be sent to OpenAI to generate a new response
* /
export const getFeedbackMessage = ( previousResponse : string , feedback : string | QuickFeedbackType ) : Message [ ] = > {
return [
{
role : Role.system ,
content : ` Your previous response was: ${ previousResponse } . The user has provided the following feedback: ${ feedback } . Re-generate your response according to the provided feedback. ` ,
} ,
] ;
} ;
2023-09-28 09:42:56 -05:00
/ * *
*
* @param dashboard Dashboard to generate a title or description for
* @returns String for inclusion in prompts stating what the dashboard ' s panels are
* /
export function getDashboardPanelPrompt ( dashboard : DashboardModel ) : string {
2023-11-07 17:18:07 -06:00
const getPanelString = ( panel : PanelModel , idx : number ) = >
` - Panel ${ idx }
- Title : $ { panel . title } $ { panel . description ? ` \ n- Description: ${ panel . description } ` : '' } ` ;
2023-09-28 09:42:56 -05:00
const panelStrings : string [ ] = dashboard . panels . map ( getPanelString ) ;
let panelPrompt : string ;
if ( panelStrings . length <= 10 ) {
panelPrompt = ` The panels in the dashboard are: \ n ${ panelStrings . join ( '\n' ) } ` ;
} else {
const withDescriptions = dashboard . panels . filter ( ( panel ) = > panel . description ) ;
const withoutDescriptions = dashboard . panels . filter ( ( panel ) = > ! panel . description ) ;
let concatenatedPanelStrings ;
if ( withDescriptions . length >= 10 ) {
concatenatedPanelStrings = withDescriptions . slice ( 10 ) . map ( getPanelString ) . join ( '\n' ) ;
} else {
concatenatedPanelStrings = withDescriptions . map ( getPanelString ) . join ( '\n' ) ;
concatenatedPanelStrings += '\n' ;
concatenatedPanelStrings += withoutDescriptions
. slice ( 10 - withDescriptions . length )
. map ( getPanelString )
. join ( 'n' ) ;
}
panelPrompt =
` There are ${ panelStrings . length } panels. \ n ` +
'Due to space constraints, only the information for ten of them is presented.\n' +
'These ten are not necessarily the first ten, but prioritized to those with descriptions.\n' +
` The panels in the dashboard are: \ n ${ concatenatedPanelStrings } ` ;
} // This truncation should prevent exceeding the allowed size for GPT calls.
// Additionally, context windows that are too long degrade performance,
// So it is possibly that if we can condense it further it would be better
return panelPrompt ;
2023-09-27 09:47:06 -05:00
}
2023-11-07 17:18:07 -06:00
export function getFilteredPanelString ( panel : PanelModel ) : string {
const panelObj = panel . getSaveModel ( ) ;
const keysToKeep = new Set ( [
'id' ,
'datasource' ,
'title' ,
'description' ,
'targets' ,
'thresholds' ,
'type' ,
'xaxis' ,
'yaxes' ,
] ) ;
2023-11-16 12:39:12 +00:00
const panelObjFiltered = Object . keys ( panelObj ) . reduce ( ( obj : { [ key : string ] : unknown } , key ) = > {
2023-11-07 17:18:07 -06:00
if ( keysToKeep . has ( key ) ) {
obj [ key ] = panelObj [ key ] ;
}
return obj ;
} , { } ) ;
return JSON . stringify ( panelObjFiltered , null , 2 ) ;
}