From f83366fcddd07eeeee8e55f2418b131c5bb564e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Fri, 10 May 2024 20:52:59 +0200 Subject: [PATCH] GenAI: Fixes multiple calls to settings and health (#87623) * GenAI: Fixes multiple calls to settings and health * swap order of tests given new caching --------- Co-authored-by: nmarrs --- .../dashboard/components/GenAI/utils.test.ts | 18 ++++++++-------- .../dashboard/components/GenAI/utils.ts | 21 +++++++++++++++++-- 2 files changed, 28 insertions(+), 11 deletions(-) diff --git a/public/app/features/dashboard/components/GenAI/utils.test.ts b/public/app/features/dashboard/components/GenAI/utils.test.ts index 95ffa9a4e9b..8b3c06003a8 100644 --- a/public/app/features/dashboard/components/GenAI/utils.test.ts +++ b/public/app/features/dashboard/components/GenAI/utils.test.ts @@ -98,15 +98,6 @@ describe('getDashboardChanges', () => { }); describe('isLLMPluginEnabled', () => { - it('should return true if LLM plugin is enabled', async () => { - // Mock llms.openai.health to return true - jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false }); - - const enabled = await isLLMPluginEnabled(); - - expect(enabled).toBe(true); - }); - it('should return false if LLM plugin is not enabled', async () => { // Mock llms.openai.health to return false jest.mocked(llms.openai.health).mockResolvedValue({ ok: false, configured: false }); @@ -115,6 +106,15 @@ describe('isLLMPluginEnabled', () => { expect(enabled).toBe(false); }); + + it('should return true if LLM plugin is enabled', async () => { + // Mock llms.openai.health to return true + jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false }); + + const enabled = await isLLMPluginEnabled(); + + expect(enabled).toBe(true); + }); }); describe('sanitizeReply', () => { diff --git a/public/app/features/dashboard/components/GenAI/utils.ts b/public/app/features/dashboard/components/GenAI/utils.ts index a0caf3065b4..2b1707caacc 100644 --- a/public/app/features/dashboard/components/GenAI/utils.ts +++ b/public/app/features/dashboard/components/GenAI/utils.ts @@ -59,18 +59,35 @@ export function getDashboardChanges(dashboard: DashboardModel): { }; } +// Shared healthcheck promise so avoid multiple calls llm app settings and health check APIs +let llmHealthCheck: Promise | undefined; + /** * Check if the LLM plugin is enabled. * @returns true if the LLM plugin is enabled. */ -export async function isLLMPluginEnabled() { +export async function isLLMPluginEnabled(): Promise { if (!config.apps['grafana-llm-app']) { return false; } + if (llmHealthCheck) { + return llmHealthCheck; + } + // Check if the LLM plugin is enabled. // If not, we won't be able to make requests, so return early. - return llms.openai.health().then((response) => response.ok); + llmHealthCheck = new Promise((resolve) => { + llms.openai.health().then((response) => { + if (!response.ok) { + // Health check fail clear cached promise so we can try again later + llmHealthCheck = undefined; + } + resolve(response.ok); + }); + }); + + return llmHealthCheck; } /**