diff --git a/public/app/features/dashboard/components/GenAI/utils.test.ts b/public/app/features/dashboard/components/GenAI/utils.test.ts index 95ffa9a4e9b..8b3c06003a8 100644 --- a/public/app/features/dashboard/components/GenAI/utils.test.ts +++ b/public/app/features/dashboard/components/GenAI/utils.test.ts @@ -98,15 +98,6 @@ describe('getDashboardChanges', () => { }); describe('isLLMPluginEnabled', () => { - it('should return true if LLM plugin is enabled', async () => { - // Mock llms.openai.health to return true - jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false }); - - const enabled = await isLLMPluginEnabled(); - - expect(enabled).toBe(true); - }); - it('should return false if LLM plugin is not enabled', async () => { // Mock llms.openai.health to return false jest.mocked(llms.openai.health).mockResolvedValue({ ok: false, configured: false }); @@ -115,6 +106,15 @@ describe('isLLMPluginEnabled', () => { expect(enabled).toBe(false); }); + + it('should return true if LLM plugin is enabled', async () => { + // Mock llms.openai.health to return true + jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false }); + + const enabled = await isLLMPluginEnabled(); + + expect(enabled).toBe(true); + }); }); describe('sanitizeReply', () => { diff --git a/public/app/features/dashboard/components/GenAI/utils.ts b/public/app/features/dashboard/components/GenAI/utils.ts index a0caf3065b4..2b1707caacc 100644 --- a/public/app/features/dashboard/components/GenAI/utils.ts +++ b/public/app/features/dashboard/components/GenAI/utils.ts @@ -59,18 +59,35 @@ export function getDashboardChanges(dashboard: DashboardModel): { }; } +// Shared healthcheck promise so avoid multiple calls llm app settings and health check APIs +let llmHealthCheck: Promise | undefined; + /** * Check if the LLM plugin is enabled. * @returns true if the LLM plugin is enabled. */ -export async function isLLMPluginEnabled() { +export async function isLLMPluginEnabled(): Promise { if (!config.apps['grafana-llm-app']) { return false; } + if (llmHealthCheck) { + return llmHealthCheck; + } + // Check if the LLM plugin is enabled. // If not, we won't be able to make requests, so return early. - return llms.openai.health().then((response) => response.ok); + llmHealthCheck = new Promise((resolve) => { + llms.openai.health().then((response) => { + if (!response.ok) { + // Health check fail clear cached promise so we can try again later + llmHealthCheck = undefined; + } + resolve(response.ok); + }); + }); + + return llmHealthCheck; } /**