Prometheus: Fix PromQail health check + add test (#77259)

promQail: fix health check + add test
This commit is contained in:
Yasir Ekinci 2023-12-06 14:51:33 +01:00 committed by GitHub
parent d7641b0ecb
commit 399d0708da
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 60 additions and 2 deletions

View File

@ -1,4 +1,47 @@
import { guessMetricType } from './helpers';
import { llms } from '@grafana/experimental';
import { guessMetricType, isLLMPluginEnabled } from './helpers';
// Mock the grafana-experimental llms module
jest.mock('@grafana/experimental', () => ({
llms: {
openai: {
enabled: jest.fn(),
},
vector: {
enabled: jest.fn(),
},
},
}));
describe('isLLMPluginEnabled', () => {
it('should return true if LLM plugin is enabled', async () => {
jest.mocked(llms.openai.enabled).mockResolvedValue({ ok: true, configured: true });
jest.mocked(llms.vector.enabled).mockResolvedValue({ ok: true, enabled: true });
const enabled = await isLLMPluginEnabled();
expect(enabled).toBe(true);
});
it('should return false if LLM plugin is not enabled', async () => {
jest.mocked(llms.openai.enabled).mockResolvedValue({ ok: false, configured: false });
jest.mocked(llms.vector.enabled).mockResolvedValue({ ok: false, enabled: false });
const enabled = await isLLMPluginEnabled();
expect(enabled).toBe(false);
});
it('should return false if LLM plugin is enabled but health check fails', async () => {
jest.mocked(llms.openai.enabled).mockResolvedValue({ ok: false, configured: true });
jest.mocked(llms.vector.enabled).mockResolvedValue({ ok: false, enabled: true });
const enabled = await isLLMPluginEnabled();
expect(enabled).toBe(false);
});
});
const metricListWithType = [
// below is summary metric family

View File

@ -261,6 +261,21 @@ function guessMetricFamily(metric: string): string {
return metric;
}
/**
* Check if the LLM plugin is enabled.
* @returns true if the LLM plugin is enabled.
*/
export async function isLLMPluginEnabled(): Promise<boolean> {
// Check if the LLM plugin is enabled.
// If not, we won't be able to make requests, so return early.
const openaiEnabled = llms.openai.enabled().then((response) => response.ok);
const vectorEnabled = llms.vector.enabled().then((response) => response.ok);
// combine 2 promises
return Promise.all([openaiEnabled, vectorEnabled]).then((results) => {
return results.every((result) => result);
});
}
/**
* Calls the API and adds suggestions to the interaction
*
@ -279,7 +294,7 @@ export async function promQailSuggest(
) {
// when you're not running promqail
// @ts-ignore llms types issue
const check = (await llms.openai.enabled()) && (await llms.vector.enabled());
const check = await isLLMPluginEnabled();
const interactionToUpdate = interaction ? interaction : createInteraction(SuggestionType.Historical);