Skip to content

Commit

Permalink
Merge pull request #96 from grafana/health-check-separate-enabled
Browse files Browse the repository at this point in the history
llms: separate health and enabled methods
  • Loading branch information
yoziru authored Nov 29, 2023
2 parents 7e89554 + 5850ac3 commit a4c35ad
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 6 deletions.
37 changes: 37 additions & 0 deletions src/llms/openai.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { enabled } from './openai';
import { LLM_PLUGIN_ROUTE } from './constants';
import { getBackendSrv } from '@grafana/runtime';

jest.mock('@grafana/runtime', () => ({
getBackendSrv: jest.fn(),
}));

describe('enabled', () => {
it('should return false if not configured', async () => {
(getBackendSrv as jest.Mock).mockImplementation(() => ({
get: jest.fn().mockReturnValue(Promise.resolve({ enabled: false })),
}));

// Call the enabled function
const result = await enabled();
expect(result).toBe(false);
});

it('should return true if configured', async () => {
(getBackendSrv as jest.Mock).mockImplementation(() => ({
get: jest.fn().mockImplementation((url: string) => {
if (url === `${LLM_PLUGIN_ROUTE}/settings`) {
return Promise.resolve({ enabled: true });
} else if (url === `${LLM_PLUGIN_ROUTE}/health`) {
return Promise.resolve({ details: { openAI: { configured: true, ok: true } } });
}
// raise an error if we get here
throw new Error('unexpected url');
}),
}));

// Call the enabled function
const result = await enabled();
expect(result).toBe(true);
});
});
12 changes: 7 additions & 5 deletions src/llms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ export function streamChatCompletions(request: ChatCompletionsRequest): Observab
let loggedWarning = false;

/** Check if the OpenAI API is enabled via the LLM plugin. */
export const enabled = async (): Promise<OpenAIHealthDetails> => {
export const health = async (): Promise<OpenAIHealthDetails> => {
// First check if the plugin is enabled.
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
Expand Down Expand Up @@ -391,6 +391,11 @@ export const enabled = async (): Promise<OpenAIHealthDetails> => {
details.openAI;
}

export const enabled = async (): Promise<boolean> => {
const healthDetails = await health();
return healthDetails.configured && healthDetails.ok;
}

/**
* Enum representing different states for a stream.
* @enum {string}
Expand Down Expand Up @@ -482,10 +487,7 @@ export function useOpenAIStream(
[notifyError]
);

const { error: enabledError, value: isEnabled } = useAsync(
async () => await enabled().then((response) => response.ok),
[enabled]
);
const { error: enabledError, value: isEnabled } = useAsync(async () => await enabled(), [enabled]);

const { error: asyncError, value } = useAsync(async () => {
if (!isEnabled || !messages.length) {
Expand Down
37 changes: 37 additions & 0 deletions src/llms/vector.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { enabled } from './vector';
import { LLM_PLUGIN_ROUTE } from './constants';
import { getBackendSrv } from '@grafana/runtime';

jest.mock('@grafana/runtime', () => ({
getBackendSrv: jest.fn(),
}));

describe('enabled', () => {
it('should return false if not configured', async () => {
(getBackendSrv as jest.Mock).mockImplementation(() => ({
get: jest.fn().mockReturnValue(Promise.resolve({ enabled: false })),
}));

// Call the enabled function
const result = await enabled();
expect(result).toBe(false);
});

it('should return true if configured', async () => {
(getBackendSrv as jest.Mock).mockImplementation(() => ({
get: jest.fn().mockImplementation((url: string) => {
if (url === `${LLM_PLUGIN_ROUTE}/settings`) {
return Promise.resolve({ enabled: true });
} else if (url === `${LLM_PLUGIN_ROUTE}/health`) {
return Promise.resolve({ details: { vector: { enabled: true, ok: true } } });
}
// raise an error if we get here
throw new Error('unexpected url');
}),
}));

// Call the enabled function
const result = await enabled();
expect(result).toBe(true);
});
});
7 changes: 6 additions & 1 deletion src/llms/vector.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ export async function search<T extends SearchResultPayload>(request: SearchReque
let loggedWarning = false;

/** Check if the vector API is enabled and configured via the LLM plugin. */
export const enabled = async (): Promise<VectorHealthDetails> => {
export const health = async (): Promise<VectorHealthDetails> => {
// First check if the plugin is enabled.
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
Expand Down Expand Up @@ -123,3 +123,8 @@ export const enabled = async (): Promise<VectorHealthDetails> => {
{ enabled: details.vector, ok: details.vector } :
details.vector;
};

export const enabled = async (): Promise<boolean> => {
const healthDetails = await health();
return healthDetails.enabled && healthDetails.ok;
}

0 comments on commit a4c35ad

Please sign in to comment.