Skip to content

Commit

Permalink
Run prettier across llms module
Browse files Browse the repository at this point in the history
  • Loading branch information
sd2k committed Oct 23, 2023
1 parent ace60c4 commit f12cebf
Show file tree
Hide file tree
Showing 3 changed files with 88 additions and 54 deletions.
6 changes: 3 additions & 3 deletions src/llms/constants.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { logWarning } from "@grafana/runtime";
import { SemVer } from 'semver';

import { SemVer } from "semver";
import { logWarning } from '@grafana/runtime';

export const LLM_PLUGIN_ID = 'grafana-llm-app';
export const LLM_PLUGIN_ROUTE = `/api/plugins/${LLM_PLUGIN_ID}`;
Expand All @@ -14,6 +14,6 @@ export function setLLMPluginVersion(version: string) {
try {
LLM_PLUGIN_VERSION = new SemVer(version);
} catch (e) {
logWarning('Failed to parse version of grafana-llm-app; assuming old version is present.')
logWarning('Failed to parse version of grafana-llm-app; assuming old version is present.');
}
}
90 changes: 58 additions & 32 deletions src/llms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,19 @@
* The {@link enabled} function can be used to check if the plugin is enabled and configured.
*/

import { isLiveChannelMessageEvent, LiveChannelAddress, LiveChannelMessageEvent, LiveChannelScope } from "@grafana/data";
import { getBackendSrv, getGrafanaLiveSrv, logDebug } from "@grafana/runtime";
import { pipe, Observable, UnaryFunction } from 'rxjs';
import { filter, map, scan, takeWhile, tap } from 'rxjs/operators';

import { pipe, Observable, UnaryFunction } from "rxjs";
import { filter, map, scan, takeWhile, tap } from "rxjs/operators";
import {
isLiveChannelMessageEvent,
LiveChannelAddress,
LiveChannelMessageEvent,
LiveChannelScope,
} from '@grafana/data';
import { getBackendSrv, getGrafanaLiveSrv, logDebug } from '@grafana/runtime';

import { LLM_PLUGIN_ID, LLM_PLUGIN_ROUTE, setLLMPluginVersion } from "./constants";
import { HealthCheckResponse, OpenAIHealthDetails } from "./types";
import { LLM_PLUGIN_ID, LLM_PLUGIN_ROUTE, setLLMPluginVersion } from './constants';
import { HealthCheckResponse, OpenAIHealthDetails } from './types';

const OPENAI_CHAT_COMPLETIONS_PATH = 'openai/v1/chat/completions';

Expand Down Expand Up @@ -227,17 +232,18 @@ export interface ChatCompletionsChunk {

/** Return true if the message is a 'content' message. */
export function isContentMessage(message: ChatCompletionsDelta): message is ContentMessage {
return 'content' in message
return 'content' in message;
}


/** Return true if the message is a 'done' message. */
export function isDoneMessage(message: ChatCompletionsDelta): message is DoneMessage {
return 'done' in message && message.done != null;
}

/** Return true if the response is an error response. */
export function isErrorResponse<T>(response: ChatCompletionsResponse<T> | ChatCompletionsErrorResponse): response is ChatCompletionsErrorResponse {
export function isErrorResponse<T>(
response: ChatCompletionsResponse<T> | ChatCompletionsErrorResponse
): response is ChatCompletionsErrorResponse {
return 'error' in response;
}

Expand All @@ -255,12 +261,17 @@ export function isErrorResponse<T>(response: ChatCompletionsResponse<T> | ChatCo
* // Output:
* // ['Hello', '? ', 'How ', 'are ', 'you', '?']
*/
export function extractContent(): UnaryFunction<Observable<ChatCompletionsResponse<ChatCompletionsChunk>>, Observable<string>> {
export function extractContent(): UnaryFunction<
Observable<ChatCompletionsResponse<ChatCompletionsChunk>>,
Observable<string>
> {
return pipe(
filter((response: ChatCompletionsResponse<ChatCompletionsChunk>) => isContentMessage(response.choices[0].delta)),
// The type assertion is needed here because the type predicate above doesn't seem to propagate.
map((response: ChatCompletionsResponse<ChatCompletionsChunk>) => (response.choices[0].delta as ContentMessage).content),
)
map(
(response: ChatCompletionsResponse<ChatCompletionsChunk>) => (response.choices[0].delta as ContentMessage).content
)
);
}

/**
Expand All @@ -277,20 +288,27 @@ export function extractContent(): UnaryFunction<Observable<ChatCompletionsRespon
* // Output:
* // ['Hello', 'Hello! ', 'Hello! How ', 'Hello! How are ', 'Hello! How are you', 'Hello! How are you?']
*/
export function accumulateContent(): UnaryFunction<Observable<ChatCompletionsResponse<ChatCompletionsChunk>>, Observable<string>> {
export function accumulateContent(): UnaryFunction<
Observable<ChatCompletionsResponse<ChatCompletionsChunk>>,
Observable<string>
> {
return pipe(
extractContent(),
scan((acc, curr) => acc + curr, ''),
scan((acc, curr) => acc + curr, '')
);
}

/**
* Make a request to OpenAI's chat-completions API via the Grafana LLM plugin proxy.
*/
export async function chatCompletions(request: ChatCompletionsRequest): Promise<ChatCompletionsResponse> {
const response = await getBackendSrv().post<ChatCompletionsResponse>('/api/plugins/grafana-llm-app/resources/openai/v1/chat/completions', request, {
headers: { 'Content-Type': 'application/json' }
});
const response = await getBackendSrv().post<ChatCompletionsResponse>(
'/api/plugins/grafana-llm-app/resources/openai/v1/chat/completions',
request,
{
headers: { 'Content-Type': 'application/json' },
}
);
return response;
}

Expand Down Expand Up @@ -321,7 +339,9 @@ export async function chatCompletions(request: ChatCompletionsRequest): Promise<
* // Output:
* // ['Hello', 'Hello! ', 'Hello! How ', 'Hello! How are ', 'Hello! How are you', 'Hello! How are you?']
*/
export function streamChatCompletions(request: ChatCompletionsRequest): Observable<ChatCompletionsResponse<ChatCompletionsChunk>> {
export function streamChatCompletions(
request: ChatCompletionsRequest
): Observable<ChatCompletionsResponse<ChatCompletionsChunk>> {
const channel: LiveChannelAddress = {
scope: LiveChannelScope.Plugin,
namespace: LLM_PLUGIN_ID,
Expand All @@ -330,15 +350,17 @@ export function streamChatCompletions(request: ChatCompletionsRequest): Observab
};
const messages = getGrafanaLiveSrv()
.getStream(channel)
.pipe(filter((event) => isLiveChannelMessageEvent(event))) as Observable<LiveChannelMessageEvent<ChatCompletionsResponse<ChatCompletionsChunk>>>
.pipe(filter((event) => isLiveChannelMessageEvent(event))) as Observable<
LiveChannelMessageEvent<ChatCompletionsResponse<ChatCompletionsChunk>>
>;
return messages.pipe(
tap((event) => {
if (isErrorResponse(event.message)) {
throw new Error(event.message.error);
}
}),
takeWhile((event) => isErrorResponse(event.message) || !isDoneMessage(event.message.choices[0].delta)),
map((event) => event.message),
map((event) => event.message)
);
}

Expand All @@ -349,31 +371,37 @@ export const enabled = async (): Promise<OpenAIHealthDetails> => {
// First check if the plugin is enabled.
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
showSuccessAlert: false,
showErrorAlert: false,
});
if (!settings.enabled) {
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not enabled.' }
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not enabled.' };
}
} catch (e) {
logDebug(String(e));
logDebug('Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
logDebug(
'Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.'
);
loggedWarning = true;
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not installed.' };
}

// Run a health check to see if OpenAI is configured on the plugin.
let response: HealthCheckResponse;
try {
response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
showSuccessAlert: false,
showErrorAlert: false,
});
} catch (e) {
if (!loggedWarning) {
logDebug(String(e));
logDebug('Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
logDebug(
'Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.'
);
loggedWarning = true;
}
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not installed.' };
}

const { details } = response;
Expand All @@ -382,9 +410,7 @@ export const enabled = async (): Promise<OpenAIHealthDetails> => {
setLLMPluginVersion(details.version);
}
if (details?.openAI === undefined) {
return { configured: false, ok: false, error: 'The Grafana LLM plugin is outdated; please update it.' }
return { configured: false, ok: false, error: 'The Grafana LLM plugin is outdated; please update it.' };
}
return typeof details.openAI === 'boolean' ?
{ configured: details.openAI, ok: details.openAI } :
details.openAI;
}
return typeof details.openAI === 'boolean' ? { configured: details.openAI, ok: details.openAI } : details.openAI;
};
46 changes: 27 additions & 19 deletions src/llms/vector.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
* The {@link enabled} function can be used to check if the plugin is enabled and configured.
*/

import { getBackendSrv, logDebug } from "@grafana/runtime";
import { LLM_PLUGIN_ROUTE, setLLMPluginVersion } from "./constants";
import { HealthCheckResponse, VectorHealthDetails } from "./types";
import { getBackendSrv, logDebug } from '@grafana/runtime';
import { LLM_PLUGIN_ROUTE, setLLMPluginVersion } from './constants';
import { HealthCheckResponse, VectorHealthDetails } from './types';

interface SearchResultPayload extends Record<string, any> { }
interface SearchResultPayload extends Record<string, any> {}

/**
* A request to search for resources in the vector database.
Expand All @@ -28,7 +28,7 @@ export interface SearchRequest {

/**
* Limit the number of results returned to the top `topK` results.
*
*
* Defaults to 10.
**/
topK?: number;
Expand Down Expand Up @@ -69,9 +69,13 @@ interface SearchResultResponse<T extends SearchResultPayload> {
* Search for resources in the configured vector database.
*/
export async function search<T extends SearchResultPayload>(request: SearchRequest): Promise<Array<SearchResult<T>>> {
const response = await getBackendSrv().post<SearchResultResponse<T>>('/api/plugins/grafana-llm-app/resources/vector/search', request, {
headers: { 'Content-Type': 'application/json' }
});
const response = await getBackendSrv().post<SearchResultResponse<T>>(
'/api/plugins/grafana-llm-app/resources/vector/search',
request,
{
headers: { 'Content-Type': 'application/json' },
}
);
return response.results;
}

Expand All @@ -82,33 +86,39 @@ export const enabled = async (): Promise<VectorHealthDetails> => {
// First check if the plugin is enabled.
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
showSuccessAlert: false,
showErrorAlert: false,
});
if (!settings.enabled) {
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not enabled.' }
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not enabled.' };
}
} catch (e) {
logDebug(String(e));
logDebug('Failed to check if the vector service is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
logDebug(
'Failed to check if the vector service is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.'
);
loggedWarning = true;
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not installed.' };
}

// Run a health check to see if the vector service is configured on the plugin.
let response: HealthCheckResponse;
try {
response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
showSuccessAlert: false,
showErrorAlert: false,
});
} catch (e) {
// We shouldn't really get here if we managed to get the plugin's settings above,
// but catch this just in case.
if (!loggedWarning) {
logDebug(String(e));
logDebug('Failed to check if vector service is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
logDebug(
'Failed to check if vector service is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.'
);
loggedWarning = true;
}
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not installed.' };
}

const { details } = response;
Expand All @@ -117,9 +127,7 @@ export const enabled = async (): Promise<VectorHealthDetails> => {
setLLMPluginVersion(details.version);
}
if (details?.vector === undefined) {
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is outdated; please update it.' }
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is outdated; please update it.' };
}
return typeof details.vector === 'boolean' ?
{ enabled: details.vector, ok: details.vector } :
details.vector;
return typeof details.vector === 'boolean' ? { enabled: details.vector, ok: details.vector } : details.vector;
};

0 comments on commit f12cebf

Please sign in to comment.