diff --git a/.config/tsconfig.json b/.config/tsconfig.json index 64b37690..a606c5cf 100644 --- a/.config/tsconfig.json +++ b/.config/tsconfig.json @@ -2,7 +2,7 @@ * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ * * In order to extend the configuration follow the steps in - * https://grafana.github.io/plugin-tools/docs/advanced-configuration#extending-the-typescript-config + * https://grafana.com/developers/plugin-tools/create-a-plugin/extend-a-plugin/extend-configurations#extend-the-typescript-config */ { "compilerOptions": { diff --git a/README.md b/README.md index 2e5cdee5..23d5b23f 100644 --- a/README.md +++ b/README.md @@ -143,7 +143,7 @@ const MyComponent = (): JSX.Element => { 1. Install dependencies ```bash - npm run install + npm install ``` 2. Build plugin in development mode and run in watch mode diff --git a/src/utils/utils.api.ts b/src/utils/utils.api.ts index 56c52c7a..041a8c70 100644 --- a/src/utils/utils.api.ts +++ b/src/utils/utils.api.ts @@ -6,10 +6,14 @@ import { filter, map, takeWhile } from "rxjs/operators"; import pluginJson from '../plugin.json'; +export interface Message { + role: string, + content: string, +} + export interface ChatCompletionsProps { model: string; - systemPrompt: string; - userPrompt: string; + messages: Message[]; } interface Choice { @@ -22,13 +26,10 @@ interface ChatCompletionsResponse { choices: T[]; } -export const chatCompletions = async ({ model, systemPrompt, userPrompt }: ChatCompletionsProps): Promise => { +export const chatCompletions = async ({ model, messages }: ChatCompletionsProps): Promise => { const response = await getBackendSrv().post('/api/plugins/grafana-llm-app/resources/openai/v1/chat/completions', { model, - messages: [ - { role: 'system', content: systemPrompt }, - { role: 'user', content: userPrompt }, - ], + messages, }, { headers: { 'Content-Type': 'application/json' } }); return response.choices[0].message.content; } @@ -59,23 +60,20 @@ const isDoneMessage = (message: any): message is DoneMessage => { return message.done !== undefined; } -export const streamChatCompletions = ({ model, systemPrompt, userPrompt }: ChatCompletionsProps): Observable => { +export const streamChatCompletions = ({ model, messages }: ChatCompletionsProps): Observable => { const channel: LiveChannelAddress = { scope: LiveChannelScope.Plugin, namespace: pluginJson.id, path: `/openai/v1/chat/completions`, data: { model, - messages: [ - { role: 'system', content: systemPrompt }, - { role: 'user', content: userPrompt }, - ], + messages, }, }; - const messages = getGrafanaLiveSrv() + const responses = getGrafanaLiveSrv() .getStream(channel) .pipe(filter((event) => isLiveChannelMessageEvent(event))) as Observable>> - return messages.pipe( + return responses.pipe( takeWhile((event) => !isDoneMessage(event.message.choices[0].delta)), map((event) => event.message.choices[0].delta), filter((delta) => isContentMessage(delta)), diff --git a/tsconfig.json b/tsconfig.json index d294745a..098b9046 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,3 +1,7 @@ { + "compilerOptions": { + "jsx": "react", + "lib": [ "es2015" ] + }, "extends": "./.config/tsconfig.json" -} \ No newline at end of file +}