From e4b61c4a2a4eefb4794f93ed4287370a9d8a3cb5 Mon Sep 17 00:00:00 2001 From: Ben Sully Date: Tue, 1 Aug 2023 11:50:29 +0100 Subject: [PATCH] Log a debug message the first time checking OpenAI settings fails with e.g. a NetworkError This will help users debug connectivity or LLM related issues. --- src/llms/openai.ts | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/llms/openai.ts b/src/llms/openai.ts index ce60fe8..6ee7c85 100644 --- a/src/llms/openai.ts +++ b/src/llms/openai.ts @@ -9,7 +9,7 @@ */ import { isLiveChannelMessageEvent, LiveChannelAddress, LiveChannelMessageEvent, LiveChannelScope } from "@grafana/data"; -import { getBackendSrv, getGrafanaLiveSrv } from "@grafana/runtime"; +import { getBackendSrv, getGrafanaLiveSrv, logDebug } from "@grafana/runtime"; import { Observable } from "rxjs"; import { filter, map, takeWhile } from "rxjs/operators"; @@ -227,6 +227,8 @@ export function streamChatCompletions(request: ChatCompletionsRequest): Observab ); } +let loggedWarning = false; + /** * Check if the OpenAI API is enabled via the LLM plugin. */ @@ -236,7 +238,12 @@ export const enabled = async () => { showSuccessAlert: false, showErrorAlert: false, }); return settings.enabled && (settings?.secureJsonFields?.openAIKey ?? false); - } catch (_e) { + } catch (e) { + if (!loggedWarning) { + logDebug(String(e)); + logDebug('Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.'); + loggedWarning = true; + } return false; } }