From 39ce3c2b145f1abd266d183a9f570eb03002b72c Mon Sep 17 00:00:00 2001 From: Paul Coghlan Date: Mon, 4 Nov 2024 15:56:55 +0000 Subject: [PATCH] Add Assistant healthcheck --- packages/grafana-llm-app/go.mod | 4 ++-- packages/grafana-llm-app/go.sum | 4 ++-- .../grafana-llm-app/llmclient/llmclient.go | 1 + .../pkg/plugin/grafana_provider.go | 4 ++++ packages/grafana-llm-app/pkg/plugin/health.go | 21 +++++++++++++++++++ .../pkg/plugin/llm_provider.go | 2 ++ .../pkg/plugin/openai_provider.go | 4 ++++ .../pkg/plugin/test_provider.go | 4 ++++ packages/grafana-llm-frontend/src/types.ts | 2 ++ 9 files changed, 42 insertions(+), 4 deletions(-) diff --git a/packages/grafana-llm-app/go.mod b/packages/grafana-llm-app/go.mod index 46d313ac..dc5a19ba 100644 --- a/packages/grafana-llm-app/go.mod +++ b/packages/grafana-llm-app/go.mod @@ -5,9 +5,9 @@ go 1.22.2 toolchain go1.23.2 require ( - github.com/grafana/grafana-plugin-sdk-go v0.252.0 - github.com/qdrant/go-client v1.12.0 github.com/sashabaranov/go-openai v1.32.0 + github.com/grafana/grafana-plugin-sdk-go v0.251.0 + github.com/qdrant/go-client v1.11.0 github.com/stretchr/testify v1.9.0 google.golang.org/grpc v1.67.1 ) diff --git a/packages/grafana-llm-app/go.sum b/packages/grafana-llm-app/go.sum index e0afba1c..6ea02610 100644 --- a/packages/grafana-llm-app/go.sum +++ b/packages/grafana-llm-app/go.sum @@ -65,8 +65,8 @@ github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e h1:JKmoR8x90Iww1 github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= -github.com/grafana/grafana-plugin-sdk-go v0.252.0 h1:GEnMZOQl+yN96Cg9EPZAcPRKI65LeYvryAQzkYckES0= -github.com/grafana/grafana-plugin-sdk-go v0.252.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk= +github.com/grafana/grafana-plugin-sdk-go v0.251.0 h1:gnOtxrC/1rqFvpSbQYyoZqkr47oWDlz4Q2L6Ozmsi3w= +github.com/grafana/grafana-plugin-sdk-go v0.251.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk= github.com/grafana/otel-profiling-go v0.5.1 h1:stVPKAFZSa7eGiqbYuG25VcqYksR6iWvF3YH66t4qL8= github.com/grafana/otel-profiling-go v0.5.1/go.mod h1:ftN/t5A/4gQI19/8MoWurBEtC6gFw8Dns1sJZ9W4Tls= github.com/grafana/pyroscope-go/godeltaprof v0.1.8 h1:iwOtYXeeVSAeYefJNaxDytgjKtUuKQbJqgAIjlnicKg= diff --git a/packages/grafana-llm-app/llmclient/llmclient.go b/packages/grafana-llm-app/llmclient/llmclient.go index 57f691d5..6efb515f 100644 --- a/packages/grafana-llm-app/llmclient/llmclient.go +++ b/packages/grafana-llm-app/llmclient/llmclient.go @@ -128,6 +128,7 @@ type openAIHealthDetails struct { OK bool `json:"ok"` Error string `json:"error,omitempty"` Models map[Model]openAIModelHealth `json:"models"` + Assistant openAIModelHealth `json:"assistant"` } type vectorHealthDetails struct { diff --git a/packages/grafana-llm-app/pkg/plugin/grafana_provider.go b/packages/grafana-llm-app/pkg/plugin/grafana_provider.go index f62faebe..455a621e 100644 --- a/packages/grafana-llm-app/pkg/plugin/grafana_provider.go +++ b/packages/grafana-llm-app/pkg/plugin/grafana_provider.go @@ -73,3 +73,7 @@ func (p *grafanaProvider) ChatCompletionStream(ctx context.Context, req ChatComp r.Model = req.Model.toOpenAI(DEFAULT_MODEL_SETTINGS) return streamOpenAIRequest(ctx, r, p.oc) } + +func (p *grafanaProvider) ListAssistants(ctx context.Context, limit *int, order *string, after *string, before *string) (openai.AssistantsList, error) { + return p.oc.ListAssistants(ctx, limit, order, after, before) +} diff --git a/packages/grafana-llm-app/pkg/plugin/health.go b/packages/grafana-llm-app/pkg/plugin/health.go index 75ec1ed0..386dad67 100644 --- a/packages/grafana-llm-app/pkg/plugin/health.go +++ b/packages/grafana-llm-app/pkg/plugin/health.go @@ -22,6 +22,7 @@ type openAIHealthDetails struct { OK bool `json:"ok"` Error string `json:"error,omitempty"` Models map[Model]openAIModelHealth `json:"models"` + Assistant openAIModelHealth `json:"assistant"` } type vectorHealthDetails struct { @@ -66,6 +67,17 @@ func (a *App) testOpenAIModel(ctx context.Context, model Model) error { return nil } +func (a *App) testOpenAIAssistant(ctx context.Context) error { + llmProvider, err := createProvider(a.settings) + if err != nil { + return err + } + + limit := 1 + _, err = llmProvider.ListAssistants(ctx, &limit, nil, nil, nil) + return err +} + // openAIHealth checks the health of the OpenAI configuration and caches the // result if successful. The caller must lock a.healthCheckMutex. func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest) (openAIHealthDetails, error) { @@ -86,6 +98,7 @@ func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest) OK: true, Configured: a.settings.OpenAI.Configured(), Models: map[Model]openAIModelHealth{}, + Assistant: openAIModelHealth{OK: false, Error: "Assistant not present"}, } for _, model := range openAIModels { @@ -113,6 +126,14 @@ func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest) d.Error = "No functioning models are available" } + if d.Configured { + err := a.testOpenAIAssistant(ctx) + if err == nil { + d.Assistant.OK = true + d.Assistant.Error = "" + } + } + // Only cache result if openAI is ok to use. if d.OK { a.healthOpenAI = &d diff --git a/packages/grafana-llm-app/pkg/plugin/llm_provider.go b/packages/grafana-llm-app/pkg/plugin/llm_provider.go index ed23783d..aed43952 100644 --- a/packages/grafana-llm-app/pkg/plugin/llm_provider.go +++ b/packages/grafana-llm-app/pkg/plugin/llm_provider.go @@ -133,4 +133,6 @@ type LLMProvider interface { // ChatCompletionStream provides text completion in a chat-like interface with // tokens being sent as they are ready. ChatCompletionStream(context.Context, ChatCompletionRequest) (<-chan ChatCompletionStreamResponse, error) + // ListAssistants lists assistants. + ListAssistants(context.Context, *int, *string, *string, *string) (openai.AssistantsList, error) } diff --git a/packages/grafana-llm-app/pkg/plugin/openai_provider.go b/packages/grafana-llm-app/pkg/plugin/openai_provider.go index 937cdb89..fd414771 100644 --- a/packages/grafana-llm-app/pkg/plugin/openai_provider.go +++ b/packages/grafana-llm-app/pkg/plugin/openai_provider.go @@ -98,3 +98,7 @@ func streamOpenAIRequest(ctx context.Context, r openai.ChatCompletionRequest, oc }() return c, nil } + +func (p *openAI) ListAssistants(ctx context.Context, limit *int, order *string, after *string, before *string) (openai.AssistantsList, error) { + return p.oc.ListAssistants(ctx, limit, order, after, before) +} diff --git a/packages/grafana-llm-app/pkg/plugin/test_provider.go b/packages/grafana-llm-app/pkg/plugin/test_provider.go index 7171b585..3b65761d 100644 --- a/packages/grafana-llm-app/pkg/plugin/test_provider.go +++ b/packages/grafana-llm-app/pkg/plugin/test_provider.go @@ -139,3 +139,7 @@ func (p *testProvider) ChatCompletionStream(ctx context.Context, req ChatComplet } return c, nil } + +func (p *testProvider) ListAssistants(ctx context.Context, limit *int, order *string, after *string, before *string) (openai.AssistantsList, error) { + return openai.AssistantsList{}, nil +} diff --git a/packages/grafana-llm-frontend/src/types.ts b/packages/grafana-llm-frontend/src/types.ts index 35ab19ba..51637343 100644 --- a/packages/grafana-llm-frontend/src/types.ts +++ b/packages/grafana-llm-frontend/src/types.ts @@ -21,6 +21,8 @@ export interface OpenAIHealthDetails { // The health check attempts to call the OpenAI API with each // of a few models and records the result of each call here. models?: Record; + // Health details for the OpenAI assistant model. + assistant?: OpenAIModelHealthDetails; } export interface OpenAIModelHealthDetails {