Skip to content

Commit

Permalink
Add Assistant healthcheck
Browse files Browse the repository at this point in the history
  • Loading branch information
paulcoghlan committed Nov 4, 2024
1 parent c0f9070 commit 39ce3c2
Show file tree
Hide file tree
Showing 9 changed files with 42 additions and 4 deletions.
4 changes: 2 additions & 2 deletions packages/grafana-llm-app/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ go 1.22.2
toolchain go1.23.2

require (
github.com/grafana/grafana-plugin-sdk-go v0.252.0
github.com/qdrant/go-client v1.12.0
github.com/sashabaranov/go-openai v1.32.0
github.com/grafana/grafana-plugin-sdk-go v0.251.0
github.com/qdrant/go-client v1.11.0
github.com/stretchr/testify v1.9.0
google.golang.org/grpc v1.67.1
)
Expand Down
4 changes: 2 additions & 2 deletions packages/grafana-llm-app/go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e h1:JKmoR8x90Iww1
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/grafana/grafana-plugin-sdk-go v0.252.0 h1:GEnMZOQl+yN96Cg9EPZAcPRKI65LeYvryAQzkYckES0=
github.com/grafana/grafana-plugin-sdk-go v0.252.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk=
github.com/grafana/grafana-plugin-sdk-go v0.251.0 h1:gnOtxrC/1rqFvpSbQYyoZqkr47oWDlz4Q2L6Ozmsi3w=
github.com/grafana/grafana-plugin-sdk-go v0.251.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk=
github.com/grafana/otel-profiling-go v0.5.1 h1:stVPKAFZSa7eGiqbYuG25VcqYksR6iWvF3YH66t4qL8=
github.com/grafana/otel-profiling-go v0.5.1/go.mod h1:ftN/t5A/4gQI19/8MoWurBEtC6gFw8Dns1sJZ9W4Tls=
github.com/grafana/pyroscope-go/godeltaprof v0.1.8 h1:iwOtYXeeVSAeYefJNaxDytgjKtUuKQbJqgAIjlnicKg=
Expand Down
1 change: 1 addition & 0 deletions packages/grafana-llm-app/llmclient/llmclient.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ type openAIHealthDetails struct {
OK bool `json:"ok"`
Error string `json:"error,omitempty"`
Models map[Model]openAIModelHealth `json:"models"`
Assistant openAIModelHealth `json:"assistant"`
}

type vectorHealthDetails struct {
Expand Down
4 changes: 4 additions & 0 deletions packages/grafana-llm-app/pkg/plugin/grafana_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,7 @@ func (p *grafanaProvider) ChatCompletionStream(ctx context.Context, req ChatComp
r.Model = req.Model.toOpenAI(DEFAULT_MODEL_SETTINGS)
return streamOpenAIRequest(ctx, r, p.oc)
}

func (p *grafanaProvider) ListAssistants(ctx context.Context, limit *int, order *string, after *string, before *string) (openai.AssistantsList, error) {
return p.oc.ListAssistants(ctx, limit, order, after, before)
}
21 changes: 21 additions & 0 deletions packages/grafana-llm-app/pkg/plugin/health.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ type openAIHealthDetails struct {
OK bool `json:"ok"`
Error string `json:"error,omitempty"`
Models map[Model]openAIModelHealth `json:"models"`
Assistant openAIModelHealth `json:"assistant"`
}

type vectorHealthDetails struct {
Expand Down Expand Up @@ -66,6 +67,17 @@ func (a *App) testOpenAIModel(ctx context.Context, model Model) error {
return nil
}

func (a *App) testOpenAIAssistant(ctx context.Context) error {
llmProvider, err := createProvider(a.settings)
if err != nil {
return err
}

limit := 1
_, err = llmProvider.ListAssistants(ctx, &limit, nil, nil, nil)
return err
}

// openAIHealth checks the health of the OpenAI configuration and caches the
// result if successful. The caller must lock a.healthCheckMutex.
func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest) (openAIHealthDetails, error) {
Expand All @@ -86,6 +98,7 @@ func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest)
OK: true,
Configured: a.settings.OpenAI.Configured(),
Models: map[Model]openAIModelHealth{},
Assistant: openAIModelHealth{OK: false, Error: "Assistant not present"},
}

for _, model := range openAIModels {
Expand Down Expand Up @@ -113,6 +126,14 @@ func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest)
d.Error = "No functioning models are available"
}

if d.Configured {
err := a.testOpenAIAssistant(ctx)
if err == nil {
d.Assistant.OK = true
d.Assistant.Error = ""
}
}

// Only cache result if openAI is ok to use.
if d.OK {
a.healthOpenAI = &d
Expand Down
2 changes: 2 additions & 0 deletions packages/grafana-llm-app/pkg/plugin/llm_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,4 +133,6 @@ type LLMProvider interface {
// ChatCompletionStream provides text completion in a chat-like interface with
// tokens being sent as they are ready.
ChatCompletionStream(context.Context, ChatCompletionRequest) (<-chan ChatCompletionStreamResponse, error)
// ListAssistants lists assistants.
ListAssistants(context.Context, *int, *string, *string, *string) (openai.AssistantsList, error)
}
4 changes: 4 additions & 0 deletions packages/grafana-llm-app/pkg/plugin/openai_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -98,3 +98,7 @@ func streamOpenAIRequest(ctx context.Context, r openai.ChatCompletionRequest, oc
}()
return c, nil
}

func (p *openAI) ListAssistants(ctx context.Context, limit *int, order *string, after *string, before *string) (openai.AssistantsList, error) {
return p.oc.ListAssistants(ctx, limit, order, after, before)
}
4 changes: 4 additions & 0 deletions packages/grafana-llm-app/pkg/plugin/test_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,7 @@ func (p *testProvider) ChatCompletionStream(ctx context.Context, req ChatComplet
}
return c, nil
}

func (p *testProvider) ListAssistants(ctx context.Context, limit *int, order *string, after *string, before *string) (openai.AssistantsList, error) {
return openai.AssistantsList{}, nil
}
2 changes: 2 additions & 0 deletions packages/grafana-llm-frontend/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ export interface OpenAIHealthDetails {
// The health check attempts to call the OpenAI API with each
// of a few models and records the result of each call here.
models?: Record<string, OpenAIModelHealthDetails>;
// Health details for the OpenAI assistant model.
assistant?: OpenAIModelHealthDetails;
}

export interface OpenAIModelHealthDetails {
Expand Down

0 comments on commit 39ce3c2

Please sign in to comment.