Skip to content

Commit

Permalink
Merge pull request #59 from grafana/add-client
Browse files Browse the repository at this point in the history
Add client package
  • Loading branch information
sd2k authored Sep 26, 2023
2 parents db52792 + 9755245 commit 456179b
Show file tree
Hide file tree
Showing 5 changed files with 107 additions and 0 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

## Unreleased

* Add Go package providing an OpenAI client to use the LLM app from backend Go code

## 0.2.1

* Improve health check endpoint to include status of various features
Expand Down
1 change: 1 addition & 0 deletions cspell.config.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
"grafana",
"httpadapter",
"instancemgmt",
"llmclient",
"llms",
"nolint",
"openai",
Expand Down
5 changes: 5 additions & 0 deletions llmclient/go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
module github.com/grafana/grafana-llm-app/llmclient

go 1.19

require github.com/sashabaranov/go-openai v1.15.3
2 changes: 2 additions & 0 deletions llmclient/go.sum
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
github.com/sashabaranov/go-openai v1.15.3 h1:rzoNK9n+Cak+PM6OQ9puxDmFllxfnVea9StlmhglXqA=
github.com/sashabaranov/go-openai v1.15.3/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
97 changes: 97 additions & 0 deletions llmclient/llmclient.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
// package llmclient provides a client for the Grafana LLM app.
// It is used to communicate with LLM providers via the Grafana LLM app
// using the configuration stored in the app to handle authentication.
package llmclient

import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"

"github.com/sashabaranov/go-openai"
)

const (
appResourcesPrefix = "/api/plugins/grafana-llm-app/resources"
)

// OpenAI is an interface for talking to OpenAI via the Grafana LLM app.
// Requests made using this interface will be routed to the OpenAI backend
// configured in the Grafana LLM app's settings, with authentication handled
// by the LLM app.
type OpenAI interface {
// Enabled returns true if the Grafana LLM app has been configured for use
// with OpenAI.
Enabled(ctx context.Context) (bool, error)
// ChatCompletions makes a request to the OpenAI Chat Completion API.
ChatCompletions(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error)
// ChatCompletionsStream makes a streaming request to the OpenAI Chat Completion API.
ChatCompletionsStream(ctx context.Context, req openai.ChatCompletionRequest) (*openai.ChatCompletionStream, error)
}

type openAI struct {
httpClient *http.Client
client *openai.Client

grafanaURL, grafanaAPIKey string
}

// NewOpenAI creates a new OpenAI client talking to the Grafana LLM app installed
// on the given Grafana instance.
func NewOpenAI(grafanaURL, grafanaAPIKey string) OpenAI {
httpClient := &http.Client{}
return NewOpenAIWithClient(grafanaURL, grafanaAPIKey, httpClient)
}

// NewOpenAIWithClient creates a new OpenAI client talking to the Grafana LLM app installed
// on the given Grafana instance, using the given HTTP client.
func NewOpenAIWithClient(grafanaURL, grafanaAPIKey string, httpClient *http.Client) OpenAI {
url := strings.TrimRight(grafanaURL, "/") + appResourcesPrefix + "/openai/v1"
cfg := openai.DefaultConfig(grafanaAPIKey)
cfg.BaseURL = url
cfg.HTTPClient = httpClient
client := openai.NewClientWithConfig(cfg)
return &openAI{
httpClient: httpClient,
client: client,
grafanaURL: grafanaURL,
grafanaAPIKey: grafanaAPIKey,
}
}

type healthCheckResponse struct {
Details struct {
OpenAIEnabled bool `json:"openAI"`
VectorEnabled bool `json:"vector"`
} `json:"details"`
}

func (o *openAI) Enabled(ctx context.Context) (bool, error) {
req, err := http.NewRequestWithContext(ctx, "GET", o.grafanaURL+appResourcesPrefix+"/health", nil)
if err != nil {
return false, fmt.Errorf("create request: %w", err)
}
req.Header.Set("Authorization", "Bearer "+o.grafanaAPIKey)
resp, err := o.httpClient.Do(req)
if err != nil {
return false, fmt.Errorf("make request: %w", err)
}
if resp.StatusCode != http.StatusOK {
return false, nil
}
var response healthCheckResponse
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
return false, fmt.Errorf("unmarshal response: %w", err)
}
return response.Details.OpenAIEnabled, nil
}

func (o *openAI) ChatCompletions(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
return o.client.CreateChatCompletion(ctx, req)
}

func (o *openAI) ChatCompletionsStream(ctx context.Context, req openai.ChatCompletionRequest) (*openai.ChatCompletionStream, error) {
return o.client.CreateChatCompletionStream(ctx, req)
}

0 comments on commit 456179b

Please sign in to comment.