-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #59 from grafana/add-client
Add client package
- Loading branch information
Showing
5 changed files
with
107 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -33,6 +33,7 @@ | |
"grafana", | ||
"httpadapter", | ||
"instancemgmt", | ||
"llmclient", | ||
"llms", | ||
"nolint", | ||
"openai", | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
module github.com/grafana/grafana-llm-app/llmclient | ||
|
||
go 1.19 | ||
|
||
require github.com/sashabaranov/go-openai v1.15.3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
github.com/sashabaranov/go-openai v1.15.3 h1:rzoNK9n+Cak+PM6OQ9puxDmFllxfnVea9StlmhglXqA= | ||
github.com/sashabaranov/go-openai v1.15.3/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
// package llmclient provides a client for the Grafana LLM app. | ||
// It is used to communicate with LLM providers via the Grafana LLM app | ||
// using the configuration stored in the app to handle authentication. | ||
package llmclient | ||
|
||
import ( | ||
"context" | ||
"encoding/json" | ||
"fmt" | ||
"net/http" | ||
"strings" | ||
|
||
"github.com/sashabaranov/go-openai" | ||
) | ||
|
||
const ( | ||
appResourcesPrefix = "/api/plugins/grafana-llm-app/resources" | ||
) | ||
|
||
// OpenAI is an interface for talking to OpenAI via the Grafana LLM app. | ||
// Requests made using this interface will be routed to the OpenAI backend | ||
// configured in the Grafana LLM app's settings, with authentication handled | ||
// by the LLM app. | ||
type OpenAI interface { | ||
// Enabled returns true if the Grafana LLM app has been configured for use | ||
// with OpenAI. | ||
Enabled(ctx context.Context) (bool, error) | ||
// ChatCompletions makes a request to the OpenAI Chat Completion API. | ||
ChatCompletions(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) | ||
// ChatCompletionsStream makes a streaming request to the OpenAI Chat Completion API. | ||
ChatCompletionsStream(ctx context.Context, req openai.ChatCompletionRequest) (*openai.ChatCompletionStream, error) | ||
} | ||
|
||
type openAI struct { | ||
httpClient *http.Client | ||
client *openai.Client | ||
|
||
grafanaURL, grafanaAPIKey string | ||
} | ||
|
||
// NewOpenAI creates a new OpenAI client talking to the Grafana LLM app installed | ||
// on the given Grafana instance. | ||
func NewOpenAI(grafanaURL, grafanaAPIKey string) OpenAI { | ||
httpClient := &http.Client{} | ||
return NewOpenAIWithClient(grafanaURL, grafanaAPIKey, httpClient) | ||
} | ||
|
||
// NewOpenAIWithClient creates a new OpenAI client talking to the Grafana LLM app installed | ||
// on the given Grafana instance, using the given HTTP client. | ||
func NewOpenAIWithClient(grafanaURL, grafanaAPIKey string, httpClient *http.Client) OpenAI { | ||
url := strings.TrimRight(grafanaURL, "/") + appResourcesPrefix + "/openai/v1" | ||
cfg := openai.DefaultConfig(grafanaAPIKey) | ||
cfg.BaseURL = url | ||
cfg.HTTPClient = httpClient | ||
client := openai.NewClientWithConfig(cfg) | ||
return &openAI{ | ||
httpClient: httpClient, | ||
client: client, | ||
grafanaURL: grafanaURL, | ||
grafanaAPIKey: grafanaAPIKey, | ||
} | ||
} | ||
|
||
type healthCheckResponse struct { | ||
Details struct { | ||
OpenAIEnabled bool `json:"openAI"` | ||
VectorEnabled bool `json:"vector"` | ||
} `json:"details"` | ||
} | ||
|
||
func (o *openAI) Enabled(ctx context.Context) (bool, error) { | ||
req, err := http.NewRequestWithContext(ctx, "GET", o.grafanaURL+appResourcesPrefix+"/health", nil) | ||
if err != nil { | ||
return false, fmt.Errorf("create request: %w", err) | ||
} | ||
req.Header.Set("Authorization", "Bearer "+o.grafanaAPIKey) | ||
resp, err := o.httpClient.Do(req) | ||
if err != nil { | ||
return false, fmt.Errorf("make request: %w", err) | ||
} | ||
if resp.StatusCode != http.StatusOK { | ||
return false, nil | ||
} | ||
var response healthCheckResponse | ||
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil { | ||
return false, fmt.Errorf("unmarshal response: %w", err) | ||
} | ||
return response.Details.OpenAIEnabled, nil | ||
} | ||
|
||
func (o *openAI) ChatCompletions(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) { | ||
return o.client.CreateChatCompletion(ctx, req) | ||
} | ||
|
||
func (o *openAI) ChatCompletionsStream(ctx context.Context, req openai.ChatCompletionRequest) (*openai.ChatCompletionStream, error) { | ||
return o.client.CreateChatCompletionStream(ctx, req) | ||
} |