Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: ai-prompt-template plugin #11517

Merged
merged 16 commits into from
Aug 29, 2024
Merged
1 change: 1 addition & 0 deletions apisix/cli/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ local _M = {
"authz-keycloak",
"proxy-cache",
"body-transformer",
"ai-prompt-template",
"proxy-mirror",
"proxy-rewrite",
"workflow",
Expand Down
146 changes: 146 additions & 0 deletions apisix/plugins/ai-prompt-template.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
--
-- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements. See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the "License"); you may not use this file except in compliance with
-- the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
local core = require("apisix.core")
local body_transformer = require("apisix.plugins.body-transformer")
local ipairs = ipairs

local prompt_schema = {
properties = {
role = {
type = "string",
enum = { "system", "user", "assistant" }
},
content = {
type = "string",
minLength = 1,
}
},
required = { "role", "content" }
}

local prompts = {
type = "array",
minItems = 1,
items = prompt_schema
}

local schema = {
type = "object",
properties = {
templates = {
type = "array",
minItems = 1,
items = {
type = "object",
properties = {
name = {
type = "string",
minLength = 1,
},
template = {
type = "object",
properties = {
model = {
type = "string",
minLength = 1,
},
messages = prompts
}
}
},
required = {"name", "template"}
}
},
},
required = {"templates"},
}


local _M = {
version = 0.1,
priority = 1060,
name = "ai-prompt-template",
schema = schema,
}

local templates_lrucache = core.lrucache.new({
ttl = 300, count = 256
})

local templates_json_lrucache = core.lrucache.new({
ttl = 300, count = 256
})
bzp2010 marked this conversation as resolved.
Show resolved Hide resolved

function _M.check_schema(conf)
return core.schema.check(schema, conf)
end


local function get_request_body_table()
local body, err = core.request.get_body()
if not body then
return nil, { message = "could not get body: " .. err }
end

local body_tab, err = core.json.decode(body)
if not body_tab then
return nil, { message = "could not get parse JSON request body: ", err }
end

return body_tab
end


local function find_template(conf, template_name)
for _, template in ipairs(conf.templates) do
if template.name == template_name then
return template.template
end
end
return nil
end
bzp2010 marked this conversation as resolved.
Show resolved Hide resolved

function _M.rewrite(conf, ctx)
local body_tab, err = get_request_body_table()
if not body_tab then
return 400, err
end
local template_name = body_tab.template_name
if not template_name then
return 400, { message = "template name is missing in request." }
end

local template = templates_lrucache(template_name, conf, find_template, conf, template_name)
if not template then
return 400, { message = "template: " .. template_name .. " not configured." }
end

local template_json = templates_json_lrucache(template, template, core.json.encode, template)
core.log.info("sending template to body_transformer: ", template_json)
return body_transformer.rewrite(
{
request = {
template = template_json,
input_format = "json"
}
},
ctx
)
end


return _M
1 change: 1 addition & 0 deletions conf/config.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,7 @@ plugins: # plugin list (sorted by priority)
#- error-log-logger # priority: 1091
- proxy-cache # priority: 1085
- body-transformer # priority: 1080
- ai-prompt-template # priority: 1060
- proxy-mirror # priority: 1010
- proxy-rewrite # priority: 1008
- workflow # priority: 1006
Expand Down
1 change: 1 addition & 0 deletions docs/en/latest/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@
"plugins/proxy-rewrite",
"plugins/grpc-transcode",
"plugins/grpc-web",
"plugins/ai-prompt-template",
"plugins/fault-injection",
"plugins/mocking",
"plugins/degraphql",
Expand Down
102 changes: 102 additions & 0 deletions docs/en/latest/plugins/ai-prompt-template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
---
title: ai-prompt-template
keywords:
- Apache APISIX
- API Gateway
- Plugin
- ai-prompt-template
description: This document contains information about the Apache APISIX ai-prompt-template Plugin.
---

<!--
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
-->

## Description

The `ai-prompt-template` plugin simplifies access to AI providers and models by predefining the request format
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
The `ai-prompt-template` plugin simplifies access to AI providers and models by predefining the request format
The `ai-prompt-template` plugin simplifies access to LLM providers, such as OpenAI and Anthropic, and their models by predefining the request format

using a template and allowing users to pass only the values for template variables.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
using a template and allowing users to pass only the values for template variables.
using a template, which only allows users to pass customized values into template variables.


## Plugin Attributes
kayx23 marked this conversation as resolved.
Show resolved Hide resolved

| **Field** | **Type** | **Description** | **Required** |
| ------------------------------------- | -------- | --------------------------------------------------- | ------------ |
| `templates` | Array | An array of template objects | Yes |
| `templates.name` | String | Name of the template. | Yes |
| `templates.template.model` | String | Model of the AI Model. Example: gpt-4, gpt-3.5 | Yes |
Copy link
Member

@kayx23 kayx23 Aug 28, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
| `templates.template.model` | String | Model of the AI Model. Example: gpt-4, gpt-3.5 | Yes |
| `templates.template.model` | String | Model of the AI Model, for example `gpt-4` or `gpt-3.5`. See your LLM provider API documentation for more available models. | Yes |

| `templates.template.messages.role` | String | Role of the message (`system`, `user`, `assistant`) | Yes |
| `templates.template.messages.content` | String | Content of the message. | Yes |

## Example usage

Create a route with the `ai-prompt-template` plugin like so:

```shell
curl "http://127.0.0.1:9180/apisix/admin/routes/1" -X PUT \
-H "X-API-KEY: ${ADMIN_API_KEY}" \
-d '{
"uri": "/v1/chat/completions",
"upstream": {
"type": "roundrobin",
"nodes": {
"api.openai.com:443": 1
},
"scheme": "https",
"pass_host": "node"
},
"plugins": {
"ai-prompt-template": {
"templates": [
{
"name": "level of detail",
"template": {
"model": "gpt-4",
"messages": [
{
"role": "user",
"content": "Explain about {{ topic }} in {{ level }}."
}
]
}
}
]
}
}
}'
```

Now send a request:

```shell
curl http://127.0.0.1:9080/v1/chat/completions -i -XPOST -H 'Content-Type: application/json' -d '{
"template_name": "level of detail,
shreemaan-abhishek marked this conversation as resolved.
Show resolved Hide resolved
"topic": "psychology",
"level": "brief"
}' -H "Authorization: Bearer <your token here>"
```

Then the request body will be modified to something like this:

```json
{
"model": "some model",
"messages": [
{ "role": "user", "content": "Explain about psychology in brief." }
]
}
```
1 change: 1 addition & 0 deletions t/admin/plugins.t
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ opa
authz-keycloak
proxy-cache
body-transformer
ai-prompt-template
proxy-mirror
proxy-rewrite
workflow
Expand Down
Loading
Loading