feat: ai-prompt-decorator plugin (#11515)
diff --git a/apisix/cli/config.lua b/apisix/cli/config.lua
index 7f15542..6ab10c9 100644
--- a/apisix/cli/config.lua
+++ b/apisix/cli/config.lua
@@ -214,6 +214,7 @@
"proxy-cache",
"body-transformer",
"ai-prompt-template",
+ "ai-prompt-decorator",
"proxy-mirror",
"proxy-rewrite",
"workflow",
diff --git a/apisix/plugins/ai-prompt-decorator.lua b/apisix/plugins/ai-prompt-decorator.lua
new file mode 100644
index 0000000..10b36e8
--- /dev/null
+++ b/apisix/plugins/ai-prompt-decorator.lua
@@ -0,0 +1,117 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements. See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License. You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+local core = require("apisix.core")
+local ngx = ngx
+local pairs = pairs
+local EMPTY = {}
+
+local prompt_schema = {
+ properties = {
+ role = {
+ type = "string",
+ enum = { "system", "user", "assistant" }
+ },
+ content = {
+ type = "string",
+ minLength = 1,
+ }
+ },
+ required = { "role", "content" }
+}
+
+local prompts = {
+ type = "array",
+ items = prompt_schema
+}
+
+local schema = {
+ type = "object",
+ properties = {
+ prepend = prompts,
+ append = prompts,
+ },
+ anyOf = {
+ { required = { "prepend" } },
+ { required = { "append" } },
+ { required = { "append", "prepend" } },
+ },
+}
+
+
+local _M = {
+ version = 0.1,
+ priority = 1070,
+ name = "ai-prompt-decorator",
+ schema = schema,
+}
+
+
+function _M.check_schema(conf)
+ return core.schema.check(schema, conf)
+end
+
+
+local function get_request_body_table()
+ local body, err = core.request.get_body()
+ if not body then
+ return nil, { message = "could not get body: " .. err }
+ end
+
+ local body_tab, err = core.json.decode(body)
+ if not body_tab then
+ return nil, { message = "could not get parse JSON request body: " .. err }
+ end
+
+ return body_tab
+end
+
+
+local function decorate(conf, body_tab)
+ local new_messages = conf.prepend or EMPTY
+ for _, message in pairs(body_tab.messages) do
+ core.table.insert_tail(new_messages, message)
+ end
+
+ for _, message in pairs(conf.append or EMPTY) do
+ core.table.insert_tail(new_messages, message)
+ end
+
+ body_tab.messages = new_messages
+end
+
+
+function _M.rewrite(conf, ctx)
+ local body_tab, err = get_request_body_table()
+ if not body_tab then
+ return 400, err
+ end
+
+ if not body_tab.messages then
+ return 400, "messages missing from request body"
+ end
+ decorate(conf, body_tab) -- will decorate body_tab in place
+
+ local new_jbody, err = core.json.encode(body_tab)
+ if not new_jbody then
+ return 500, { message = "failed to parse modified JSON request body: " .. err }
+ end
+
+ ngx.req.set_body_data(new_jbody)
+end
+
+
+return _M
diff --git a/apisix/plugins/ai-prompt-template.lua b/apisix/plugins/ai-prompt-template.lua
index 0a092c3..d2c3669 100644
--- a/apisix/plugins/ai-prompt-template.lua
+++ b/apisix/plugins/ai-prompt-template.lua
@@ -72,7 +72,7 @@
local _M = {
version = 0.1,
- priority = 1060,
+ priority = 1071,
name = "ai-prompt-template",
schema = schema,
}
diff --git a/conf/config.yaml.example b/conf/config.yaml.example
index 5d22418..17b3852 100644
--- a/conf/config.yaml.example
+++ b/conf/config.yaml.example
@@ -476,7 +476,8 @@
#- error-log-logger # priority: 1091
- proxy-cache # priority: 1085
- body-transformer # priority: 1080
- - ai-prompt-template # priority: 1060
+ - ai-prompt-template # priority: 1071
+ - ai-prompt-decorator # priority: 1070
- proxy-mirror # priority: 1010
- proxy-rewrite # priority: 1008
- workflow # priority: 1006
diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json
index 0998ec7..2195688 100644
--- a/docs/en/latest/config.json
+++ b/docs/en/latest/config.json
@@ -87,6 +87,7 @@
"type": "category",
"label": "Transformation",
"items": [
+ "plugins/ai-prompt-decorator",
"plugins/response-rewrite",
"plugins/proxy-rewrite",
"plugins/grpc-transcode",
diff --git a/docs/en/latest/plugins/ai-prompt-decorator.md b/docs/en/latest/plugins/ai-prompt-decorator.md
new file mode 100644
index 0000000..44ee59e
--- /dev/null
+++ b/docs/en/latest/plugins/ai-prompt-decorator.md
@@ -0,0 +1,109 @@
+---
+title: ai-prompt-decorator
+keywords:
+ - Apache APISIX
+ - API Gateway
+ - Plugin
+ - ai-prompt-decorator
+description: This document contains information about the Apache APISIX ai-prompt-decorator Plugin.
+---
+
+<!--
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+-->
+
+## Description
+
+The `ai-prompt-decorator` plugin simplifies access to LLM providers, such as OpenAI and Anthropic, and their models by appending or prepending prompts into the request.
+
+## Plugin Attributes
+
+| **Field** | **Required** | **Type** | **Description** |
+| ----------------- | --------------- | -------- | --------------------------------------------------- |
+| `prepend` | Conditionally\* | Array | An array of prompt objects to be prepended |
+| `prepend.role` | Yes | String | Role of the message (`system`, `user`, `assistant`) |
+| `prepend.content` | Yes | String | Content of the message. Minimum length: 1 |
+| `append` | Conditionally\* | Array | An array of prompt objects to be appended |
+| `append.role` | Yes | String | Role of the message (`system`, `user`, `assistant`) |
+| `append.content` | Yes | String | Content of the message. Minimum length: 1 |
+
+\* **Conditionally Required**: At least one of `prepend` or `append` must be provided.
+
+## Example usage
+
+Create a route with the `ai-prompt-decorator` plugin like so:
+
+```shell
+curl "http://127.0.0.1:9180/apisix/admin/routes/1" -X PUT \
+ -H "X-API-KEY: ${ADMIN_API_KEY}" \
+ -d '{
+ "uri": "/v1/chat/completions",
+ "plugins": {
+ "ai-prompt-decorator": {
+ "prepend":[
+ {
+ "role": "system",
+ "content": "I have exams tomorrow so explain conceptually and briefly"
+ }
+ ],
+ "append":[
+ {
+ "role": "system",
+ "content": "End the response with an analogy."
+ }
+ ]
+ }
+ },
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "api.openai.com:443": 1
+ },
+ "pass_host": "node",
+ "scheme": "https"
+ }
+ }'
+```
+
+Now send a request:
+
+```shell
+curl http://127.0.0.1:9080/v1/chat/completions -i -XPOST -H 'Content-Type: application/json' -d '{
+ "model": "gpt-4",
+ "messages": [{ "role": "user", "content": "What is TLS Handshake?" }]
+}' -H "Authorization: Bearer <your token here>"
+```
+
+Then the request body will be modified to something like this:
+
+```json
+{
+ "model": "gpt-4",
+ "messages": [
+ {
+ "role": "system",
+ "content": "I have exams tomorrow so explain conceptually and briefly"
+ },
+ { "role": "user", "content": "What is TLS Handshake?" },
+ {
+ "role": "system",
+ "content": "End the response with an analogy."
+ }
+ ]
+}
+```
diff --git a/t/admin/plugins.t b/t/admin/plugins.t
index 547b1a3..ef43ea9 100644
--- a/t/admin/plugins.t
+++ b/t/admin/plugins.t
@@ -94,6 +94,7 @@
proxy-cache
body-transformer
ai-prompt-template
+ai-prompt-decorator
proxy-mirror
proxy-rewrite
workflow
diff --git a/t/plugin/ai-prompt-decorator.t b/t/plugin/ai-prompt-decorator.t
new file mode 100644
index 0000000..15f40ee
--- /dev/null
+++ b/t/plugin/ai-prompt-decorator.t
@@ -0,0 +1,293 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+use t::APISIX 'no_plan';
+
+repeat_each(1);
+log_level('info');
+no_root_location();
+no_shuffle();
+
+add_block_preprocessor(sub {
+ my ($block) = @_;
+
+ if (!$block->request) {
+ $block->set_value("request", "GET /t");
+ }
+
+});
+
+run_tests();
+
+__DATA__
+
+=== TEST 1: sanity: configure prepend only
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "uri": "/echo",
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "127.0.0.1:1980": 1
+ }
+ },
+ "plugins": {
+ "ai-prompt-decorator": {
+ "prepend":[
+ {
+ "role": "system",
+ "content": "some content"
+ }
+ ]
+ }
+ }
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+}
+--- response_body
+passed
+
+
+
+=== TEST 2: test prepend
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body, actual_resp = t('/echo',
+ ngx.HTTP_POST,
+ [[{
+ "messages": [
+ { "role": "system", "content": "You are a mathematician" },
+ { "role": "user", "content": "What is 1+1?" }
+ ]
+ }]],
+ [[{
+ "messages": [
+ { "role": "system", "content": "some content" },
+ { "role": "system", "content": "You are a mathematician" },
+ { "role": "user", "content": "What is 1+1?" }
+ ]
+ }]]
+ )
+ if code >= 300 then
+ ngx.status = code
+ ngx.say("failed")
+ return
+ end
+ ngx.say("passed")
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 3: sanity: configure append only
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "uri": "/echo",
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "127.0.0.1:1980": 1
+ }
+ },
+ "plugins": {
+ "ai-prompt-decorator": {
+ "append":[
+ {
+ "role": "system",
+ "content": "some content"
+ }
+ ]
+ }
+ }
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+}
+--- response_body
+passed
+
+
+
+=== TEST 4: test append
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body, actual_resp = t('/echo',
+ ngx.HTTP_POST,
+ [[{
+ "messages": [
+ { "role": "system", "content": "You are a mathematician" },
+ { "role": "user", "content": "What is 1+1?" }
+ ]
+ }]],
+ [[{
+ "messages": [
+ { "role": "system", "content": "You are a mathematician" },
+ { "role": "user", "content": "What is 1+1?" },
+ { "role": "system", "content": "some content" }
+ ]
+ }]]
+ )
+ if code >= 300 then
+ ngx.status = code
+ ngx.say("failed")
+ return
+ end
+ ngx.say("passed")
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 5: sanity: configure append and prepend both
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "uri": "/echo",
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "127.0.0.1:1980": 1
+ }
+ },
+ "plugins": {
+ "ai-prompt-decorator": {
+ "append":[
+ {
+ "role": "system",
+ "content": "some append"
+ }
+ ],
+ "prepend":[
+ {
+ "role": "system",
+ "content": "some prepend"
+ }
+ ]
+ }
+ }
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+}
+--- response_body
+passed
+
+
+
+=== TEST 6: test append
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body, actual_resp = t('/echo',
+ ngx.HTTP_POST,
+ [[{
+ "messages": [
+ { "role": "system", "content": "You are a mathematician" },
+ { "role": "user", "content": "What is 1+1?" }
+ ]
+ }]],
+ [[{
+ "messages": [
+ { "role": "system", "content": "some prepend" },
+ { "role": "system", "content": "You are a mathematician" },
+ { "role": "user", "content": "What is 1+1?" },
+ { "role": "system", "content": "some append" }
+ ]
+ }]]
+ )
+ if code >= 300 then
+ ngx.status = code
+ ngx.say("failed")
+ return
+ end
+ ngx.say("passed")
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 7: sanity: configure neither append nor prepend should fail
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "uri": "/echo",
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "127.0.0.1:1980": 1
+ }
+ },
+ "plugins": {
+ "ai-prompt-decorator": {
+ }
+ }
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+}
+--- response_body_eval
+qr/.*failed to check the configuration of plugin ai-prompt-decorator err.*/
+--- error_code: 400