Skip to content

Commit 8e9a6ed

Browse files
committed
f
1 parent a61a68b commit 8e9a6ed

File tree

2 files changed

+20
-13
lines changed

2 files changed

+20
-13
lines changed

apisix/plugins/ai-request-rewrite.lua

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ local model_options_schema = {
4949
properties = {
5050
model = {
5151
type = "string",
52-
description = "Model to execute."
52+
description = "Model to execute. Examples: \"gpt-3.5-turbo\" for openai, \"deepseek-chat\" for deekseek, or \"qwen-turbo\" for openai-compatible services"
5353
}
5454
},
5555
additionalProperties = true
@@ -128,15 +128,22 @@ end
128128

129129

130130
local function parse_llm_response(res_body)
131-
local response_table = core.json.decode(res_body)
131+
local response_table, err = core.json.decode(res_body)
132132

133-
if not response_table then return nil, "failed to decode llm response" end
133+
if not response_table then
134+
return nil, "failed to decode llm response " .. ", err: " .. err
135+
end
134136

135137
if not response_table.choices then
136138
return nil, "'choices' not in llm response"
137139
end
138140

139-
return response_table.choices[1].message.content, nil
141+
local message = response_table.choices[1].message
142+
if not message then
143+
return nil, "'message' not in llm response choices"
144+
end
145+
146+
return message.content, nil
140147
end
141148

142149

@@ -185,7 +192,7 @@ function _M.access(conf, ctx)
185192

186193
-- Handle LLM response
187194
if res.status >= 400 then
188-
core.log.error("LLM service returned error status: ", res.status)
195+
core.log.error("LLM service returned error status: ", res.status, ", err: ", err)
189196
return internal_server_error
190197
end
191198

docs/en/latest/plugins/ai-request-rewrite.md

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ keywords:
55
- API Gateway
66
- Plugin
77
- ai-request-rewrite
8-
description: This document contains information about the Apache APISIX ai-request-rewrite Plugin.
8+
description: The ai-request-rewrite plugin intercepts client requests before they are forwarded to the upstream service. It sends a predefined prompt, along with the original request body, to a specified LLM service. The LLM processes the input and returns a modified request body, which is then used for the upstream request. This allows dynamic transformation of API requests based on AI-generated content.
99
---
1010

1111
<!--
@@ -29,20 +29,20 @@ description: This document contains information about the Apache APISIX ai-reque
2929

3030
## Description
3131

32-
The `ai-request-rewrite` plugin leverages predefined prompts and AI services to intelligently modify client requests, enabling AI-powered content transformation before forwarding to upstream services.
32+
The `ai-request-rewrite` plugin intercepts client requests before they are forwarded to the upstream service. It sends a predefined prompt, along with the original request body, to a specified LLM service. The LLM processes the input and returns a modified request body, which is then used for the upstream request. This allows dynamic transformation of API requests based on AI-generated content.
3333

3434
## Plugin Attributes
3535

3636
| **Field** | **Required** | **Type** | **Description** |
3737
| ------------------------- | ------------ | -------- | ------------------------------------------------------------------------------------ |
38-
| prompt | Yes | String | The prompt send to AI service. |
39-
| provider | Yes | String | Type of the AI service. |
38+
| prompt | Yes | String | The prompt send to LLM service. |
39+
| provider | Yes | String | Name of the LLM service. Available options: openai, deekseek and openai-compatible |
4040
| auth | Yes | Object | Authentication configuration |
4141
| auth.header | No | Object | Authentication headers. Key must match pattern `^[a-zA-Z0-9._-]+$`. |
4242
| auth.query | No | Object | Authentication query parameters. Key must match pattern `^[a-zA-Z0-9._-]+$`. |
4343
| options | No | Object | Key/value settings for the model |
44-
| options.model | No | String | Model to execute. |
45-
| override.endpoint | No | String | To be specified to override the endpoint of the AI service |
44+
| options.model | No | String | Model to execute. Examples: "gpt-3.5-turbo" for openai, "deepseek-chat" for deekseek, or "qwen-turbo" for openai-compatible services |
45+
| override.endpoint | No | String | To be specified to override the endpoint of the LLM service, |
4646
| timeout | No | Integer | Timeout in milliseconds for requests to AI service. Range: 1 - 60000. Default: 3000 |
4747
| keepalive | No | Boolean | Enable keepalive for requests to AI service. Default: true |
4848
| keepalive_timeout | No | Integer | Keepalive timeout in milliseconds for requests to AI service. Minimum: 1000. Default: 60000 |
@@ -99,7 +99,7 @@ curl "http://127.0.0.1:9080/anything" \
9999
}'
100100
```
101101

102-
The request body for AI Service is as follows:
102+
The request body send to the LLM Service is as follows:
103103

104104
```json
105105
{
@@ -117,7 +117,7 @@ The request body for AI Service is as follows:
117117

118118
```
119119

120-
The upstream service will receive a request like this:
120+
The LLM processes the input and returns a modified request body, which replace detected sensitive values with a masked format then used for the upstream request:
121121

122122
```json
123123
{

0 commit comments

Comments
 (0)