Skip to content

Commit 77ade98

Browse files
hkdemanclaude[bot]modelorona
authored
feat: implement OpenAI compatible API provider support (#500)
* feat: implement OpenAI compatible API provider support - Add WHODB_OPENAI_COMPATIBLE_LABEL environment variable for custom labeling - Create OpenAI compatible provider when API key and endpoint are configured - Add OpenAI-Compatible LLM type with request/response handling - Support custom models from WHODB_CUSTOM_MODELS environment variable - Reuse existing OpenAI-compatible API structure without requiring new API code Fixes #496 Co-authored-by: modelorona <modelorona@users.noreply.github.com> * fix: handle SSE format in OpenAI-compatible streaming responses - Strip "data: " prefix from SSE lines before JSON parsing - Handle "[DONE]" control message to terminate stream properly - Fixes JSON unmarshaling errors in streaming responses Co-authored-by: modelorona <modelorona@users.noreply.github.com> * fix: prevent truncated streaming responses in OpenAI-compatible client - Send accumulated response before breaking on "[DONE]" message - Ensure complete response is returned when stream ends normally - Fixes issue where last part of response could be lost Co-authored-by: modelorona <modelorona@users.noreply.github.com> * add openai compatible url func * move openai compatible to chatgpt logic --------- Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com> Co-authored-by: modelorona <modelorona@users.noreply.github.com> Co-authored-by: modelorona <angel.hristozov@gmail.com>
1 parent e68ed14 commit 77ade98

File tree

5 files changed

+66
-9
lines changed

5 files changed

+66
-9
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
/.idea
22
/.vscode
33
/dev/elasticsearch
4+
DS_Store

core/src/env/env.go

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ var OpenAIEndpoint = os.Getenv("WHODB_OPENAI_ENDPOINT")
4242

4343
var OpenAICompatibleEndpoint = os.Getenv("WHODB_OPENAI_COMPATIBLE_ENDPOINT")
4444
var OpenAICompatibleAPIKey = os.Getenv("WHODB_OPENAI_COMPATIBLE_API_KEY")
45+
var OpenAICompatibleLabel = os.Getenv("WHODB_OPENAI_COMPATIBLE_LABEL")
4546

4647
var CustomModels = common.FilterList(strings.Split(os.Getenv("WHODB_CUSTOM_MODELS"), ","), func(item string) bool {
4748
return strings.TrimSpace(item) != ""
@@ -58,6 +59,7 @@ type ChatProvider struct {
5859
ProviderId string
5960
}
6061

62+
// TODO: need to make this more dynamic so users can configure more than one key for each provider
6163
func GetConfiguredChatProviders() []ChatProvider {
6264
providers := []ChatProvider{}
6365

@@ -79,6 +81,19 @@ func GetConfiguredChatProviders() []ChatProvider {
7981
})
8082
}
8183

84+
if len(OpenAICompatibleAPIKey) > 0 && len(OpenAICompatibleEndpoint) > 0 {
85+
label := OpenAICompatibleLabel
86+
if label == "" {
87+
label = "OpenAI-Compatible API"
88+
}
89+
providers = append(providers, ChatProvider{
90+
Type: "OpenAI-Compatible",
91+
APIKey: OpenAICompatibleAPIKey,
92+
Endpoint: GetOpenAICompatibleEndpoint(),
93+
ProviderId: "openai-compatible-1",
94+
})
95+
}
96+
8297
providers = append(providers, ChatProvider{
8398
Type: "Ollama",
8499
APIKey: "",
@@ -115,16 +130,19 @@ func GetAnthropicEndpoint() string {
115130
}
116131

117132
func GetOpenAIEndpoint() string {
118-
// If the OpenAI compatible endpoint is set, use it. Otherwise, use the OpenAI endpoint.
119-
if OpenAICompatibleEndpoint != "" {
120-
return OpenAICompatibleEndpoint
121-
}
122133
if OpenAIEndpoint != "" {
123134
return OpenAIEndpoint
124135
}
125136
return "https://api.openai.com/v1"
126137
}
127138

139+
func GetOpenAICompatibleEndpoint() string {
140+
if OpenAICompatibleEndpoint != "" && OpenAICompatibleAPIKey != "" {
141+
return OpenAICompatibleEndpoint
142+
}
143+
return "https://api.openai.com/v1"
144+
}
145+
128146
func GetClideyQuickContainerImage() string {
129147
image := os.Getenv("CLIDEY_QUICK_CONTAINER_IMAGE")
130148
if len(image) == 0 {

core/src/llm/chatgpt_client.go

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import (
2727
"github.com/clidey/whodb/core/src/env"
2828
)
2929

30-
func prepareChatGPTRequest(c *LLMClient, prompt string, model LLMModel, receiverChan *chan string) (string, []byte, map[string]string, error) {
30+
func prepareChatGPTRequest(c *LLMClient, prompt string, model LLMModel, receiverChan *chan string, isOpenAICompatible bool) (string, []byte, map[string]string, error) {
3131
requestBody, err := json.Marshal(map[string]interface{}{
3232
"model": string(model),
3333
"messages": []map[string]string{{"role": "user", "content": prompt}},
@@ -37,6 +37,9 @@ func prepareChatGPTRequest(c *LLMClient, prompt string, model LLMModel, receiver
3737
return "", nil, nil, err
3838
}
3939
url := fmt.Sprintf("%v/chat/completions", env.GetOpenAIEndpoint())
40+
if isOpenAICompatible {
41+
url = fmt.Sprintf("%v/chat/completions", env.GetOpenAICompatibleEndpoint())
42+
}
4043
headers := map[string]string{
4144
"Authorization": fmt.Sprintf("Bearer %s", c.APIKey),
4245
"Content-Type": "application/json",

core/src/llm/llm_client.go

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,10 @@ import (
2626
type LLMType string
2727

2828
const (
29-
Ollama_LLMType LLMType = "Ollama"
30-
ChatGPT_LLMType LLMType = "ChatGPT"
31-
Anthropic_LLMType LLMType = "Anthropic"
29+
Ollama_LLMType LLMType = "Ollama"
30+
ChatGPT_LLMType LLMType = "ChatGPT"
31+
Anthropic_LLMType LLMType = "Anthropic"
32+
OpenAICompatible_LLMType LLMType = "OpenAI-Compatible"
3233
)
3334

3435
type LLMModel string
@@ -49,9 +50,11 @@ func (c *LLMClient) Complete(prompt string, model LLMModel, receiverChan *chan s
4950
case Ollama_LLMType:
5051
url, requestBody, headers, err = prepareOllamaRequest(prompt, model)
5152
case ChatGPT_LLMType:
52-
url, requestBody, headers, err = prepareChatGPTRequest(c, prompt, model, receiverChan)
53+
url, requestBody, headers, err = prepareChatGPTRequest(c, prompt, model, receiverChan, false)
5354
case Anthropic_LLMType:
5455
url, requestBody, headers, err = prepareAnthropicRequest(c, prompt, model)
56+
case OpenAICompatible_LLMType:
57+
url, requestBody, headers, err = prepareChatGPTRequest(c, prompt, model, receiverChan, true)
5558
default:
5659
return nil, errors.New("unsupported LLM type")
5760
}
@@ -88,6 +91,8 @@ func (c *LLMClient) GetSupportedModels() ([]string, error) {
8891
url, headers = prepareChatGPTModelsRequest(c.APIKey)
8992
case Anthropic_LLMType:
9093
return getAnthropicModels(c.APIKey)
94+
case OpenAICompatible_LLMType:
95+
return getOpenAICompatibleModels()
9196
default:
9297
return nil, errors.New("unsupported LLM type")
9398
}
@@ -118,6 +123,8 @@ func (c *LLMClient) parseResponse(body io.ReadCloser, receiverChan *chan string)
118123
return parseChatGPTResponse(body, receiverChan, &responseBuilder)
119124
case Anthropic_LLMType:
120125
return parseAnthropicResponse(body, receiverChan, &responseBuilder)
126+
case OpenAICompatible_LLMType:
127+
return parseChatGPTResponse(body, receiverChan, &responseBuilder)
121128
default:
122129
return nil, errors.New("unsupported LLM type")
123130
}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/*
2+
* Copyright 2025 Clidey, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package llm
18+
19+
import (
20+
"github.com/clidey/whodb/core/src/env"
21+
)
22+
23+
func getOpenAICompatibleModels() ([]string, error) {
24+
if len(env.CustomModels) > 0 {
25+
return env.CustomModels, nil
26+
}
27+
return []string{}, nil
28+
}

0 commit comments

Comments
 (0)