@@ -70,10 +70,10 @@ request_to_completions() {
70
70
-H ' Content-Type: application/json' \
71
71
-H " Authorization: Bearer $OPENAI_KEY " \
72
72
-d ' {
73
- "model": "' " $MODEL " ' ",
74
- "prompt": "' " $prompt " ' ",
75
- "max_tokens": ' $MAX_TOKENS ' ,
76
- "temperature": ' $TEMPERATURE '
73
+ "model": "' " $MODEL " ' ",
74
+ "prompt": "' " $prompt " ' ",
75
+ "max_tokens": ' $MAX_TOKENS ' ,
76
+ "temperature": ' $TEMPERATURE '
77
77
}'
78
78
}
79
79
@@ -86,9 +86,9 @@ request_to_image() {
86
86
-H ' Content-Type: application/json' \
87
87
-H " Authorization: Bearer $OPENAI_KEY " \
88
88
-d ' {
89
- "prompt": "' " ${prompt#* image: } " ' ",
90
- "n": 1,
91
- "size": "' " $SIZE " ' "
89
+ "prompt": "' " ${prompt#* image: } " ' ",
90
+ "n": 1,
91
+ "size": "' " $SIZE " ' "
92
92
}' )
93
93
}
94
94
@@ -101,14 +101,14 @@ request_to_chat() {
101
101
-H ' Content-Type: application/json' \
102
102
-H " Authorization: Bearer $OPENAI_KEY " \
103
103
-d ' {
104
- "model": "' " $MODEL " ' ",
105
- "messages": [
106
- {"role": "system", "content": "' " $SYSTEM_PROMPT " ' "},
107
- ' " $message " '
108
- ],
109
- "max_tokens": ' $MAX_TOKENS ' ,
110
- "temperature": ' $TEMPERATURE '
111
- }'
104
+ "model": "' " $MODEL " ' ",
105
+ "messages": [
106
+ {"role": "system", "content": "' " $SYSTEM_PROMPT " ' "},
107
+ ' " $message " '
108
+ ],
109
+ "max_tokens": ' $MAX_TOKENS ' ,
110
+ "temperature": ' $TEMPERATURE '
111
+ }'
112
112
}
113
113
114
114
# build chat context before each request for /completions (all models except
@@ -125,7 +125,7 @@ build_chat_context() {
125
125
}
126
126
127
127
escape (){
128
- echo " $1 " | jq -Rrs ' tojson[1:-1]'
128
+ echo " $1 " | jq -Rrs ' tojson[1:-1]'
129
129
}
130
130
131
131
# maintain chat context function for /completions (all models except
0 commit comments