@@ -153,21 +153,21 @@ local config = {
153
153
},
154
154
{
155
155
provider = " anthropic" ,
156
- name = " ChatClaude-3-5 -Sonnet" ,
156
+ name = " ChatClaude-3-7 -Sonnet" ,
157
157
chat = true ,
158
158
command = false ,
159
159
-- string with model name or table with model name and parameters
160
- model = { model = " claude-3-5 -sonnet-latest" , temperature = 0.8 , top_p = 1 },
160
+ model = { model = " claude-3-7 -sonnet-latest" , temperature = 0.8 , top_p = 1 },
161
161
-- system prompt (use this to specify the persona/role of the AI)
162
162
system_prompt = require (" gp.defaults" ).chat_system_prompt ,
163
163
},
164
164
{
165
165
provider = " anthropic" ,
166
- name = " ChatClaude-3-Haiku" ,
166
+ name = " ChatClaude-3-5- Haiku" ,
167
167
chat = true ,
168
168
command = false ,
169
169
-- string with model name or table with model name and parameters
170
- model = { model = " claude-3-haiku-latest" , temperature = 0.8 , top_p = 1 },
170
+ model = { model = " claude-3-5- haiku-latest" , temperature = 0.8 , top_p = 1 },
171
171
-- system prompt (use this to specify the persona/role of the AI)
172
172
system_prompt = require (" gp.defaults" ).chat_system_prompt ,
173
173
},
@@ -251,20 +251,20 @@ local config = {
251
251
},
252
252
{
253
253
provider = " anthropic" ,
254
- name = " CodeClaude-3-5 -Sonnet" ,
254
+ name = " CodeClaude-3-7 -Sonnet" ,
255
255
chat = false ,
256
256
command = true ,
257
257
-- string with model name or table with model name and parameters
258
- model = { model = " claude-3-5 -sonnet-latest" , temperature = 0.8 , top_p = 1 },
258
+ model = { model = " claude-3-7 -sonnet-latest" , temperature = 0.8 , top_p = 1 },
259
259
system_prompt = require (" gp.defaults" ).code_system_prompt ,
260
260
},
261
261
{
262
262
provider = " anthropic" ,
263
- name = " CodeClaude-3-Haiku" ,
263
+ name = " CodeClaude-3-5- Haiku" ,
264
264
chat = false ,
265
265
command = true ,
266
266
-- string with model name or table with model name and parameters
267
- model = { model = " claude-3-haiku-latest" , temperature = 0.8 , top_p = 1 },
267
+ model = { model = " claude-3-5- haiku-latest" , temperature = 0.8 , top_p = 1 },
268
268
system_prompt = require (" gp.defaults" ).code_system_prompt ,
269
269
},
270
270
{
0 commit comments