Skip to content

Commit 8b448c0

Browse files
committed
feat: ollama and perplexity with llama3.1-8B
perplexity is deprecating a lot of models on August 12 2024
1 parent 53278ba commit 8b448c0

File tree

1 file changed

+16
-17
lines changed

1 file changed

+16
-17
lines changed

lua/gp/config.lua

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -142,11 +142,11 @@ local config = {
142142
},
143143
{
144144
provider = "pplx",
145-
name = "ChatPerplexityMixtral",
145+
name = "ChatPerplexityLlama3.1-8B",
146146
chat = true,
147147
command = false,
148148
-- string with model name or table with model name and parameters
149-
model = { model = "mixtral-8x7b-instruct", temperature = 1.1, top_p = 1 },
149+
model = { model = "llama-3.1-sonar-small-128k-chat", temperature = 1.1, top_p = 1 },
150150
-- system prompt (use this to specify the persona/role of the AI)
151151
system_prompt = require("gp.defaults").chat_system_prompt,
152152
},
@@ -172,13 +172,15 @@ local config = {
172172
},
173173
{
174174
provider = "ollama",
175-
name = "ChatOllamaLlama3",
175+
name = "ChatOllamaLlama3.1-8B",
176176
chat = true,
177177
command = false,
178178
-- string with model name or table with model name and parameters
179179
model = {
180-
model = "llama3",
181-
num_ctx = 8192,
180+
model = "llama3.1",
181+
temperature = 0.6,
182+
top_p = 1,
183+
min_p = 0.05,
182184
},
183185
-- system prompt (use this to specify the persona/role of the AI)
184186
system_prompt = "You are a general AI assistant.",
@@ -223,7 +225,7 @@ local config = {
223225
name = "CodeCopilot",
224226
chat = false,
225227
command = true,
226-
-- string with the Copilot engine name or table with engine name and parameters if applicable
228+
-- string with model name or table with model name and parameters
227229
model = { model = "gpt-4o", temperature = 0.8, top_p = 1, n = 1 },
228230
-- system prompt (use this to specify the persona/role of the AI)
229231
system_prompt = require("gp.defaults").code_system_prompt,
@@ -239,11 +241,11 @@ local config = {
239241
},
240242
{
241243
provider = "pplx",
242-
name = "CodePerplexityMixtral",
244+
name = "CodePerplexityLlama3.1-8B",
243245
chat = false,
244246
command = true,
245247
-- string with model name or table with model name and parameters
246-
model = { model = "mixtral-8x7b-instruct", temperature = 0.8, top_p = 1 },
248+
model = { model = "llama-3.1-sonar-small-128k-chat", temperature = 0.8, top_p = 1 },
247249
system_prompt = require("gp.defaults").code_system_prompt,
248250
},
249251
{
@@ -266,21 +268,18 @@ local config = {
266268
},
267269
{
268270
provider = "ollama",
269-
name = "CodeOllamaLlama3",
271+
name = "CodeOllamaLlama3.1-8B",
270272
chat = false,
271273
command = true,
272-
-- string with the Copilot engine name or table with engine name and parameters if applicable
274+
-- string with model name or table with model name and parameters
273275
model = {
274-
model = "llama3",
275-
temperature = 1.9,
276+
model = "llama3.1",
277+
temperature = 0.4,
276278
top_p = 1,
277-
num_ctx = 8192,
279+
min_p = 0.05,
278280
},
279281
-- system prompt (use this to specify the persona/role of the AI)
280-
system_prompt = "You are an AI working as a code editor providing answers.\n\n"
281-
.. "Use 4 SPACES FOR INDENTATION.\n"
282-
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
283-
.. "START AND END YOUR ANSWER WITH:\n\n```",
282+
system_prompt = require("gp.defaults").code_system_prompt,
284283
},
285284
},
286285

0 commit comments

Comments
 (0)