Skip to content

Commit

Permalink
chore: change AI model for CodeCompanion
Browse files Browse the repository at this point in the history
  • Loading branch information
shubham-cpp committed Jan 2, 2025
1 parent 84a5030 commit 10a12f4
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 35 deletions.
1 change: 0 additions & 1 deletion .config/ghostty/config
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ keybind = alt+shift+j=resize_split:down,10
keybind = alt+shift+k=resize_split:up,10
keybind = alt+shift+l=resize_split:right,10

keybind = alt+d=close_window
keybind = alt+x=close_surface
keybind = alt+c=close_surface
keybind = alt+s=new_split:down
Expand Down
81 changes: 47 additions & 34 deletions .config/nvim/lua/plugins/avante.lua
Original file line number Diff line number Diff line change
@@ -1,3 +1,38 @@
local function get_ollama_setup()
local ollama_setup = {
-- add any opts here
---@type Provider
provider = 'gemini',
vendors = {
---@type AvanteProvider
ollama = {
['local'] = true,
endpoint = '127.0.0.1:11434/v1',
model = 'llama3.2',
parse_curl_args = function(opts, code_opts)
return {
url = opts.endpoint .. '/chat/completions',
headers = {
['Accept'] = 'application/json',
['Content-Type'] = 'application/json',
},
body = {
model = opts.model,
messages = require('avante.providers').copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced
max_tokens = 2048,
stream = true,
},
}
end,
parse_response_data = function(data_stream, event_state, opts)
require('avante.providers').openai.parse_response(data_stream, event_state, opts)
end,
},
},
}
return ollama_setup
end

---@type LazySpec
return {
{
Expand All @@ -7,39 +42,8 @@ return {
build = 'make',
opts = function()
vim.env.GEMINI_API_KEY = require('plugins.config.util').get_age_credentials 'gemini_api.age'
local ollama_setup = {
-- add any opts here
---@type Provider
provider = 'gemini',
vendors = {
---@type AvanteProvider
ollama = {
['local'] = true,
endpoint = '127.0.0.1:11434/v1',
model = 'llama3.2',
parse_curl_args = function(opts, code_opts)
return {
url = opts.endpoint .. '/chat/completions',
headers = {
['Accept'] = 'application/json',
['Content-Type'] = 'application/json',
},
body = {
model = opts.model,
messages = require('avante.providers').copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced
max_tokens = 2048,
stream = true,
},
}
end,
parse_response_data = function(data_stream, event_state, opts)
require('avante.providers').openai.parse_response(data_stream, event_state, opts)
end,
},
},
}
if not vim.env.GEMINI_API_KEY then
return ollama_setup
return get_ollama_setup()
end
return {
---@type Provider
Expand Down Expand Up @@ -131,8 +135,8 @@ return {
display = { chat = { render_headers = false } },
strategies = {
--NOTE: Change the adapter as required
chat = { adapter = 'openai_compatible' },
inline = { adapter = 'openai_compatible' },
chat = { adapter = 'gemini' },
inline = { adapter = 'gemini' },
},
adapters = {
openai_compatible = function()
Expand All @@ -150,6 +154,15 @@ return {
},
})
end,
gemini = function()
return require('codecompanion.adapters').extend('gemini', {
schema = {
model = {
default = 'gemini-2.0-flash-exp',
},
},
})
end,
},
})
end,
Expand Down

0 comments on commit 10a12f4

Please sign in to comment.