From 694a05c24c82abe4ebe717042d104da9dedccc87 Mon Sep 17 00:00:00 2001 From: Benjamin Michotte Date: Thu, 30 May 2024 12:21:40 +0200 Subject: [PATCH] add support for Codestral by Mistral (#17) --- README.md | 31 ++++++++++++++-- lua/cmp_ai/backends/codestral.lua | 59 +++++++++++++++++++++++++++++++ 2 files changed, 88 insertions(+), 2 deletions(-) create mode 100644 lua/cmp_ai/backends/codestral.lua diff --git a/README.md b/README.md index e648abc..b2fd0e5 100644 --- a/README.md +++ b/README.md @@ -5,14 +5,14 @@ AI source for [hrsh7th/nvim-cmp](https://github.com/hrsh7th/nvim-cmp) This is a general purpose AI source for `cmp`, easily adapted to any restapi supporting remote code completion. -For now, HuggingFace SantaCoder, OpenAI Chat and Google Bard are implemeted. +For now, HuggingFace SantaCoder, OpenAI Chat, Codestral and Google Bard are implemeted. ## Install ### Dependencies - You will need `plenary.nvim` to use this plugin. -- For using OpenAI or HuggingFace, you will also need `curl`. +- For using Codestral, OpenAI or HuggingFace, you will also need `curl`. - For using Google Bard, you will need [dsdanielpark/Bard-API](https://github.com/dsdanielpark/Bard-API). ### Using a plugin manager @@ -93,6 +93,33 @@ environment, `OPENAI_API_KEY`. Available models for OpenAI are `gpt-4` and `gpt-3.5-turbo`. +To use Codestral: + +```lua +local cmp_ai = require('cmp_ai.config') + +cmp_ai:setup({ + max_lines = 1000, + provider = 'Codestral', + provider_options = { + model = 'codestral-latest', + }, + notify = true, + notify_callback = function(msg) + vim.notify(msg) + end, + run_on_every_keystroke = true, + ignored_file_types = { + -- default is not to ignore + -- uncomment to ignore in lua: + -- lua = true + }, +}) +``` + +You will also need to make sure you have the Codestral api key in you +environment, `CODESTRAL_API_KEY`. + To use Google Bard: ```lua diff --git a/lua/cmp_ai/backends/codestral.lua b/lua/cmp_ai/backends/codestral.lua new file mode 100644 index 0000000..9bc8b40 --- /dev/null +++ b/lua/cmp_ai/backends/codestral.lua @@ -0,0 +1,59 @@ +local requests = require('cmp_ai.requests') + +Codestral = requests:new(nil) +BASE_URL = 'https://codestral.mistral.ai/v1/fim/completions' + +function Codestral:new(o, params) + o = o or {} + setmetatable(o, self) + self.__index = self + self.params = vim.tbl_deep_extend('keep', params or {}, { + model = 'codestral-latest', + temperature = 0.1, + n = 1, + max_tokens = 100, + }) + + self.api_key = os.getenv('CODESTRAL_API_KEY') + if not self.api_key then + vim.schedule(function() + vim.notify('CODESTRAL_API_KEY environment variable not set', vim.log.levels.ERROR) + end) + self.api_key = 'NO_KEY' + end + self.headers = { + 'Authorization: Bearer ' .. self.api_key, + } + return o +end + +function Codestral:complete(lines_before, lines_after, cb) + if not self.api_key then + vim.schedule(function() + vim.notify('CODESTRAL_API_KEY environment variable not set', vim.log.levels.ERROR) + end) + return + end + local data = { + prompt = lines_before, + } + data = vim.tbl_deep_extend('keep', data, self.params) + self:Get(BASE_URL, self.headers, data, function(answer) + local new_data = {} + if answer.choices then + for _, response in ipairs(answer.choices) do + local entry = response.message.content + table.insert(new_data, entry) + end + end + cb(new_data) + end) +end + +function Codestral:test() + self:complete('def factorial(n)\n if', ' return ans\n', function(data) + dump(data) + end) +end + +return Codestral