nvim: 先暫時關閉會影響工作的東西

This commit is contained in:
Yuan Chiu 2025-06-20 06:39:20 +08:00
parent e6eff2a817
commit 0e03e0a861
Signed by: yuan
GPG Key ID: 50FBE4156404B98D
2 changed files with 29 additions and 29 deletions

View File

@ -117,32 +117,32 @@ return {
end,
},
{
-- 自動補全用
'huggingface/llm.nvim',
dependencies = { 'kevinhwang91/nvim-ufo' }, -- 確保 ufo 先載入
event = 'VeryLazy', -- 啟動後自動載入
opts = {
backend = "openai",
-- chat_mode = true,
-- model = "qwen2.5-coder:1.5b", -- 實測發現不相容,先關掉
-- model = "codellama:7b",
-- model = "starcoder2:3b",
-- model = "starcoder:1b",
model = "stable-code:3b",
url = {{ .ollamaUrl | quote }}, -- llm-ls uses "/api/generate"
{{ if .ollamaKey }}api_token = {{ .ollamaKey | quote }},{{ end }}
-- -- cf https://github.com/ollama/ollama/blob/main/docs/api.md#parametersu can set any field:value pair here it will be passed as is to the backend
request_body = {
-- Modelfile options for the model you use
options = {
temperature = 0.2,
top_p = 0.95,
}
},
-- on_response = function(output)
-- return output:gsub("^```[%w]*\n?", ""):gsub("```$", "")
-- end,
},
},
-- {
-- -- 自動補全用
-- 'huggingface/llm.nvim',
-- dependencies = { 'kevinhwang91/nvim-ufo' }, -- 確保 ufo 先載入
-- event = 'VeryLazy', -- 啟動後自動載入
-- opts = {
-- backend = "openai",
-- -- chat_mode = true,
-- -- model = "qwen2.5-coder:1.5b", -- 實測發現不相容,先關掉
-- -- model = "codellama:7b",
-- -- model = "starcoder2:3b",
-- -- model = "starcoder:1b",
-- model = "stable-code:3b",
-- url = {{ .ollamaUrl | quote }}, -- llm-ls uses "/api/generate"
-- {{ if .ollamaKey }}api_token = {{ .ollamaKey | quote }},{{ end }}
-- -- -- cf https://github.com/ollama/ollama/blob/main/docs/api.md#parametersu can set any field:value pair here it will be passed as is to the backend
-- request_body = {
-- -- Modelfile options for the model you use
-- options = {
-- temperature = 0.2,
-- top_p = 0.95,
-- }
-- },
-- -- on_response = function(output)
-- -- return output:gsub("^```[%w]*\n?", ""):gsub("```$", "")
-- -- end,
-- },
-- },
}

View File

@ -1,4 +1,4 @@
-- if true then return {} end
if true then return {} end
return {
{