Added set-magit-gptcommit-provider to Doom Emacs
This commit is contained in:
parent
449aef978e
commit
d626431d68
BIN
.authinfo.gpg
BIN
.authinfo.gpg
Binary file not shown.
@ -489,7 +489,8 @@ related notes or tasks."
|
||||
(:prefix-map ("l" . "LLMs")
|
||||
:desc "Aidermacs" "a" #'aidermacs-transient-menu
|
||||
:desc "ChatGPT Shell" "c" #'chatgpt-shell-transient
|
||||
:desc "Set Forge LLM Provider" "f" #'my/set-forge-llm-provider))
|
||||
:desc "Set Forge LLM Provider" "f" #'my/set-forge-llm-provider
|
||||
:desc "Set Magit GPT Provider" "m" #'my/set-magit-gptcommit-provider))
|
||||
|
||||
(setq chatgpt-shell-model-version "gemini-2.5-pro-exp")
|
||||
(setq chatgpt-shell-streaming "t")
|
||||
@ -499,6 +500,20 @@ related notes or tasks."
|
||||
(setq chatgpt-shell-anthropic-key anthropic-key)
|
||||
(setq dall-e-shell-openai-key openai-key)
|
||||
|
||||
(defun my/set-magit-gptcommit-provider (provider)
|
||||
"Set the Magit GPT commit LLM provider dynamically."
|
||||
(interactive
|
||||
(list (completing-read "Choose LLM for Magit GPT Commit: " '("Gemini" "Claude" "Qwen" "Ollama"))))
|
||||
(setq magit-gptcommit-llm-provider
|
||||
(pcase provider
|
||||
("Gemini" (make-llm-gemini :key gemini-key :chat-model "gemini-2.5-pro-exp-03-25"))
|
||||
("Claude" (make-llm-claude :key anthropic-key :chat-model "claude-3-7-sonnet-latest"))
|
||||
("Qwen" (make-llm-openai-compatible :url "https://openrouter.ai/api/v1"
|
||||
:chat-model "qwen/qwen3-235b-a22b"
|
||||
:key openrouter-api-key))
|
||||
("Ollama" (make-llm-ollama :scheme "http" :host "192.168.0.122" :chat-model "gemma3:12b"))))
|
||||
(message "Magit GPT provider set to %s" provider))
|
||||
|
||||
(require 'llm-ollama)
|
||||
(setq magit-gptcommit-llm-provider (make-llm-ollama :scheme "http" :host "192.168.0.122" :chat-model "gemma3:12b"))
|
||||
(setq llm-warn-on-nonfree nil)
|
||||
|
@ -939,7 +939,8 @@ always expand it later if new tools join the lineup.
|
||||
(:prefix-map ("l" . "LLMs")
|
||||
:desc "Aidermacs" "a" #'aidermacs-transient-menu
|
||||
:desc "ChatGPT Shell" "c" #'chatgpt-shell-transient
|
||||
:desc "Set Forge LLM Provider" "f" #'my/set-forge-llm-provider))
|
||||
:desc "Set Forge LLM Provider" "f" #'my/set-forge-llm-provider
|
||||
:desc "Set Magit GPT Provider" "m" #'my/set-magit-gptcommit-provider))
|
||||
#+end_src
|
||||
|
||||
*** ChatGPT Shell
|
||||
@ -984,6 +985,11 @@ This section configures =magit-gptcommit=, an integration that uses an LLM to au
|
||||
well-written Git commit messages based on the current file diffs. I’m using a local model via Ollama (in
|
||||
this case, =gemma3:12b=) to keep everything offline and fast.
|
||||
|
||||
To make switching models easier, I also added =my/set-magit-gptcommit-provider=, an interactive command
|
||||
that lets me choose the active LLM provider (Gemini, Claude, Qwen, or Ollama) on the fly. This is super
|
||||
handy when I want to test different models' output or switch between local and cloud-based models
|
||||
depending on the context.
|
||||
|
||||
The key part here is the custom prompt. It's designed to enforce a consistent commit message format,
|
||||
following the Linux kernel commit style. The prompt instructs the model to:
|
||||
|
||||
@ -994,6 +1000,20 @@ following the Linux kernel commit style. The prompt instructs the model to:
|
||||
This gives me consistent, clean, and useful commit messages without having to write them myself.
|
||||
|
||||
#+begin_src emacs-lisp
|
||||
(defun my/set-magit-gptcommit-provider (provider)
|
||||
"Set the Magit GPT commit LLM provider dynamically."
|
||||
(interactive
|
||||
(list (completing-read "Choose LLM for Magit GPT Commit: " '("Gemini" "Claude" "Qwen" "Ollama"))))
|
||||
(setq magit-gptcommit-llm-provider
|
||||
(pcase provider
|
||||
("Gemini" (make-llm-gemini :key gemini-key :chat-model "gemini-2.5-pro-exp-03-25"))
|
||||
("Claude" (make-llm-claude :key anthropic-key :chat-model "claude-3-7-sonnet-latest"))
|
||||
("Qwen" (make-llm-openai-compatible :url "https://openrouter.ai/api/v1"
|
||||
:chat-model "qwen/qwen3-235b-a22b"
|
||||
:key openrouter-api-key))
|
||||
("Ollama" (make-llm-ollama :scheme "http" :host "192.168.0.122" :chat-model "gemma3:12b"))))
|
||||
(message "Magit GPT provider set to %s" provider))
|
||||
|
||||
(require 'llm-ollama)
|
||||
(setq magit-gptcommit-llm-provider (make-llm-ollama :scheme "http" :host "192.168.0.122" :chat-model "gemma3:12b"))
|
||||
(setq llm-warn-on-nonfree nil)
|
||||
|
Loading…
x
Reference in New Issue
Block a user