;;; rcd-llm.el --- RCD LLM Functions -*- lexical-binding: t; -*- ;; Copyright (C) 2024 by Jean Louis ;; Author: Jean Louis ;; Version: 0.1 ;; Package-Requires: (rcd-utilities rcd-pg-basics rcd-cf hyperscope) ;; Keywords: convenience help multimedia text tools ;; URL: ;; This file is not part of GNU Emacs. ;; This program is free software: you can redistribute it and/or ;; modify it under the terms of the GNU General Public License as ;; published by the Free Software Foundation, either version 3 of the ;; License, or (at your option) any later version. ;; ;; This program is distributed in the hope that it will be useful, but ;; WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ;; General Public License for more details. ;; ;; You should have received a copy of the GNU General Public License ;; along with this program. If not, see . ;;; Commentary: ;; RCD LLM Functions ;;; Change Log: ;;; Code: (require 'rcd-utilities) (require 'rcd-cf) (require 'rcd-pg-basics) (require 'hyperscope) (require 'rcd-dashboard) ;;; Customization Group (defgroup rcd-llm nil "Customization options for RCD AI functionalities." :prefix "rcd-llm-" :group 'applications) ;;; Customize Variables (defcustom rcd-llm-users-llm-function 'rcd-llm-db "User's AI function. This variable determines which AI function is used by the RCD AI system. You can customize it to use a different AI backend as needed." :type '(choice (const :tag "ChatGPT Shell" rcd-chatgpt-shell) (const :tag "RCD ChatGPT" rcd-llm-chatgpt) (const :tag "Groq" rcd-llm-groq) (const :tag "RCD LLM Database" rcd-llm-db) (const :tag "Llamafile" rcd-llm-llamafile) (const :tag "HuggingFace" rcd-llm-huggingface) (const :tag "Mistral" rcd-llm-mistral)) :group 'rcd-llm) ;;; Variables (defvar rcd-llm-prompt "LLM Prompt: " "Defines LLM prompt.") (defvar rcd-llm-last-response "" "Defines last LLM response.") (defvar rcd-llm-use-users-llm-memory nil "When TRUE, use user's AI memory.") (defvar rcd-llm-add-to-memory nil "When TRUE, always add to present AI memory of the user.") (defvar rcd-llm-speak t "Toggle speaking by LLM.") (defalias 'rcd-llm-memory-select 'rcd-db-current-user-llmq-memory-select) ;;; LLM Utilities (defun rcd-llm-speak-toggle () "Toggles the LLM speech output" (interactive) (cond (rcd-llm-speak (setq rcd-llm-speak nil)) (t (setq rcd-llm-speak t))) (rcd-message (format "Variable `rcd-llm-speak': %s" rcd-llm-speak))) (defun rcd-llm-user-memory () "Return current user AI memory." (let ((memory (rcd-db-get-entry-where "usersdefaults" "usersdefaults_aimemory" (format "usersdefaults_users = %s" (rcd-db-current-user)) rcd-db))) (when memory (concat (hyperscope-text-with-title memory))))) (defun rcd-parse-http-json-string (string) "Parses a JSON STRING preceded by HTTP headers. Returns the parsed JSON object." (let ((json-start-index (string-match "\{" string))) (when json-start-index (json-read-from-string (substring string json-start-index))))) (defun rcd-llm-clipboard-modify () "Modify X clipboard by using AI. User may write in the web browser, marks the text Let us say yo" (interactive) (select-frame-set-input-focus (selected-frame)) (with-temp-buffer (clipboard-yank) (goto-char (point-min)) (set-mark (point-max)) (rcd-region-string) (rcd-llm) (gui--set-last-clipboard-selection (buffer-string)))) (defun rcd-llm-add-to-memory () "Toggle variable `rcd-llm-add-to-memory'. When `rcd-llm-add-to-memory' is TRUE, then LLM responses are added to users's memory." (interactive) (let ((answer (y-or-n-p "Add to user's LLM memory? "))) (cond (answer (setq rcd-llm-add-to-memory t)) (t (setq rcd-llm-add-to-memory nil))) (rcd-message (format "Variable `rcd-llm-add-to-memory' is not: %s" rcd-llm-add-to-memory)))) (defun rcd-llm-use-users-memory () "Toggle variable `rcd-llm-use-users-memory'. When `rcd-llm-use-users-memory' is TRUE, then user's LLM memory is used with each prompt." (interactive) (let ((answer (y-or-n-p "Use user's AI memory? "))) (cond (answer (setq rcd-llm-use-users-llm-memory t)) (t (setq rcd-llm-use-users-llm-memory nil))))) ;;; LLM Logging (defun rcd-log-llm-list (&optional query) "Search LLM log and display report. Optional QUERY string may be used for search." (interactive) (let* ((query (or query (rcd-string-nil-if-blank (rcd-ask "Find ChatGPT log: ")))) (query (when query (sql-escape-string query))) (query-sql (cond (query (concat " AND log_name ~* " query " OR log_description ~* " query)) (t ""))) (sql (format "SELECT log_id, coalesce(get_full_contacts_name(log_people), 'UNKNOWN'), REPLACE(coalesce(log_name,''), E'\n', ' '), REPLACE(coalesce(log_description,''), E'\n', ' ') FROM log WHERE log_logtypes = 8 %s" query-sql)) (prompt "RCD Notes ChatGPT Log")) (rcd-db-sql-report prompt sql [("ID" 4 t) ("Contact" 20 t) ("Prompt" 20 t) ("Response" 100 t)] "log" nil nil))) (defun rcd-log-llm-model () (let* ((users-model (rcd-db-users-defaults "llmmodels")) (model (rcd-sql-list "SELECT llmmodels_name, llmendpoints_name FROM llmendpoints, llmmodels, usersdefaults WHERE llmendpoints_id = llmmodels_llmendpoints AND llmmodels_id = usersdefaults_llmmodels AND usersdefaults_users = $1" rcd-db 1))) model)) (defun rcd-log-llm (prompt response) "Log PROMPT, RESPONSE. It takes two arguments: PROMPT and RESPONSE. The purpose of this function is to log a LLM's PROMPT and RESPONSE in a database." (cond ((and (and prompt response) (and (stringp prompt) (stringp response))) (let* ((function (symbol-name rcd-llm-users-llm-function)) (model-name (cond ((not (eq function (symbol-name 'rcd-llm-db))) (format "Function: %s" function)) (t (car (rcd-log-llm-model))))) (model-url (cond ((not (eq function (symbol-name 'rcd-llm-db))) "URL: see function") (t (cdr (rcd-log-llm-model))))) (note (format "Model name: %s\nModel URL: %s\n" model-name model-url))) (rcd-sql-first "INSERT INTO log (log_people, log_name, log_description, log_logtypes, log_note) VALUES (1, $1, $2, $3, $4) RETURNING log_id" rcd-db prompt response 8 note))) ((not prompt) (user-error "rcd-log-llm: PROMPT missing.")) ((not response) (user-error "rcd-log-llm: RESPONSE missing.")))) ;;; Main LLM Functions (defun hyperscope-llm-user-new-memory () "Generate new AI memory elementary object for user, and keep adding to it." (interactive) (let* ((defaults-id (rcd-db-current-user-defaults-id)) (title (rcd-ask-get "New AI memory title: ")) (subtype (rcd-llm-users-memory-subtype)) (description (rcd-ask (format "Describe `%s'" title))) (set (rcd-repeat-until-something 'hyperscope-select-set "Select set for AI memory: ")) (id (hyperscope-add-generic title nil nil 1 subtype set nil description))) (rcd-db-update-entry "usersdefaults" "usersdefaults_aimemory" defaults-id id rcd-db) (setq rcd-llm-add-to-memory t))) (defun rcd-llm-switch-users-memory () "Switch to users AI memory if such exist." (interactive) (let ((id (rcd-db-current-user-llm-memory))) (when id (hyperscope-isolate id)))) (defun rcd-db-current-user-llm-subtype () "Return AI subtype." (rcd-db-get-entry "usersdefaults" "usersdefaults_aimemorysubtype" (rcd-db-current-user) rcd-db)) (defun rcd-db-current-user-llm-memory () "Return AI memory." (rcd-db-get-entry "usersdefaults" "usersdefaults_aimemory" (rcd-db-current-user) rcd-db)) (defun rcd-llm-users-memory-subtype () "Return LLM memory subtype." (let* ((defaults-id (rcd-db-current-user-defaults-id)) (memory-subtype (or (rcd-db-get-entry "usersdefaults" "usersdefaults_aimemorysubtype" defaults-id rcd-db) (let ((subtype (hyperscope-subtype-select "Select AI memory subtype: "))) (when (and subtype defaults-id) (rcd-db-update-entry "usersdefaults" "usersdefaults_aimemorysubtype" defaults-id subtype rcd-db)))))) memory-subtype)) (defun rcd-db-current-user-llm-memory-select () "Select AI memory for current user." (interactive) (let* ((defaults-id (rcd-db-current-user-defaults-id)) (memory-subtype (rcd-llm-users-memory-subtype)) (memory (hyperscope-select-by-subtype "Select AI memory: " memory-subtype))) (when (and defaults-id memory memory-subtype) (when (rcd-db-update-entry "usersdefaults" "usersdefaults_aimemorysubtype" defaults-id memory-subtype rcd-db) (rcd-db-update-entry "usersdefaults" "usersdefaults_aimemory" defaults-id memory rcd-db) (setq rcd-llm-add-to-memory t))))) (defvar rcd-llm-pop-to-window nil) (defun rcd-llm (&optional prompt) "Send PROMPT to default LLM. With single prefix key `C-u' it will add RESPONSE after the cursor or PROMPT in the buffer. With double prefix key `C-u C-u' it will kill RESPONSE in the memory. With triple prefix key `C-u C-u C-u' it will pop up new buffer with the PROMPT and RESPONSE. It will invoke function as customized by user in the variable `rcd-llm-users-llm-function'. It determines the selected region of text using the `rcd-region-string' function and assigns it to the `region` variable. It processes the PROMPT value based on several conditions: - If both the region and PROMPT are non-empty, it combines the prompt and region with appropriate formatting. - If the PROMPT is non-empty but there is no region, it uses the PROMPT as is. - If the PROMPT is empty but there is a region, it uses the region as the prompt. - If both the PROMPT and region are empty, it displays a warning message. - The processed prompt is then assigned to the `prompt` variable. 5. It sends the `prompt` and the model name \"gpt-3.5-turbo\" to the `chatgpt-shell-post-prompt` function to obtain a response from the ChatGPT model. The response is stored in the `response` variable. 6. It logs the response using the `rcd-log-llm` function. 7. Based on the conditions, it performs different actions: - If there is a selected `region` and the function is called with a PREFIX argument of 4, it inserts the response right after the region in the buffer. - If there is a selected `region` and the function is called with a PREFIX argument of 16, it copies the response to the kill ring using `rcd-kill-new`. - If there is a selected `region`, it replaces the selected region with the response using `rcd-region-string-replace`. - If there is no selected region, it simply inserts the response. 8. The function completes execution." (interactive) (let* ((region (rcd-region-string)) (rcd-llm-model (map-elt (seq-first chatgpt-shell-models) :version)) (memory (when rcd-llm-use-users-llm-memory (rcd-llm-user-memory))) (prompt (or prompt (rcd-ask rcd-llm-prompt))) (prompt (cond ((and region (not (string-empty-p prompt))) (cond (t (concat (string-add prompt ":\n\n") region)))) ((not (string-empty-p prompt)) prompt) ((and region (string-empty-p prompt)) region) (t nil))) (rcd-message-date nil)) (cond (prompt (rcd-message "Requesting LLM...") (let ((response (cond (rcd-llm-model (funcall rcd-llm-users-llm-function prompt memory rcd-llm-model)) (t (error "Could not find a model. Missing model setup?"))))) (cond (response (rcd-log-llm prompt response) (cond ;; when there is region ((and region (called-interactively-p) (eql (car current-prefix-arg) 4)) (goto-char (cdar (region-bounds))) (setq deactivate-mark t) (insert "\n\n" response "\n\n")) ;; When region and 2 C-u ((and region (called-interactively-p) (eql (car current-prefix-arg) 16)) (setq deactivate-mark t) (rcd-kill-new response)) ;; with 3 x C-u open buffer ((or (and (eql (car current-prefix-arg) 64) (called-interactively-p)) rcd-llm-pop-to-window) (rcd-pop-to-report (concat (underline-text (concat "LLM Function: " (upcase (symbol-name rcd-llm-users-llm-function)))) (concat "Prompt: " prompt) "\n" (make-string fill-column (string-to-char "=")) "\n\n" response) "*LLM Response*") (switch-to-buffer "*LLM Response*") (markdown-mode) (text-scale-adjust 1) (local-set-key "q" 'kill-buffer-and-window)) ;; otherwise if region ((and region (called-interactively-p)) (rcd-region-string-replace response)) ;; if called interactively, insert into buffer ((called-interactively-p) (insert response)) ;; otherwise just return response (t response)) (when (and rcd-llm-add-to-memory rcd-llm-use-users-llm-memory) (hyperscope-add-to-column (rcd-db-current-user-llm-memory) "hyobjects_text" prompt) (hyperscope-add-to-column (rcd-db-current-user-llm-memory) "hyobjects_text" response)) (setq rcd-llm-last-response response) (when rcd-llm-speak (rcd-tts-and-speak "Finished.")) ;; (rcd-notify (format "Processing finished:" rcd-llm-users-llm-function) ;; (concat "\n" (rcd-substring-soft rcd-llm-last-response 0 100)) ;; nil ;; "/usr/share/icons/gnome/256x256/status/starred.png") (cond ((called-interactively-p) (rcd-kill-new response)) (t response))) (t (prog2 (rcd-warning-message "Could not reach AI server") nil))))) (t (rcd-warning-message "LLM: Empty Prompt"))))) (defun rcd-llm-response (response-buffer) "Parse LLM's RESPONSE-BUFFER and return decoded string." (when response-buffer (with-current-buffer response-buffer (condition-case err (progn ;; Skip HTTP headers (goto-char (point-min)) (when (search-forward "\n\n" nil t) (let ((response (decode-coding-string (buffer-substring-no-properties (point) (point-max)) 'utf-8))) (kill-buffer response-buffer) ;; Parse JSON and extract the reply (let* ((json-response (json-parse-string response :object-type 'alist)) (choices (alist-get 'choices json-response)) (message (alist-get 'message (aref choices 0))) (message (decode-coding-string (alist-get 'content message) 'utf-8))) (replace-regexp-in-string (rx (or "" "<|eot_id|>" "<|end|>" "<|endoftext|>" "|im_end|" "<|end_of_text|>" "<|end_of_role|>") line-end) "\n" message))))) (error (rcd-message "Error in rcd-llm-response: %s" (error-message-string err)) nil))))) ;;; LLM Statistics (defun rcd-llm-usage-by-day () (interactive) (cf-chart-bar-quickie "SELECT date_part('day', log_datecreated)::int AS day, count(log_name) FROM log WHERE log_logtypes = 8 AND log_datecreated > (current_date - 30) GROUP BY date_part('day', log_datecreated)::int ORDER BY day DESC" "LLM usage by day" "Last days" "Totals")) (defun rcd-llm-usage-by-week () (interactive) (rcd-db-chart-by-periods "log" "week" "LLM usage by week" "WEEKS" "REQUESTS" "WHERE log_logtypes = 8 AND log_datecreated > (current_date - 365)")) (defun rcd-llm-usage-by-month () (interactive) (rcd-db-chart-by-periods "log" "month" "LLM usage by month" "MONTHS" "REQUESTS" "WHERE log_logtypes = 8 AND log_datecreated > (current_date - 365)")) ;;;; Other LLM ;;; ChatGPT (defun rcd-llm-chatgpt (prompt &optional memory rcd-llm-model) "Send PROMPT to OpenAI API and return the response. Optional MEMORY and MODEL may be used." (let* ((rcd-llm-model (cond ((boundp 'rcd-llm-model) rcd-llm-model) (t "gpt-4o-mini-2024-07-18"))) (url-request-method "POST") (url-request-extra-headers '(("Content-Type" . "application/json; charset=utf-8") ("Authorization" . "Bearer APIKEY"))) (url-request-data (encode-coding-string (json-encode `(("model" . ,rcd-llm-model) ("messages" . [((role . "user") (content . ,prompt))]))) 'utf-8)) (response-buffer (url-retrieve-synchronously "https://api.openai.com/v1/chat/completions"))) (rcd-llm-response response-buffer))) (defun rcd-chatgpt-shell (prompt memory rcd-llm-model) "Call function `chatgpt-shell-post'. PROMPT is LLM's prompt. MEMORY is string containing user's memory. MODEL is one of available LLM models by OpenAI." (chatgpt-shell-post :context (list (cons memory nil) (cons prompt nil) ) :version rcd-llm-model)) ;;; Llamafile (defun rcd-llm-llamafile (prompt &optional memory rcd-llm-model) "Send PROMPT to Llama file. Optional MEMORY and MODEL may be used." (let* ((rcd-llm-model (cond ((boundp 'rcd-llm-model) rcd-llm-model) (t "LLaMA_CPP"))) (memory (cond ((and memory rcd-llm-use-users-llm-memory) (concat "Following is user's memory, until the END-OF-MEMORY-TAG: \n\n" memory "\n\n END-OF-MEMORY-TAG\n\n")))) (prompt (cond (memory (concat memory "\n\n" prompt)) (t prompt))) (temperature 0.8) (max-tokens -1) (top-p 0.95) (stream :json-false) (buffer (let ((url-request-method "POST") (url-request-extra-headers '(("Content-Type" . "application/json") ("Authorization" . "Bearer no-key"))) (prompt (encode-coding-string prompt 'utf-8)) (url-request-data (encode-coding-string (setq rcd-llm-last-json (json-encode `((model . ,rcd-llm-model) (messages . [ ((role . "system") (content . "You are a helpful assistant. Answer short.")) ((role . "user") (content . ,prompt))]) (temperature . ,temperature) (max_tokens . ,max-tokens) (top_p . ,top-p) (stream . ,stream)))) 'utf-8))) (url-retrieve-synchronously ;; "http://127.0.0.1:8080/v1/chat/completions")))) "http://192.168.188.140:8080/v1/chat/completions")))) (rcd-llm-response buffer))) ;;; Groq (defun rcd-llm-groq (prompt &optional memory rcd-llm-model) "Send PROMPT to Groq. Optional MEMORY and MODEL may be used." (let* ((rcd-llm-model (cond ((boundp 'rcd-llm-model) rcd-llm-model) ;; (t "llama-3.2-1b-preview"))) (t "mixtral-8x7b-32768"))) ; (t "llama-3.3-70b-versatile"))) (buffer (let ((url-request-method "POST") (url-request-extra-headers '(("Content-Type" . "application/json") ("Authorization" . "Bearer APIKEY"))) (url-request-data (encode-coding-string (json-encode `((model . ,rcd-llm-model) (messages . [ ((role . "user") (content . ,prompt)) ]))) 'utf-8))) (url-retrieve-synchronously "https://api.groq.com/openai/v1/chat/completions")))) (rcd-llm-response buffer))) ;;; DB based LLM (defun rcd-llm-db (prompt &optional memory rcd-llm-model temperature max-tokens top-p top-k min-p stream) "Send PROMPT to API as decided by the database. Optional MEMORY, RCD-LLM-MODEL, TEMPERATURE, MAX-TOKENS, TOP-P, and STREAM can be used." (let ((rcd-llm-model-id (rcd-db-users-defaults "llmmodels"))) (cond ((not rcd-llm-model-id) (rcd-warning-message "Did not find default user's LLM model. Do `M-x rcd-my-defaults' to set it.")) (t (let* ((rcd-llm-model (rcd-db-get-entry "llmmodels" "llmmodels_name" rcd-llm-model-id rcd-db)) (temperature (or temperature (rcd-db-get-entry "llmmodels" "llmmodels_temperature" rcd-llm-model-id rcd-db))) (max-tokens (or max-tokens (rcd-db-get-entry "llmmodels" "llmmodels_maxtokens" rcd-llm-model-id rcd-db))) (top-p (or top-p (rcd-db-get-entry "llmmodels" "llmmodels_topp" rcd-llm-model-id rcd-db))) (min-p (or top-p (rcd-db-get-entry "llmmodels" "llmmodels_minp" rcd-llm-model-id rcd-db))) (top-k (or top-k (rcd-db-get-entry "llmmodels" "llmmodels_topk" rcd-llm-model-id rcd-db))) (llm-endpoint-id (rcd-db-get-entry "llmmodels" "llmmodels_llmendpoints" rcd-llm-model-id rcd-db)) (llm-endpoint (rcd-db-get-entry "llmendpoints" "llmendpoints_name" llm-endpoint-id rcd-db)) (llm-provider-id (rcd-db-get-entry "llmendpoints" "llmendpoints_llmproviders" llm-endpoint-id rcd-db)) (api-key (rcd-db-get-entry "llmproviders" "llmproviders_apikey" llm-provider-id rcd-db)) (system-message (or (rcd-db-get-entry "llmmodels" "llmmodels_systemmessage" rcd-llm-model-id rcd-db) "You are helpful assistant.")) (authorization (concat "Bearer " api-key)) (stream (if stream t :json-false)) (url-request-method "POST") (prompt (encode-coding-string prompt 'utf-8)) (url-request-extra-headers `(("Content-Type" . "application/json") ("Authorization" . ,authorization))) (url-request-data (encode-coding-string (setq rcd-llm-last-json (json-encode `((model . ,rcd-llm-model) ;; `((model . "Qwen2.5-1.5B-Instruct") (messages . [ ((role . "system") (content . ,system-message)) ((role . "user") (content . ,prompt))]) (temperature . ,temperature) (max_tokens . ,max-tokens) (top_p . ,top-p) (frequency_penalty . 1.2) (repeat_penalty . 1.2) ;; (top_k . ,top-k) ;; (min_p . ,min-p) (stream . ,stream)))) 'utf-8)) (buffer (url-retrieve-synchronously llm-endpoint))) (rcd-llm-response buffer)))))) ;;; Hugging Face (defvar rcd-llm-last-json nil "Last JSON sent to LLM.") (defun rcd-llm-huggingface (prompt &optional memory rcd-llm-model temperature max-tokens top-p stream) "send PROMPT to Hugging Face API with specified parameters. Optional MEMORY, RCD-LLM-MODEL, TEMPERATURE, MAX-TOKENS, TOP-P, and STREAM can be used." (let* ((rcd-llm-model (or rcd-llm-model "Qwen/Qwen2.5-Coder-32B-Instruct")) (temperature (or temperature 0.5)) (max-tokens (or max-tokens 2048)) (top-p (or top-p 0.7)) (stream (if stream t :json-false)) (url-request-method "POST") (url-request-extra-headers '(("Content-Type" . "application/json") ("Authorization" . "Bearer APIKEY"))) (url-request-data (encode-coding-string (setq rcd-llm-last-json (json-encode `((model . ,rcd-llm-model) (messages . [((role . "user") (content . ,prompt))]) (temperature . ,temperature) (max_tokens . ,max-tokens) (top_p . ,top-p) (stream . ,stream)))) 'utf-8)) (buffer (url-retrieve-synchronously "https://api-inference.huggingface.co/models/Qwen/Qwen2.5-Coder-32B-Instruct/v1/chat/completions"))) (rcd-llm-response buffer))) ;;; Mistral (defun rcd-llm-mistral (prompt &optional memory rcd-llm-model) "Send PROMPT to Mistral. Optional MEMORY and MODEL may be used." (let* ((rcd-llm-model (cond ((boundp 'rcd-llm-model) rcd-llm-model) (t "open-mistral-7b"))) ; (t "mistral-large-latest"))) (buffer (let ((url-request-method "POST") (url-request-extra-headers '(("Content-Type" . "application/json") ("Authorization" . "Bearer APIKEY"))) (url-request-data (encode-coding-string (json-encode `((model . ,rcd-llm-model) ;; (agent_id . "ag:6bf709a1:20250103:helpful-mistral-7b:adb53e32") (messages . [ ((role . "user") (content . ,prompt))]))) 'utf-8))) (url-retrieve-synchronously "https://api.mistral.ai/v1/chat/completions")))) (rcd-llm-response buffer))) (global-set-key (kbd "C-") #'rcd-llm) ;;; Modification of other mode maps (defun rcd-llm-other-window (&optional prompt) "Return result of `rcd-llm' in other window." (interactive) (let ((prompt (or prompt (rcd-ask-get rcd-llm-prompt))) (current-prefix-arg '(64))) (rcd-llm prompt))) (defun rcd-llm-define-word-or-region-other-window (&optional word) "Define current word or region by using `rcd-llm' in other window." (interactive) (let* ((region (rcd-region-string)) (word (or word (current-word))) (prompt (cond (region (concat "What is meaning of: " region)) (word (concat "Define this word: " word)) (t (rcd-ask-get (concat "No region or word found, " rcd-llm-prompt)))))) (rcd-llm-other-window prompt))) (with-eval-after-load "wordnut" (keymap-set wordnut-mode-map "L" #'rcd-llm-define-word-or-region-other-window)) ;; RCD LLM Dashboard (defun rcd-llm-dashboard-header () "RCD LLM Dashboard header." (rcd-dashboard-heading (concat (format "⭐ %s ⭐ Dashboard ⭐ " rcd-program-name-full) (or user-full-name user-login-name user-real-login-name "") "\n"))) (defvar rcd-llm-prompts-general-information '("What is the definition of `%s'?" "Can you provide an overview of %s?" "How does %s work?" "What are the benefits of %s?" "Can you give me some examples of %s?") "LLM General Information Prompts") (defvar rcd-llm-prompts-trends-and-analysis '("What are the current trends in %s?" "Can you analyze the impact of %s on %s?" "What are the top %s factors influencing %s?") "LLM Trends and Analysis Prompts") ;; (rcd-button-insert "What is: " (lambda (_) ;; (let ((current-prefix-arg '(64))) ;; (rcd-llm (concat "What is?" (rcd-ask-get "What is? ") "?")))))) (defun rcd-llm-dashboard-basics () "RCD LLM Basics" (insert "** Large Language Models\n\n") (insert "*** LLM Settings\n\n") (insert "**** ") (rcd-button-insert "LLM Providers" (lambda (_) (rcd-db-table-edit-by-name "llmproviders"))) (insert "\n**** ") (rcd-button-insert "LLM Endpoints" (lambda (_) (rcd-db-table-edit-by-name "llmendpoints"))) (insert "\n**** ") (rcd-button-insert "LLM Models" (lambda (_) (rcd-db-table-edit-by-name "llmmodels"))) (insert "\n\n** LLM Prompts\n\n") (insert "*** General information\n\n") (insert "*** Trends and Analysis\n\n") (insert "*** Data and Statistics\n\n") (insert "*** Recommendation and Advice\n\n") (insert "*** Comparison and Evaluation\n\n") (insert "*** Creative and Open-Ended\n\n")) (defun rcd-llm-dashboard () "RCD Notes Dashboard." (interactive) (let ((rcd-dashboard-buffer-name "RCD LLM Dashboard") (rcd-dashboard-always-refresh (cond ((equal '(4) current-prefix-arg) t) (t rcd-dashboard-always-refresh)))) (cond ((equal 0 current-prefix-arg) (help-for-help)) (t (rcd-dashboard '(rcd-llm-dashboard-header rcd-llm-dashboard-basics) "RCD LLM Dashboard") (rcd-speak "Large Language Model Dashboard"))))) (provide 'rcd-llm) ;;; rcd-llm.el ends here