unofficial mirror of guix-patches@gnu.org 
 help / color / mirror / code / Atom feed
From: Nicolas Graves via Guix-patches via <guix-patches@gnu.org>
To: 62443@debbugs.gnu.org
Cc: ngraves@ngraves.fr
Subject: [bug#62443] [PATCH 3/3] gnu: Add llama-cpp.
Date: Sat, 25 Mar 2023 16:32:20 +0100	[thread overview]
Message-ID: <20230325153220.26027-3-ngraves@ngraves.fr> (raw)
In-Reply-To: <20230325153220.26027-1-ngraves@ngraves.fr>

* gnu/packages/machine-learning.scm (llama-cpp): New variable.
---
 gnu/packages/machine-learning.scm | 64 +++++++++++++++++++++++++++++++
 1 file changed, 64 insertions(+)

diff --git a/gnu/packages/machine-learning.scm b/gnu/packages/machine-learning.scm
index df1989d316..6c78b14fc6 100644
--- a/gnu/packages/machine-learning.scm
+++ b/gnu/packages/machine-learning.scm
@@ -400,6 +400,70 @@ (define-public guile-aiscm
 (define-public guile-aiscm-next
   (deprecated-package "guile-aiscm-next" guile-aiscm))
 
+(define-public llama-cpp
+  (let ((commit "3cd8dde0d1357b7f11bdd25c45d5bf5e97e284a0")
+        (revision "0"))
+    (package
+      (name "llama-cpp")
+      (version (git-version "0.0.0" revision commit))
+      (source
+       (origin
+         (method git-fetch)
+         (uri (git-reference
+               (url "https://github.com/ggerganov/llama.cpp")
+               (commit (string-append "master-" (string-take commit 7)))))
+         (file-name (git-file-name name version))
+         (sha256
+          (base32 "0i7c92cxqs31xklrn688978kk29agivgxjgvsb45wzm65gc6hm5c"))))
+      (build-system cmake-build-system)
+      (arguments
+       (list
+        #:modules '((ice-9 textual-ports)
+                    (guix build utils)
+                    ((guix build python-build-system) #:prefix python:)
+                    (guix build cmake-build-system))
+        #:imported-modules `(,@%cmake-build-system-modules
+                             (guix build python-build-system))
+        #:phases
+        #~(modify-phases %standard-phases
+            (add-before 'install 'install-python-scripts
+              (lambda _
+                (let ((bin (string-append #$output "/bin/")))
+                  (define (make-script script)
+                    (let ((suffix (if (string-suffix? ".py" script) "" ".py")))
+                      (call-with-input-file
+                          (string-append "../source/" script suffix)
+                        (lambda (input)
+                          (call-with-output-file (string-append bin script)
+                            (lambda (output)
+                              (format output "#!~a/bin/python3\n~a"
+                                      #$(this-package-input "python")
+                                      (get-string-all input))))))
+                      (chmod (string-append bin script) #o555)))
+                  (mkdir-p bin)
+                  (make-script "convert-pth-to-ggml")
+                  (make-script "convert-gptq-to-ggml")
+                  (make-script "quantize.py")
+                  (substitute* (string-append bin "quantize.py")
+                    (("os\\.getcwd\\(\\), quantize_script_binary")
+                     (string-append "\"" bin "\", quantize_script_binary"))))))
+            (add-after 'install-python-scripts 'wrap-python-scripts
+              (assoc-ref python:%standard-phases 'wrap))
+            (replace 'install
+              (lambda _
+                (let ((bin (string-append #$output "/bin/")))
+                  (install-file "bin/quantize" bin)
+                  (copy-file "bin/main" (string-append bin "llama"))))))))
+      (propagated-inputs
+       (list python-pytorch python-sentencepiece python-numpy))
+      (inputs (list python))
+      (home-page "https://github.com/ggerganov/llama.cpp")
+      (synopsis "Port of Facebook's LLaMA model in C/C++")
+      (description "This package provides a port to Facebook's LLaMA collection
+of foundation language models.  It requires models parameters to be downloaded
+independently to be able to run a LLaMA model.")
+      (license license:expat))))
+
 (define-public mcl
   (package
     (name "mcl")
-- 
2.39.2





  parent reply	other threads:[~2023-03-25 15:33 UTC|newest]

Thread overview: 5+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
2023-03-25 15:05 [bug#62443] LLaMA.cpp Nicolas Graves via Guix-patches via
2023-03-25 15:32 ` [bug#62443] [PATCH 1/3] gnu: Add sentencepiece Nicolas Graves via Guix-patches via
2023-03-25 15:32   ` [bug#62443] [PATCH 2/3] gnu: Add python-sentencepiece Nicolas Graves via Guix-patches via
2023-03-25 15:32   ` Nicolas Graves via Guix-patches via [this message]
2023-04-08 12:07 ` [bug#62443] LLaMA.cpp Nicolas Goaziou

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

  List information: https://guix.gnu.org/

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=20230325153220.26027-3-ngraves@ngraves.fr \
    --to=guix-patches@gnu.org \
    --cc=62443@debbugs.gnu.org \
    --cc=ngraves@ngraves.fr \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line before the message body.
Code repositories for project(s) associated with this public inbox

	https://git.savannah.gnu.org/cgit/guix.git

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).