pkgsrc-WIP-changes archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
wip/llama.cpp: Update to 0.0.2.3173
Module Name: pkgsrc-wip
Committed By: Ryo ONODERA <ryoon%NetBSD.org@localhost>
Pushed By: ryoon
Date: Wed Jun 19 01:01:46 2024 +0900
Changeset: ea9be36e93629ebd89d4dfa8d891e5ba680ee05b
Modified Files:
llama.cpp/Makefile
llama.cpp/PLIST
llama.cpp/distinfo
Log Message:
wip/llama.cpp: Update to 0.0.2.3173
* Enable blas backend.
* Disable build and instalation of tests.
* Python scripts require additional dependencies. It is not fixed yet.
You can use this package with preconverted models in GGUF.
To see a diff of this commit:
https://wip.pkgsrc.org/cgi-bin/gitweb.cgi?p=pkgsrc-wip.git;a=commitdiff;h=ea9be36e93629ebd89d4dfa8d891e5ba680ee05b
Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.
diffstat:
llama.cpp/Makefile | 13 ++++++---
llama.cpp/PLIST | 84 +++++++++++++++++++++++-------------------------------
llama.cpp/distinfo | 6 ++--
3 files changed, 47 insertions(+), 56 deletions(-)
diffs:
diff --git a/llama.cpp/Makefile b/llama.cpp/Makefile
index 0fc35cac86..22c8780582 100644
--- a/llama.cpp/Makefile
+++ b/llama.cpp/Makefile
@@ -1,24 +1,29 @@
# $NetBSD$
-DISTNAME= llama.cpp-b3091
+DISTNAME= llama.cpp-${GITHUB_TAG}
PKGNAME= ${DISTNAME:S/-b/-0.0.2./}
CATEGORIES= devel
MASTER_SITES= ${MASTER_SITE_GITHUB:=ggerganov/}
-GITHUB_TAG= b3091
+GITHUB_TAG= b3173
MAINTAINER= pkgsrc-users%NetBSD.org@localhost
HOMEPAGE= https://github.com/ggerganov/llama.cpp/
COMMENT= LLM inference in C/C++
LICENSE= mit
-#WRKSRC= ${WRKDIR}/llama.cpp-b3091
USE_TOOLS+= pkg-config
USE_LANGUAGES= c c++
-USE_TOOLS+= gmake
+
+BLAS_C_INTERFACE= yes
PKGCONFIG_OVERRIDE+= cmake/llama.pc.in
REPLACE_PYTHON+= *.py */*.py */*/*.py
+CMAKE_CONFIGURE_ARGS+= -DLLAMA_BLAS=1
+CMAKE_CONFIGURE_ARGS+= -DBLAS_LIBRARIES=${CBLAS_LIBS:Q}
+CMAKE_CONFIGURE_ARGS+= -DLLAMA_BUILD_TESTS=no
+
.include "../../devel/cmake/build.mk"
.include "../../lang/python/application.mk"
+.include "../../mk/blas.buildlink3.mk"
.include "../../mk/bsd.pkg.mk"
diff --git a/llama.cpp/PLIST b/llama.cpp/PLIST
index ebd9640327..f8a1813576 100644
--- a/llama.cpp/PLIST
+++ b/llama.cpp/PLIST
@@ -1,55 +1,41 @@
@comment $NetBSD$
-bin/baby-llama
-bin/batched
-bin/batched-bench
-bin/benchmark
bin/convert-hf-to-gguf.py
-bin/convert-llama2c-to-ggml
-bin/embedding
-bin/eval-callback
-bin/export-lora
-bin/finetune
-bin/gguf
-bin/gguf-split
-bin/gritlm
-bin/imatrix
-bin/infill
+bin/llama-baby-llama
+bin/llama-batched
+bin/llama-batched-bench
bin/llama-bench
-bin/llava-cli
-bin/lookahead
-bin/lookup
-bin/lookup-create
-bin/lookup-merge
-bin/lookup-stats
-bin/main
-bin/parallel
-bin/passkey
-bin/perplexity
-bin/quantize
-bin/quantize-stats
-bin/retrieval
-bin/save-load-state
-bin/server
-bin/simple
-bin/speculative
-bin/test-autorelease
-bin/test-backend-ops
-bin/test-chat-template
-bin/test-grad0
-bin/test-grammar-integration
-bin/test-grammar-parser
-bin/test-json-schema-to-grammar
-bin/test-llama-grammar
-bin/test-model-load-cancel
-bin/test-quantize-fns
-bin/test-quantize-perf
-bin/test-rope
-bin/test-sampling
-bin/test-tokenizer-0
-bin/test-tokenizer-1-bpe
-bin/test-tokenizer-1-spm
-bin/tokenize
-bin/train-text-from-scratch
+bin/llama-bench-matmult
+bin/llama-cli
+bin/llama-convert-llama2c-to-ggml
+bin/llama-cvector-generator
+bin/llama-embedding
+bin/llama-eval-callback
+bin/llama-export-lora
+bin/llama-finetune
+bin/llama-gbnf-validator
+bin/llama-gguf
+bin/llama-gguf-split
+bin/llama-gritlm
+bin/llama-imatrix
+bin/llama-infill
+bin/llama-llava-cli
+bin/llama-lookahead
+bin/llama-lookup
+bin/llama-lookup-create
+bin/llama-lookup-merge
+bin/llama-lookup-stats
+bin/llama-parallel
+bin/llama-passkey
+bin/llama-perplexity
+bin/llama-quantize
+bin/llama-quantize-stats
+bin/llama-retrieval
+bin/llama-save-load-state
+bin/llama-server
+bin/llama-simple
+bin/llama-speculative
+bin/llama-tokenize
+bin/llama-train-text-from-scratch
include/ggml-alloc.h
include/ggml-backend.h
include/ggml.h
diff --git a/llama.cpp/distinfo b/llama.cpp/distinfo
index f929c3a057..8211b500bd 100644
--- a/llama.cpp/distinfo
+++ b/llama.cpp/distinfo
@@ -1,5 +1,5 @@
$NetBSD$
-BLAKE2s (llama.cpp-b3091.tar.gz) = 34af707155ae44c9417ce85b102a432ed665062d87ec6ad36b039268272031f8
-SHA512 (llama.cpp-b3091.tar.gz) = f2dac9b3663dd272c74f7ff9e50b63ddb489d5566a8a2e71102b1ad0b61454221cd0bd2dcce40eca1050f2236bc4ded76916c34371f0375649c3920126665bda
-Size (llama.cpp-b3091.tar.gz) = 20541244 bytes
+BLAKE2s (llama.cpp-b3173.tar.gz) = 74cd3a8c11a8def5f213bdb1afc209abacf89cd39522ca71bf12a6275a9909ac
+SHA512 (llama.cpp-b3173.tar.gz) = acde4758d08f4be9fafa570fcc9ab6700c556381242548a02163c01eaeac30a3680d9f097a8d4b92dcc2783049f80d4647f1c6fe4964c1ddf02303d4e3dc6abf
+Size (llama.cpp-b3173.tar.gz) = 20592194 bytes
Home |
Main Index |
Thread Index |
Old Index