git: b1657d5c89cf - main - misc/llama-cpp: update 5195 → 5280

From: Yuri Victorovich <yuri_at_FreeBSD.org>
Date: Mon, 05 May 2025 07:40:51 UTC
The branch main has been updated by yuri:

URL: https://cgit.FreeBSD.org/ports/commit/?id=b1657d5c89cf14b056596478207f91098f0f9295

commit b1657d5c89cf14b056596478207f91098f0f9295
Author:     Yuri Victorovich <yuri@FreeBSD.org>
AuthorDate: 2025-05-05 07:39:00 +0000
Commit:     Yuri Victorovich <yuri@FreeBSD.org>
CommitDate: 2025-05-05 07:40:43 +0000

    misc/llama-cpp: update 5195 → 5280
    
    Reported by:    portscout
---
 misc/llama-cpp/Makefile  | 2 +-
 misc/llama-cpp/distinfo  | 6 +++---
 misc/llama-cpp/pkg-plist | 1 -
 3 files changed, 4 insertions(+), 5 deletions(-)

diff --git a/misc/llama-cpp/Makefile b/misc/llama-cpp/Makefile
index cbe71548ca64..c62a1970cbd2 100644
--- a/misc/llama-cpp/Makefile
+++ b/misc/llama-cpp/Makefile
@@ -1,6 +1,6 @@
 PORTNAME=	llama-cpp
 DISTVERSIONPREFIX=	b
-DISTVERSION=	5195
+DISTVERSION=	5280
 CATEGORIES=	misc # machine-learning
 
 MAINTAINER=	yuri@FreeBSD.org
diff --git a/misc/llama-cpp/distinfo b/misc/llama-cpp/distinfo
index 72c808d600d5..6b55ce1adb3b 100644
--- a/misc/llama-cpp/distinfo
+++ b/misc/llama-cpp/distinfo
@@ -1,5 +1,5 @@
-TIMESTAMP = 1745716899
-SHA256 (ggerganov-llama.cpp-b5195_GH0.tar.gz) = 9dee0d0e9a645d232415e1d2b252fd3938f11357b430d268da17bd17db668d95
-SIZE (ggerganov-llama.cpp-b5195_GH0.tar.gz) = 21069357
+TIMESTAMP = 1746426075
+SHA256 (ggerganov-llama.cpp-b5280_GH0.tar.gz) = e067328dcab70d3c0ac23b3121293771f87ff05dfad746e6db9239825823fd0d
+SIZE (ggerganov-llama.cpp-b5280_GH0.tar.gz) = 21100077
 SHA256 (nomic-ai-kompute-4565194_GH0.tar.gz) = 95b52d2f0514c5201c7838348a9c3c9e60902ea3c6c9aa862193a212150b2bfc
 SIZE (nomic-ai-kompute-4565194_GH0.tar.gz) = 13540496
diff --git a/misc/llama-cpp/pkg-plist b/misc/llama-cpp/pkg-plist
index 929a283b5c72..fb952a574b2e 100644
--- a/misc/llama-cpp/pkg-plist
+++ b/misc/llama-cpp/pkg-plist
@@ -26,7 +26,6 @@ bin/convert_hf_to_gguf.py
 %%EXAMPLES%%bin/llama-passkey
 %%EXAMPLES%%bin/llama-perplexity
 %%EXAMPLES%%bin/llama-quantize
-%%EXAMPLES%%bin/llama-qwen2vl-cli
 %%EXAMPLES%%bin/llama-retrieval
 %%EXAMPLES%%bin/llama-run
 %%EXAMPLES%%bin/llama-save-load-state