[package - 133i386-default][misc/llama-cpp] Failed for llama-cpp-3593 in build

From: <pkg-fallout_at_FreeBSD.org>
Date: Sat, 17 Aug 2024 04:52:07 UTC
You are receiving this mail as a port that you maintain
is failing to build on the FreeBSD package build server.
Please investigate the failure and submit a PR to fix
build.

Maintainer:     yuri@FreeBSD.org
Log URL:        https://pkg-status.freebsd.org/beefy15/data/133i386-default/1963f6d23107/logs/llama-cpp-3593.log
Build URL:      https://pkg-status.freebsd.org/beefy15/build.html?mastername=133i386-default&build=1963f6d23107
Log:

=>> Building misc/llama-cpp
build started at Sat Aug 17 04:51:14 UTC 2024
port directory: /usr/ports/misc/llama-cpp
package name: llama-cpp-3593
building for: FreeBSD 133i386-default-job-02 13.3-RELEASE-p5 FreeBSD 13.3-RELEASE-p5 i386
maintained by: yuri@FreeBSD.org
Makefile datestamp: -rw-r--r--  1 root  wheel  1116 Aug 17 01:01 /usr/ports/misc/llama-cpp/Makefile
Ports top last git commit: 1963f6d2310
Ports top unclean checkout: no
Port dir last git commit: 784e78e960e
Port dir unclean checkout: no
Poudriere version: poudriere-git-3.4.1-30-g79e3edcd
Host OSVERSION: 1500019
Jail OSVERSION: 1303001
Job Id: 02

---Begin Environment---
SHELL=/bin/csh
BLOCKSIZE=K
MAIL=/var/mail/root
MM_CHARSET=UTF-8
LANG=C.UTF-8
OSVERSION=1303001
STATUS=1
HOME=/root
PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin
MAKE_OBJDIR_CHECK_WRITABLE=0
UNAME_m=i386
UNAME_p=i386
UNAME_r=13.3-RELEASE-p5
LOCALBASE=/usr/local
UNAME_v=FreeBSD 13.3-RELEASE-p5
USER=root
POUDRIERE_NAME=poudriere-git
LIBEXECPREFIX=/usr/local/libexec/poudriere
POUDRIERE_VERSION=3.4.1-30-g79e3edcd
MASTERMNT=/usr/local/poudriere/data/.m/133i386-default/ref
LC_COLLATE=C
POUDRIERE_BUILD_TYPE=bulk
PACKAGE_BUILDING=yes
SAVED_TERM=
OUTPUT_REDIRECTED_STDERR=4
OUTPUT_REDIRECTED=1
PWD=/usr/local/poudriere/data/.m/133i386-default/02/.p
OUTPUT_REDIRECTED_STDOUT=3
P_PORTS_FEATURES=FLAVORS SUBPACKAGES SELECTED_OPTIONS
MASTERNAME=133i386-default
SCRIPTPREFIX=/usr/local/share/poudriere
SCRIPTNAME=bulk.sh
OLDPWD=/usr/local/poudriere/data/.m/133i386-default/ref/.p/pool
POUDRIERE_PKGNAME=poudriere-git-3.4.1-30-g79e3edcd
SCRIPTPATH=/usr/local/share/poudriere/bulk.sh
POUDRIEREPATH=/usr/local/bin/poudriere
---End Environment---

---Begin Poudriere Port Flags/Env---
PORT_FLAGS=
PKGENV=
FLAVOR=
MAKE_ARGS=
---End Poudriere Port Flags/Env---

---Begin OPTIONS List---
===> The following configuration options are available for llama-cpp-3593:
     EXAMPLES=on: Build and/or install examples
     VULKAN=on: Vulkan GPU offload support
===> Use 'make config' to modify these settings
---End OPTIONS List---

--MAINTAINER--
yuri@FreeBSD.org
--End MAINTAINER--

--CONFIGURE_ARGS--

--End CONFIGURE_ARGS--

--CONFIGURE_ENV--
PYTHON="/usr/local/bin/python3.11" XDG_DATA_HOME=/wrkdirs/usr/ports/misc/llama-cpp/work  XDG_CONFIG_HOME=/wrkdirs/usr/ports/misc/llama-cpp/work  XDG_CACHE_HOME=/wrkdirs/usr/ports/misc/llama-cpp/work/.cache  HOME=/wrkdirs/usr/ports/misc/llama-cpp/work TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/misc/llama-cpp/work/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin PKG_CONFIG_LIBDIR=/wrkdirs/usr/ports/misc/llama-cpp/work/.pkgconfig:/usr/local/libdata/pkgconfig:/usr/local/share/pkgconfig:/usr/libdata/pkgconfig SHELL=/bin/sh CONFIG_SHELL=/bin/sh
--End CONFIGURE_ENV--

--MAKE_ENV--
NINJA_STATUS="[%p %s/%t] " XDG_DATA_HOME=/wrkdirs/usr/ports/misc/llama-cpp/work  XDG_CONFIG_HOME=/wrkdirs/usr/ports/misc/llama-cpp/work  XDG_CACHE_HOME=/wrkdirs/usr/ports/misc/llama-cpp/work/.cache  HOME=/wrkdirs/usr/ports/misc/llama-cpp/work TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/misc/llama-cpp/work/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin PKG_CONFIG_LIBDIR=/wrkdirs/usr/ports/misc/llama-cpp/work/.pkgconfig:/usr/local/libdata/pkgconfig:/usr/local/share/pkgconfig:/usr/libdata/pkgconfig MK_DEBUG_FILES=no MK_KERNEL_SYMBOLS=no SHELL=/bin/sh NO_LINT=YES DESTDIR=/wrkdirs/usr/ports/misc/llama-cpp/work/stage PREFIX=/usr/local  LOCALBASE=/usr/local  CC="cc" CFLAGS="-O2 -pipe  -fstack-protector-strong -fno-strict-aliasing "  CPP="cpp" CPPFLAGS=""  LDFLAGS=" -fstack-protector-strong " LIBS=""  CXX="c++" CXXFLAGS="-O2 -pipe -fstack-protector-strong -fno-strict-aliasing  " BSD_INSTALL_PROGRAM="install  -s -m 555"  BSD_INSTALL_LIB="install  -s -m 0644"  B
SD_INSTALL_SCRIPT="install  -m 555"  BSD_INSTALL_DATA="install  -m 0644"  BSD_INSTALL_MAN="install  -m 444"
--End MAKE_ENV--

--PLIST_SUB--
PORTEXAMPLES="" EXAMPLES="" NO_EXAMPLES="@comment " VULKAN="" NO_VULKAN="@comment " CMAKE_BUILD_TYPE="release" PYTHON_INCLUDEDIR=include/python3.11  PYTHON_LIBDIR=lib/python3.11  PYTHON_PLATFORM=freebsd13  PYTHON_SITELIBDIR=lib/python3.11/site-packages  PYTHON_SUFFIX=311  PYTHON_EXT_SUFFIX=.cpython-311  PYTHON_VER=3.11  PYTHON_VERSION=python3.11 PYTHON2="@comment " PYTHON3="" OSREL=13.3 PREFIX=%D LOCALBASE=/usr/local  RESETPREFIX=/usr/local LIB32DIR=lib DOCSDIR="share/doc/llama-cpp"  EXAMPLESDIR="share/examples/llama-cpp"  DATADIR="share/llama-cpp"  WWWDIR="www/llama-cpp"  ETCDIR="etc/llama-cpp"
--End PLIST_SUB--

--SUB_LIST--
 EXAMPLES="" NO_EXAMPLES="@comment " VULKAN="" NO_VULKAN="@comment " PYTHON_INCLUDEDIR=/usr/local/include/python3.11  PYTHON_LIBDIR=/usr/local/lib/python3.11  PYTHON_PLATFORM=freebsd13  PYTHON_SITELIBDIR=/usr/local/lib/python3.11/site-packages  PYTHON_SUFFIX=311  PYTHON_EXT_SUFFIX=.cpython-311  PYTHON_VER=3.11  PYTHON_VERSION=python3.11 PYTHON2="@comment " PYTHON3="" PREFIX=/usr/local LOCALBASE=/usr/local  DATADIR=/usr/local/share/llama-cpp DOCSDIR=/usr/local/share/doc/llama-cpp EXAMPLESDIR=/usr/local/share/examples/llama-cpp  WWWDIR=/usr/local/www/llama-cpp ETCDIR=/usr/local/etc/llama-cpp
--End SUB_LIST--

---Begin make.conf---
USE_PACKAGE_DEPENDS=yes
BATCH=yes
WRKDIRPREFIX=/wrkdirs
PORTSDIR=/usr/ports
PACKAGES=/packages
DISTDIR=/distfiles
PACKAGE_BUILDING=yes
PACKAGE_BUILDING_FLAVORS=yes
MACHINE=i386
MACHINE_ARCH=i386
ARCH=${MACHINE_ARCH}
####  ####
# XXX: We really need this but cannot use it while 'make checksum' does not
# try the next mirror on checksum failure.  It currently retries the same
# failed mirror and then fails rather then trying another.  It *does*
# try the next if the size is mismatched though.
#MASTER_SITE_FREEBSD=yes
# Build ALLOW_MAKE_JOBS_PACKAGES with 3 jobs
MAKE_JOBS_NUMBER=3
#### Misc Poudriere ####
.include "/etc/make.conf.ports_env"
GID=0
UID=0
---End make.conf---
--Resource limits--
cpu time               (seconds, -t)  unlimited
file size           (512-blocks, -f)  unlimited
data seg size           (kbytes, -d)  524288
stack size              (kbytes, -s)  65536
core file size      (512-blocks, -c)  unlimited
max memory size         (kbytes, -m)  unlimited
locked memory           (kbytes, -l)  unlimited
max user processes              (-u)  89999
open files                      (-n)  8192
virtual mem size        (kbytes, -v)  unlimited
swap limit              (kbytes, -w)  unlimited
socket buffer size       (bytes, -b)  unlimited
pseudo-terminals                (-p)  unlimited
kqueues                         (-k)  unlimited
umtx shared locks               (-o)  unlimited
--End resource limits--
=======================<phase: check-sanity   >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  License MIT accepted by the user
===========================================================================
=======================<phase: pkg-depends    >============================
===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0
===>   llama-cpp-3593 depends on file: /usr/local/sbin/pkg - not found
===>   Installing existing package /packages/All/pkg-1.21.3.pkg
[133i386-default-job-02] Installing pkg-1.21.3...
[133i386-default-job-02] Extracting pkg-1.21.3: .......... done
===>   llama-cpp-3593 depends on file: /usr/local/sbin/pkg - found
===>   Returning to build of llama-cpp-3593
===========================================================================
=======================<phase: fetch-depends  >============================
===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0
===========================================================================
=======================<phase: fetch          >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  License MIT accepted by the user
=> ggerganov-llama.cpp-b3593_GH0.tar.gz doesn't seem to exist in /portdistfiles/.
=> Attempting to fetch https://codeload.github.com/ggerganov/llama.cpp/tar.gz/b3593?dummy=/ggerganov-llama.cpp-b3593_GH0.tar.gz
fetch: https://codeload.github.com/ggerganov/llama.cpp/tar.gz/b3593?dummy=/ggerganov-llama.cpp-b3593_GH0.tar.gz: size unknown
fetch: https://codeload.github.com/ggerganov/llama.cpp/tar.gz/b3593?dummy=/ggerganov-llama.cpp-b3593_GH0.tar.gz: size of remote file is not known
ggerganov-llama.cpp-b3593_GH0.tar.gz                    18 MB 7862 kBps    02s
===> Fetching all distfiles required by llama-cpp-3593 for building
===========================================================================
=======================<phase: checksum       >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  License MIT accepted by the user
===> Fetching all distfiles required by llama-cpp-3593 for building
=> SHA256 Checksum OK for ggerganov-llama.cpp-b3593_GH0.tar.gz.
=> SHA256 Checksum OK for nomic-ai-kompute-4565194_GH0.tar.gz.
===========================================================================
=======================<phase: extract-depends>============================
===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0
===========================================================================
=======================<phase: extract        >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  License MIT accepted by the user
===> Fetching all distfiles required by llama-cpp-3593 for building
===>  Extracting for llama-cpp-3593
=> SHA256 Checksum OK for ggerganov-llama.cpp-b3593_GH0.tar.gz.
=> SHA256 Checksum OK for nomic-ai-kompute-4565194_GH0.tar.gz.
===========================================================================
=======================<phase: patch-depends  >============================
===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0
===========================================================================
=======================<phase: patch          >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  Patching for llama-cpp-3593
===========================================================================
=======================<phase: build-depends  >============================
===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0
===>   llama-cpp-3593 depends on executable: glslc - not found
===>   Installing existing package /packages/All/shaderc-2024.1.pkg
[133i386-default-job-02] Installing shaderc-2024.1...
[133i386-default-job-02] Extracting shaderc-2024.1: .......... done
===>   llama-cpp-3593 depends on executable: glslc - found
===>   Returning to build of llama-cpp-3593
===>   llama-cpp-3593 depends on package: vulkan-headers>0 - not found
===>   Installing existing package /packages/All/vulkan-headers-1.3.293.pkg
[133i386-default-job-02] Installing vulkan-headers-1.3.293...
[133i386-default-job-02] Extracting vulkan-headers-1.3.293: .......... done
===>   llama-cpp-3593 depends on package: vulkan-headers>0 - found
===>   Returning to build of llama-cpp-3593
===>   llama-cpp-3593 depends on file: /usr/local/bin/cmake - not found
===>   Installing existing package /packages/All/cmake-core-3.30.2.pkg
[133i386-default-job-02] Installing cmake-core-3.30.2...
[133i386-default-job-02] `-- Installing expat-2.6.2...
[133i386-default-job-02] `-- Extracting expat-2.6.2: .......... done
[133i386-default-job-02] `-- Installing jsoncpp-1.9.5...
[133i386-default-job-02] `-- Extracting jsoncpp-1.9.5: .......... done
[133i386-default-job-02] `-- Installing libuv-1.48.0...
[133i386-default-job-02] `-- Extracting libuv-1.48.0: .......... done
[133i386-default-job-02] `-- Installing rhash-1.4.4_1...
[133i386-default-job-02] |   `-- Installing gettext-runtime-0.22.5...
[133i386-default-job-02] |   | `-- Installing indexinfo-0.3.1...
[133i386-default-job-02] |   | `-- Extracting indexinfo-0.3.1: .... done
[133i386-default-job-02] |   `-- Extracting gettext-runtime-0.22.5: .......... done
[133i386-default-job-02] `-- Extracting rhash-1.4.4_1: .......... done
[133i386-default-job-02] Extracting cmake-core-3.30.2: .......... done
===>   llama-cpp-3593 depends on file: /usr/local/bin/cmake - found
===>   Returning to build of llama-cpp-3593
===>   llama-cpp-3593 depends on executable: ninja - not found
===>   Installing existing package /packages/All/ninja-1.11.1,4.pkg
[133i386-default-job-02] Installing ninja-1.11.1,4...
[133i386-default-job-02] `-- Installing python311-3.11.9_1...
[133i386-default-job-02] |   `-- Installing libffi-3.4.6...
[133i386-default-job-02] |   `-- Extracting libffi-3.4.6: .......... done
[133i386-default-job-02] |   `-- Installing mpdecimal-4.0.0...
[133i386-default-job-02] |   `-- Extracting mpdecimal-4.0.0: .......... done
[133i386-default-job-02] |   `-- Installing readline-8.2.10...
[133i386-default-job-02] |   `-- Extracting readline-8.2.10: .......... done
[133i386-default-job-02] `-- Extracting python311-3.11.9_1: .......... done
[133i386-default-job-02] Extracting ninja-1.11.1,4: ........ done
=====
Message from python311-3.11.9_1:

--
Note that some standard Python modules are provided as separate ports
as they require additional dependencies. They are available as:

py311-gdbm       databases/py-gdbm@py311
py311-sqlite3    databases/py-sqlite3@py311
py311-tkinter    x11-toolkits/py-tkinter@py311
===>   llama-cpp-3593 depends on executable: ninja - found
===>   Returning to build of llama-cpp-3593
===========================================================================
=======================<phase: lib-depends    >============================
===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0
===>   llama-cpp-3593 depends on shared library: libvulkan.so - not found
===>   Installing existing package /packages/All/vulkan-loader-1.3.293.pkg
[133i386-default-job-02] Installing vulkan-loader-1.3.293...
[133i386-default-job-02] `-- Installing libX11-1.8.9,1...
[133i386-default-job-02] |   `-- Installing libxcb-1.17.0...
[133i386-default-job-02] |   | `-- Installing libXau-1.0.11...
[133i386-default-job-02] |   | `-- Extracting libXau-1.0.11: .......... done
[133i386-default-job-02] |   | `-- Installing libXdmcp-1.1.5...
[133i386-default-job-02] |   |   `-- Installing xorgproto-2024.1...
[133i386-default-job-02] |   |   `-- Extracting xorgproto-2024.1: .......... done
[133i386-default-job-02] |   | `-- Extracting libXdmcp-1.1.5: ......... done
[133i386-default-job-02] |   `-- Extracting libxcb-1.17.0: .......... done
[133i386-default-job-02] `-- Extracting libX11-1.8.9,1: .......... done
[133i386-default-job-02] `-- Installing libXrandr-1.5.4...
[133i386-default-job-02] |   `-- Installing libXext-1.3.6,1...
[133i386-default-job-02] |   `-- Extracting libXext-1.3.6,1: .......... done
[133i386-default-job-02] |   `-- Installing libXrender-0.9.11...
[133i386-default-job-02] |   `-- Extracting libXrender-0.9.11: .......... done
[133i386-default-job-02] `-- Extracting libXrandr-1.5.4: .......... done
[133i386-default-job-02] `-- Installing wayland-1.23.0...
[133i386-default-job-02] |   `-- Installing libepoll-shim-0.0.20240608...
[133i386-default-job-02] |   `-- Extracting libepoll-shim-0.0.20240608: .......... done
[133i386-default-job-02] |   `-- Installing libxml2-2.11.8...
[133i386-default-job-02] |   `-- Extracting libxml2-2.11.8: .......... done
[133i386-default-job-02] `-- Extracting wayland-1.23.0: .......... done
[133i386-default-job-02] Extracting vulkan-loader-1.3.293: .......... done
=====
Message from wayland-1.23.0:

--
Wayland requires XDG_RUNTIME_DIR to be defined to a path that will
contain "wayland-%d" unix(4) sockets. This is usually handled by
consolekit2 (via ck-launch-session) or pam_xdg (via login).
===>   llama-cpp-3593 depends on shared library: libvulkan.so - found (/usr/local/lib/libvulkan.so)
===>   Returning to build of llama-cpp-3593
===========================================================================
=======================<phase: configure      >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  Configuring for llama-cpp-3593
===>  Performing out-of-source build
/bin/mkdir -p /wrkdirs/usr/ports/misc/llama-cpp/work/.build
-- The C compiler identification is Clang 17.0.6
-- The CXX compiler identification is Clang 17.0.6
-- Detecting C compiler ABI info
-- Detecting C compiler ABI info - done
-- Check for working C compiler: /usr/bin/cc - skipped
-- Detecting C compile features
-- Detecting C compile features - done
-- Detecting CXX compiler ABI info
-- Detecting CXX compiler ABI info - done
-- Check for working CXX compiler: /usr/bin/c++ - skipped
-- Detecting CXX compile features
-- Detecting CXX compile features - done
-- Found Git: /wrkdirs/usr/ports/misc/llama-cpp/work/.bin/git
-- Performing Test CMAKE_HAVE_LIBC_PTHREAD
-- Performing Test CMAKE_HAVE_LIBC_PTHREAD - Failed
-- Found Threads: TRUE
-- Found OpenMP_C: -fopenmp=libomp (found version "5.1")
-- Found OpenMP_CXX: -fopenmp=libomp (found version "5.1")
-- Found OpenMP: TRUE (found version "5.1")
-- OpenMP found
-- Using llamafile
-- Found Vulkan: /usr/local/lib/libvulkan.so (found version "1.3.293") found components: glslc missing components: glslangValidator
-- Vulkan found
-- Warning: ccache not found - consider installing it for faster compilation or disable this warning with GGML_CCACHE=OFF
-- CMAKE_SYSTEM_PROCESSOR: i386
-- Unknown architecture
-- Looking for pthread_create in pthreads
-- Looking for pthread_create in pthreads - not found
-- Looking for pthread_create in pthread
-- Looking for pthread_create in pthread - found
CMake Warning at common/CMakeLists.txt:30 (message):
  Git repository not found; to enable automatic generation of build info,
  make sure Git is installed and the project is a Git repository.


-- Configuring done (1.9s)
-- Generating done (0.1s)
CMake Warning:
  Manually-specified variables were not used by the project:

    BOOST_PYTHON_SUFFIX
    CMAKE_COLOR_MAKEFILE
    CMAKE_MODULE_LINKER_FLAGS
    CMAKE_VERBOSE_MAKEFILE
    FETCHCONTENT_FULLY_DISCONNECTED
    Python3_EXECUTABLE
    Python_ADDITIONAL_VERSIONS
    Python_EXECUTABLE


-- Build files have been written to: /wrkdirs/usr/ports/misc/llama-cpp/work/.build
===========================================================================
=======================<phase: build          >============================
===== env: NO_DEPENDS=yes USER=root UID=0 GID=0
===>  Building for llama-cpp-3593
[  0% 3/127] cd /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593 && /usr/local/bin/cmake -DMSVC= -DCMAKE_C_COMPILER_VERSION=17.0.6 -DCMAKE_C_COMPILER_ID=Clang -DCMAKE_VS_PLATFORM_NAME= -DCMAKE_C_COMPILER=/usr/bin/cc -P /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/common/cmake/build-info-gen-cpp.cmake
-- Found Git: /wrkdirs/usr/ports/misc/llama-cpp/work/.bin/git
[  1% 4/127] /usr/bin/c++   -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O2 -pipe -fstack-protector-strong -fno-strict-aliasing   -DNDEBUG -fPIC -MD -MT common/CMakeFiles/build_info.dir/build-info.cpp.o -MF common/CMakeFiles/build_info.dir/build-info.cpp.o.d -o common/CMakeFiles/build_info.dir/build-info.cpp.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/common/build-info.cpp
[  2% 5/127] /usr/bin/cc  -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples/gguf-hash/deps -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -MD -MT examples/gguf-hash/CMakeFiles/sha1.dir/deps/sha1/sha1.c.o -MF examples/gguf-hash/CMakeFiles/sha1.dir/deps/sha1/sha1.c.o.d -o examples/gguf-hash/CMakeFiles/sha1.dir/deps/sha1/sha1.c.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples/gguf-hash/deps/sha1/sha1.c
[  3% 6/127] /usr/bin/cc  -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples/gguf-hash/deps -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -MD -MT examples/gguf-hash/CMakeFiles/sha256.dir/deps/sha256/sha256.c.o -MF examples/gguf-hash/CMakeFiles/sha256.dir/deps/sha256/sha256.c.o.d -o examples/gguf-hash/CMakeFiles/sha256.dir/deps/sha256/sha256.c.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples/gguf-hash/deps/sha256/sha256.c
[  3% 6/127] /usr/bin/cc  -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples/gguf-hash/deps -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -MD -MT examples/gguf-hash/CMakeFiles/xxhash.dir/deps/xxhash/xxhash.c.o -MF examples/gguf-hash/CMakeFiles/xxhash.dir/deps/xxhash/xxhash.c.o.d -o examples/gguf-hash/CMakeFiles/xxhash.dir/deps/xxhash/xxhash.c.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/examples/gguf-hash/deps/xxhash/xxhash.c
[  4% 6/127] /usr/bin/c++ -DGGML_SCHED_MAX_COPIES=4 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP  -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O2 -pipe -fstack-protector-strong -fno-strict-aliasing   -DNDEBUG -std=gnu++11 -pthread -MD -MT ggml/src/vulkan-shaders/CMakeFiles/vulkan-shaders-gen.dir/vulkan-shaders-gen.cpp.o -MF ggml/src/vulkan-shaders/CMakeFiles/vulkan-shaders-gen.dir/vulkan-shaders-gen.cpp.o.d -o ggml/src/vulkan-shaders/CMakeFiles/vulkan-shaders-gen.dir/vulkan-shaders-gen.cpp.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/vulkan-shaders/vulkan-shaders-gen.cpp
[  5% 7/127] : && /usr/bin/c++ -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O2 -pipe -fstack-protector-strong -fno-strict-aliasing   -DNDEBUG -fstack-protector-strong ggml/src/vulkan-shaders/CMakeFiles/vulkan-shaders-gen.dir/vulkan-shaders-gen.cpp.o -o bin/vulkan-shaders-gen  -pthread && :
[  6% 8/127] cd /wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src && /wrkdirs/usr/ports/misc/llama-cpp/work/.build/bin/vulkan-shaders-gen --glslc /usr/local/bin/glslc --input-dir /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/vulkan-shaders --output-dir /wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src/vulkan-shaders.spv --target-hpp /wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src/ggml-vulkan-shaders.hpp --target-cpp /wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src/ggml-vulkan-shaders.cpp --no-clean
ggml_vulkan: Generating and compiling shaders to SPIR-V
[  7% 11/127] /usr/bin/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml-alloc.c.o -MF ggml/src/CMakeFiles/ggml.dir/ggml-alloc.c.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml-alloc.c.o -c /
wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-alloc.c
[  7% 12/127] /usr/bin/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml-backend.c.o -MF ggml/src/CMakeFiles/ggml.dir/ggml-backend.c.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml-backend.c.
o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-backend.c
[  8% 13/127] /usr/bin/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml-quants.c.o -MF ggml/src/CMakeFiles/ggml.dir/ggml-quants.c.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml-quants.c.o -
c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-quants.c
[  9% 14/127] /usr/bin/c++ -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O2 -pipe -fstack-protector-strong -fno-strict-aliasing   -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o -MF ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggm
l/src/ggml-vulkan.cpp
FAILED: ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o 
/usr/bin/c++ -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O2 -pipe -fstack-protector-strong -fno-strict-aliasing   -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o -MF ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml-vulkan.cpp.o -c /wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-vulkan.cpp
/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-vulkan.cpp:2619:5: error: no matching function for call to 'vkCmdCopyBuffer'
 2619 |     vkCmdCopyBuffer(subctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy);
      |     ^~~~~~~~~~~~~~~
/usr/local/include/vulkan/vulkan_core.h:4735:28: note: candidate function not viable: no known conversion from 'vk::Buffer' to 'VkBuffer' (aka 'unsigned long long') for 2nd argument
 4735 | VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
      |                            ^
 4736 |     VkCommandBuffer                             commandBuffer,
 4737 |     VkBuffer                                    srcBuffer,
      |     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-vulkan.cpp:2694:5: error: no matching function for call to 'vkCmdCopyBuffer'
 2694 |     vkCmdCopyBuffer(subctx->s->buffer, staging_buffer->buffer, dst->buffer, 1, &buf_copy);
      |     ^~~~~~~~~~~~~~~
/usr/local/include/vulkan/vulkan_core.h:4735:28: note: candidate function not viable: no known conversion from 'vk::Buffer' to 'VkBuffer' (aka 'unsigned long long') for 2nd argument
 4735 | VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
      |                            ^
 4736 |     VkCommandBuffer                             commandBuffer,
 4737 |     VkBuffer                                    srcBuffer,
      |     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-vulkan.cpp:2826:5: error: no matching function for call to 'vkCmdCopyBuffer'
 2826 |     vkCmdCopyBuffer(ctx->s->buffer, src->buffer, dst->buffer, 1, &bc);
      |     ^~~~~~~~~~~~~~~
/usr/local/include/vulkan/vulkan_core.h:4735:28: note: candidate function not viable: no known conversion from 'vk::Buffer' to 'VkBuffer' (aka 'unsigned long long') for 2nd argument
 4735 | VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
      |                            ^
 4736 |     VkCommandBuffer                             commandBuffer,
 4737 |     VkBuffer                                    srcBuffer,
      |     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml-vulkan.cpp:6673:61: warning: format specifies type 'long' but the argument has type 'size_t' (aka 'unsigned int') [-Wformat]
 6673 |         snprintf(name, sizeof(name), "%s%ld", GGML_VK_NAME, i);
      |                                         ~~~                 ^
      |                                         %zu
1 warning and 3 errors generated.
[ 10% 14/127] /usr/bin/c++ -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O2 -pipe -fstack-protector-strong -fno-strict-aliasing   -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml-vulkan-shaders.cpp.o -MF ggml/src/CMakeFiles/ggml.dir/ggml-vulkan-shaders.cpp.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml-vulkan-shaders.cpp.o -c /wrkdirs/usr/ports/misc/llama-cpp/
work/.build/ggml/src/ggml-vulkan-shaders.cpp
[ 11% 14/127] /usr/bin/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -DGGML_USE_VULKAN -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/../include -I/wrkdirs/usr/ports/misc/llama-cpp/work/llama.cpp-b3593/ggml/src/. -I/wrkdirs/usr/ports/misc/llama-cpp/work/.build/ggml/src -isystem /usr/local/include -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing -O2 -pipe  -fstack-protector-strong -fno-strict-aliasing  -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -fopenmp=libomp -MD -MT ggml/src/CMakeFiles/ggml.dir/ggml.c.o -MF ggml/src/CMakeFiles/ggml.dir/ggml.c.o.d -o ggml/src/CMakeFiles/ggml.dir/ggml.c.o -c /wrkdirs/usr/ports/
misc/llama-cpp/work/llama.cpp-b3593/ggml/src/ggml.c
ninja: build stopped: subcommand failed.
===> Compilation failed unexpectedly.
Try to set MAKE_JOBS_UNSAFE=yes and rebuild before reporting the failure to
the maintainer.
*** Error code 1

Stop.
make: stopped in /usr/ports/misc/llama-cpp