diff --git a/misc/py-llama-cpp-python/Makefile b/misc/py-llama-cpp-python/Makefile index 23c50dd516d5..4e6f3573f767 100644 --- a/misc/py-llama-cpp-python/Makefile +++ b/misc/py-llama-cpp-python/Makefile @@ -1,42 +1,41 @@ PORTNAME= llama-cpp-python DISTVERSIONPREFIX= v -DISTVERSION= 0.3.16 -PORTREVISION= 1 +DISTVERSION= 0.3.20 CATEGORIES= misc # machine-learning PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} MAINTAINER= yuri@FreeBSD.org COMMENT= Python bindings for the llama.cpp library WWW= https://llama-cpp-python.readthedocs.io/en/latest/ \ https://github.com/abetlen/llama-cpp-python LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE.md BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}scikit-build-core>=0.9.2:devel/py-scikit-build-core@${PY_FLAVOR} \ cmake:devel/cmake-core LIB_DEPENDS= libvulkan.so:graphics/vulkan-loader RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}diskcache>=5.6.1:devel/py-diskcache@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}Jinja2>=2.11.3:devel/py-Jinja2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}numpy1>=1.16:math/py-numpy1@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.5.0:devel/py-typing-extensions@${PY_FLAVOR} RUN_DEPENDS+= ${PYTHON_PKGNAMEPREFIX}fastapi>=0.100.0:www/py-fastapi@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pydantic-settings>=2.0.1:devel/py-pydantic-settings@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}sse-starlette>=1.6.1:www/py-sse-starlette@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}starlette-context>=0.3.6:www/py-starlette-context@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}uvicorn>=0.22.0:www/py-uvicorn@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pyyaml>=5.1:devel/py-pyyaml@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}httpx>=0.24.1:www/py-httpx@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}huggingface-hub>=0.23.0:misc/py-huggingface-hub@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}scipy>=1.10:science/py-scipy@${PY_FLAVOR} USES= python shebangfix USE_PYTHON= pep517 autoplist pytest USE_GITHUB= yes GH_ACCOUNT= abetlen -GH_TUPLE= ggerganov:llama.cpp:b6598:cpp/vendor/llama.cpp +GH_TUPLE= ggerganov:llama.cpp:f49e917:cpp/vendor/llama.cpp SHEBANG_GLOB= *.py .include diff --git a/misc/py-llama-cpp-python/distinfo b/misc/py-llama-cpp-python/distinfo index 7f4849c553a3..3dc559d95177 100644 --- a/misc/py-llama-cpp-python/distinfo +++ b/misc/py-llama-cpp-python/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1758915707 -SHA256 (abetlen-llama-cpp-python-v0.3.16_GH0.tar.gz) = d6ae5a6ac40dda4d14c6bb8f5e9504d28f442cf810263661d457c948c386f2a4 -SIZE (abetlen-llama-cpp-python-v0.3.16_GH0.tar.gz) = 279565 -SHA256 (ggerganov-llama.cpp-b6598_GH0.tar.gz) = cd296792f49695bd44e885a5c1fcefe4ef72f3da7f8933be1378f944116515a3 -SIZE (ggerganov-llama.cpp-b6598_GH0.tar.gz) = 25833773 +TIMESTAMP = 1775551031 +SHA256 (abetlen-llama-cpp-python-v0.3.20_GH0.tar.gz) = 2e4a39603059cca4eb8630136253ab668ca245ed51d486969e778b69f45603ee +SIZE (abetlen-llama-cpp-python-v0.3.20_GH0.tar.gz) = 286693 +SHA256 (ggerganov-llama.cpp-f49e917_GH0.tar.gz) = 9b683589792de24a79042b5b232451c5697ef64dfe0f8ca0bc77347538223877 +SIZE (ggerganov-llama.cpp-f49e917_GH0.tar.gz) = 29723031 diff --git a/misc/py-llama-cpp-python/files/patch-vendor_llama.cpp_ggml_src_ggml-cpu_CMakeLists.txt b/misc/py-llama-cpp-python/files/patch-vendor_llama.cpp_ggml_src_ggml-cpu_CMakeLists.txt index e39877694888..15b68b26763e 100644 --- a/misc/py-llama-cpp-python/files/patch-vendor_llama.cpp_ggml_src_ggml-cpu_CMakeLists.txt +++ b/misc/py-llama-cpp-python/files/patch-vendor_llama.cpp_ggml_src_ggml-cpu_CMakeLists.txt @@ -1,18 +1,19 @@ ---- vendor/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt.orig 2025-12-30 12:29:09 UTC +--- vendor/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt.orig 2026-04-03 02:35:15 UTC +++ vendor/llama.cpp/ggml/src/ggml-cpu/CMakeLists.txt -@@ -387,15 +387,6 @@ function(ggml_add_cpu_backend_variant_impl tag_name) +@@ -396,16 +396,6 @@ function(ggml_add_cpu_backend_variant_impl tag_name) + string(TOUPPER "${POWER10_M}" POWER10_M_UPPER) string(REGEX MATCHALL "POWER *([0-9]+)" MATCHED_STRING "${POWER10_M_UPPER}") string(REGEX REPLACE "POWER *([0-9]+)" "\\1" EXTRACTED_NUMBER "${MATCHED_STRING}") - +- - if (EXTRACTED_NUMBER GREATER_EQUAL 10) -- list(APPEND ARCH_FLAGS -mcpu=power10 -mpowerpc64) +- list(APPEND ARCH_FLAGS -mcpu=power10) - elseif (EXTRACTED_NUMBER EQUAL 9) -- list(APPEND ARCH_FLAGS -mcpu=power9 -mpowerpc64) +- list(APPEND ARCH_FLAGS -mcpu=power9) - elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64le") - list(APPEND ARCH_FLAGS -mcpu=powerpc64le -mtune=native) - else() - list(APPEND ARCH_FLAGS -mcpu=native -mtune=native -mpowerpc64) - endif() elseif(GGML_CPU_ALL_VARIANTS) # Begin with the lowest baseline set(ARCH_DEFINITIONS "")