git: 9652f95ce8e4 - main - misc/py-opt-einsum: remove duplicate port

From: Antoine Brodin <antoine_at_FreeBSD.org>
Date: Mon, 18 Aug 2025 21:03:38 UTC
The branch main has been updated by antoine:

URL: https://cgit.FreeBSD.org/ports/commit/?id=9652f95ce8e452a0929165a7a2ac03a95a0422b2

commit 9652f95ce8e452a0929165a7a2ac03a95a0422b2
Author:     Antoine Brodin <antoine@FreeBSD.org>
AuthorDate: 2025-08-18 21:02:54 +0000
Commit:     Antoine Brodin <antoine@FreeBSD.org>
CommitDate: 2025-08-18 21:02:54 +0000

    misc/py-opt-einsum: remove duplicate port
    
    Revert "misc/py-opt-einsum: New port: Optimized Einsum: A tensor contraction order optimizer"
    
    This reverts commit 8d4f6872f4ab415d384f83a769a1deabdf2a441c.
---
 misc/Makefile                |  1 -
 misc/py-opt-einsum/Makefile  | 26 --------------------------
 misc/py-opt-einsum/distinfo  |  3 ---
 misc/py-opt-einsum/pkg-descr |  9 ---------
 4 files changed, 39 deletions(-)

diff --git a/misc/Makefile b/misc/Makefile
index a2353c07faf5..4d3466e4b671 100644
--- a/misc/Makefile
+++ b/misc/Makefile
@@ -498,7 +498,6 @@
     SUBDIR += py-openpaperwork-core
     SUBDIR += py-openpaperwork-gtk
     SUBDIR += py-openvdb
-    SUBDIR += py-opt-einsum
     SUBDIR += py-optuna
     SUBDIR += py-oterm
     SUBDIR += py-palettable
diff --git a/misc/py-opt-einsum/Makefile b/misc/py-opt-einsum/Makefile
deleted file mode 100644
index afc1a532cd9d..000000000000
--- a/misc/py-opt-einsum/Makefile
+++ /dev/null
@@ -1,26 +0,0 @@
-PORTNAME=	opt-einsum
-DISTVERSION=	3.4.0
-CATEGORIES=	misc python # machine-learning
-MASTER_SITES=	PYPI
-PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
-DISTNAME=	${PORTNAME:S/-/_/}-${PORTVERSION}
-
-MAINTAINER=	yuri@FreeBSD.org
-COMMENT=	Optimized Einsum: A tensor contraction order optimizer
-WWW=		https://github.com/dgasmith/opt_einsum
-
-LICENSE=	MIT
-LICENSE_FILE=	${WRKSRC}/LICENSE
-
-BUILD_DEPENDS=	${PYTHON_PKGNAMEPREFIX}hatch-fancy-pypi-readme>=0:devel/py-hatch-fancy-pypi-readme@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}hatch-vcs>0:devel/py-hatch-vcs@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}hatchling>0:devel/py-hatchling@${PY_FLAVOR}
-
-USES=		python
-USE_PYTHON=	pep517 autoplist pytest
-
-NO_ARCH=	yes
-
-# tests as of 3.4.0: 7736 passed, 155 skipped in 76.00s (0:01:16)
-
-.include <bsd.port.mk>
diff --git a/misc/py-opt-einsum/distinfo b/misc/py-opt-einsum/distinfo
deleted file mode 100644
index 856c1d93e171..000000000000
--- a/misc/py-opt-einsum/distinfo
+++ /dev/null
@@ -1,3 +0,0 @@
-TIMESTAMP = 1755493435
-SHA256 (opt_einsum-3.4.0.tar.gz) = 96ca72f1b886d148241348783498194c577fa30a8faac108586b14f1ba4473ac
-SIZE (opt_einsum-3.4.0.tar.gz) = 63004
diff --git a/misc/py-opt-einsum/pkg-descr b/misc/py-opt-einsum/pkg-descr
deleted file mode 100644
index 7e1e65cc14a1..000000000000
--- a/misc/py-opt-einsum/pkg-descr
+++ /dev/null
@@ -1,9 +0,0 @@
-Optimized einsum can significantly reduce the overall execution time of
-einsum-like expressions (e.g., np.einsum, dask.array.einsum, pytorch.einsum,
-tensorflow.einsum, ) by optimizing the expression's contraction order and
-dispatching many operations to canonical BLAS, cuBLAS, or other specialized
-routines.
-
-Optimized einsum is agnostic to the backend and can handle NumPy, Dask, PyTorch,
-Tensorflow, CuPy, Sparse, Theano, JAX, and Autograd arrays as well as
-potentially any library which conforms to a standard API.