From 8f18fcc00aaa6232c103765eb8c55f04d7b79ada Mon Sep 17 00:00:00 2001 From: Louis Maddox Date: Sat, 17 Feb 2024 11:58:09 +0000 Subject: [PATCH 1/2] fix: URL typo in repo metadata (closes #21) --- inference_lib/setup.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/inference_lib/setup.cfg b/inference_lib/setup.cfg index 292b8760..8eaa4fff 100644 --- a/inference_lib/setup.cfg +++ b/inference_lib/setup.cfg @@ -6,9 +6,9 @@ author_email = vahe527887@yandex.ru description = Efficiently run models quantized with AQLM long_description = file: README.md long_description_content_type = text/markdown -url = https://github.com/Vage1994/AQLM +url = https://github.com/Vahe1994/AQLM project_urls = - Bug Tracker = https://github.com/Vage1994/AQLM/issues + Bug Tracker = https://github.com/Vahe1994/AQLM/issues classifiers = Development Status :: 4 - Beta Intended Audience :: Developers From 522bd99c68f0fe6261c9663985738e15f132f322 Mon Sep 17 00:00:00 2001 From: Louis Maddox Date: Sat, 17 Feb 2024 11:59:11 +0000 Subject: [PATCH 2/2] fix: relax `transformers` pin to lower bound (closes #22) --- inference_lib/setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/inference_lib/setup.cfg b/inference_lib/setup.cfg index 8eaa4fff..293fbd8b 100644 --- a/inference_lib/setup.cfg +++ b/inference_lib/setup.cfg @@ -32,7 +32,7 @@ include_package_data = True python_requires = >=3.10 install_requires = torch>=2.1.1 - transformers==4.37.0 + transformers>=4.37.0 [options.extras_require] gpu = triton>=2.1