from setuptools import setup, find_packages setup( name="wersa", version="0.1.0", author="Vincenzo Dentamaro", description="WERSA Attention for Hugging Face Transformers", long_description="A package providing the WERSA attention mechanism for building custom Transformer models.", packages=find_packages(), install_requires=[ "torch>=1.9", "transformers>=4.20.0", ], classifiers=[ "Programming Language :: Python :: 3", "Operating System :: OS Independent", ], python_requires='>=3.8', )