forked from zzd1992/FlashWindowAttention
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup.py
More file actions
29 lines (28 loc) · 821 Bytes
/
setup.py
File metadata and controls
29 lines (28 loc) · 821 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
from setuptools import setup, find_packages
setup(
name='flash_swin_attn',
version='0.1.0',
packages=find_packages(),
install_requires=[
'torch',
'einops',
'triton'
],
entry_points={
'console_scripts': [
# Add command line scripts here
],
},
author='Zhendong Zhang',
author_email='zhd.zhang.ai@gmail.com',
description='Speedup the attention computation of Swin Transformer',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
url='https://github.com/zzd1992/FlashSwinAttention',
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
python_requires='>=3.6',
)