Bump version to 0.2.1

This commit is contained in:
Tri Dao 2022-11-20 22:35:59 -08:00
parent 0fa5c0d7ef
commit 054816177e
2 changed files with 2 additions and 2 deletions

View File

@ -14,7 +14,7 @@ from apex.transformer.tensor_parallel.utils import VocabUtility
# `all_gather_into_tensor` and `reduce_scatter_tensor` are new placeholders for # `all_gather_into_tensor` and `reduce_scatter_tensor` are new placeholders for
# `_all_gather_base` and `_reduce_scatter_base`. They require the most recent # `_all_gather_base` and `_reduce_scatter_base`. They require the most recent
# version of PyTorch. The following 4 lines are for backward comparability with # version of PyTorch. The following 4 lines are for backward compatibility with
# older PyTorch. # older PyTorch.
if "all_gather_into_tensor" not in dir(torch.distributed): if "all_gather_into_tensor" not in dir(torch.distributed):
torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base

View File

@ -152,7 +152,7 @@ ext_modules.append(
setup( setup(
name="flash_attn", name="flash_attn",
version="0.2.0", version="0.2.1",
packages=find_packages( packages=find_packages(
exclude=("build", "csrc", "include", "tests", "dist", "docs", "benchmarks", "flash_attn.egg-info",) exclude=("build", "csrc", "include", "tests", "dist", "docs", "benchmarks", "flash_attn.egg-info",)
), ),