-
Notifications
You must be signed in to change notification settings - Fork 533
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Summary: Pull Request resolved: #3642 X-link: facebookresearch/FBGEMM#718 - Re-organize SLL ops, pt 1 Differential Revision: D68915217
- Loading branch information
1 parent
8bf568b
commit 40574c0
Showing
6 changed files
with
795 additions
and
743 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
#!/usr/bin/env python3 | ||
# Copyright (c) Meta Platforms, Inc. and affiliates. | ||
# All rights reserved. | ||
# | ||
# This source code is licensed under the BSD-style license found in the | ||
# LICENSE file in the root directory of this source tree. | ||
|
||
# pyre-strict | ||
|
||
|
||
from fbgemm_gpu.sll.triton.multi_head_jagged_flash_attention import ( # noqa F401 | ||
multi_head_jagged_flash_attention, | ||
MultiHeadJaggedFlashAttention, | ||
) | ||
|
||
op_registrations = { | ||
"sll_multi_head_jagged_flash_attention": { | ||
"CUDA": multi_head_jagged_flash_attention, | ||
"AutogradCUDA": multi_head_jagged_flash_attention, | ||
}, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
# Copyright (c) Meta Platforms, Inc. and affiliates. | ||
# All rights reserved. | ||
# | ||
# This source code is licensed under the BSD-style license found in the | ||
# LICENSE file in the root directory of this source tree. | ||
|
||
# pyre-unsafe | ||
|
||
import torch | ||
|
||
|
||
def expect_contiguous(x: torch.Tensor) -> torch.Tensor: | ||
if not x.is_contiguous(): | ||
return x.contiguous() | ||
else: | ||
return x |
Oops, something went wrong.