We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 3dd3392 commit df69839Copy full SHA for df69839
examples/modeling/modeling_doge.py
@@ -45,9 +45,9 @@
45
from .configuration_doge import DogeConfig
46
47
try:
48
- from flash_dmattn.integrations.flash_dynamic_mask_attention import flash_dynamic_mask_attention_forward
+ from flash_sparse_attn.integrations.flash_sparse_attention import flash_dynamic_mask_attention_forward
49
except ImportError:
50
- print("Please install flash_dmattn to use this model: pip install flash-dmattn")
+ print("Please install flash_sparse_attn to use this model: pip install flash-sparse-attn")
51
52
if is_torch_flex_attn_available():
53
from torch.nn.attention.flex_attention import BlockMask
0 commit comments