We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e8d38d0 commit a90271bCopy full SHA for a90271b
examples/modeling/modeling_doge.py
@@ -43,7 +43,14 @@
43
from transformers.utils.generic import OutputRecorder, check_model_inputs
44
from .configuration_doge import DogeConfig
45
46
-from flash_dmattn import flash_dmattn_func_auto
+try:
47
+ from flash_dmattn import flash_dmattn_func_auto
48
+except ImportError:
49
+ def flash_dmattn_func_auto(*args, **kwargs):
50
+ raise ImportError(
51
+ "flash_dmattn is not installed. Please install it to use flash_dmattn_func_auto. "
52
+ "You can install it with `pip install flash-dmattn` or consult the documentation."
53
+ )
54
55
if is_torch_flex_attn_available():
56
from torch.nn.attention.flex_attention import BlockMask
0 commit comments