blob: f2b2a8469f8f1eb20db27c7ae8dcb8b6ce74ddfe [file] [log] [blame]
import torch
from importlib.util import find_spec
def amp_definitely_not_available():
return not (torch.cuda.is_available() or find_spec('torch_xla'))