|
|
|
@ -293,8 +293,6 @@ if importlib.util.find_spec('numba') is not None: |
|
|
|
try: |
|
|
|
try: |
|
|
|
list(numba.cuda.gpus) |
|
|
|
list(numba.cuda.gpus) |
|
|
|
from numba import cuda |
|
|
|
from numba import cuda |
|
|
|
from numba.core import config |
|
|
|
|
|
|
|
config.CUDA_LOW_OCCUPANCY_WARNINGS = False |
|
|
|
|
|
|
|
except CudaSupportError: |
|
|
|
except CudaSupportError: |
|
|
|
log.warn('Cuda unavailable. Falling back to pure Python.') |
|
|
|
log.warn('Cuda unavailable. Falling back to pure Python.') |
|
|
|
cuda = MockCuda() |
|
|
|
cuda = MockCuda() |
|
|
|
|