We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 668760c commit f006ab7Copy full SHA for f006ab7
src/anomalib/engine/accelerator/xpu.py
@@ -12,7 +12,10 @@
12
class XPUAccelerator(Accelerator):
13
"""Support for a XPU, optimized for large-scale machine learning."""
14
15
- accelerator_name = "xpu"
+ @property
16
+ def name(self) -> str:
17
+ """Setting the name of the accelerator which is required for accelerators by pytorch-lightning >= 2.5.6."""
18
+ return "xpu"
19
20
@staticmethod
21
def setup_device(device: torch.device) -> None:
@@ -59,7 +62,7 @@ def teardown(self) -> None:
59
62
60
63
61
64
AcceleratorRegistry.register(
- XPUAccelerator.accelerator_name,
65
+ XPUAccelerator().name,
66
XPUAccelerator,
67
description="Accelerator supports XPU devices",
68
)
0 commit comments