From 66904a5c84633f9b6b6aca081798e83987c12a4e Mon Sep 17 00:00:00 2001 From: Blaise Date: Wed, 4 Dec 2024 09:35:59 +0100 Subject: [PATCH] remove mps & xpu acceleration --- rvc/configs/config.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/rvc/configs/config.py b/rvc/configs/config.py index e6490936d..ed5447e1d 100644 --- a/rvc/configs/config.py +++ b/rvc/configs/config.py @@ -2,7 +2,6 @@ import json import os - version_config_paths = [ os.path.join("v1", "32000.json"), os.path.join("v1", "40000.json"), @@ -46,14 +45,6 @@ def load_config_json(self) -> dict: configs[config_file] = json.load(f) return configs - def has_mps(self) -> bool: - # Check if Metal Performance Shaders are available - for macOS 12.3+. - return torch.backends.mps.is_available() - - def has_xpu(self) -> bool: - # Check if XPU is available. - return hasattr(torch, "xpu") and torch.xpu.is_available() - def set_precision(self, precision): if precision not in ["fp32", "fp16"]: raise ValueError("Invalid precision type. Must be 'fp32' or 'fp16'.") @@ -109,10 +100,6 @@ def get_precision(self): def device_config(self) -> tuple: if self.device.startswith("cuda"): self.set_cuda_config() - elif self.has_mps(): - self.device = "mps" - self.is_half = False - self.set_precision("fp32") else: self.device = "cpu" self.is_half = False