From 40b452fd68bd36beb6bf065c04014de048568e75 Mon Sep 17 00:00:00 2001 From: Vadim Date: Thu, 26 Dec 2024 01:10:52 +0100 Subject: [PATCH] show gpu name and vram --- ReleaseNotes.md | 3 ++- src/Settings.py | 2 +- .../DeviceDetection/device_detection.py | 26 ++++++++++++++++--- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/ReleaseNotes.md b/ReleaseNotes.md index 8ec5c00..63ffb37 100644 --- a/ReleaseNotes.md +++ b/ReleaseNotes.md @@ -1,4 +1,4 @@ -# Version: 0.0.13dev3 +# Version: 0.0.13dev4 Date: 2024.12.26 - Changes: - Download Cover from MusicBrainz @@ -8,6 +8,7 @@ Date: 2024.12.26 - Improved arguments, so you dont need to use true or false - Added interactive mode - Use user defined ffmpeg path + - Show GPU Name and VRAM # Version: 0.0.12 Date: 2024.12.19 diff --git a/src/Settings.py b/src/Settings.py index 2ec467b..fcc52e0 100644 --- a/src/Settings.py +++ b/src/Settings.py @@ -11,7 +11,7 @@ @dataclass class Settings: - APP_VERSION = "0.0.13-dev3" + APP_VERSION = "0.0.13-dev4" CONFIDENCE_THRESHOLD = 0.6 CONFIDENCE_PROMPT_TIMEOUT = 4 diff --git a/src/modules/DeviceDetection/device_detection.py b/src/modules/DeviceDetection/device_detection.py index fcfe0d2..2466b6a 100644 --- a/src/modules/DeviceDetection/device_detection.py +++ b/src/modules/DeviceDetection/device_detection.py @@ -14,22 +14,40 @@ def check_gpu_support() -> tuple[bool, bool]: print(f"{ULTRASINGER_HEAD} Checking GPU support.") + pytorch_gpu_supported = __check_pytorch_support() + tensorflow_gpu_supported = __check_tensorflow_support() + + return 'cuda' if tensorflow_gpu_supported else 'cpu', 'cuda' if pytorch_gpu_supported else 'cpu' + + +def __check_tensorflow_support(): tensorflow_gpu_supported = False gpus = tf.config.list_physical_devices('GPU') if gpus: tensorflow_gpu_supported = True print(f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - using {red_highlighted('cuda')} gpu.") else: - print(f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - there are no {red_highlighted('cuda')} devices available -> Using {red_highlighted('cpu')}.") + print( + f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - there are no {red_highlighted('cuda')} devices available -> Using {red_highlighted('cpu')}.") if os.name == 'nt': - print(f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - versions above 2.10 dropped GPU support for Windows, refer to the readme for possible solutions.") + print( + f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - versions above 2.10 dropped GPU support for Windows, refer to the readme for possible solutions.") + return tensorflow_gpu_supported + +def __check_pytorch_support(): pytorch_gpu_supported = torch.cuda.is_available() if not pytorch_gpu_supported: print( f"{ULTRASINGER_HEAD} {blue_highlighted('pytorch')} - there are no {red_highlighted('cuda')} devices available -> Using {red_highlighted('cpu')}." ) else: + gpu_name = torch.cuda.get_device_name(0) + gpu_properties = torch.cuda.get_device_properties(0) + gpu_vram = round(gpu_properties.total_memory / 1024 ** 3, 2) # Convert bytes to GB and round to 2 decimal places + print(f"{ULTRASINGER_HEAD} Found GPU: {blue_highlighted(gpu_name)} VRAM: {blue_highlighted(gpu_vram)} GB.") + if gpu_vram < 6: + print( + f"{ULTRASINGER_HEAD} {red_highlighted('GPU VRAM is less than 6GB. Program may crash due to insufficient memory.')}") print(f"{ULTRASINGER_HEAD} {blue_highlighted('pytorch')} - using {red_highlighted('cuda')} gpu.") - - return 'cuda' if tensorflow_gpu_supported else 'cpu', 'cuda' if pytorch_gpu_supported else 'cpu' + return pytorch_gpu_supported