Skip to content

Commit

Permalink
Merge pull request #211 from rakuri255/show_gpu
Browse files Browse the repository at this point in the history
show gpu name and vram
  • Loading branch information
rakuri255 authored Dec 26, 2024
2 parents 46d10ba + 40b452f commit 2451d9c
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 6 deletions.
3 changes: 2 additions & 1 deletion ReleaseNotes.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Version: 0.0.13dev3
# Version: 0.0.13dev4
Date: 2024.12.26
- Changes:
- Download Cover from MusicBrainz
Expand All @@ -8,6 +8,7 @@ Date: 2024.12.26
- Improved arguments, so you dont need to use true or false
- Added interactive mode
- Use user defined ffmpeg path
- Show GPU Name and VRAM

# Version: 0.0.12
Date: 2024.12.19
Expand Down
2 changes: 1 addition & 1 deletion src/Settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
@dataclass
class Settings:

APP_VERSION = "0.0.13-dev3"
APP_VERSION = "0.0.13-dev4"
CONFIDENCE_THRESHOLD = 0.6
CONFIDENCE_PROMPT_TIMEOUT = 4

Expand Down
26 changes: 22 additions & 4 deletions src/modules/DeviceDetection/device_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,40 @@ def check_gpu_support() -> tuple[bool, bool]:

print(f"{ULTRASINGER_HEAD} Checking GPU support.")

pytorch_gpu_supported = __check_pytorch_support()
tensorflow_gpu_supported = __check_tensorflow_support()

return 'cuda' if tensorflow_gpu_supported else 'cpu', 'cuda' if pytorch_gpu_supported else 'cpu'


def __check_tensorflow_support():
tensorflow_gpu_supported = False
gpus = tf.config.list_physical_devices('GPU')
if gpus:
tensorflow_gpu_supported = True
print(f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - using {red_highlighted('cuda')} gpu.")
else:
print(f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - there are no {red_highlighted('cuda')} devices available -> Using {red_highlighted('cpu')}.")
print(
f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - there are no {red_highlighted('cuda')} devices available -> Using {red_highlighted('cpu')}.")
if os.name == 'nt':
print(f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - versions above 2.10 dropped GPU support for Windows, refer to the readme for possible solutions.")
print(
f"{ULTRASINGER_HEAD} {blue_highlighted('tensorflow')} - versions above 2.10 dropped GPU support for Windows, refer to the readme for possible solutions.")
return tensorflow_gpu_supported


def __check_pytorch_support():
pytorch_gpu_supported = torch.cuda.is_available()
if not pytorch_gpu_supported:
print(
f"{ULTRASINGER_HEAD} {blue_highlighted('pytorch')} - there are no {red_highlighted('cuda')} devices available -> Using {red_highlighted('cpu')}."
)
else:
gpu_name = torch.cuda.get_device_name(0)
gpu_properties = torch.cuda.get_device_properties(0)
gpu_vram = round(gpu_properties.total_memory / 1024 ** 3, 2) # Convert bytes to GB and round to 2 decimal places
print(f"{ULTRASINGER_HEAD} Found GPU: {blue_highlighted(gpu_name)} VRAM: {blue_highlighted(gpu_vram)} GB.")
if gpu_vram < 6:
print(
f"{ULTRASINGER_HEAD} {red_highlighted('GPU VRAM is less than 6GB. Program may crash due to insufficient memory.')}")
print(f"{ULTRASINGER_HEAD} {blue_highlighted('pytorch')} - using {red_highlighted('cuda')} gpu.")

return 'cuda' if tensorflow_gpu_supported else 'cpu', 'cuda' if pytorch_gpu_supported else 'cpu'
return pytorch_gpu_supported

0 comments on commit 2451d9c

Please sign in to comment.