diff --git a/.vscode/launch.json b/.vscode/launch.json index 66be8e7..f7701bf 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -22,6 +22,16 @@ "module": "wyzely_detect", // "justMyCode": true "justMyCode": false - } + }, + { + "name": "Debug --help", + "type": "python", + "request": "launch", + "module": "wyzely_detect", + "args": [ + "--help" + ], + "justMyCode": false + }, ] } \ No newline at end of file diff --git a/wyzely_detect/__main__.py b/wyzely_detect/__main__.py index 2dce50b..7f6f1bd 100644 --- a/wyzely_detect/__main__.py +++ b/wyzely_detect/__main__.py @@ -1,6 +1,6 @@ # import face_recognition from pathlib import Path - +import os import cv2 # import hjson as json @@ -30,12 +30,22 @@ def main(): # https://github.com/ultralytics/ultralytics/issues/3084#issuecomment-1732433168 # Currently, I have been unable to set up Poetry to use GPU for Torch for i in range(torch.cuda.device_count()): - print(torch.cuda.get_device_properties(i).name) + print(f'Using {torch.cuda.get_device_properties(i).name} for pytorch') if torch.cuda.is_available(): torch.cuda.set_device(0) print("Set CUDA device") else: print("No CUDA device available, using CPU") + # Seems automatically, deepface (tensorflow) tried to use my GPU on Pop!_OS (I did not set up cudnn or anything) + # Not sure the best way, in Poetry, to manage GPU libraries so for now, just use CPU + if args.force_disable_tensorflow_gpu: + print("Forcing tensorflow to use CPU") + import tensorflow as tf + tf.config.set_visible_devices([], 'GPU') + if tf.config.experimental.list_logical_devices('GPU'): + print('GPU disabled unsuccessfully') + else: + print("GPU disabled successfully") model = YOLO("yolov8n.pt") diff --git a/wyzely_detect/utils/cli_args.py b/wyzely_detect/utils/cli_args.py index dcf2766..b18d20f 100644 --- a/wyzely_detect/utils/cli_args.py +++ b/wyzely_detect/utils/cli_args.py @@ -72,7 +72,15 @@ def set_argparse(): action="store_true", help="Don't display the video feed", ) - + video_options.add_argument( + '-c', + '--force-disable-tensorflow-gpu', + default=os.environ["FORCE_DISABLE_TENSORFLOW_GPU"] + if "FORCE_DISABLE_TENSORFLOW_GPU" in os.environ and os.environ["FORCE_DISABLE_TENSORFLOW_GPU"] != "" + else False, + action="store_true", + help="Force disable tensorflow GPU through env since sometimes it's not worth it to install cudnn and whatnot", + ) notifcation_services = argparser.add_argument_group("Notification Services") notifcation_services.add_argument(