Added flag to disable TensorFlow using GPU
Added since on Linux, it seems Tensorflow attempts to use GPU by default
This commit is contained in:
parent
de5d6c1ab0
commit
6928fdace5
|
@ -22,6 +22,16 @@
|
||||||
"module": "wyzely_detect",
|
"module": "wyzely_detect",
|
||||||
// "justMyCode": true
|
// "justMyCode": true
|
||||||
"justMyCode": false
|
"justMyCode": false
|
||||||
}
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug --help",
|
||||||
|
"type": "python",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "wyzely_detect",
|
||||||
|
"args": [
|
||||||
|
"--help"
|
||||||
|
],
|
||||||
|
"justMyCode": false
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
# import face_recognition
|
# import face_recognition
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import os
|
||||||
import cv2
|
import cv2
|
||||||
|
|
||||||
# import hjson as json
|
# import hjson as json
|
||||||
|
@ -30,12 +30,22 @@ def main():
|
||||||
# https://github.com/ultralytics/ultralytics/issues/3084#issuecomment-1732433168
|
# https://github.com/ultralytics/ultralytics/issues/3084#issuecomment-1732433168
|
||||||
# Currently, I have been unable to set up Poetry to use GPU for Torch
|
# Currently, I have been unable to set up Poetry to use GPU for Torch
|
||||||
for i in range(torch.cuda.device_count()):
|
for i in range(torch.cuda.device_count()):
|
||||||
print(torch.cuda.get_device_properties(i).name)
|
print(f'Using {torch.cuda.get_device_properties(i).name} for pytorch')
|
||||||
if torch.cuda.is_available():
|
if torch.cuda.is_available():
|
||||||
torch.cuda.set_device(0)
|
torch.cuda.set_device(0)
|
||||||
print("Set CUDA device")
|
print("Set CUDA device")
|
||||||
else:
|
else:
|
||||||
print("No CUDA device available, using CPU")
|
print("No CUDA device available, using CPU")
|
||||||
|
# Seems automatically, deepface (tensorflow) tried to use my GPU on Pop!_OS (I did not set up cudnn or anything)
|
||||||
|
# Not sure the best way, in Poetry, to manage GPU libraries so for now, just use CPU
|
||||||
|
if args.force_disable_tensorflow_gpu:
|
||||||
|
print("Forcing tensorflow to use CPU")
|
||||||
|
import tensorflow as tf
|
||||||
|
tf.config.set_visible_devices([], 'GPU')
|
||||||
|
if tf.config.experimental.list_logical_devices('GPU'):
|
||||||
|
print('GPU disabled unsuccessfully')
|
||||||
|
else:
|
||||||
|
print("GPU disabled successfully")
|
||||||
|
|
||||||
model = YOLO("yolov8n.pt")
|
model = YOLO("yolov8n.pt")
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,15 @@ def set_argparse():
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Don't display the video feed",
|
help="Don't display the video feed",
|
||||||
)
|
)
|
||||||
|
video_options.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--force-disable-tensorflow-gpu',
|
||||||
|
default=os.environ["FORCE_DISABLE_TENSORFLOW_GPU"]
|
||||||
|
if "FORCE_DISABLE_TENSORFLOW_GPU" in os.environ and os.environ["FORCE_DISABLE_TENSORFLOW_GPU"] != ""
|
||||||
|
else False,
|
||||||
|
action="store_true",
|
||||||
|
help="Force disable tensorflow GPU through env since sometimes it's not worth it to install cudnn and whatnot",
|
||||||
|
)
|
||||||
|
|
||||||
notifcation_services = argparser.add_argument_group("Notification Services")
|
notifcation_services = argparser.add_argument_group("Notification Services")
|
||||||
notifcation_services.add_argument(
|
notifcation_services.add_argument(
|
||||||
|
|
Loading…
Reference in New Issue