Made GPU capability toggleable

This commit is contained in:
slashtechno 2024-02-11 14:27:05 -06:00
parent 401c5cee16
commit 1a09004e3f
Signed by: slashtechno
GPG Key ID: 8EC1D9D9286C2B17
3 changed files with 9 additions and 6 deletions

View File

@ -49,6 +49,7 @@ This assumes you have Python 3.10 or 3.11 installed
#### Poetry #### Poetry
1. `poetry install` 1. `poetry install`
a. For GPU support, use `poetry install -E cuda --with gpu`
2. `poetry run -- wyzely-detect` 2. `poetry run -- wyzely-detect`
### Configuration ### Configuration
The following are some basic CLI options. Most flags have environment variable equivalents which can be helpful when using Docker. The following are some basic CLI options. Most flags have environment variable equivalents which can be helpful when using Docker.

View File

@ -22,6 +22,7 @@ hjson = "^3.1.0"
numpy = "^1.23.2" numpy = "^1.23.2"
# https://github.com/python-poetry/poetry/issues/6409#issuecomment-1911735833 # https://github.com/python-poetry/poetry/issues/6409#issuecomment-1911735833
# To install with GPU, use poetry install -E cuda --with gpu
torch = {version = "^2.1.2", source = "pytorch-cpu", markers = "extra!='cuda'" } torch = {version = "^2.1.2", source = "pytorch-cpu", markers = "extra!='cuda'" }
# https://stackoverflow.com/a/76477590/18270659 # https://stackoverflow.com/a/76477590/18270659
@ -33,21 +34,22 @@ torch = {version = "^2.1.2", source = "pytorch-cpu", markers = "extra!='cuda'" }
# cuDNN version - 8.8.1 # cuDNN version - 8.8.1
# Installed from Nvidia website - nvidia-cuda-toolkit is not installed, but default PopOS drivers are installed # Installed from Nvidia website - nvidia-cuda-toolkit is not installed, but default PopOS drivers are installed
tensorflow-io-gcs-filesystem = "0.31.0" tensorflow-io-gcs-filesystem = "0.31.0"
# So this wasn't working on Windows, so unless there's a way to optionally install this, we'll install it without and-cuda tensorflow = {version = "^2.14.0", markers = "extra!='cuda'"}
# tensorflow = {version = "^2.14.0", extras = ["and-cuda"]}
tensorflow = {version = "^2.14.0"}
deepface = "^0.0.79" deepface = "^0.0.79"
prettytable = "^3.9.0" prettytable = "^3.9.0"
[tool.poetry.group.remote]
[tool.poetry.group.gpu]
optional = true optional = true
[tool.poetry.group.remote.dependencies] [tool.poetry.group.gpu.dependencies]
torch = {version = "^2.1.2", source = "pytorch-cu121", markers = "extra=='cuda'"} torch = {version = "^2.1.2", source = "pytorch-cu121", markers = "extra=='cuda'"}
tensorflow = {version = "^2.14.0", extras = ["and-cuda"], markers = "extra=='cuda'"}
[tool.poetry.extras] [tool.poetry.extras]
# Might be better to rename this to nocpu since it's more accurate
cuda = [] cuda = []
[[tool.poetry.source]] [[tool.poetry.source]]

View File

@ -2,7 +2,7 @@
from pathlib import Path from pathlib import Path
import cv2 import cv2
from PrettyTable import PrettyTable from prettytable import PrettyTable
# import hjson as json # import hjson as json
import torch import torch