added dashboard and cli flags

This commit is contained in:
Nickiel12 2023-09-28 18:18:13 -07:00
parent 24c4c31656
commit 0c46b9a190
2 changed files with 101 additions and 8 deletions

View file

@ -1,6 +1,28 @@
import cv2
import numpy as np
import argparse
import sys
import datetime
def init_argparse() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
prog="FaceDetection",
usage="%(prog)s [OPTION]",
description="Run face localization"
)
parser.add_argument(
"-v", "--version", action="version", version=f"{parser.prog} version 1.0.1"
)
parser.add_argument(
"-d", "--dashboard", action='store_true', help="Flag to enable live dashboard with statistics - requires terminal width of 90 columns or greater"
)
parser.add_argument(
"-o", "--output", action='store_true', help="show the resultant directions"
)
parser.add_argument(
"-f", "--file", nargs="?", help="File to scan instead of using the camera. Useful for generating training data"
)
return parser
multiplication_factor = 0.05
@ -17,17 +39,47 @@ def get_adjustment_amount(imgSize, currentX, currentY, currentW, currentH):
return [horizontal_adjustment, vertical_adjustment]
frames_searched = 1
faces_found = 0
start_time = datetime.datetime.now()
def draw_dashboard(keep_stat_line = False):
global frames_searched, faces_found, start_time
elapsed_time = datetime.datetime.now() - start_time
hours, remainder = divmod(elapsed_time.total_seconds(), 3600)
minutes, seconds = divmod(remainder, 60)
f_found = f"{faces_found} Faces found".ljust(16, ' ')
f_searched = f"{frames_searched} Frames searched".ljust(21, ' ')
success_rate = f"{round((faces_found / frames_searched) * 100, 1)}% Success rate".ljust(16, ' ')
if keep_stat_line:
print(f"{f_found} | {f_searched} | {success_rate} | {round(hours)}h {round(minutes)}m {round(seconds)}s elapsed", flush=True)
else:
print(f"{f_found} | {f_searched} | {success_rate} | {round(hours)}h {round(minutes)}m {round(seconds)}s elapsed", end="\r", flush=True)
parser = init_argparse()
args = parser.parse_args()
cap = cv2.VideoCapture(0, cv2.IMREAD_GRAYSCALE) # instead of grayscale you can also use -1, 0, or 1.
faceCascade = cv2.CascadeClassifier(r"./lbpcascade_frontalface.xml") # CHECK THIS FIRST TROUBLE SHOOTING
faceCascade_default = cv2.CascadeClassifier(r"./haarcascade_frontalface_default.xml")
faceCascade_alt = cv2.CascadeClassifier(r"./haarcascade_frontalface_alt.xml")
faceCascade_alt2 = cv2.CascadeClassifier(r"./haarcascade_frontalface_alt2.xml")
faceCascade_alttree = cv2.CascadeClassifier(r"./haarcascade_frontalface_alt_tree.xml")
profileFaceCascade = cv2.CascadeClassifier(r"./haarcascade_profileface.xml")
tmp, frm = cap.read()
height, width, channels = frm.shape
print(f"{height*.25}, {width}")
# print(f"{height*.25}, {width}")
del tmp, frm
#Color is 1, grayscale is 0, and the unchanged is -1
while(True):
ret, frame = cap.read()
frames_searched += 1
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Detect faces in the image
@ -38,16 +90,60 @@ while(True):
minSize=(30, 30)
)
if len(faces) == 0:
faces = faceCascade_default.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30,30)
)
if len(faces) == 0:
faces = profileFaceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30,30)
)
if len(faces) == 0:
faces = faceCascade_alt.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30,30)
)
if len(faces) == 0:
faces = faceCascade_alt2.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30,30)
)
if len(faces) == 0:
faces = faceCascade_alttree.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30,30)
)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
faces_found += 1
adjustment_required = get_adjustment_amount([width, height], x, y, w, h)
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255))
print(f"Adjust right: {adjustment_required[0]}")
print(f"Adjust up : {adjustment_required[1]}")
if args.output:
print(f"Adjust right: {adjustment_required[0]}".ljust(90, ' '), flush=True)
print(f"Adjust up : {adjustment_required[1]}", flush=True)
cv2.imshow('frame', frame)
if args.dashboard:
draw_dashboard()
if cv2.waitKey(1) & 0xFF == ord('q'):
break
draw_dashboard(keep_stat_line=True)
cap.release()

View file

@ -1,9 +1,6 @@
{ pkgs ? import <nixpkgs> {} }:
let
my-python-packages = ps: with ps; [
numpy
# other python packages
];
in
pkgs.mkShell {
buildInputs = with pkgs.python311Packages; [