removed old file, renamed main file
This commit is contained in:
parent
fac9e73506
commit
542abea6c7
2 changed files with 186 additions and 285 deletions
|
@ -1,99 +0,0 @@
|
||||||
import cv2
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
|
|
||||||
multiplication_factor = 0.05
|
|
||||||
|
|
||||||
def get_adjustment_amount(imgSize, currentX, currentY, currentW, currentH):
|
|
||||||
|
|
||||||
current_top_left = [currentX, currentY]
|
|
||||||
current_bottom_right = [currentX + currentW, currentY + currentH]
|
|
||||||
|
|
||||||
current_top_right = [currentX + currentW, currentY]
|
|
||||||
|
|
||||||
# find the difference between the left gap and the right gap, divide it by two, and multiply it by the speed scale
|
|
||||||
horizontal_adjustment = multiplication_factor * (currentX - (imgSize[0] - current_top_right[0])) / 2
|
|
||||||
vertical_adjustment = multiplication_factor * (currentY - (imgSize[0] - current_bottom_right[1])) / 2
|
|
||||||
|
|
||||||
return [horizontal_adjustment, vertical_adjustment]
|
|
||||||
|
|
||||||
cap = cv2.VideoCapture(0, cv2.IMREAD_GRAYSCALE) # instead of grayscale you can also use -1, 0, or 1.
|
|
||||||
faceCascade = cv2.CascadeClassifier(r"./lbpcascade_frontalface.xml") # CHECK THIS FIRST TROUBLE SHOOTING
|
|
||||||
faceCascade_default = cv2.CascadeClassifier(r"./haarcascade_frontalface_default.xml")
|
|
||||||
faceCascade_alt = cv2.CascadeClassifier(r"./haarcascade_frontalface_alt.xml")
|
|
||||||
faceCascade_alt2 = cv2.CascadeClassifier(r"./haarcascade_frontalface_alt2.xml")
|
|
||||||
faceCascade_alttree = cv2.CascadeClassifier(r"./haarcascade_frontalface_alt_tree.xml")
|
|
||||||
profileFaceCascade = cv2.CascadeClassifier(r"./haarcascade_profileface.xml")
|
|
||||||
|
|
||||||
|
|
||||||
tmp, frm = cap.read()
|
|
||||||
height, width, channels = frm.shape
|
|
||||||
print(f"{height*.25}, {width}")
|
|
||||||
del tmp, frm
|
|
||||||
|
|
||||||
#Color is 1, grayscale is 0, and the unchanged is -1
|
|
||||||
while(True):
|
|
||||||
ret, frame = cap.read()
|
|
||||||
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
|
||||||
|
|
||||||
# Detect faces in the image
|
|
||||||
faces = faceCascade.detectMultiScale(
|
|
||||||
gray,
|
|
||||||
scaleFactor=1.1,
|
|
||||||
minNeighbors=5,
|
|
||||||
minSize=(30, 30)
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(faces) == 0:
|
|
||||||
faces = faceCascade_default.detectMultiScale(
|
|
||||||
gray,
|
|
||||||
scaleFactor=1.1,
|
|
||||||
minNeighbors=5,
|
|
||||||
minSize=(30,30)
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(faces) == 0:
|
|
||||||
faces = profileFaceCascade.detectMultiScale(
|
|
||||||
gray,
|
|
||||||
scaleFactor=1.1,
|
|
||||||
minNeighbors=5,
|
|
||||||
minSize=(30,30)
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(faces) == 0:
|
|
||||||
faces = faceCascade_alt.detectMultiScale(
|
|
||||||
gray,
|
|
||||||
scaleFactor=1.1,
|
|
||||||
minNeighbors=5,
|
|
||||||
minSize=(30,30)
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(faces) == 0:
|
|
||||||
faces = faceCascade_alt2.detectMultiScale(
|
|
||||||
gray,
|
|
||||||
scaleFactor=1.1,
|
|
||||||
minNeighbors=5,
|
|
||||||
minSize=(30,30)
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(faces) == 0:
|
|
||||||
faces = faceCascade_alttree.detectMultiScale(
|
|
||||||
gray,
|
|
||||||
scaleFactor=1.1,
|
|
||||||
minNeighbors=5,
|
|
||||||
minSize=(30,30)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Draw a rectangle around the faces
|
|
||||||
for (x, y, w, h) in faces:
|
|
||||||
adjustment_required = get_adjustment_amount([width, height], x, y, w, h)
|
|
||||||
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255))
|
|
||||||
print(f"Adjust right: {adjustment_required[0]}")
|
|
||||||
print(f"Adjust up : {adjustment_required[1]}")
|
|
||||||
cv2.imshow('frame', frame)
|
|
||||||
|
|
||||||
if cv2.waitKey(1) & 0xFF == ord('q'):
|
|
||||||
break
|
|
||||||
|
|
||||||
cap.release()
|
|
Loading…
Reference in a new issue