added some working files
This commit is contained in:
parent
a461f8216e
commit
374707764b
11 changed files with 262 additions and 1 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,3 +1,7 @@
|
|||
runs/*
|
||||
*.pt
|
||||
*.mp4
|
||||
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
|
|
@ -41,6 +41,8 @@ async def handle_client(websocket, path):
|
|||
await update_from_packet(command)
|
||||
except websockets.exceptions.ConnectionClosed:
|
||||
print("Connection closed with: ", websocket.remote_address)
|
||||
except Exception as e:
|
||||
print(f"There was an error: {e}")
|
||||
finally:
|
||||
print("closing websocket")
|
||||
del socket_connections[websocket]
|
||||
|
|
14
YOLO python/basic-tracking.py
Normal file
14
YOLO python/basic-tracking.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from ultralytics import YOLO
|
||||
|
||||
|
||||
# Load an official or custom model
|
||||
# model = YOLO('yolov8n.pt') # Load an official Detect model
|
||||
model = YOLO('yolov8s.pt') # Load an official Detect model
|
||||
# model = YOLO('yolov8n-seg.pt') # Load an official Segment model
|
||||
# model = YOLO('yolov8n-pose.pt') # Load an official Pose model
|
||||
# model = YOLO('path/to/best.pt') # Load a custom trained model
|
||||
|
||||
|
||||
# Run inference on 'bus.jpg' with arguments
|
||||
for i in model.predict('D:\Projects\FaceTrackingCamerav3\YOLO python\kevin-center-camera-position.mp4', show=True, save=True, save_frames=True, save_txt=True, conf=0.5, stream=True):
|
||||
continue
|
6
YOLO python/dataset/detect.yaml
Normal file
6
YOLO python/dataset/detect.yaml
Normal file
|
@ -0,0 +1,6 @@
|
|||
train: images/train
|
||||
val: images/val
|
||||
test: images/test
|
||||
|
||||
names:
|
||||
0: Person
|
67
YOLO python/webcam-test.py
Normal file
67
YOLO python/webcam-test.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
# source from https://dipankarmedh1.medium.com/real-time-object-detection-with-yolo-and-webcam-enhancing-your-computer-vision-skills-861b97c78993
|
||||
|
||||
from ultralytics import YOLO
|
||||
import cv2
|
||||
import math
|
||||
|
||||
# model
|
||||
model = YOLO("yolov8x.pt")
|
||||
|
||||
# start webcam
|
||||
cap = cv2.VideoCapture(0)
|
||||
cap.set(3, 640)
|
||||
cap.set(4, 480)
|
||||
|
||||
# object classes
|
||||
classNames = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat",
|
||||
"traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat",
|
||||
"dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella",
|
||||
"handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat",
|
||||
"baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup",
|
||||
"fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli",
|
||||
"carrot", "hot dog", "pizza", "donut", "cake", "chair", "sofa", "pottedplant", "bed",
|
||||
"diningtable", "toilet", "tvmonitor", "laptop", "mouse", "remote", "keyboard", "cell phone",
|
||||
"microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors",
|
||||
"teddy bear", "hair drier", "toothbrush"
|
||||
]
|
||||
|
||||
|
||||
while True:
|
||||
success, img = cap.read()
|
||||
results = model.track(img, persist=True, stream=True)
|
||||
|
||||
# coordinates
|
||||
for r in results:
|
||||
boxes = r.boxes
|
||||
|
||||
for box in boxes:
|
||||
# bounding box
|
||||
x1, y1, x2, y2 = box.xyxy[0]
|
||||
x1, y1, x2, y2 = int(x1), int(y1), int(x2), int(y2) # convert to int values
|
||||
|
||||
# put box in cam
|
||||
cv2.rectangle(img, (x1, y1), (x2, y2), (255, 0, 255), 3)
|
||||
|
||||
# confidence
|
||||
confidence = math.ceil((box.conf[0]*100))/100
|
||||
print("Confidence --->",confidence)
|
||||
|
||||
# class name
|
||||
cls = int(box.cls[0])
|
||||
print("Class name -->", classNames[cls])
|
||||
|
||||
# object details
|
||||
org = [x1, y1]
|
||||
font = cv2.FONT_HERSHEY_SIMPLEX
|
||||
fontScale = 1
|
||||
color = (255, 0, 0)
|
||||
thickness = 2
|
||||
|
||||
cv2.putText(img, classNames[cls], org, font, fontScale, color, thickness)
|
||||
|
||||
cv2.imshow('Webcam', img)
|
||||
if cv2.waitKey(1) == ord('q'):
|
||||
break
|
||||
|
||||
cap.release()
|
||||
cv2.destroyAllWindows()
|
7
face_tracking_demo/Cargo.lock
generated
Normal file
7
face_tracking_demo/Cargo.lock
generated
Normal file
|
@ -0,0 +1,7 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "face_tracking_demo"
|
||||
version = "0.1.0"
|
6
face_tracking_demo/Cargo.toml
Normal file
6
face_tracking_demo/Cargo.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[package]
|
||||
name = "face_tracking_demo"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
0
face_tracking_demo/src/main.rs
Normal file
0
face_tracking_demo/src/main.rs
Normal file
61
face_tracking_demo/src/opencv_main.rs
Normal file
61
face_tracking_demo/src/opencv_main.rs
Normal file
|
@ -0,0 +1,61 @@
|
|||
use opencv::{
|
||||
core, highgui, prelude::*, video::{self, TrackerDaSiamRPN_Params}, videoio
|
||||
};
|
||||
|
||||
fn main() -> opencv::Result<()> {
|
||||
let window = "window";
|
||||
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
|
||||
|
||||
// let mut cam = opencv::videoio::VideoCapture::new(0, opencv::videoio::CAP_ANY)?; // 0 is the default camera
|
||||
let mut cap = videoio::VideoCapture::from_file(r"C:\Users\nicho\Desktop\tmp\test-vid.mp4", videoio::CAP_ANY)?;
|
||||
let opened = opencv::videoio::VideoCapture::is_opened(&cap)?;
|
||||
if !opened {
|
||||
panic!("Unable to open default camera!");
|
||||
}
|
||||
|
||||
let mut frame = core::Mat::default();
|
||||
// let mut tracker = opencv::video::TrackerMIL::create(opencv::video::TrackerMIL_Params::default().unwrap())?;
|
||||
let mut tracker = opencv::video::TrackerDaSiamRPN::create(&opencv::video::TrackerDaSiamRPN_Params::default().unwrap())?;
|
||||
// let mut tracker = opencv::video::TrackerGOTURN::create(&opencv::video::TrackerGOTURN_Params::default().unwrap())?;
|
||||
// let mut tracker = opencv::video::TrackerNano::create(&opencv::video::TrackerNano_Params::default().unwrap())?;
|
||||
// let mut tracker = opencv::tracking::TrackerKCF::create()
|
||||
// let mut tracker = opencv::alphamat
|
||||
let mut bounding_box = core::Rect::new(0, 0, 0, 0);
|
||||
let mut update_tracker = false;
|
||||
let mut frame_counter: u64 = 0;
|
||||
|
||||
loop {
|
||||
// cam.read(&mut frame)?;
|
||||
cap.read(&mut frame)?;
|
||||
|
||||
if !update_tracker {
|
||||
bounding_box = highgui::select_roi(window, &frame, false, false, true)?;
|
||||
let _ = tracker.init(&frame, bounding_box);
|
||||
update_tracker = true;
|
||||
}
|
||||
|
||||
let key = highgui::wait_key(10)?;
|
||||
|
||||
if key > 0 {
|
||||
if key == 27 {
|
||||
break; // 'ESC' to quit
|
||||
} else if key == 32 {
|
||||
// 'SPACE' to select an object to track
|
||||
}
|
||||
}
|
||||
|
||||
if update_tracker && frame_counter % 3 == 0 {
|
||||
let _ = tracker.update(&frame, &mut bounding_box);
|
||||
}
|
||||
frame_counter += 1;
|
||||
|
||||
println!("box is: {}, {}", bounding_box.x, bounding_box.y);
|
||||
opencv::imgproc::rectangle(&mut frame, bounding_box, core::Scalar::new(255.0, 0.0, 0.0, 0.0), 2, 8, 0)?;
|
||||
|
||||
if frame.size()?.width > 0 {
|
||||
highgui::imshow(window, &mut frame)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
94
face_tracking_demo/test.py
Normal file
94
face_tracking_demo/test.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
|
||||
import cv2
|
||||
import sys
|
||||
|
||||
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
|
||||
|
||||
if __name__ == '__main__' :
|
||||
|
||||
# Set up tracker.
|
||||
# Instead of MIL, you can also use
|
||||
|
||||
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
|
||||
tracker_type = tracker_types[2]
|
||||
|
||||
if int(minor_ver) < 3:
|
||||
tracker = cv2.Tracker_create(tracker_type)
|
||||
else:
|
||||
if tracker_type == 'BOOSTING':
|
||||
tracker = cv2.TrackerBoosting_create()
|
||||
if tracker_type == 'MIL':
|
||||
tracker = cv2.TrackerMIL_create()
|
||||
if tracker_type == 'KCF':
|
||||
tracker = cv2.TrackerKCF_create()
|
||||
if tracker_type == 'TLD':
|
||||
tracker = cv2.TrackerTLD_create()
|
||||
if tracker_type == 'MEDIANFLOW':
|
||||
tracker = cv2.TrackerMedianFlow_create()
|
||||
if tracker_type == 'GOTURN':
|
||||
tracker = cv2.TrackerGOTURN_create()
|
||||
if tracker_type == 'MOSSE':
|
||||
tracker = cv2.TrackerMOSSE_create()
|
||||
if tracker_type == "CSRT":
|
||||
tracker = cv2.TrackerCSRT_create()
|
||||
|
||||
# Read video
|
||||
video = cv2.VideoCapture("videos/chaplin.mp4")
|
||||
|
||||
# Exit if video not opened.
|
||||
if not video.isOpened():
|
||||
print("Could not open video")
|
||||
sys.exit()
|
||||
|
||||
# Read first frame.
|
||||
ok, frame = video.read()
|
||||
if not ok:
|
||||
print('Cannot read video file')
|
||||
sys.exit()
|
||||
|
||||
# Define an initial bounding box
|
||||
bbox = (287, 23, 86, 320)
|
||||
|
||||
# Uncomment the line below to select a different bounding box
|
||||
bbox = cv2.selectROI(frame, False)
|
||||
|
||||
# Initialize tracker with first frame and bounding box
|
||||
ok = tracker.init(frame, bbox)
|
||||
|
||||
while True:
|
||||
# Read a new frame
|
||||
ok, frame = video.read()
|
||||
if not ok:
|
||||
break
|
||||
|
||||
# Start timer
|
||||
timer = cv2.getTickCount()
|
||||
|
||||
# Update tracker
|
||||
ok, bbox = tracker.update(frame)
|
||||
|
||||
# Calculate Frames per second (FPS)
|
||||
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
|
||||
|
||||
# Draw bounding box
|
||||
if ok:
|
||||
# Tracking success
|
||||
p1 = (int(bbox[0]), int(bbox[1]))
|
||||
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
|
||||
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
|
||||
else :
|
||||
# Tracking failure
|
||||
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
|
||||
|
||||
# Display tracker type on frame
|
||||
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
|
||||
|
||||
# Display FPS on frame
|
||||
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
|
||||
|
||||
# Display result
|
||||
cv2.imshow("Tracking", frame)
|
||||
|
||||
# Exit if ESC pressed
|
||||
k = cv2.waitKey(1) & 0xff
|
||||
if k == 27 : break
|
|
@ -1 +1 @@
|
|||
Subproject commit f809c1dd283cc049d652338fb438de65abe40cfc
|
||||
Subproject commit 852e23dc7c92f80bf59f679803fb6344942bc138
|
Loading…
Reference in a new issue