working video and coord transmission

This commit is contained in:
Nickiel12 2024-04-27 19:33:18 -07:00
parent 506ebde9f5
commit f706157354
9 changed files with 277 additions and 232 deletions

3
Cargo.lock generated
View file

@ -1359,7 +1359,8 @@ dependencies = [
[[package]]
name = "interprocess"
version = "2.0.0"
source = "git+https://github.com/kotauskas/interprocess.git#5023981cac9cc3dcc184a461b97061ec359f54d4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d5f0e3c218e7a86a6712fd3adc84672304f9e839402b866685b9117a077c37f"
dependencies = [
"futures-core",
"libc",

View file

@ -13,6 +13,8 @@ futures-core = "0.3.30"
futures-util = { version = "0.3.30", features = ["tokio-io"] }
gilrs = "0.10.6"
gstreamer = { version = "0.22.4", features = ["v1_22"] }
gstreamer-app = { version = "0.22.0", features = ["v1_22"] }
gstreamer-video = { version = "0.22.4", features = ["v1_22"] }
gst-plugin-gtk4 = { version = "0.12.2", features = ["gtk_v4_12"] }
gtk = { version = "0.8.1", package = "gtk4", features = ["v4_12"] }
log = "0.4.21"
@ -21,8 +23,5 @@ simplelog = "0.12.2"
tokio = { version = "1.37.0", features = ["rt-multi-thread", "time"] }
tokio-tungstenite = "0.21.0"
toml = "0.8.12"
# interprocess = { version = "1.2.1", features = ["tokio_support"] }
interprocess = { version = "2.0.0", git = "https://github.com/kotauskas/interprocess.git", features = ["tokio"] }
gstreamer-app = { version = "0.22.0", features = ["v1_22"] }
gstreamer-video = { version = "0.22.4", features = ["v1_22"] }
interprocess = { version = "2.0.0", features = ["tokio"] }
image = "0.25.1"

View file

@ -18,8 +18,9 @@ use tokio::net::TcpStream;
use tokio::runtime::Handle;
use tokio_tungstenite::{connect_async, tungstenite::Message, MaybeTlsStream, WebSocketStream};
use crate::remote_sources::TrackerState;
use crate::{gstreamer_pipeline, remote_sources};
use crate::ui::BoxCoords;
use crate::ui::NormalizedBoxCoords;
use crate::{joystick_source::joystick_loop, ui::GuiUpdate};
const PRIORITY_TIMEOUT: Duration = Duration::from_secs(2);
@ -46,7 +47,7 @@ pub enum ApplicationEvent {
struct CoordState<'a> {
pub sck_outbound: Option<SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, Message>>,
pub sck_alive_server: Arc<AtomicBool>,
pub identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
pub identity_boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>>,
pub sck_alive_recvr: Arc<AtomicBool>,
pub joystick_loop_alive: Arc<AtomicBool>,
@ -60,6 +61,8 @@ struct CoordState<'a> {
pub pipeline: gstreamer_pipeline::WebcamPipeline,
pub keep_windows_pipe_alive: Arc<AtomicBool>,
pub tracker_state: Arc<Mutex<TrackerState>>,
}
impl<'a> CoordState<'a> {
@ -68,7 +71,7 @@ impl<'a> CoordState<'a> {
to_mec: Sender<ApplicationEvent>,
to_gui: Sender<GuiUpdate>,
rt: Handle,
identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
identity_boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>>,
) -> Self {
let this = CoordState {
sck_outbound: None,
@ -87,11 +90,19 @@ impl<'a> CoordState<'a> {
pipeline: gstreamer_pipeline::WebcamPipeline::new(),
keep_windows_pipe_alive: Arc::new(AtomicBool::new(true)),
tracker_state: Arc::new(Mutex::new(TrackerState {
tracking_id: 0,
last_detect: Instant::now(),
has_active_connection: false,
}))
};
this.rt.spawn(crate::remote_sources::shared_video_pipe::create_outbound_pipe(
this.pipeline.sink_frame.clone(),
this.to_mec.clone(),
this.keep_windows_pipe_alive.clone(),
this.identity_boxes.clone(),
this.tracker_state.clone(),
));
this
}
@ -156,7 +167,7 @@ impl<'a> CoordState<'a> {
self.rt.clone(),
self.to_mec.clone(),
self.sck_alive_server.clone(),
self.identity_boxes.clone(),
self.tracker_state.clone(),
));
}
@ -190,7 +201,7 @@ pub async fn start_coordinator(
to_mec: Sender<ApplicationEvent>,
to_gui: Sender<GuiUpdate>,
runtime: Handle,
identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
identity_boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>>,
) {
info!("Starting coordinator!");

View file

@ -22,6 +22,8 @@ impl WebcamPipeline {
pub fn new() -> WebcamPipeline {
let pipeline = Pipeline::with_name("webcam_pipeline");
// All of the following errors are unrecoverable
let source = ElementFactory::make("mfvideosrc")
.build()
.expect("Could not build video source for GStreamer");

View file

@ -1,147 +0,0 @@
use std::{
cmp::{max, min},
sync::{Arc, Mutex},
time::{Duration, Instant},
};
use async_channel::Sender;
use futures_util::StreamExt;
use log::{error, info};
use tokio::net::TcpStream;
use tokio_tungstenite::{tungstenite::Result, WebSocketStream};
use super::TrackerState;
use crate::{
coordinator::{ApplicationEvent, MoveEvent},
ui::BoxCoords,
};
const HALF_FRAME_WIDTH: f64 = 320.0;
pub async fn handle_connection(
mut ws_stream: WebSocketStream<TcpStream>,
mec: Sender<ApplicationEvent>,
tracker_state: Arc<Mutex<TrackerState>>,
identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
) -> Result<()> {
if let Ok(mut ts) = tracker_state.lock() {
ts.has_active_connection = true;
}
while let Some(msg) = ws_stream.next().await {
let msg = msg?;
if msg.is_text() {
let (x_off, y_off) = match process_incoming_string(
msg.to_string(),
tracker_state.clone(),
&identity_boxes,
) {
Ok(val) => val,
Err(e) => {
error!("{e}");
(0, 0)
}
};
if let Err(e) = mec
.send(ApplicationEvent::MoveEvent(
MoveEvent { x: x_off, y: y_off },
crate::coordinator::ConnectionType::Automated,
))
.await
{
error!("MEC Unavailable, closing the connection on the socket-server: {e}");
break;
}
}
}
if let Ok(mut ts) = tracker_state.lock() {
ts.has_active_connection = false;
}
Ok(())
}
fn process_incoming_string(
message: String,
tracker_state: Arc<Mutex<TrackerState>>,
identity_boxes: &Arc<Mutex<Vec<BoxCoords>>>,
) -> core::result::Result<(i32, i32), String> {
let mut boxes: Vec<BoxCoords> = Vec::new();
for line in message.lines() {
let parts: Vec<&str> = line.split(' ').collect();
let id = parts[0]
.replace(['[', ']'], "")
.parse()
.map_err(|_| "Invalid ID")?;
if parts.len() != 3 {
return Err("Invalid socket input format: number of parts".to_string());
}
let coords: Vec<&str> = parts[1].split(':').collect();
if coords.len() != 2 {
return Err("Invalid socket input format: coords 1".to_string());
}
let x1: u32 = coords[0].parse().map_err(|_| "Invalid x coordinate")?;
let y1: u32 = coords[1].parse().map_err(|_| "Invalid y coordinate")?;
let coords2: Vec<&str> = parts[2].split(':').collect();
if coords2.len() != 2 {
return Err("Invalid socket input format: coords 2".to_string());
}
let x2: u32 = coords2[0].parse().map_err(|_| "Invalid width")?;
let y2: u32 = coords2[1].parse().map_err(|_| "Invalid width")?;
boxes.push(BoxCoords { id, x1, y1, x2, y2 });
}
let ret: core::result::Result<(i32, i32), String>;
if let Ok(mut ts) = tracker_state.lock() {
if ts.last_detect + Duration::from_secs(2) < Instant::now() && !boxes.is_empty() {
info!("Setting new target: {}", boxes[0].id);
ts.tracking_id = boxes[0].id;
}
if let Some(target_box) = boxes.iter().find(|e| e.id == ts.tracking_id) {
let x_adjust = calc_x_adjust(target_box.x1, target_box.x2);
let y_adjust = calc_y_adjust(target_box.y1);
ts.last_detect = Instant::now();
ret = Ok((x_adjust, y_adjust));
} else {
ret = Err("Couldn't find target in results".to_string());
}
} else {
ret = Err("Couldn't lock tracker state".to_string());
}
if let Ok(mut ib) = identity_boxes.lock() {
// Replace the memory address in the mutex guard with that of the created vec above
*ib = boxes;
}
return ret;
}
fn calc_x_adjust(x1: u32, x2: u32) -> i32 {
let dist_from_center = ((x1 + x2) as f64 / 2.0) - HALF_FRAME_WIDTH;
let mut x_adjust = (dist_from_center / HALF_FRAME_WIDTH * 200.0) as i32;
if x_adjust < 15 && x_adjust > -15 {
x_adjust = 0;
}
min(max(x_adjust, -100), 100)
}
fn calc_y_adjust(y1: u32) -> i32 {
let mut y_adjust = y1 as i32 - 100;
if y_adjust < 0 {
y_adjust -= 20;
} else if y_adjust < 30 {
y_adjust = 0;
} else {
y_adjust = (y_adjust as f32 * 0.75) as i32;
}
min(max(y_adjust, -100), 100)
}

View file

@ -20,14 +20,11 @@ use tokio_tungstenite::{
tungstenite::{Error, Message, Result},
};
mod automated_source;
mod process_box_string;
mod remote_source;
pub mod shared_video_pipe;
use crate::{
coordinator::{ApplicationEvent, ConnectionType},
ui::BoxCoords,
};
use crate::coordinator::{ApplicationEvent, ConnectionType};
pub struct TrackerState {
pub has_active_connection: bool,
@ -39,17 +36,12 @@ pub async fn start_socketserver(
rt: Handle,
mec: Sender<ApplicationEvent>,
stay_alive: Arc<AtomicBool>,
identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
tracker_state: Arc<Mutex<TrackerState>>,
) {
let addr = "127.0.0.1:9002";
let listener = TcpListener::bind(&addr).await.expect("Can't listen");
info!("Listening on: {}", addr);
let tracker_state = Arc::new(Mutex::new(TrackerState {
tracking_id: 0,
last_detect: Instant::now(),
has_active_connection: false,
}));
while let Ok((stream, _)) = listener.accept().await {
let peer = stream
@ -62,7 +54,6 @@ pub async fn start_socketserver(
stream,
mec.clone(),
tracker_state.clone(),
identity_boxes.clone(),
));
}
@ -74,10 +65,9 @@ async fn accept_connection(
stream: TcpStream,
mec: Sender<ApplicationEvent>,
tracker_state: Arc<Mutex<TrackerState>>,
identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
) {
if let Err(e) =
handle_connection(peer, stream, mec.clone(), tracker_state, identity_boxes).await
handle_connection(peer, stream, mec.clone(), tracker_state).await
{
match e {
Error::ConnectionClosed | Error::Protocol(_) | Error::Utf8 => (),
@ -91,7 +81,6 @@ async fn handle_connection(
stream: TcpStream,
mec: Sender<ApplicationEvent>,
tracker_state: Arc<Mutex<TrackerState>>,
identity_boxes: Arc<Mutex<Vec<BoxCoords>>>,
) -> Result<()> {
let mut ws_stream = accept_async(stream).await.expect("Failed to accept");
info!("New WebSocket connection: {}", peer);
@ -142,8 +131,7 @@ async fn handle_connection(
if !ws_stream.is_terminated() {
match connection_type.unwrap() {
ConnectionType::Automated => {
automated_source::handle_connection(ws_stream, mec, tracker_state, identity_boxes)
.await?;
todo!();
}
ConnectionType::Remote => {
remote_source::handle_connection().await?;

View file

@ -0,0 +1,59 @@
use std::sync::{Arc, Mutex};
use crate:: ui::NormalizedBoxCoords;
pub fn process_incoming_string(
message: String,
identity_boxes: &Arc<Mutex<Vec<NormalizedBoxCoords>>>, // This goes all the way back to the GUI thread for drawing boxes
) -> core::result::Result<(), String> {
let mut boxes: Vec<NormalizedBoxCoords> = Vec::new();
for line in message.lines() {
let parts: Vec<&str> = line.split(' ').collect();
let id = parts[0]
.replace(['[', ']'], "")
.parse()
.map_err(|_| "Invalid ID")?;
if parts.len() != 3 {
return Err("Invalid socket input format: number of parts".to_string());
}
let coords: Vec<&str> = parts[1].split(':').collect();
if coords.len() != 2 {
return Err("Invalid socket input format: coords 1".to_string());
}
let x1: u32 = coords[0].parse().map_err(|_| "Invalid x coordinate")?;
let y1: u32 = coords[1].parse().map_err(|_| "Invalid y coordinate")?;
let coords2: Vec<&str> = parts[2].split(':').collect();
if coords2.len() != 2 {
return Err("Invalid socket input format: coords 2".to_string());
}
let x2: u32 = coords2[0].parse().map_err(|_| "Invalid width")?;
let y2: u32 = coords2[1].parse().map_err(|_| "Invalid width")?;
boxes.push(
NormalizedBoxCoords
{
id,
x1: (x1 as f32 / 1000.0),
x2: (x2 as f32 / 1000.0),
y1: (y1 as f32 / 1000.0),
y2: (y2 as f32 / 1000.0),
}
);
}
// Replace the memory address in the mutex guard with that of the created vec above
if let Ok(mut ib) = identity_boxes.lock() {
// Replace the memory address in the mutex guard with that of the created vec above
*ib = boxes;
}
Ok(())
}

View file

@ -1,88 +1,148 @@
use std::sync::{atomic::AtomicBool, Arc, Mutex};
use std::{
cmp::{max, min}, sync::{atomic::AtomicBool, Arc, Mutex}, time::{Duration, Instant}
};
use async_channel::Sender;
use gstreamer_app::AppSink;
use gstreamer_video::VideoFrameExt;
use gstreamer_video::{video_frame::Readable, VideoFrame, VideoInfo};
use interprocess::os::windows::named_pipe::{
pipe_mode,
pipe_mode::{self, Bytes},
tokio::{DuplexPipeStream, PipeStream},
};
use log::{error, info};
use tokio::io::AsyncWriteExt;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use crate::coordinator::ApplicationEvent;
use crate::{coordinator::{ApplicationEvent, MoveEvent}, remote_sources::process_box_string, ui::NormalizedBoxCoords};
pub async fn create_outbound_pipe(appsink: Arc<Mutex<AppSink>>, to_mec: Sender<ApplicationEvent>, keep_alive: Arc<AtomicBool>) {
if let Ok(mut pipe) =
use super::TrackerState;
struct LoopState {
pub appsink: Arc<Mutex<AppSink>>,
pub mec: Sender<ApplicationEvent>,
pub identity_boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>>, // This goes all the way back to the GUI thread for drawing boxes
pub tracker_state: Arc<Mutex<TrackerState>>,
pub pipe: PipeStream<Bytes, Bytes>,
pub video_info: VideoInfo,
pub byte_buffer: Vec<u8>,
pub len_buf: [u8; 4],
}
impl LoopState {
fn get_video_frame(&mut self) -> Result<VideoFrame<Readable>, String> {
let sample = self.appsink.lock().map_err(|e| format!("Could not get a lock on the appsink: {e}"))?.pull_sample().map_err(|e| format!("Could not pull appsink sample: {e}"))?;
let buffer = sample.buffer_owned().unwrap();
gstreamer_video::VideoFrame::from_buffer_readable(buffer, &self.video_info).map_err(|_| format!("Unable to make video frame from buffer!"))
}
async fn read_return_message(&mut self) -> Result<String, String> {
// Read message size from the pipe
if let Err(e) = self.pipe.read_exact(&mut self.len_buf).await {
return Err(format!("Couldn't read message length from the windows pipe: {e}"));
}
let length = u32::from_le_bytes(self.len_buf);
self.byte_buffer.resize(length as usize, 0);
// Read the message of message length from the pipe
if let Err(e) = self.pipe.read_exact(&mut self.byte_buffer).await {
return Err(format!("Couldn't read the message from the windows pipe: {e}"));
}
Ok(String::from_utf8_lossy(&self.byte_buffer).to_string())
}
}
pub async fn create_outbound_pipe(
appsink: Arc<Mutex<AppSink>>,
mec: Sender<ApplicationEvent>,
keep_alive: Arc<AtomicBool>,
identity_boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>>, // This goes all the way back to the GUI thread for drawing boxes
tracker_state: Arc<Mutex<TrackerState>>,
) {
if let Ok(pipe) =
DuplexPipeStream::<pipe_mode::Bytes>::connect_by_path(r"\\.\pipe\example_pipe").await
{
let video_info = gstreamer_video::VideoInfo::builder(gstreamer_video::VideoFormat::Rgb, 640, 480)
.build()
.expect("Couldn't build video info!");
let mut state = LoopState {
appsink,
mec,
identity_boxes,
tracker_state,
pipe,
video_info: gstreamer_video::VideoInfo::builder(gstreamer_video::VideoFormat::Rgb, 640, 480) .build() .expect("Couldn't build video info!"),
byte_buffer: Vec::new(),
len_buf: [0; 4],
};
let mut prev_tick = Instant::now();
loop {
if !keep_alive.load(std::sync::atomic::Ordering::SeqCst) {
break;
}
let sample = match appsink.lock() {
Ok(sink) => {
match sink.pull_sample() {
if prev_tick + Duration::from_millis(30) < Instant::now() {
tokio::time::sleep(prev_tick + Duration::from_millis(30) - Instant::now()).await;
}
prev_tick = Instant::now();
state.len_buf = [0; 4];
let video_frame = match state.get_video_frame() {
Ok(e) => e,
Err(e) => {
error!("Could not pull appsink sample: {e}");
break;
}
}
}
Err(e) => {
error!("Could not get a lock on the appsink: {e}");
error!("{}", e);
break;
}
};
let buffer = sample.buffer_owned().unwrap();
let video_frame = match gstreamer_video::VideoFrame::from_buffer_readable(buffer, &video_info) {
Ok(e) => e,
Err(_) => {
error!("Unable to make video frame from buffer!");
break;
}
};
info!("Video frame {}x{} with stride of {}, is this many bytes: {}", video_frame.width(), video_frame.height(), video_frame.plane_stride()[0], video_frame.plane_data(0).unwrap().len());
// info!("Video frame {}x{} with stride of {}, is this many bytes: {}", video_frame.width(), video_frame.height(), video_frame.plane_stride()[0], video_frame.plane_data(0).unwrap().len());
if let Err(e) = send_to_pipe(&mut pipe, video_frame.plane_data(0).unwrap()).await {
// Send video frame to pipe
if let Err(e) = send_to_pipe(&mut state.pipe, video_frame.plane_data(0).unwrap()).await {
error!("Error in sending to the pipe: {e}");
break;
}
let message = match state.read_return_message().await {
Ok(e) => e,
Err(e) => {
error!("{}", e);
break;
}
};
let x_off: i32;
let y_off: i32;
// Load the tracking boxes into identity_boxes, then do the adjustment calcuations on the updated tracking info (side-effects)
(x_off, y_off) = process_box_string::process_incoming_string(message.to_string(), &state.identity_boxes)
.and_then(|_| calculate_tracking(&state.tracker_state, &state.identity_boxes))
.unwrap_or((0, 0));
if let Err(e) = state.mec
.send(ApplicationEvent::MoveEvent(
MoveEvent { x: x_off, y: y_off },
crate::coordinator::ConnectionType::Automated,
))
.await
{
error!("MEC Unavailable, closing the connection on the pipe: {e}");
break;
}
}
if let Err(e) = pipe.shutdown().await {
info!("Windows pipe has been shut down");
keep_alive.store(false, std::sync::atomic::Ordering::SeqCst);
if let Err(e) = state.pipe.shutdown().await {
error!("Couldn't shut down pipe: {e}");
}
// send pipeline images as bytes
// let bus = pipeline.get_bus().expect("Pipeline has no bus");
// let mut frame_count = 0;
// while let Some(msg) = bus.timed_pop(gstreamer::ClockTime::from_seconds(1)) {
// use gstreamer::MessageView;
// match msg.view() {
// MessageView::Element(element) => {
// if let Some(buffer) = element.structure().get::<gstreamer::Buffer>("buffer") {
// frame_count += 1;
// println!("Writing frame {}", frame_count);
// let data = buffer.map_readable().expect("Failed to map buffer");
// // Send the data to the broadcast channel
// tx.send(data.as_slice()).expect("Failed to send data");
// }
// }
// _ => (),
// }
// }
}
}
@ -95,3 +155,54 @@ async fn send_to_pipe<'a>(
Ok(())
}
fn calculate_tracking(
tracker_state: &Arc<Mutex<TrackerState>>,
identity_boxes: &Arc<Mutex<Vec<NormalizedBoxCoords>>>, // This goes all the way back to the GUI thread for drawing boxes
) -> core::result::Result<(i32, i32), String> {
if let Ok(boxes) = identity_boxes.lock() {
if let Ok(mut ts) = tracker_state.lock() {
if ts.last_detect + Duration::from_secs(2) < Instant::now() && !boxes.is_empty() {
info!("Setting new target: {}", boxes[0].id);
ts.tracking_id = boxes[0].id;
}
if let Some(target_box) = boxes.iter().find(|e| e.id == ts.tracking_id) {
let x_adjust = calc_x_adjust(target_box.x1, target_box.x2);
let y_adjust = calc_y_adjust(target_box.y1);
ts.last_detect = Instant::now();
Ok((x_adjust, y_adjust))
} else {
Err("Couldn't find target in results".to_string())
}
} else {
Err("Couldn't lock tracker state".to_string())
}
} else {
Err("Couldn't lock identity boxes".to_string())
}
}
fn calc_x_adjust(x1: f32, x2: f32) -> i32 {
let dist_from_center = ((x1 + x2) / 2.0) - 0.5;
let mut x_adjust = ((dist_from_center / 0.5 * 2.0) * 100.0) as i32;
if x_adjust < 15 && x_adjust > -15 {
x_adjust = 0;
}
min(max(x_adjust, -100), 100)
}
fn calc_y_adjust(y1: f32) -> i32 {
// All values are normalized, then multiplied by 1000. 500 == 50% of the screen
let mut y_adjust = ((y1 - 0.1) * 250.0) as i32;
if y_adjust < 0 {
y_adjust -= 20;
} else if y_adjust < 30 {
y_adjust = 0;
} else {
y_adjust = (y_adjust as f32 * 0.75) as i32;
}
min(max(y_adjust, -100), 100)
}

View file

@ -41,6 +41,26 @@ pub struct BoxCoords {
pub y2: u32,
}
pub struct NormalizedBoxCoords {
pub id: u32,
pub x1: f32,
pub y1: f32,
pub x2: f32,
pub y2: f32,
}
impl NormalizedBoxCoords {
fn into_relative(&self, width: i32, height: i32) -> BoxCoords {
BoxCoords {
id: self.id,
x1: (self.x1 * width as f32) as u32,
y1: (self.y1 * height as f32) as u32,
x2: (self.x2 * width as f32) as u32,
y2: (self.y2 * height as f32) as u32,
}
}
}
pub fn build_ui(app: &Application, runtime: Handle) {
let initial_settings = load_config();
let main_box = gtk::Box::new(gtk::Orientation::Horizontal, 0);
@ -58,7 +78,7 @@ pub fn build_ui(app: &Application, runtime: Handle) {
// Main Event Channel
let (to_mec, mec) = async_channel::unbounded::<ApplicationEvent>();
let (to_gui, gui_recv) = async_channel::bounded::<GuiUpdate>(10);
let identity_boxes: Arc<Mutex<Vec<BoxCoords>>> = Arc::new(Mutex::new(vec![]));
let identity_boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>> = Arc::new(Mutex::new(vec![]));
runtime.spawn(start_coordinator(
mec,
@ -126,8 +146,8 @@ pub fn build_ui(app: &Application, runtime: Handle) {
.content_height(480)
.content_width(640)
.build();
drawable.set_draw_func(move |_, ctx, _width, _height| {
draw_boxes(&ctx, identity_boxes.clone());
drawable.set_draw_func(move |_, ctx, width, height| {
draw_boxes(width, height,&ctx, identity_boxes.clone());
});
overlay_box.set_child(Some(&webcam_picture));
@ -213,12 +233,13 @@ pub fn build_ui(app: &Application, runtime: Handle) {
window.present();
}
fn draw_boxes(ctx: &Context, boxes: Arc<Mutex<Vec<BoxCoords>>>) {
fn draw_boxes(width: i32, height: i32, ctx: &Context, boxes: Arc<Mutex<Vec<NormalizedBoxCoords>>>) {
ctx.set_line_width(2.0);
ctx.set_source_rgb(0.0, 1.0, 0.0);
if let Ok(bxs) = boxes.lock() {
for b in bxs.iter() {
for nb in bxs.iter() {
let b = nb.into_relative(width, height);
ctx.rectangle(
b.x1 as f64,
b.y1 as f64,