moved jpeg encoding to gstreamer

This commit is contained in:
Nickiel12 2024-06-22 12:54:32 -07:00
parent a3d8a6fd06
commit 8251af515f
6 changed files with 69 additions and 65 deletions

1
Cargo.lock generated
View file

@ -1163,7 +1163,6 @@ dependencies = [
"gst-plugin-gtk4",
"gstreamer",
"gstreamer-app",
"gstreamer-video",
"gtk4",
"log",
"serde",

View file

@ -16,7 +16,6 @@ futures-util = { version = "0.3.30", features = ["tokio-io"] }
gilrs = "0.10.6"
gstreamer = { version = "0.22.4", features = ["v1_22"] }
gstreamer-app = { version = "0.22.0", features = ["v1_22"] }
gstreamer-video = { version = "0.22.4", features = ["v1_22"] }
gst-plugin-gtk4 = { version = "0.12.2", features = ["gtk_v4_12"] }
gtk = { version = "0.8.1", package = "gtk4", features = ["v4_12"] }
log = "0.4.21"

View file

@ -7,7 +7,6 @@ use std::{
use async_channel::Sender;
use futures_util::{SinkExt, StreamExt, TryStreamExt};
use gstreamer_app::AppSink;
use gstreamer_video::{video_frame::Readable, VideoFrame, VideoInfo};
use tokio::time::{sleep_until, Instant};
use tokio_tungstenite::{connect_async, tungstenite::Message};
use tracing::{error, info, instrument};
@ -33,11 +32,6 @@ pub async fn remote_video_loop(
conn_string
);
let video_info =
gstreamer_video::VideoInfo::builder(gstreamer_video::VideoFormat::Rgb, 640, 480)
.build()
.expect("Could not build video info!");
let mut fail_count = 0;
{
let mut tm = tracker_metrics.lock().await;
@ -65,19 +59,12 @@ pub async fn remote_video_loop(
Ok((connection, _)) => {
let (mut sender, mut recvr) = connection.split();
// runtime.spawn(listen_to_messages(
// recvr,
// to_mec.clone(),
// tracker_state.clone(),
// socket_state.clone(),
// ));
let mut last_iter: Instant;
loop {
last_iter = Instant::now();
// Do this in an encloser to not keep a lock on the appsink
let video_frame = match {
let image_message = match {
let appsnk = match appsink.lock() {
Ok(e) => e,
Err(e) => {
@ -86,7 +73,7 @@ pub async fn remote_video_loop(
}
};
get_video_frame(&appsnk, &video_info)
get_video_frame(&appsnk)
} {
Ok(e) => e,
Err(e) => {
@ -99,14 +86,8 @@ pub async fn remote_video_loop(
}
};
if let Err(e) = sender
.send(Message::binary(
video_frame
.plane_data(0)
.expect("Could not get video frame data"),
))
.await
{
if let Err(e) = sender.send(image_message).await {
error!("There was an error sending the video frame to the server: {e}");
if let Err(e) = sender.close().await {
error!("Could not close socket to remote computer: {e}")
@ -162,7 +143,8 @@ pub async fn remote_video_loop(
}
// rate limit updates
// sleep_until(Instant::now() + Duration::from_millis(100)).await;
// prevent starving the GUI thread's lock on the tracker state
sleep_until(Instant::now() + Duration::from_millis(10)).await;
}
}
}
@ -197,18 +179,15 @@ pub async fn remote_video_loop(
socket_state.is_connected.store(false, Ordering::SeqCst);
}
fn get_video_frame(
appsink: &AppSink,
video_info: &VideoInfo,
) -> Result<VideoFrame<Readable>, String> {
let buffer = appsink
fn get_video_frame(appsink: &AppSink) -> Result<Message, String> {
let sample = appsink
.pull_sample()
.map_err(|e| format!("Could not pull appsink sample: {e}"))?
.buffer_owned()
.ok_or(format!("Could not get owned buffer from appsink"))?;
gstreamer_video::VideoFrame::from_buffer_readable(buffer, video_info)
.map_err(|_| format!("Unable to make video frame from buffer!"))
.map_err(|e| format!("Could not get sample: {e}"))?;
let buffer = sample.buffer().ok_or("Could not get buffer, was None")?;
let map = buffer
.map_readable()
.map_err(|e| format!("Could not get readable map: {e}"))?;
Ok(Message::binary(map.to_vec()))
}
fn calculate_tracking(

View file

@ -6,17 +6,14 @@ use snafu::prelude::*;
use std::str::FromStr;
use std::sync::{Arc, Mutex};
pub const JPEG_QUALITY: i32 = 40;
#[derive(Debug)]
pub struct WebcamPipeline {
pub pipeline: Pipeline,
pub src: Element,
pub converter: Element,
pub tee: Element,
pub queue_app: Element,
pub sink_paintable: Element,
pub resize: Element,
pub sink_frame: Arc<Mutex<AppSink>>,
}
@ -65,20 +62,33 @@ impl WebcamPipeline {
// println!("The queue is full!");
// }));
let appsink_queue = ElementFactory::make("queue")
.property("max-size-time", 1u64)
.property("max-size-buffers", 0u32)
.property("max-size-bytes", 0u32)
.build()
.context(BuildSnafu {
element: "appsink queue",
})?;
let resize = ElementFactory::make("videoscale")
.build()
.context(BuildSnafu {
element: "videoscale",
})?;
let caps_string = "video/x-raw,format=RGB,width=640,height=480";
// let caps_string = String::from("video/x-raw,format=RGB,max-buffers=1,drop=true");
let jpeg_enc = ElementFactory::make("jpegenc")
.property("quality", JPEG_QUALITY)
.build()
.context(BuildSnafu { element: "jpegenc" })?;
let caps_string = "image/jpeg,width=640,height=640";
let appsrc_caps = gstreamer::Caps::from_str(caps_string).context(BuildSnafu {
element: "appsink caps",
})?;
let sink_frame = AppSink::builder()
.name("frame_output")
.name("frame_appsink")
.sync(false)
.max_buffers(3u32)
.drop(true)
@ -95,7 +105,9 @@ impl WebcamPipeline {
&tee,
&queue_app,
&sink_paintable,
&appsink_queue,
&resize,
&jpeg_enc,
&sink_frame.upcast_ref(),
])
.context(LinkSnafu {
@ -103,16 +115,12 @@ impl WebcamPipeline {
to: "pipeline",
})?;
source.link(&convert).context(LinkSnafu {
from: "mfvideosrc",
to: "videoconvert",
})?;
convert.link(&rate).context(LinkSnafu {
from: "videoconvert",
to: "videorate",
Element::link_many(&[&source, &convert, &rate]).context(LinkSnafu {
from: "source et. al.",
to: "rate",
})?;
// -- BEGIN PAINTABLE SINK PIPELINE
let tee_caps =
gstreamer::caps::Caps::from_str("video/x-raw,framerate=15/1").context(BuildSnafu {
element: "tee caps",
@ -134,48 +142,66 @@ impl WebcamPipeline {
.ok_or(PipelineError::PadRequestError {
element: "gtk4 sink".to_string(),
})?;
tee_src_1
.link(&paintable_queue_sinkpad)
.context(PadLinkSnafu {
from: "tee src pad",
to: "gtk4 paintable queue",
})?;
queue_app.link(&sink_paintable).context(LinkSnafu {
from: "gtk4 paintable queue",
to: "gtk4 paintable",
})?;
// -- END PAINTABLE SINK PIPELINE
// -- BEGIN APPSINK PIPELINE
let tee_src_2 = tee
.request_pad_simple("src_%u")
.ok_or(PipelineError::PadRequestError {
element: "tee pad 2".to_string(),
})?;
let appsink_resize_sinkpad =
resize
let appsink_queue_sinkpad =
appsink_queue
.static_pad("sink")
.ok_or(PipelineError::PadRequestError {
element: "appsink queue".to_string(),
})?;
tee_src_2
.link(&appsink_resize_sinkpad)
.link(&appsink_queue_sinkpad)
.context(PadLinkSnafu {
from: "tee src pad 2",
to: "appsink queue sinkpad",
})?;
resize.link(&sink_frame).context(LinkSnafu {
from: "videoscale",
appsink_queue.link(&resize).context(LinkSnafu {
from: "appsink_queue",
to: "resize",
})?;
let resize_caps =
gstreamer::caps::Caps::from_str("video/x-raw,format=RGB,width=640,height=640")
.context(BuildSnafu {
element: "resize_caps",
})?;
resize
.link_filtered(&jpeg_enc, &resize_caps)
.context(LinkSnafu {
from: "jpeg_enc",
to: "resize_caps",
})?;
Element::link_many(&[&jpeg_enc, &sink_frame.upcast_ref()]).context(LinkSnafu {
from: "jpeg_enc",
to: "appsink",
})?;
Ok(WebcamPipeline {
pipeline,
src: source,
converter: convert,
tee,
queue_app,
sink_paintable,
resize,
sink_frame: Arc::new(Mutex::new(sink_frame)),
})
}

View file

@ -144,7 +144,7 @@ pub fn build_ui(app: &Application, config: Arc<RwLock<AppConfig>>, runtime: Hand
app.add_action_entries([connections_activate]);
// Main Event Channel
let (to_mec, mec) = async_channel::unbounded::<ApplicationEvent>();
let (to_mec, mec) = async_channel::bounded::<ApplicationEvent>(10);
let (to_gui, gui_recv) = async_channel::bounded::<GuiUpdate>(10);
let tracker_state = Arc::new(Mutex::new(TrackerState {
tracking_id: 0,

View file

@ -1,9 +1,10 @@
# Functional
- Up-direction maxes at -50 instead of 100
- Tracking lag real issue
## QoL
- Fine tuning the tracking speeds to be non-linear, make sure the pi doesn't have that speed cap (remember could be expecting 6v max speed).
- left and right need to hit 50 fast
- up down have really slow responses from the machine, so they need to be buffed too. They could just go 75 -> 100 as the only speeds
- During connection waits, add loading dots