diff --git a/README.md b/README.md index 4b34093..a880c7e 100644 --- a/README.md +++ b/README.md @@ -84,6 +84,7 @@ Hopefully shouldn't be too hard to make a function or possibly a lut that takes ```mermaid packet +title RGBA +8: "R" +8: "G" +8: "B" diff --git a/crates/iced-video/src/lib.rs b/crates/iced-video/src/lib.rs index bef9091..b23d22a 100644 --- a/crates/iced-video/src/lib.rs +++ b/crates/iced-video/src/lib.rs @@ -17,27 +17,42 @@ pub type Result> = core::result::Result; use std::sync::{Arc, Mutex, atomic::AtomicBool}; +mod seal { + pub trait Sealed {} + impl Sealed for super::Unknown {} + impl Sealed for super::Ready {} +} + +pub trait State: seal::Sealed { + fn is_ready() -> bool { + false + } +} +#[derive(Debug, Clone)] +pub struct Unknown; +#[derive(Debug, Clone)] +pub struct Ready; +impl State for Unknown {} +impl State for Ready { + fn is_ready() -> bool { + true + } +} + /// This is the video handle that is used to control the video playback. /// This should be keps in the application state. #[derive(Debug, Clone)] -pub struct VideoHandle { +pub struct VideoHandle { id: id::Id, pub source: source::VideoSource, frame_ready: Arc, on_new_frame: Option>, on_end_of_stream: Option>, on_about_to_finish: Option>, + __marker: core::marker::PhantomData, } -impl VideoHandle { - pub fn id(&self) -> &id::Id { - &self.id - } - - pub fn source(&self) -> &source::VideoSource { - &self.source - } - +impl VideoHandle { pub fn new(url: impl AsRef) -> Result { let source = source::VideoSource::new(url)?; let frame_ready = Arc::clone(&source.ready); @@ -48,36 +63,64 @@ impl VideoHandle { on_end_of_stream: None, on_about_to_finish: None, frame_ready, + __marker: core::marker::PhantomData, }) } - pub async fn wait(self) -> Result { + /// Creates a new video handle and waits for the metadata to be loaded. + pub async fn load(url: impl AsRef) -> Result> { + let handle = VideoHandle::new(url)?; + handle.wait().await + } +} +impl VideoHandle { + pub fn id(&self) -> &id::Id { + &self.id + } + + pub fn source(&self) -> &source::VideoSource { + &self.source + } + + pub async fn wait(self) -> Result> { self.source.wait().await?; - Ok(self) + Ok(self.state::()) } - pub fn subscription(&self) -> iced_futures::subscription::Subscription { - let sub = widget::VideoSubscription { - id: self.id.clone(), - on_end_of_stream: self.on_end_of_stream.clone(), - on_new_frame: self.on_new_frame.clone(), - on_about_to_finish: self.on_about_to_finish.clone(), - bus: self.source.bus.clone(), - }; - iced_futures::subscription::from_recipe(sub) + fn state(self) -> VideoHandle { + VideoHandle { + id: self.id, + source: self.source, + on_new_frame: self.on_new_frame, + on_end_of_stream: self.on_end_of_stream, + on_about_to_finish: self.on_about_to_finish, + frame_ready: self.frame_ready, + __marker: core::marker::PhantomData, + } } - pub fn subscription_with( - &self, - state: &State, - f: impl FnOnce(&State) -> iced_futures::subscription::Subscription + 'static, - ) -> iced_futures::subscription::Subscription - where - State: Send + Sync + 'static, - { - let sub = self.subscription(); - iced_futures::subscription::Subscription::batch([sub, f(state)]) - } + // pub fn subscription(&self) -> iced_futures::subscription::Subscription { + // let sub = widget::VideoSubscription { + // id: self.id.clone(), + // on_end_of_stream: self.on_end_of_stream.clone(), + // on_new_frame: self.on_new_frame.clone(), + // on_about_to_finish: self.on_about_to_finish.clone(), + // bus: self.source.bus.clone(), + // }; + // iced_futures::subscription::from_recipe(sub) + // } + // + // pub fn subscription_with( + // &self, + // state: &State, + // f: impl FnOnce(&State) -> iced_futures::subscription::Subscription + 'static, + // ) -> iced_futures::subscription::Subscription + // where + // State: Send + Sync + 'static, + // { + // let sub = self.subscription(); + // iced_futures::subscription::Subscription::batch([sub, f(state)]) + // } pub fn on_new_frame(self, message: Message) -> Self { Self { @@ -109,10 +152,13 @@ impl VideoHandle { pub fn stop(&self) { self.source.stop(); } +} - /// Creates a new video handle and waits for the metadata to be loaded. - pub async fn load(url: impl AsRef) -> Result { - let handle = Self::new(url)?; - handle.wait().await +impl VideoHandle { + pub fn format(&self) -> Result { + self.source + .format() + .change_context(Error) + .attach("Failed to get video format") } } diff --git a/crates/iced-video/src/primitive.rs b/crates/iced-video/src/primitive.rs index aa88dac..d2ead03 100644 --- a/crates/iced-video/src/primitive.rs +++ b/crates/iced-video/src/primitive.rs @@ -66,6 +66,14 @@ pub struct VideoFrame { pub size: wgpu::Extent3d, pub ready: Arc, pub frame: Arc>, + pub format: VideoFormat, +} + +#[derive(Debug, Clone, Copy)] +pub enum ToneMapping { + None, + InverseOETF, + Reinhard, } impl iced_wgpu::Primitive for VideoFrame { @@ -80,8 +88,13 @@ impl iced_wgpu::Primitive for VideoFrame { viewport: &iced_wgpu::graphics::Viewport, ) { let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| { - let texture = - VideoTexture::new("iced-video-texture", self.size, device, pipeline.format); + let texture = VideoTexture::new( + "iced-video-texture", + self.size, + device, + pipeline.format, + self.format, + ); let conversion_matrix = if texture.format().is_wide() { BT2020_TO_RGB } else { @@ -235,12 +248,16 @@ impl iced_wgpu::Primitive for VideoFrame { /// NV12 or P010 are only supported in DX12 and Vulkan backends. /// While we can use vulkan with moltenvk on macos, I'd much rather use metal directly +/// Right now only supports interleaved UV formats. +/// For planar formats we would need 3 textures. #[derive(Debug)] pub struct VideoTexture { y: wgpu::Texture, uv: wgpu::Texture, size: wgpu::Extent3d, - pixel_format: gst::VideoFormat, + video_format: VideoFormat, + surface_format: wgpu::TextureFormat, + tone_mapping: ToneMapping, } impl VideoTexture { @@ -252,9 +269,28 @@ impl VideoTexture { label: &str, size: wgpu::Extent3d, device: &wgpu::Device, - format: wgpu::TextureFormat, + surface_format: wgpu::TextureFormat, video_format: VideoFormat, ) -> Self { + let surface_hdr = surface_format.is_wide(); + let video_hdr = matches!(video_format, VideoFormat::P01010le | VideoFormat::P016Le); + + if surface_hdr && !video_hdr { + tracing::warn!("Surface texture is HDR but video format is SDR"); + } else if !surface_hdr && video_hdr { + tracing::warn!("Video format is HDR but surface does not support HDR"); + } + + let tone_mapping = if surface_hdr && video_hdr { + ToneMapping::None + } else if surface_hdr && !video_hdr { + ToneMapping::InverseOETF + } else if !surface_hdr && video_hdr { + ToneMapping::Reinhard + } else { + ToneMapping::None + }; + let y_texture = device.create_texture(&wgpu::TextureDescriptor { label: Some(&format!("{}-y", label)), size: wgpu::Extent3d { @@ -265,7 +301,7 @@ impl VideoTexture { mip_level_count: 1, sample_count: 1, dimension: wgpu::TextureDimension::D2, - format: wgpu::TextureFormat::R8Unorm, + format: wgpu::TextureFormat::R16Unorm, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, view_formats: &[], }); @@ -279,7 +315,7 @@ impl VideoTexture { mip_level_count: 1, sample_count: 1, dimension: wgpu::TextureDimension::D2, - format: wgpu::TextureFormat::Rg8Unorm, + format: wgpu::TextureFormat::Rg16Unorm, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, view_formats: &[], }); @@ -287,151 +323,81 @@ impl VideoTexture { y: y_texture, uv: uv_texture, size, - pixel_format: VideoFormat::Unknown, + surface_format, + video_format, + tone_mapping, } } + // This return the surface texture format, not the video pixel format pub fn format(&self) -> wgpu::TextureFormat { - match self { - VideoTexture::NV12(_) => wgpu::TextureFormat::NV12, - VideoTexture::P010(_) => wgpu::TextureFormat::P010, - VideoTexture::Composite { y, uv } => { - todo!() - // if y.format().is_wide() { - // wgpu::TextureFormat::P010 - // } else { - // wgpu::TextureFormat::NV12 - // } - } - } + self.surface_format } pub fn y_texture(&self) -> wgpu::TextureView { - match self { - VideoTexture::NV12(nv12) => nv12.create_view(&wgpu::TextureViewDescriptor { - label: Some("iced-video-texture-view-y-nv12"), - format: Some(wgpu::TextureFormat::R8Unorm), - ..Default::default() - }), - VideoTexture::P010(p010) => p010.create_view(&wgpu::TextureViewDescriptor { - label: Some("iced-video-texture-view-y-p010"), - format: Some(wgpu::TextureFormat::R16Unorm), - ..Default::default() - }), - VideoTexture::Composite { y, .. } => { - y.create_view(&wgpu::TextureViewDescriptor::default()) - } - } + self.y.create_view(&wgpu::TextureViewDescriptor::default()) } pub fn uv_texture(&self) -> wgpu::TextureView { - match self { - VideoTexture::NV12(nv12) => nv12.create_view(&wgpu::TextureViewDescriptor { - label: Some("iced-video-texture-view-uv-nv12"), - format: Some(wgpu::TextureFormat::Rg8Unorm), - ..Default::default() - }), - VideoTexture::P010(p010) => p010.create_view(&wgpu::TextureViewDescriptor { - label: Some("iced-video-texture-view-uv-p010"), - format: Some(wgpu::TextureFormat::Rg16Unorm), - ..Default::default() - }), - VideoTexture::Composite { uv, .. } => { - uv.create_view(&wgpu::TextureViewDescriptor::default()) - } - } + self.uv.create_view(&wgpu::TextureViewDescriptor::default()) } pub fn resize(&self, name: &str, new_size: wgpu::Extent3d, device: &wgpu::Device) -> Self { - VideoTexture::new(name, new_size, device, self.format()) + VideoTexture::new(name, new_size, device, self.format(), self.video_format) + } + + pub fn pixel_format(&self) -> VideoFormat { + self.video_format + } + + pub fn set_pixel_format(&mut self, format: VideoFormat) { + self.video_format = format; } /// This assumes that the data is laid out correctly for the texture format. pub fn write_texture(&self, data: &[u8], queue: &wgpu::Queue) { - match self { - VideoTexture::NV12(nv12) => { - queue.write_texture( - wgpu::TexelCopyTextureInfo { - texture: nv12, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - data, - wgpu::TexelCopyBufferLayout { - offset: 0, - bytes_per_row: Some(nv12.size().width * 3), - rows_per_image: Some(nv12.size().height), - }, - nv12.size(), - ); - } - VideoTexture::P010(p010) => { - dbg!(&p010.size()); - dbg!(data.len()); - queue.write_texture( - wgpu::TexelCopyTextureInfo { - texture: p010, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - data, - wgpu::TexelCopyBufferLayout { - offset: 0, - bytes_per_row: Some(p010.size().width * 3), - rows_per_image: Some(p010.size().height), - }, - p010.size(), - ); - } - VideoTexture::Composite { y, uv } => { - let y_size = wgpu::Extent3d { - width: y.size().width, - height: y.size().height, - depth_or_array_layers: 1, - }; - let uv_size = wgpu::Extent3d { - width: uv.size().width, - height: uv.size().height, - depth_or_array_layers: 1, - }; - let y_data_size = (y_size.width * y_size.height) as usize; - let uv_data_size = (uv_size.width * uv_size.height * 2) as usize; // UV is interleaved + // let (y, u, v) = match self.video_format { + // VideoFormat::Nv12 | VideoFormat::P01010le | VideoFormat::P016Le => (4, 1, 1), + // _ => (1, 1), + // }; + let Self { y, uv, .. } = self; + let y_size = y.size(); + let uv_size = uv.size(); - queue.write_texture( - wgpu::TexelCopyTextureInfo { - texture: y, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - &data[0..y_data_size], - wgpu::TexelCopyBufferLayout { - offset: 0, - bytes_per_row: Some(y_size.width), - rows_per_image: Some(y_size.height), - }, - y_size, - ); + let y_data_size = (y_size.width * y_size.height * 2) as usize; + let uv_data_size = (y_data_size / 2) as usize; // UV is interleaved - queue.write_texture( - wgpu::TexelCopyTextureInfo { - texture: uv, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - &data[y_data_size..(y_data_size + uv_data_size)], - wgpu::TexelCopyBufferLayout { - offset: 0, - bytes_per_row: Some(uv_size.width * 2), - rows_per_image: Some(uv_size.height), - }, - uv_size, - ); - } - } + queue.write_texture( + wgpu::TexelCopyTextureInfo { + texture: y, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + &data[0..y_data_size], + wgpu::TexelCopyBufferLayout { + offset: 0, + bytes_per_row: Some(y_size.width), + rows_per_image: Some(y_size.height), + }, + y_size, + ); + + queue.write_texture( + wgpu::TexelCopyTextureInfo { + texture: uv, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + &data[y_data_size..(y_data_size + uv_data_size)], + wgpu::TexelCopyBufferLayout { + offset: 0, + bytes_per_row: Some(uv_size.width), + rows_per_image: Some(uv_size.height), + }, + uv_size, + ); } } @@ -448,12 +414,6 @@ impl VideoFrameData { pub fn is_hdr(&self) -> bool { self.texture.format().is_wide() } - pub fn is_nv12(&self) -> bool { - matches!(self.texture.format(), wgpu::TextureFormat::NV12) - } - pub fn is_p010(&self) -> bool { - matches!(self.texture.format(), wgpu::TextureFormat::P010) - } } #[derive(Debug)] diff --git a/crates/iced-video/src/shaders/passthrough.wgsl b/crates/iced-video/src/shaders/passthrough.wgsl index db509db..59ff28e 100644 --- a/crates/iced-video/src/shaders/passthrough.wgsl +++ b/crates/iced-video/src/shaders/passthrough.wgsl @@ -1,13 +1,3 @@ -// struct VertexOutput { -// @builtin(position) clip_position: vec4f, -// @location(0) coords: vec2f, -// } - -// struct VertexInput { -// // @location(0) position: vec3, -// // @location(1) tex_coords: vec2, -// } - struct VertexOutput { @builtin(position) clip_position: vec4, @location(0) tex_coords: vec2, @@ -15,38 +5,30 @@ struct VertexOutput { @vertex fn vs_main( - // model: VertexInput, + @builtin(vertex_index) in_vertex_index: u32, ) -> VertexOutput { var out: VertexOutput; - out.tex_coords = vec2(0.0, 0.0); - out.clip_position = vec4(0,0,0, 1.0); + let uv = vec2(f32((in_vertex_index << 1u) & 2u), f32(in_vertex_index & 2u)); + out.clip_position = vec4(uv * 2.0 - 1.0, 0.0, 1.0); + out.clip_position.y = -out.clip_position.y; + out.tex_coords = uv; return out; } - - - - - -// @vertex -// fn vs_main(@location(0) input: vec2f) -> VertexOutput { -// var out: VertexOutput; -// out.clip_position = vec4f(input, 0.0, 1.0); -// out.coords = input * 0.5 + vec2f(0.5, 0.5); -// return out; -// } @group(0) @binding(0) var y_texture: texture_2d; @group(0) @binding(1) var uv_texture: texture_2d; @group(0) @binding(2) var texture_sampler: sampler; -@group(0) @binding(3) var rgb_primaries: mat3x3; +@group(0) @binding(3) var rgb_primaries: mat4x4; @fragment fn fs_main(input: VertexOutput) -> @location(0) vec4 { let y = textureSample(y_texture, texture_sampler, input.tex_coords).r; let uv = textureSample(uv_texture, texture_sampler, input.tex_coords).rg; - let yuv = vec3f(y, uv); + let yuv = vec4f(y, uv, 0); let rgb = rgb_primaries * yuv; - return vec4f(rgb, 1.0); + return vec4f(rgb.r, rgb.g, rgb.b, 1.0); + // let rgb = rgb_primaries * yuv; + // return vec4f(rgb, 1.0); } diff --git a/crates/iced-video/src/source.rs b/crates/iced-video/src/source.rs index 598a8bb..3884326 100644 --- a/crates/iced-video/src/source.rs +++ b/crates/iced-video/src/source.rs @@ -13,7 +13,6 @@ use std::sync::{Arc, Mutex, atomic::AtomicBool}; #[derive(Debug, Clone)] pub struct VideoSource { pub(crate) playbin: Playbin3, - pub(crate) videoconvert: VideoConvert, pub(crate) appsink: AppSink, pub(crate) bus: Bus, pub(crate) ready: Arc, @@ -27,22 +26,12 @@ impl VideoSource { /// now. pub fn new(url: impl AsRef) -> Result { Gst::new(); - let videoconvert = VideoConvert::new("iced-video-convert") - // .change_context(Error)? - // .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba) - .change_context(Error)?; let mut appsink = AppSink::new("iced-video-sink").change_context(Error)?; appsink .drop(true) .sync(true) // .async_(true) - .emit_signals(true) - .caps( - Caps::builder(CapsType::Video) - .field("format", "RGB10A2_LE") // Forced for now - .build(), - ); - let video_sink = videoconvert.link(&appsink).change_context(Error)?; + .emit_signals(true); let playbin = Playbin3::new("iced-video") .change_context(Error)? .with_uri(url.as_ref()) @@ -50,13 +39,13 @@ impl VideoSource { .with_buffer_size(4096 * 4096 * 4 * 3) .with_ring_buffer_max_size(4096 * 4096 * 4 * 3) .with_flags(Playbin3::default_flags() | PlayFlags::DOWNLOAD) - .with_video_sink(&video_sink); + .with_video_sink(&appsink); let bus = playbin.bus().change_context(Error)?; playbin.pause().change_context(Error)?; let ready = Arc::new(AtomicBool::new(false)); let frame = Arc::new(Mutex::new(gst::Sample::new())); - appsink.on_new_frame({ + appsink.on_new_sample({ let ready = Arc::clone(&ready); let frame = Arc::clone(&frame); move |appsink| { @@ -75,7 +64,6 @@ impl VideoSource { Ok(Self { playbin, - videoconvert, appsink, bus, ready, @@ -85,6 +73,26 @@ impl VideoSource { } pub async fn wait(&self) -> Result<()> { + use futures_lite::StreamExt; + // self.bus_stream() + // .for_each(|msg: gst::Message| { + // use gst::gstreamer::prelude::*; + // match msg.view() { + // MessageView::Eos(_) => { + // tracing::info!("Video reached end of stream"); + // } + // MessageView::Error(err) => { + // tracing::error!( + // "Video Error from {:?}: {} ({:?})", + // err.src().map(|s| s.path_string()), + // err.error(), + // err.debug() + // ); + // } + // view => tracing::info!("Video Message: {:#?}", view), + // } + // }) + // .await; self.playbin .wait_for_states(&[gst::State::Paused, gst::State::Playing]) .await @@ -93,6 +101,23 @@ impl VideoSource { Ok(()) } + pub fn format(&self) -> Result { + let caps = self + .appsink + .sink("sink") + .current_caps() + .change_context(Error)?; + let format = caps + .format() + .ok_or(Error) + .attach("Failed to get video caps structure")?; + Ok(format) + } + + pub fn bus_stream(&self) -> impl futures_lite::Stream { + self.bus.stream() + } + pub fn is_playing(&self) -> Result { let state = self.playbin.state(None).change_context(Error)?; Ok(state == gst::State::Playing) diff --git a/crates/iced-video/src/widget.rs b/crates/iced-video/src/widget.rs index c91630c..cf5a613 100644 --- a/crates/iced-video/src/widget.rs +++ b/crates/iced-video/src/widget.rs @@ -10,7 +10,8 @@ where Renderer: PrimitiveRenderer, { id: id::Id, - handle: &'a VideoHandle, + handle: &'a VideoHandle, + video_format: gst::VideoFormat, content_fit: iced::ContentFit, width: iced::Length, height: iced::Length, @@ -21,12 +22,15 @@ where impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer> where Renderer: PrimitiveRenderer, - Message: Clone, + Message: Clone + Send + Sync, { - pub fn new(handle: &'a VideoHandle) -> Self { + pub fn new(handle: &'a VideoHandle) -> Self { Self { id: handle.id.clone(), handle: &handle, + video_format: handle + .format() + .expect("Failed to get video format during widget creation"), content_fit: iced::ContentFit::Contain, width: Length::Shrink, height: Length::Shrink, @@ -74,7 +78,7 @@ where impl iced::Widget for Video<'_, Message, Theme, Renderer> where - Message: Clone, + Message: Clone + Send + Sync, Renderer: PrimitiveRenderer, { fn size(&self) -> iced::Size { @@ -141,6 +145,7 @@ where }, ready: Arc::clone(&self.handle.frame_ready), frame: Arc::clone(&self.handle.source.frame), + format: self.video_format, }, ); }; @@ -184,7 +189,7 @@ where impl<'a, Message, Theme, Renderer> From> for iced::Element<'a, Message, Theme, Renderer> where - Message: 'a + Clone, + Message: Send + Sync + 'a + Clone, Theme: 'a, Renderer: 'a + iced_wgpu::primitive::Renderer, { diff --git a/gst/src/lib.rs b/gst/src/lib.rs index fb3f313..83df5cc 100644 --- a/gst/src/lib.rs +++ b/gst/src/lib.rs @@ -17,6 +17,7 @@ pub use element::*; pub use gstreamer; #[doc(inline)] pub use gstreamer::{Message, MessageType, MessageView, State}; +pub use gstreamer_video::VideoFormat; pub use pad::*; pub use pipeline::*; pub use plugins::*; diff --git a/gst/src/plugins/app/appsink.rs b/gst/src/plugins/app/appsink.rs index a61e219..0e0d93e 100644 --- a/gst/src/plugins/app/appsink.rs +++ b/gst/src/plugins/app/appsink.rs @@ -6,9 +6,71 @@ pub use gstreamer_app::AppSinkCallbacks; wrap_gst!(AppSink, gstreamer::Element); parent_child!(Element, AppSink); +pub struct AppSinkBuilder { + inner: AppSink, + callbacks: Option, +} + +impl AppSinkBuilder { + pub fn on_new_sample(mut self, mut f: F) -> Self + where + F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static, + { + let mut callbacks_builder = self + .callbacks + .take() + .unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder); + callbacks_builder = callbacks_builder.new_sample(move |appsink| { + use glib::object::Cast; + let element = appsink.upcast_ref::(); + let appsink = AppSink::from_gst_ref(element); + std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink))) + .unwrap_or(Err(gstreamer::FlowError::Error)) + .map(|_| gstreamer::FlowSuccess::Ok) + }); + self.callbacks = Some(callbacks_builder); + self + } + + pub fn on_new_preroll(mut self, mut f: F) -> Self + where + F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static, + { + let mut callbacks_builder = self + .callbacks + .take() + .unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder); + callbacks_builder = callbacks_builder.new_preroll(move |appsink| { + use glib::object::Cast; + let element = appsink.upcast_ref::(); + let appsink = AppSink::from_gst_ref(element); + std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink))) + .unwrap_or(Err(gstreamer::FlowError::Error)) + .map(|_| gstreamer::FlowSuccess::Ok) + }); + self.callbacks = Some(callbacks_builder); + self + } + + pub fn build(self) -> AppSink { + let AppSinkBuilder { inner, callbacks } = self; + if let Some(callbacks) = callbacks { + inner.appsink().set_callbacks(callbacks.build()); + } + inner + } +} + impl Sink for AppSink {} impl AppSink { + pub fn builder(name: impl AsRef) -> AppSinkBuilder { + let inner = AppSink::new(name).expect("Failed to create AppSink"); + AppSinkBuilder { + inner, + callbacks: None, + } + } fn appsink(&self) -> &gstreamer_app::AppSink { self.inner .downcast_ref::() @@ -54,7 +116,7 @@ impl AppSink { self } - pub fn on_new_frame(&mut self, mut f: F) -> &mut Self + pub fn on_new_sample(&mut self, mut f: F) -> &mut Self where F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static, { diff --git a/ui-iced/src/lib.rs b/ui-iced/src/lib.rs index e202f73..5ed259d 100644 --- a/ui-iced/src/lib.rs +++ b/ui-iced/src/lib.rs @@ -2,7 +2,7 @@ mod settings; mod video; mod shared_string; -use iced_video::{Video, VideoHandle}; +use iced_video::{Ready, Video, VideoHandle}; use shared_string::SharedString; use std::sync::Arc; @@ -140,7 +140,7 @@ struct State { screen: Screen, settings: settings::SettingsState, is_authenticated: bool, - video: Option>>, + video: Option>>, } impl State { diff --git a/ui-iced/src/video.rs b/ui-iced/src/video.rs index b7e1f28..999429e 100644 --- a/ui-iced/src/video.rs +++ b/ui-iced/src/video.rs @@ -3,7 +3,7 @@ use super::*; pub enum VideoMessage { EndOfStream, Open(url::Url), - Loaded(VideoHandle), + Loaded(VideoHandle), Pause, Play, Seek(f64), @@ -55,16 +55,14 @@ pub fn update(state: &mut State, message: VideoMessage) -> Task { Task::none() } VideoMessage::Test => { - let url = url::Url::parse( - "https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm", - ) + let url = url::Url::parse("https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c") .expect("Impossible: Failed to parse hardcoded URL"); Task::done(VideoMessage::Open(url)).map(Message::Video) } } } -pub fn player(video: &VideoHandle) -> Element<'_, Message> { +pub fn player(video: &VideoHandle) -> Element<'_, Message> { container( Video::new(video) .width(Length::Fill)