feat(iced-video): added video format to the video frame
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled

This commit is contained in:
2026-01-14 09:51:56 +05:30
parent 97a7a632d4
commit 9dac0b6c78
10 changed files with 310 additions and 230 deletions

View File

@@ -84,6 +84,7 @@ Hopefully shouldn't be too hard to make a function or possibly a lut that takes
```mermaid ```mermaid
packet packet
title RGBA
+8: "R" +8: "R"
+8: "G" +8: "G"
+8: "B" +8: "B"

View File

@@ -17,27 +17,42 @@ pub type Result<T, E = Report<Error>> = core::result::Result<T, E>;
use std::sync::{Arc, Mutex, atomic::AtomicBool}; use std::sync::{Arc, Mutex, atomic::AtomicBool};
mod seal {
pub trait Sealed {}
impl Sealed for super::Unknown {}
impl Sealed for super::Ready {}
}
pub trait State: seal::Sealed {
fn is_ready() -> bool {
false
}
}
#[derive(Debug, Clone)]
pub struct Unknown;
#[derive(Debug, Clone)]
pub struct Ready;
impl State for Unknown {}
impl State for Ready {
fn is_ready() -> bool {
true
}
}
/// This is the video handle that is used to control the video playback. /// This is the video handle that is used to control the video playback.
/// This should be keps in the application state. /// This should be keps in the application state.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct VideoHandle<Message> { pub struct VideoHandle<Message, S: State = Unknown> {
id: id::Id, id: id::Id,
pub source: source::VideoSource, pub source: source::VideoSource,
frame_ready: Arc<AtomicBool>, frame_ready: Arc<AtomicBool>,
on_new_frame: Option<Box<Message>>, on_new_frame: Option<Box<Message>>,
on_end_of_stream: Option<Box<Message>>, on_end_of_stream: Option<Box<Message>>,
on_about_to_finish: Option<Box<Message>>, on_about_to_finish: Option<Box<Message>>,
__marker: core::marker::PhantomData<S>,
} }
impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> { impl<Message: Send + Sync + Clone> VideoHandle<Message, Unknown> {
pub fn id(&self) -> &id::Id {
&self.id
}
pub fn source(&self) -> &source::VideoSource {
&self.source
}
pub fn new(url: impl AsRef<str>) -> Result<Self> { pub fn new(url: impl AsRef<str>) -> Result<Self> {
let source = source::VideoSource::new(url)?; let source = source::VideoSource::new(url)?;
let frame_ready = Arc::clone(&source.ready); let frame_ready = Arc::clone(&source.ready);
@@ -48,36 +63,64 @@ impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
on_end_of_stream: None, on_end_of_stream: None,
on_about_to_finish: None, on_about_to_finish: None,
frame_ready, frame_ready,
__marker: core::marker::PhantomData,
}) })
} }
pub async fn wait(self) -> Result<Self> { /// Creates a new video handle and waits for the metadata to be loaded.
pub async fn load(url: impl AsRef<str>) -> Result<VideoHandle<Message, Ready>> {
let handle = VideoHandle::new(url)?;
handle.wait().await
}
}
impl<Message: Send + Sync + Clone, S: State> VideoHandle<Message, S> {
pub fn id(&self) -> &id::Id {
&self.id
}
pub fn source(&self) -> &source::VideoSource {
&self.source
}
pub async fn wait(self) -> Result<VideoHandle<Message, Ready>> {
self.source.wait().await?; self.source.wait().await?;
Ok(self) Ok(self.state::<Ready>())
} }
pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> { fn state<S2: State>(self) -> VideoHandle<Message, S2> {
let sub = widget::VideoSubscription { VideoHandle {
id: self.id.clone(), id: self.id,
on_end_of_stream: self.on_end_of_stream.clone(), source: self.source,
on_new_frame: self.on_new_frame.clone(), on_new_frame: self.on_new_frame,
on_about_to_finish: self.on_about_to_finish.clone(), on_end_of_stream: self.on_end_of_stream,
bus: self.source.bus.clone(), on_about_to_finish: self.on_about_to_finish,
}; frame_ready: self.frame_ready,
iced_futures::subscription::from_recipe(sub) __marker: core::marker::PhantomData,
}
} }
pub fn subscription_with<State>( // pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> {
&self, // let sub = widget::VideoSubscription {
state: &State, // id: self.id.clone(),
f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static, // on_end_of_stream: self.on_end_of_stream.clone(),
) -> iced_futures::subscription::Subscription<Message> // on_new_frame: self.on_new_frame.clone(),
where // on_about_to_finish: self.on_about_to_finish.clone(),
State: Send + Sync + 'static, // bus: self.source.bus.clone(),
{ // };
let sub = self.subscription(); // iced_futures::subscription::from_recipe(sub)
iced_futures::subscription::Subscription::batch([sub, f(state)]) // }
} //
// pub fn subscription_with<State>(
// &self,
// state: &State,
// f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static,
// ) -> iced_futures::subscription::Subscription<Message>
// where
// State: Send + Sync + 'static,
// {
// let sub = self.subscription();
// iced_futures::subscription::Subscription::batch([sub, f(state)])
// }
pub fn on_new_frame(self, message: Message) -> Self { pub fn on_new_frame(self, message: Message) -> Self {
Self { Self {
@@ -109,10 +152,13 @@ impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
pub fn stop(&self) { pub fn stop(&self) {
self.source.stop(); self.source.stop();
} }
}
/// Creates a new video handle and waits for the metadata to be loaded. impl<Message: Send + Sync + Clone> VideoHandle<Message, Ready> {
pub async fn load(url: impl AsRef<str>) -> Result<Self> { pub fn format(&self) -> Result<gst::VideoFormat> {
let handle = Self::new(url)?; self.source
handle.wait().await .format()
.change_context(Error)
.attach("Failed to get video format")
} }
} }

View File

@@ -66,6 +66,14 @@ pub struct VideoFrame {
pub size: wgpu::Extent3d, pub size: wgpu::Extent3d,
pub ready: Arc<AtomicBool>, pub ready: Arc<AtomicBool>,
pub frame: Arc<Mutex<gst::Sample>>, pub frame: Arc<Mutex<gst::Sample>>,
pub format: VideoFormat,
}
#[derive(Debug, Clone, Copy)]
pub enum ToneMapping {
None,
InverseOETF,
Reinhard,
} }
impl iced_wgpu::Primitive for VideoFrame { impl iced_wgpu::Primitive for VideoFrame {
@@ -80,8 +88,13 @@ impl iced_wgpu::Primitive for VideoFrame {
viewport: &iced_wgpu::graphics::Viewport, viewport: &iced_wgpu::graphics::Viewport,
) { ) {
let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| { let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| {
let texture = let texture = VideoTexture::new(
VideoTexture::new("iced-video-texture", self.size, device, pipeline.format); "iced-video-texture",
self.size,
device,
pipeline.format,
self.format,
);
let conversion_matrix = if texture.format().is_wide() { let conversion_matrix = if texture.format().is_wide() {
BT2020_TO_RGB BT2020_TO_RGB
} else { } else {
@@ -235,12 +248,16 @@ impl iced_wgpu::Primitive for VideoFrame {
/// NV12 or P010 are only supported in DX12 and Vulkan backends. /// NV12 or P010 are only supported in DX12 and Vulkan backends.
/// While we can use vulkan with moltenvk on macos, I'd much rather use metal directly /// While we can use vulkan with moltenvk on macos, I'd much rather use metal directly
/// Right now only supports interleaved UV formats.
/// For planar formats we would need 3 textures.
#[derive(Debug)] #[derive(Debug)]
pub struct VideoTexture { pub struct VideoTexture {
y: wgpu::Texture, y: wgpu::Texture,
uv: wgpu::Texture, uv: wgpu::Texture,
size: wgpu::Extent3d, size: wgpu::Extent3d,
pixel_format: gst::VideoFormat, video_format: VideoFormat,
surface_format: wgpu::TextureFormat,
tone_mapping: ToneMapping,
} }
impl VideoTexture { impl VideoTexture {
@@ -252,9 +269,28 @@ impl VideoTexture {
label: &str, label: &str,
size: wgpu::Extent3d, size: wgpu::Extent3d,
device: &wgpu::Device, device: &wgpu::Device,
format: wgpu::TextureFormat, surface_format: wgpu::TextureFormat,
video_format: VideoFormat, video_format: VideoFormat,
) -> Self { ) -> Self {
let surface_hdr = surface_format.is_wide();
let video_hdr = matches!(video_format, VideoFormat::P01010le | VideoFormat::P016Le);
if surface_hdr && !video_hdr {
tracing::warn!("Surface texture is HDR but video format is SDR");
} else if !surface_hdr && video_hdr {
tracing::warn!("Video format is HDR but surface does not support HDR");
}
let tone_mapping = if surface_hdr && video_hdr {
ToneMapping::None
} else if surface_hdr && !video_hdr {
ToneMapping::InverseOETF
} else if !surface_hdr && video_hdr {
ToneMapping::Reinhard
} else {
ToneMapping::None
};
let y_texture = device.create_texture(&wgpu::TextureDescriptor { let y_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some(&format!("{}-y", label)), label: Some(&format!("{}-y", label)),
size: wgpu::Extent3d { size: wgpu::Extent3d {
@@ -265,7 +301,7 @@ impl VideoTexture {
mip_level_count: 1, mip_level_count: 1,
sample_count: 1, sample_count: 1,
dimension: wgpu::TextureDimension::D2, dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::R8Unorm, format: wgpu::TextureFormat::R16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[], view_formats: &[],
}); });
@@ -279,7 +315,7 @@ impl VideoTexture {
mip_level_count: 1, mip_level_count: 1,
sample_count: 1, sample_count: 1,
dimension: wgpu::TextureDimension::D2, dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg8Unorm, format: wgpu::TextureFormat::Rg16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[], view_formats: &[],
}); });
@@ -287,117 +323,49 @@ impl VideoTexture {
y: y_texture, y: y_texture,
uv: uv_texture, uv: uv_texture,
size, size,
pixel_format: VideoFormat::Unknown, surface_format,
video_format,
tone_mapping,
} }
} }
// This return the surface texture format, not the video pixel format
pub fn format(&self) -> wgpu::TextureFormat { pub fn format(&self) -> wgpu::TextureFormat {
match self { self.surface_format
VideoTexture::NV12(_) => wgpu::TextureFormat::NV12,
VideoTexture::P010(_) => wgpu::TextureFormat::P010,
VideoTexture::Composite { y, uv } => {
todo!()
// if y.format().is_wide() {
// wgpu::TextureFormat::P010
// } else {
// wgpu::TextureFormat::NV12
// }
}
}
} }
pub fn y_texture(&self) -> wgpu::TextureView { pub fn y_texture(&self) -> wgpu::TextureView {
match self { self.y.create_view(&wgpu::TextureViewDescriptor::default())
VideoTexture::NV12(nv12) => nv12.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-y-nv12"),
format: Some(wgpu::TextureFormat::R8Unorm),
..Default::default()
}),
VideoTexture::P010(p010) => p010.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-y-p010"),
format: Some(wgpu::TextureFormat::R16Unorm),
..Default::default()
}),
VideoTexture::Composite { y, .. } => {
y.create_view(&wgpu::TextureViewDescriptor::default())
}
}
} }
pub fn uv_texture(&self) -> wgpu::TextureView { pub fn uv_texture(&self) -> wgpu::TextureView {
match self { self.uv.create_view(&wgpu::TextureViewDescriptor::default())
VideoTexture::NV12(nv12) => nv12.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-uv-nv12"),
format: Some(wgpu::TextureFormat::Rg8Unorm),
..Default::default()
}),
VideoTexture::P010(p010) => p010.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-uv-p010"),
format: Some(wgpu::TextureFormat::Rg16Unorm),
..Default::default()
}),
VideoTexture::Composite { uv, .. } => {
uv.create_view(&wgpu::TextureViewDescriptor::default())
}
}
} }
pub fn resize(&self, name: &str, new_size: wgpu::Extent3d, device: &wgpu::Device) -> Self { pub fn resize(&self, name: &str, new_size: wgpu::Extent3d, device: &wgpu::Device) -> Self {
VideoTexture::new(name, new_size, device, self.format()) VideoTexture::new(name, new_size, device, self.format(), self.video_format)
}
pub fn pixel_format(&self) -> VideoFormat {
self.video_format
}
pub fn set_pixel_format(&mut self, format: VideoFormat) {
self.video_format = format;
} }
/// This assumes that the data is laid out correctly for the texture format. /// This assumes that the data is laid out correctly for the texture format.
pub fn write_texture(&self, data: &[u8], queue: &wgpu::Queue) { pub fn write_texture(&self, data: &[u8], queue: &wgpu::Queue) {
match self { // let (y, u, v) = match self.video_format {
VideoTexture::NV12(nv12) => { // VideoFormat::Nv12 | VideoFormat::P01010le | VideoFormat::P016Le => (4, 1, 1),
queue.write_texture( // _ => (1, 1),
wgpu::TexelCopyTextureInfo { // };
texture: nv12, let Self { y, uv, .. } = self;
mip_level: 0, let y_size = y.size();
origin: wgpu::Origin3d::ZERO, let uv_size = uv.size();
aspect: wgpu::TextureAspect::All,
}, let y_data_size = (y_size.width * y_size.height * 2) as usize;
data, let uv_data_size = (y_data_size / 2) as usize; // UV is interleaved
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(nv12.size().width * 3),
rows_per_image: Some(nv12.size().height),
},
nv12.size(),
);
}
VideoTexture::P010(p010) => {
dbg!(&p010.size());
dbg!(data.len());
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: p010,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(p010.size().width * 3),
rows_per_image: Some(p010.size().height),
},
p010.size(),
);
}
VideoTexture::Composite { y, uv } => {
let y_size = wgpu::Extent3d {
width: y.size().width,
height: y.size().height,
depth_or_array_layers: 1,
};
let uv_size = wgpu::Extent3d {
width: uv.size().width,
height: uv.size().height,
depth_or_array_layers: 1,
};
let y_data_size = (y_size.width * y_size.height) as usize;
let uv_data_size = (uv_size.width * uv_size.height * 2) as usize; // UV is interleaved
queue.write_texture( queue.write_texture(
wgpu::TexelCopyTextureInfo { wgpu::TexelCopyTextureInfo {
@@ -425,15 +393,13 @@ impl VideoTexture {
&data[y_data_size..(y_data_size + uv_data_size)], &data[y_data_size..(y_data_size + uv_data_size)],
wgpu::TexelCopyBufferLayout { wgpu::TexelCopyBufferLayout {
offset: 0, offset: 0,
bytes_per_row: Some(uv_size.width * 2), bytes_per_row: Some(uv_size.width),
rows_per_image: Some(uv_size.height), rows_per_image: Some(uv_size.height),
}, },
uv_size, uv_size,
); );
} }
} }
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct VideoFrameData { pub struct VideoFrameData {
@@ -448,12 +414,6 @@ impl VideoFrameData {
pub fn is_hdr(&self) -> bool { pub fn is_hdr(&self) -> bool {
self.texture.format().is_wide() self.texture.format().is_wide()
} }
pub fn is_nv12(&self) -> bool {
matches!(self.texture.format(), wgpu::TextureFormat::NV12)
}
pub fn is_p010(&self) -> bool {
matches!(self.texture.format(), wgpu::TextureFormat::P010)
}
} }
#[derive(Debug)] #[derive(Debug)]

View File

@@ -1,13 +1,3 @@
// struct VertexOutput {
// @builtin(position) clip_position: vec4f,
// @location(0) coords: vec2f,
// }
// struct VertexInput {
// // @location(0) position: vec3<f32>,
// // @location(1) tex_coords: vec2<f32>,
// }
struct VertexOutput { struct VertexOutput {
@builtin(position) clip_position: vec4<f32>, @builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>, @location(0) tex_coords: vec2<f32>,
@@ -15,38 +5,30 @@ struct VertexOutput {
@vertex @vertex
fn vs_main( fn vs_main(
// model: VertexInput, @builtin(vertex_index) in_vertex_index: u32,
) -> VertexOutput { ) -> VertexOutput {
var out: VertexOutput; var out: VertexOutput;
out.tex_coords = vec2<f32>(0.0, 0.0); let uv = vec2<f32>(f32((in_vertex_index << 1u) & 2u), f32(in_vertex_index & 2u));
out.clip_position = vec4<f32>(0,0,0, 1.0); out.clip_position = vec4<f32>(uv * 2.0 - 1.0, 0.0, 1.0);
out.clip_position.y = -out.clip_position.y;
out.tex_coords = uv;
return out; return out;
} }
// @vertex
// fn vs_main(@location(0) input: vec2f) -> VertexOutput {
// var out: VertexOutput;
// out.clip_position = vec4f(input, 0.0, 1.0);
// out.coords = input * 0.5 + vec2f(0.5, 0.5);
// return out;
// }
@group(0) @binding(0) var y_texture: texture_2d<f32>; @group(0) @binding(0) var y_texture: texture_2d<f32>;
@group(0) @binding(1) var uv_texture: texture_2d<f32>; @group(0) @binding(1) var uv_texture: texture_2d<f32>;
@group(0) @binding(2) var texture_sampler: sampler; @group(0) @binding(2) var texture_sampler: sampler;
@group(0) @binding(3) var<uniform> rgb_primaries: mat3x3<f32>; @group(0) @binding(3) var<uniform> rgb_primaries: mat4x4<f32>;
@fragment @fragment
fn fs_main(input: VertexOutput) -> @location(0) vec4<f32> { fn fs_main(input: VertexOutput) -> @location(0) vec4<f32> {
let y = textureSample(y_texture, texture_sampler, input.tex_coords).r; let y = textureSample(y_texture, texture_sampler, input.tex_coords).r;
let uv = textureSample(uv_texture, texture_sampler, input.tex_coords).rg; let uv = textureSample(uv_texture, texture_sampler, input.tex_coords).rg;
let yuv = vec3f(y, uv); let yuv = vec4f(y, uv, 0);
let rgb = rgb_primaries * yuv; let rgb = rgb_primaries * yuv;
return vec4f(rgb, 1.0); return vec4f(rgb.r, rgb.g, rgb.b, 1.0);
// let rgb = rgb_primaries * yuv;
// return vec4f(rgb, 1.0);
} }

View File

@@ -13,7 +13,6 @@ use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct VideoSource { pub struct VideoSource {
pub(crate) playbin: Playbin3, pub(crate) playbin: Playbin3,
pub(crate) videoconvert: VideoConvert,
pub(crate) appsink: AppSink, pub(crate) appsink: AppSink,
pub(crate) bus: Bus, pub(crate) bus: Bus,
pub(crate) ready: Arc<AtomicBool>, pub(crate) ready: Arc<AtomicBool>,
@@ -27,22 +26,12 @@ impl VideoSource {
/// now. /// now.
pub fn new(url: impl AsRef<str>) -> Result<Self> { pub fn new(url: impl AsRef<str>) -> Result<Self> {
Gst::new(); Gst::new();
let videoconvert = VideoConvert::new("iced-video-convert")
// .change_context(Error)?
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
.change_context(Error)?;
let mut appsink = AppSink::new("iced-video-sink").change_context(Error)?; let mut appsink = AppSink::new("iced-video-sink").change_context(Error)?;
appsink appsink
.drop(true) .drop(true)
.sync(true) .sync(true)
// .async_(true) // .async_(true)
.emit_signals(true) .emit_signals(true);
.caps(
Caps::builder(CapsType::Video)
.field("format", "RGB10A2_LE") // Forced for now
.build(),
);
let video_sink = videoconvert.link(&appsink).change_context(Error)?;
let playbin = Playbin3::new("iced-video") let playbin = Playbin3::new("iced-video")
.change_context(Error)? .change_context(Error)?
.with_uri(url.as_ref()) .with_uri(url.as_ref())
@@ -50,13 +39,13 @@ impl VideoSource {
.with_buffer_size(4096 * 4096 * 4 * 3) .with_buffer_size(4096 * 4096 * 4 * 3)
.with_ring_buffer_max_size(4096 * 4096 * 4 * 3) .with_ring_buffer_max_size(4096 * 4096 * 4 * 3)
.with_flags(Playbin3::default_flags() | PlayFlags::DOWNLOAD) .with_flags(Playbin3::default_flags() | PlayFlags::DOWNLOAD)
.with_video_sink(&video_sink); .with_video_sink(&appsink);
let bus = playbin.bus().change_context(Error)?; let bus = playbin.bus().change_context(Error)?;
playbin.pause().change_context(Error)?; playbin.pause().change_context(Error)?;
let ready = Arc::new(AtomicBool::new(false)); let ready = Arc::new(AtomicBool::new(false));
let frame = Arc::new(Mutex::new(gst::Sample::new())); let frame = Arc::new(Mutex::new(gst::Sample::new()));
appsink.on_new_frame({ appsink.on_new_sample({
let ready = Arc::clone(&ready); let ready = Arc::clone(&ready);
let frame = Arc::clone(&frame); let frame = Arc::clone(&frame);
move |appsink| { move |appsink| {
@@ -75,7 +64,6 @@ impl VideoSource {
Ok(Self { Ok(Self {
playbin, playbin,
videoconvert,
appsink, appsink,
bus, bus,
ready, ready,
@@ -85,6 +73,26 @@ impl VideoSource {
} }
pub async fn wait(&self) -> Result<()> { pub async fn wait(&self) -> Result<()> {
use futures_lite::StreamExt;
// self.bus_stream()
// .for_each(|msg: gst::Message| {
// use gst::gstreamer::prelude::*;
// match msg.view() {
// MessageView::Eos(_) => {
// tracing::info!("Video reached end of stream");
// }
// MessageView::Error(err) => {
// tracing::error!(
// "Video Error from {:?}: {} ({:?})",
// err.src().map(|s| s.path_string()),
// err.error(),
// err.debug()
// );
// }
// view => tracing::info!("Video Message: {:#?}", view),
// }
// })
// .await;
self.playbin self.playbin
.wait_for_states(&[gst::State::Paused, gst::State::Playing]) .wait_for_states(&[gst::State::Paused, gst::State::Playing])
.await .await
@@ -93,6 +101,23 @@ impl VideoSource {
Ok(()) Ok(())
} }
pub fn format(&self) -> Result<gst::VideoFormat> {
let caps = self
.appsink
.sink("sink")
.current_caps()
.change_context(Error)?;
let format = caps
.format()
.ok_or(Error)
.attach("Failed to get video caps structure")?;
Ok(format)
}
pub fn bus_stream(&self) -> impl futures_lite::Stream<Item = gst::Message> {
self.bus.stream()
}
pub fn is_playing(&self) -> Result<bool> { pub fn is_playing(&self) -> Result<bool> {
let state = self.playbin.state(None).change_context(Error)?; let state = self.playbin.state(None).change_context(Error)?;
Ok(state == gst::State::Playing) Ok(state == gst::State::Playing)

View File

@@ -10,7 +10,8 @@ where
Renderer: PrimitiveRenderer, Renderer: PrimitiveRenderer,
{ {
id: id::Id, id: id::Id,
handle: &'a VideoHandle<Message>, handle: &'a VideoHandle<Message, Ready>,
video_format: gst::VideoFormat,
content_fit: iced::ContentFit, content_fit: iced::ContentFit,
width: iced::Length, width: iced::Length,
height: iced::Length, height: iced::Length,
@@ -21,12 +22,15 @@ where
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer> impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where where
Renderer: PrimitiveRenderer, Renderer: PrimitiveRenderer,
Message: Clone, Message: Clone + Send + Sync,
{ {
pub fn new(handle: &'a VideoHandle<Message>) -> Self { pub fn new(handle: &'a VideoHandle<Message, Ready>) -> Self {
Self { Self {
id: handle.id.clone(), id: handle.id.clone(),
handle: &handle, handle: &handle,
video_format: handle
.format()
.expect("Failed to get video format during widget creation"),
content_fit: iced::ContentFit::Contain, content_fit: iced::ContentFit::Contain,
width: Length::Shrink, width: Length::Shrink,
height: Length::Shrink, height: Length::Shrink,
@@ -74,7 +78,7 @@ where
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer> impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
for Video<'_, Message, Theme, Renderer> for Video<'_, Message, Theme, Renderer>
where where
Message: Clone, Message: Clone + Send + Sync,
Renderer: PrimitiveRenderer, Renderer: PrimitiveRenderer,
{ {
fn size(&self) -> iced::Size<Length> { fn size(&self) -> iced::Size<Length> {
@@ -141,6 +145,7 @@ where
}, },
ready: Arc::clone(&self.handle.frame_ready), ready: Arc::clone(&self.handle.frame_ready),
frame: Arc::clone(&self.handle.source.frame), frame: Arc::clone(&self.handle.source.frame),
format: self.video_format,
}, },
); );
}; };
@@ -184,7 +189,7 @@ where
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>> impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
for iced::Element<'a, Message, Theme, Renderer> for iced::Element<'a, Message, Theme, Renderer>
where where
Message: 'a + Clone, Message: Send + Sync + 'a + Clone,
Theme: 'a, Theme: 'a,
Renderer: 'a + iced_wgpu::primitive::Renderer, Renderer: 'a + iced_wgpu::primitive::Renderer,
{ {

View File

@@ -17,6 +17,7 @@ pub use element::*;
pub use gstreamer; pub use gstreamer;
#[doc(inline)] #[doc(inline)]
pub use gstreamer::{Message, MessageType, MessageView, State}; pub use gstreamer::{Message, MessageType, MessageView, State};
pub use gstreamer_video::VideoFormat;
pub use pad::*; pub use pad::*;
pub use pipeline::*; pub use pipeline::*;
pub use plugins::*; pub use plugins::*;

View File

@@ -6,9 +6,71 @@ pub use gstreamer_app::AppSinkCallbacks;
wrap_gst!(AppSink, gstreamer::Element); wrap_gst!(AppSink, gstreamer::Element);
parent_child!(Element, AppSink); parent_child!(Element, AppSink);
pub struct AppSinkBuilder {
inner: AppSink,
callbacks: Option<gstreamer_app::app_sink::AppSinkCallbacksBuilder>,
}
impl AppSinkBuilder {
pub fn on_new_sample<F>(mut self, mut f: F) -> Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{
let mut callbacks_builder = self
.callbacks
.take()
.unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder);
callbacks_builder = callbacks_builder.new_sample(move |appsink| {
use glib::object::Cast;
let element = appsink.upcast_ref::<gstreamer::Element>();
let appsink = AppSink::from_gst_ref(element);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
.unwrap_or(Err(gstreamer::FlowError::Error))
.map(|_| gstreamer::FlowSuccess::Ok)
});
self.callbacks = Some(callbacks_builder);
self
}
pub fn on_new_preroll<F>(mut self, mut f: F) -> Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{
let mut callbacks_builder = self
.callbacks
.take()
.unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder);
callbacks_builder = callbacks_builder.new_preroll(move |appsink| {
use glib::object::Cast;
let element = appsink.upcast_ref::<gstreamer::Element>();
let appsink = AppSink::from_gst_ref(element);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
.unwrap_or(Err(gstreamer::FlowError::Error))
.map(|_| gstreamer::FlowSuccess::Ok)
});
self.callbacks = Some(callbacks_builder);
self
}
pub fn build(self) -> AppSink {
let AppSinkBuilder { inner, callbacks } = self;
if let Some(callbacks) = callbacks {
inner.appsink().set_callbacks(callbacks.build());
}
inner
}
}
impl Sink for AppSink {} impl Sink for AppSink {}
impl AppSink { impl AppSink {
pub fn builder(name: impl AsRef<str>) -> AppSinkBuilder {
let inner = AppSink::new(name).expect("Failed to create AppSink");
AppSinkBuilder {
inner,
callbacks: None,
}
}
fn appsink(&self) -> &gstreamer_app::AppSink { fn appsink(&self) -> &gstreamer_app::AppSink {
self.inner self.inner
.downcast_ref::<gstreamer_app::AppSink>() .downcast_ref::<gstreamer_app::AppSink>()
@@ -54,7 +116,7 @@ impl AppSink {
self self
} }
pub fn on_new_frame<F>(&mut self, mut f: F) -> &mut Self pub fn on_new_sample<F>(&mut self, mut f: F) -> &mut Self
where where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static, F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{ {

View File

@@ -2,7 +2,7 @@ mod settings;
mod video; mod video;
mod shared_string; mod shared_string;
use iced_video::{Video, VideoHandle}; use iced_video::{Ready, Video, VideoHandle};
use shared_string::SharedString; use shared_string::SharedString;
use std::sync::Arc; use std::sync::Arc;
@@ -140,7 +140,7 @@ struct State {
screen: Screen, screen: Screen,
settings: settings::SettingsState, settings: settings::SettingsState,
is_authenticated: bool, is_authenticated: bool,
video: Option<Arc<VideoHandle<Message>>>, video: Option<Arc<VideoHandle<Message, Ready>>>,
} }
impl State { impl State {

View File

@@ -3,7 +3,7 @@ use super::*;
pub enum VideoMessage { pub enum VideoMessage {
EndOfStream, EndOfStream,
Open(url::Url), Open(url::Url),
Loaded(VideoHandle<Message>), Loaded(VideoHandle<Message, Ready>),
Pause, Pause,
Play, Play,
Seek(f64), Seek(f64),
@@ -55,16 +55,14 @@ pub fn update(state: &mut State, message: VideoMessage) -> Task<Message> {
Task::none() Task::none()
} }
VideoMessage::Test => { VideoMessage::Test => {
let url = url::Url::parse( let url = url::Url::parse("https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c")
"https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm",
)
.expect("Impossible: Failed to parse hardcoded URL"); .expect("Impossible: Failed to parse hardcoded URL");
Task::done(VideoMessage::Open(url)).map(Message::Video) Task::done(VideoMessage::Open(url)).map(Message::Video)
} }
} }
} }
pub fn player(video: &VideoHandle<Message>) -> Element<'_, Message> { pub fn player(video: &VideoHandle<Message, Ready>) -> Element<'_, Message> {
container( container(
Video::new(video) Video::new(video)
.width(Length::Fill) .width(Length::Fill)