feat(iced-video): added video format to the video frame
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled

This commit is contained in:
2026-01-14 09:51:56 +05:30
parent 97a7a632d4
commit 9dac0b6c78
10 changed files with 310 additions and 230 deletions

View File

@@ -17,27 +17,42 @@ pub type Result<T, E = Report<Error>> = core::result::Result<T, E>;
use std::sync::{Arc, Mutex, atomic::AtomicBool};
mod seal {
pub trait Sealed {}
impl Sealed for super::Unknown {}
impl Sealed for super::Ready {}
}
pub trait State: seal::Sealed {
fn is_ready() -> bool {
false
}
}
#[derive(Debug, Clone)]
pub struct Unknown;
#[derive(Debug, Clone)]
pub struct Ready;
impl State for Unknown {}
impl State for Ready {
fn is_ready() -> bool {
true
}
}
/// This is the video handle that is used to control the video playback.
/// This should be keps in the application state.
#[derive(Debug, Clone)]
pub struct VideoHandle<Message> {
pub struct VideoHandle<Message, S: State = Unknown> {
id: id::Id,
pub source: source::VideoSource,
frame_ready: Arc<AtomicBool>,
on_new_frame: Option<Box<Message>>,
on_end_of_stream: Option<Box<Message>>,
on_about_to_finish: Option<Box<Message>>,
__marker: core::marker::PhantomData<S>,
}
impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
pub fn id(&self) -> &id::Id {
&self.id
}
pub fn source(&self) -> &source::VideoSource {
&self.source
}
impl<Message: Send + Sync + Clone> VideoHandle<Message, Unknown> {
pub fn new(url: impl AsRef<str>) -> Result<Self> {
let source = source::VideoSource::new(url)?;
let frame_ready = Arc::clone(&source.ready);
@@ -48,36 +63,64 @@ impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
on_end_of_stream: None,
on_about_to_finish: None,
frame_ready,
__marker: core::marker::PhantomData,
})
}
pub async fn wait(self) -> Result<Self> {
/// Creates a new video handle and waits for the metadata to be loaded.
pub async fn load(url: impl AsRef<str>) -> Result<VideoHandle<Message, Ready>> {
let handle = VideoHandle::new(url)?;
handle.wait().await
}
}
impl<Message: Send + Sync + Clone, S: State> VideoHandle<Message, S> {
pub fn id(&self) -> &id::Id {
&self.id
}
pub fn source(&self) -> &source::VideoSource {
&self.source
}
pub async fn wait(self) -> Result<VideoHandle<Message, Ready>> {
self.source.wait().await?;
Ok(self)
Ok(self.state::<Ready>())
}
pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> {
let sub = widget::VideoSubscription {
id: self.id.clone(),
on_end_of_stream: self.on_end_of_stream.clone(),
on_new_frame: self.on_new_frame.clone(),
on_about_to_finish: self.on_about_to_finish.clone(),
bus: self.source.bus.clone(),
};
iced_futures::subscription::from_recipe(sub)
fn state<S2: State>(self) -> VideoHandle<Message, S2> {
VideoHandle {
id: self.id,
source: self.source,
on_new_frame: self.on_new_frame,
on_end_of_stream: self.on_end_of_stream,
on_about_to_finish: self.on_about_to_finish,
frame_ready: self.frame_ready,
__marker: core::marker::PhantomData,
}
}
pub fn subscription_with<State>(
&self,
state: &State,
f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static,
) -> iced_futures::subscription::Subscription<Message>
where
State: Send + Sync + 'static,
{
let sub = self.subscription();
iced_futures::subscription::Subscription::batch([sub, f(state)])
}
// pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> {
// let sub = widget::VideoSubscription {
// id: self.id.clone(),
// on_end_of_stream: self.on_end_of_stream.clone(),
// on_new_frame: self.on_new_frame.clone(),
// on_about_to_finish: self.on_about_to_finish.clone(),
// bus: self.source.bus.clone(),
// };
// iced_futures::subscription::from_recipe(sub)
// }
//
// pub fn subscription_with<State>(
// &self,
// state: &State,
// f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static,
// ) -> iced_futures::subscription::Subscription<Message>
// where
// State: Send + Sync + 'static,
// {
// let sub = self.subscription();
// iced_futures::subscription::Subscription::batch([sub, f(state)])
// }
pub fn on_new_frame(self, message: Message) -> Self {
Self {
@@ -109,10 +152,13 @@ impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
pub fn stop(&self) {
self.source.stop();
}
}
/// Creates a new video handle and waits for the metadata to be loaded.
pub async fn load(url: impl AsRef<str>) -> Result<Self> {
let handle = Self::new(url)?;
handle.wait().await
impl<Message: Send + Sync + Clone> VideoHandle<Message, Ready> {
pub fn format(&self) -> Result<gst::VideoFormat> {
self.source
.format()
.change_context(Error)
.attach("Failed to get video format")
}
}

View File

@@ -66,6 +66,14 @@ pub struct VideoFrame {
pub size: wgpu::Extent3d,
pub ready: Arc<AtomicBool>,
pub frame: Arc<Mutex<gst::Sample>>,
pub format: VideoFormat,
}
#[derive(Debug, Clone, Copy)]
pub enum ToneMapping {
None,
InverseOETF,
Reinhard,
}
impl iced_wgpu::Primitive for VideoFrame {
@@ -80,8 +88,13 @@ impl iced_wgpu::Primitive for VideoFrame {
viewport: &iced_wgpu::graphics::Viewport,
) {
let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| {
let texture =
VideoTexture::new("iced-video-texture", self.size, device, pipeline.format);
let texture = VideoTexture::new(
"iced-video-texture",
self.size,
device,
pipeline.format,
self.format,
);
let conversion_matrix = if texture.format().is_wide() {
BT2020_TO_RGB
} else {
@@ -235,12 +248,16 @@ impl iced_wgpu::Primitive for VideoFrame {
/// NV12 or P010 are only supported in DX12 and Vulkan backends.
/// While we can use vulkan with moltenvk on macos, I'd much rather use metal directly
/// Right now only supports interleaved UV formats.
/// For planar formats we would need 3 textures.
#[derive(Debug)]
pub struct VideoTexture {
y: wgpu::Texture,
uv: wgpu::Texture,
size: wgpu::Extent3d,
pixel_format: gst::VideoFormat,
video_format: VideoFormat,
surface_format: wgpu::TextureFormat,
tone_mapping: ToneMapping,
}
impl VideoTexture {
@@ -252,9 +269,28 @@ impl VideoTexture {
label: &str,
size: wgpu::Extent3d,
device: &wgpu::Device,
format: wgpu::TextureFormat,
surface_format: wgpu::TextureFormat,
video_format: VideoFormat,
) -> Self {
let surface_hdr = surface_format.is_wide();
let video_hdr = matches!(video_format, VideoFormat::P01010le | VideoFormat::P016Le);
if surface_hdr && !video_hdr {
tracing::warn!("Surface texture is HDR but video format is SDR");
} else if !surface_hdr && video_hdr {
tracing::warn!("Video format is HDR but surface does not support HDR");
}
let tone_mapping = if surface_hdr && video_hdr {
ToneMapping::None
} else if surface_hdr && !video_hdr {
ToneMapping::InverseOETF
} else if !surface_hdr && video_hdr {
ToneMapping::Reinhard
} else {
ToneMapping::None
};
let y_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some(&format!("{}-y", label)),
size: wgpu::Extent3d {
@@ -265,7 +301,7 @@ impl VideoTexture {
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::R8Unorm,
format: wgpu::TextureFormat::R16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
@@ -279,7 +315,7 @@ impl VideoTexture {
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg8Unorm,
format: wgpu::TextureFormat::Rg16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
@@ -287,151 +323,81 @@ impl VideoTexture {
y: y_texture,
uv: uv_texture,
size,
pixel_format: VideoFormat::Unknown,
surface_format,
video_format,
tone_mapping,
}
}
// This return the surface texture format, not the video pixel format
pub fn format(&self) -> wgpu::TextureFormat {
match self {
VideoTexture::NV12(_) => wgpu::TextureFormat::NV12,
VideoTexture::P010(_) => wgpu::TextureFormat::P010,
VideoTexture::Composite { y, uv } => {
todo!()
// if y.format().is_wide() {
// wgpu::TextureFormat::P010
// } else {
// wgpu::TextureFormat::NV12
// }
}
}
self.surface_format
}
pub fn y_texture(&self) -> wgpu::TextureView {
match self {
VideoTexture::NV12(nv12) => nv12.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-y-nv12"),
format: Some(wgpu::TextureFormat::R8Unorm),
..Default::default()
}),
VideoTexture::P010(p010) => p010.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-y-p010"),
format: Some(wgpu::TextureFormat::R16Unorm),
..Default::default()
}),
VideoTexture::Composite { y, .. } => {
y.create_view(&wgpu::TextureViewDescriptor::default())
}
}
self.y.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn uv_texture(&self) -> wgpu::TextureView {
match self {
VideoTexture::NV12(nv12) => nv12.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-uv-nv12"),
format: Some(wgpu::TextureFormat::Rg8Unorm),
..Default::default()
}),
VideoTexture::P010(p010) => p010.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced-video-texture-view-uv-p010"),
format: Some(wgpu::TextureFormat::Rg16Unorm),
..Default::default()
}),
VideoTexture::Composite { uv, .. } => {
uv.create_view(&wgpu::TextureViewDescriptor::default())
}
}
self.uv.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn resize(&self, name: &str, new_size: wgpu::Extent3d, device: &wgpu::Device) -> Self {
VideoTexture::new(name, new_size, device, self.format())
VideoTexture::new(name, new_size, device, self.format(), self.video_format)
}
pub fn pixel_format(&self) -> VideoFormat {
self.video_format
}
pub fn set_pixel_format(&mut self, format: VideoFormat) {
self.video_format = format;
}
/// This assumes that the data is laid out correctly for the texture format.
pub fn write_texture(&self, data: &[u8], queue: &wgpu::Queue) {
match self {
VideoTexture::NV12(nv12) => {
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: nv12,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(nv12.size().width * 3),
rows_per_image: Some(nv12.size().height),
},
nv12.size(),
);
}
VideoTexture::P010(p010) => {
dbg!(&p010.size());
dbg!(data.len());
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: p010,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(p010.size().width * 3),
rows_per_image: Some(p010.size().height),
},
p010.size(),
);
}
VideoTexture::Composite { y, uv } => {
let y_size = wgpu::Extent3d {
width: y.size().width,
height: y.size().height,
depth_or_array_layers: 1,
};
let uv_size = wgpu::Extent3d {
width: uv.size().width,
height: uv.size().height,
depth_or_array_layers: 1,
};
let y_data_size = (y_size.width * y_size.height) as usize;
let uv_data_size = (uv_size.width * uv_size.height * 2) as usize; // UV is interleaved
// let (y, u, v) = match self.video_format {
// VideoFormat::Nv12 | VideoFormat::P01010le | VideoFormat::P016Le => (4, 1, 1),
// _ => (1, 1),
// };
let Self { y, uv, .. } = self;
let y_size = y.size();
let uv_size = uv.size();
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&data[0..y_data_size],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(y_size.width),
rows_per_image: Some(y_size.height),
},
y_size,
);
let y_data_size = (y_size.width * y_size.height * 2) as usize;
let uv_data_size = (y_data_size / 2) as usize; // UV is interleaved
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&data[y_data_size..(y_data_size + uv_data_size)],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(uv_size.width * 2),
rows_per_image: Some(uv_size.height),
},
uv_size,
);
}
}
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&data[0..y_data_size],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(y_size.width),
rows_per_image: Some(y_size.height),
},
y_size,
);
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&data[y_data_size..(y_data_size + uv_data_size)],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(uv_size.width),
rows_per_image: Some(uv_size.height),
},
uv_size,
);
}
}
@@ -448,12 +414,6 @@ impl VideoFrameData {
pub fn is_hdr(&self) -> bool {
self.texture.format().is_wide()
}
pub fn is_nv12(&self) -> bool {
matches!(self.texture.format(), wgpu::TextureFormat::NV12)
}
pub fn is_p010(&self) -> bool {
matches!(self.texture.format(), wgpu::TextureFormat::P010)
}
}
#[derive(Debug)]

View File

@@ -1,13 +1,3 @@
// struct VertexOutput {
// @builtin(position) clip_position: vec4f,
// @location(0) coords: vec2f,
// }
// struct VertexInput {
// // @location(0) position: vec3<f32>,
// // @location(1) tex_coords: vec2<f32>,
// }
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>,
@@ -15,38 +5,30 @@ struct VertexOutput {
@vertex
fn vs_main(
// model: VertexInput,
@builtin(vertex_index) in_vertex_index: u32,
) -> VertexOutput {
var out: VertexOutput;
out.tex_coords = vec2<f32>(0.0, 0.0);
out.clip_position = vec4<f32>(0,0,0, 1.0);
let uv = vec2<f32>(f32((in_vertex_index << 1u) & 2u), f32(in_vertex_index & 2u));
out.clip_position = vec4<f32>(uv * 2.0 - 1.0, 0.0, 1.0);
out.clip_position.y = -out.clip_position.y;
out.tex_coords = uv;
return out;
}
// @vertex
// fn vs_main(@location(0) input: vec2f) -> VertexOutput {
// var out: VertexOutput;
// out.clip_position = vec4f(input, 0.0, 1.0);
// out.coords = input * 0.5 + vec2f(0.5, 0.5);
// return out;
// }
@group(0) @binding(0) var y_texture: texture_2d<f32>;
@group(0) @binding(1) var uv_texture: texture_2d<f32>;
@group(0) @binding(2) var texture_sampler: sampler;
@group(0) @binding(3) var<uniform> rgb_primaries: mat3x3<f32>;
@group(0) @binding(3) var<uniform> rgb_primaries: mat4x4<f32>;
@fragment
fn fs_main(input: VertexOutput) -> @location(0) vec4<f32> {
let y = textureSample(y_texture, texture_sampler, input.tex_coords).r;
let uv = textureSample(uv_texture, texture_sampler, input.tex_coords).rg;
let yuv = vec3f(y, uv);
let yuv = vec4f(y, uv, 0);
let rgb = rgb_primaries * yuv;
return vec4f(rgb, 1.0);
return vec4f(rgb.r, rgb.g, rgb.b, 1.0);
// let rgb = rgb_primaries * yuv;
// return vec4f(rgb, 1.0);
}

View File

@@ -13,7 +13,6 @@ use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Debug, Clone)]
pub struct VideoSource {
pub(crate) playbin: Playbin3,
pub(crate) videoconvert: VideoConvert,
pub(crate) appsink: AppSink,
pub(crate) bus: Bus,
pub(crate) ready: Arc<AtomicBool>,
@@ -27,22 +26,12 @@ impl VideoSource {
/// now.
pub fn new(url: impl AsRef<str>) -> Result<Self> {
Gst::new();
let videoconvert = VideoConvert::new("iced-video-convert")
// .change_context(Error)?
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
.change_context(Error)?;
let mut appsink = AppSink::new("iced-video-sink").change_context(Error)?;
appsink
.drop(true)
.sync(true)
// .async_(true)
.emit_signals(true)
.caps(
Caps::builder(CapsType::Video)
.field("format", "RGB10A2_LE") // Forced for now
.build(),
);
let video_sink = videoconvert.link(&appsink).change_context(Error)?;
.emit_signals(true);
let playbin = Playbin3::new("iced-video")
.change_context(Error)?
.with_uri(url.as_ref())
@@ -50,13 +39,13 @@ impl VideoSource {
.with_buffer_size(4096 * 4096 * 4 * 3)
.with_ring_buffer_max_size(4096 * 4096 * 4 * 3)
.with_flags(Playbin3::default_flags() | PlayFlags::DOWNLOAD)
.with_video_sink(&video_sink);
.with_video_sink(&appsink);
let bus = playbin.bus().change_context(Error)?;
playbin.pause().change_context(Error)?;
let ready = Arc::new(AtomicBool::new(false));
let frame = Arc::new(Mutex::new(gst::Sample::new()));
appsink.on_new_frame({
appsink.on_new_sample({
let ready = Arc::clone(&ready);
let frame = Arc::clone(&frame);
move |appsink| {
@@ -75,7 +64,6 @@ impl VideoSource {
Ok(Self {
playbin,
videoconvert,
appsink,
bus,
ready,
@@ -85,6 +73,26 @@ impl VideoSource {
}
pub async fn wait(&self) -> Result<()> {
use futures_lite::StreamExt;
// self.bus_stream()
// .for_each(|msg: gst::Message| {
// use gst::gstreamer::prelude::*;
// match msg.view() {
// MessageView::Eos(_) => {
// tracing::info!("Video reached end of stream");
// }
// MessageView::Error(err) => {
// tracing::error!(
// "Video Error from {:?}: {} ({:?})",
// err.src().map(|s| s.path_string()),
// err.error(),
// err.debug()
// );
// }
// view => tracing::info!("Video Message: {:#?}", view),
// }
// })
// .await;
self.playbin
.wait_for_states(&[gst::State::Paused, gst::State::Playing])
.await
@@ -93,6 +101,23 @@ impl VideoSource {
Ok(())
}
pub fn format(&self) -> Result<gst::VideoFormat> {
let caps = self
.appsink
.sink("sink")
.current_caps()
.change_context(Error)?;
let format = caps
.format()
.ok_or(Error)
.attach("Failed to get video caps structure")?;
Ok(format)
}
pub fn bus_stream(&self) -> impl futures_lite::Stream<Item = gst::Message> {
self.bus.stream()
}
pub fn is_playing(&self) -> Result<bool> {
let state = self.playbin.state(None).change_context(Error)?;
Ok(state == gst::State::Playing)

View File

@@ -10,7 +10,8 @@ where
Renderer: PrimitiveRenderer,
{
id: id::Id,
handle: &'a VideoHandle<Message>,
handle: &'a VideoHandle<Message, Ready>,
video_format: gst::VideoFormat,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
@@ -21,12 +22,15 @@ where
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
Message: Clone,
Message: Clone + Send + Sync,
{
pub fn new(handle: &'a VideoHandle<Message>) -> Self {
pub fn new(handle: &'a VideoHandle<Message, Ready>) -> Self {
Self {
id: handle.id.clone(),
handle: &handle,
video_format: handle
.format()
.expect("Failed to get video format during widget creation"),
content_fit: iced::ContentFit::Contain,
width: Length::Shrink,
height: Length::Shrink,
@@ -74,7 +78,7 @@ where
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
for Video<'_, Message, Theme, Renderer>
where
Message: Clone,
Message: Clone + Send + Sync,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<Length> {
@@ -141,6 +145,7 @@ where
},
ready: Arc::clone(&self.handle.frame_ready),
frame: Arc::clone(&self.handle.source.frame),
format: self.video_format,
},
);
};
@@ -184,7 +189,7 @@ where
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
for iced::Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Message: Send + Sync + 'a + Clone,
Theme: 'a,
Renderer: 'a + iced_wgpu::primitive::Renderer,
{