feat: Get iced-video working

This commit is contained in:
uttarayan21
2025-12-25 02:14:56 +05:30
parent 3382aebb1f
commit ebe2312272
18 changed files with 714 additions and 189 deletions

View File

@@ -7,6 +7,12 @@ edition = "2024"
error-stack = "0.6.0"
gst.workspace = true
iced_core = "0.14.0"
iced_futures = "0.14.0"
iced_renderer = { version = "0.14.0", features = ["iced_wgpu"] }
iced_wgpu = "0.14.0"
thiserror = "2.0.17"
tracing = "0.1.43"
[dev-dependencies]
iced.workspace = true
tracing-subscriber = { version = "0.3.22", features = ["env-filter"] }

View File

@@ -1,9 +1,12 @@
pub mod id;
pub mod primitive;
pub mod source;
use iced_core as iced;
use iced_renderer::Renderer as RendererWithFallback;
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
use error_stack::{Report, ResultExt};
use gst::*;
use iced_core::Length;
use iced::Length;
use std::marker::PhantomData;
use gst::plugins::app::AppSink;
@@ -16,16 +19,21 @@ pub struct Error;
pub type Result<T, E = Report<Error>> = core::result::Result<T, E>;
use std::sync::{Arc, Mutex, atomic::AtomicBool};
pub struct Video {
id: iced_core::Id,
/// This is the video handle that is used to control the video playback.
/// This should be keps in the application state.
#[derive(Debug, Clone)]
pub struct VideoHandle {
id: id::Id,
source: source::VideoSource,
is_metadata_loaded: Arc<AtomicBool>,
is_playing: Arc<AtomicBool>,
is_eos: Arc<AtomicBool>,
texture: Mutex<Option<iced_wgpu::wgpu::TextureView>>,
frame_ready: Arc<AtomicBool>,
}
impl Video {
pub fn id(&self) -> &iced_core::Id {
impl VideoHandle {
pub fn id(&self) -> &id::Id {
&self.id
}
@@ -33,91 +41,212 @@ impl Video {
&self.source
}
pub async fn new(url: impl AsRef<str>) -> Result<Self> {
pub fn new(url: impl AsRef<str>) -> Result<Self> {
let source = source::VideoSource::new(url)?;
let frame_ready = Arc::clone(&source.ready);
Ok(Self {
id: iced_core::Id::unique(),
source: source::VideoSource::new(url)?,
id: id::Id::unique(),
source: source,
is_metadata_loaded: Arc::new(AtomicBool::new(false)),
is_playing: Arc::new(AtomicBool::new(false)),
is_eos: Arc::new(AtomicBool::new(false)),
texture: Mutex::new(None),
frame_ready,
})
}
}
pub struct VideoPlayer<'a, Message, Theme = iced_core::Theme, Renderer = iced_wgpu::Renderer>
/// This is the Video widget that displays a video.
/// This should be used in the view function.
pub struct Video<'a, Message, Theme = iced::Theme, Renderer = iced_wgpu::Renderer>
where
Renderer: PrimitiveRenderer,
{
videos: &'a Video,
content_fit: iced_core::ContentFit,
width: iced_core::Length,
height: iced_core::Length,
id: id::Id,
handle: &'a VideoHandle,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
on_end_of_stream: Option<Message>,
on_new_frame: Option<Message>,
looping: bool,
// on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message + 'a>>,
// on_error: Option<Box<dyn Fn(&glib::Error) -> Message + 'a>>,
theme: Theme,
__marker: PhantomData<Renderer>,
// theme: Theme,
__marker: PhantomData<(Renderer, Theme)>,
}
impl<Message, Theme, Renderer> VideoPlayer<Message, Theme, Renderer>
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
pub fn new(source: source::VideoSource) -> Self {
pub fn new(handle: &'a VideoHandle) -> Self {
Self {
videos: Video {
id: iced_core::Id::unique(),
source,
is_playing: Arc::new(AtomicBool::new(false)),
is_eos: Arc::new(AtomicBool::new(false)),
texture: Mutex::new(None),
},
content_fit: iced_core::ContentFit::Contain,
id: handle.id.clone(),
handle: &handle,
content_fit: iced::ContentFit::Contain,
width: Length::Shrink,
height: Length::Shrink,
on_end_of_stream: None,
on_new_frame: None,
looping: false,
theme: Theme::default(),
// theme: Theme::default(),
__marker: PhantomData,
}
}
}
impl<Message, Theme, Renderer> iced_core::Widget<Message, Theme, Renderer>
for VideoPlayer<'_, Message, Theme, Renderer>
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
pub fn width(mut self, width: Length) -> Self {
self.width = width;
self
}
pub fn height(mut self, height: Length) -> Self {
self.height = height;
self
}
pub fn content_fit(mut self, fit: iced::ContentFit) -> Self {
self.content_fit = fit;
self
}
pub fn on_end_of_stream(mut self, message: Message) -> Self {
self.on_end_of_stream = Some(message);
self
}
pub fn on_new_frame(mut self, message: Message) -> Self {
self.on_new_frame = Some(message);
self
}
pub fn looping(mut self, looping: bool) -> Self {
self.looping = looping;
self
}
}
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
for Video<'_, Message, Theme, Renderer>
where
Message: Clone,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced_core::Size<Length> {
iced_core::Size {
fn size(&self) -> iced::Size<Length> {
iced::Size {
width: self.width,
height: self.height,
}
}
// The video player should take max space by default
fn layout(
&mut self,
iced_core::widget::tree: &mut iced_core::widget::Tree,
iced_core::renderer: &Renderer,
limits: &iced_core::layout::Limits,
) -> iced_core::layout::Node {
todo!()
_tree: &mut iced::widget::Tree,
_renderer: &Renderer,
limits: &iced::layout::Limits,
) -> iced::layout::Node {
iced::layout::Node::new(limits.max())
}
fn draw(
&self,
iced_core::widget::tree: &iced_core::widget::Tree,
iced_core::renderer: &mut Renderer,
tree: &iced::widget::Tree,
renderer: &mut Renderer,
theme: &Theme,
style: &iced_core::renderer::Style,
iced_core::layout: iced_core::Layout<'_>,
cursor: iced_core::mouse::Cursor,
viewport: &iced_core::Rectangle,
style: &iced::renderer::Style,
layout: iced::Layout<'_>,
cursor: iced::mouse::Cursor,
viewport: &iced::Rectangle,
) {
todo!()
if let Ok((width, height)) = self.handle.source.size() {
let video_size = iced::Size {
width: width as f32,
height: height as f32,
};
let bounds = layout.bounds();
let adjusted_fit = self.content_fit.fit(video_size, bounds.size());
let scale = iced::Vector::new(
adjusted_fit.width / video_size.width,
adjusted_fit.height / video_size.height,
);
let final_size = video_size * scale;
let position = match self.content_fit {
iced::ContentFit::None => iced::Point::new(
bounds.x + (video_size.width - adjusted_fit.width) / 2.0,
bounds.y + (video_size.height - adjusted_fit.height) / 2.0,
),
_ => iced::Point::new(
bounds.center_x() - final_size.width / 2.0,
bounds.center_y() - final_size.height / 2.0,
),
};
let drawing_bounds = iced::Rectangle::new(position, final_size);
let render = |renderer: &mut Renderer| {
renderer.draw_primitive(
drawing_bounds,
primitive::VideoFrame {
id: self.id.clone(),
size: iced_wgpu::wgpu::Extent3d {
width: width as u32,
height: height as u32,
depth_or_array_layers: 1,
},
ready: Arc::clone(&self.handle.frame_ready),
frame: Arc::clone(&self.handle.source.frame),
},
);
};
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
renderer.with_layer(bounds, render);
} else {
render(renderer);
}
}
}
fn update(
&mut self,
_tree: &mut iced_core::widget::Tree,
event: &iced::Event,
_layout: iced_core::Layout<'_>,
_cursor: iced_core::mouse::Cursor,
_renderer: &Renderer,
_clipboard: &mut dyn iced_core::Clipboard,
shell: &mut iced_core::Shell<'_, Message>,
_viewport: &iced::Rectangle,
) {
if let iced::Event::Window(iced::window::Event::RedrawRequested(_)) = event {
if self
.handle
.frame_ready
.load(std::sync::atomic::Ordering::SeqCst)
{
shell.request_redraw();
} else {
shell.request_redraw_at(iced::window::RedrawRequest::At(
iced_core::time::Instant::now() + core::time::Duration::from_millis(32),
));
}
}
}
}
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
for iced::Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Theme: 'a,
Renderer: 'a + iced_wgpu::primitive::Renderer,
{
fn from(video: Video<'a, Message, Theme, Renderer>) -> Self {
Self::new(video)
}
}

View File

@@ -1,14 +1,18 @@
use crate::id;
use iced_wgpu::primitive::Pipeline;
use iced_wgpu::wgpu;
use std::collections::BTreeMap;
use std::sync::{Arc, atomic::AtomicBool};
use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Debug)]
pub struct VideoPrimitive {
texture: wgpu::TextureView,
ready: Arc<AtomicBool>,
pub struct VideoFrame {
pub id: id::Id,
pub size: wgpu::Extent3d,
pub ready: Arc<AtomicBool>,
pub frame: Arc<Mutex<Vec<u8>>>,
}
impl iced_wgpu::Primitive for VideoPrimitive {
impl iced_wgpu::Primitive for VideoFrame {
type Pipeline = VideoPipeline;
fn prepare(
@@ -19,11 +23,102 @@ impl iced_wgpu::Primitive for VideoPrimitive {
bounds: &iced_wgpu::core::Rectangle,
viewport: &iced_wgpu::graphics::Viewport,
) {
todo!()
}
fn draw(&self, _pipeline: &Self::Pipeline, _render_pass: &mut wgpu::RenderPass<'_>) -> bool {
false
let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| {
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced-video-texture"),
size: self.size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: pipeline.format,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced-video-texture-bind-group"),
layout: &pipeline.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
},
],
});
VideoTextures {
id: self.id.clone(),
texture,
bind_group,
ready: Arc::clone(&self.ready),
}
});
// dbg!(&self.size, video.texture.size());
if self.size != video.texture.size() {
// Resize the texture if the size has changed.
let new_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced-video-texture-resized"),
size: self.size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: pipeline.format,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let new_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced-video-texture-bind-group-resized"),
layout: &pipeline.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&new_texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
},
],
});
video.texture = new_texture;
video.bind_group = new_bind_group;
}
// BUG: This causes a panic because the texture size is not correct for some reason.
if video.ready.load(std::sync::atomic::Ordering::SeqCst) {
let frame = self.frame.lock().expect("BUG: Mutex poisoned");
if frame.len() != (4 * self.size.width * self.size.height) as usize {
tracing::warn!(
"Frame size mismatch: expected {}, got {}",
4 * self.size.width * self.size.height,
frame.len()
);
return;
}
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: &video.texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&frame,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(4 * video.texture.size().width),
rows_per_image: Some(video.texture.size().height),
},
self.size,
);
video
.ready
.store(false, std::sync::atomic::Ordering::SeqCst);
}
}
fn render(
@@ -33,35 +128,41 @@ impl iced_wgpu::Primitive for VideoPrimitive {
target: &wgpu::TextureView,
clip_bounds: &iced_wgpu::core::Rectangle<u32>,
) {
if self.ready.load(std::sync::atomic::Ordering::SeqCst) {
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced-video-render-pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
depth_slice: None,
})],
depth_stencil_attachment: None,
timestamp_writes: None,
occlusion_query_set: None,
});
let Some(video) = pipeline.videos.get(&self.id) else {
return;
};
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced-video-render-pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.1,
g: 0.2,
b: 0.3,
a: 1.0,
}),
store: wgpu::StoreOp::Store,
},
depth_slice: None,
})],
depth_stencil_attachment: None,
timestamp_writes: None,
occlusion_query_set: None,
});
render_pass.set_pipeline(&pipeline.pipeline);
render_pass.set_bind_group(0, &self.bind_group, &[]);
render_pass.draw(0..3, 0..1);
self.ready
.store(false, std::sync::atomic::Ordering::Relaxed);
}
render_pass.set_pipeline(&pipeline.pipeline);
render_pass.set_bind_group(0, &video.bind_group, &[]);
render_pass.draw(0..3, 0..1);
// self.ready
// .store(false, std::sync::atomic::Ordering::Relaxed);
}
}
#[derive(Debug)]
pub struct VideoTextures {
id: u64,
id: id::Id,
texture: wgpu::Texture,
bind_group: wgpu::BindGroup,
ready: Arc<AtomicBool>,
@@ -72,7 +173,8 @@ pub struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
videos: BTreeMap<u64, VideoTextures>,
videos: BTreeMap<id::Id, VideoTextures>,
format: wgpu::TextureFormat,
}
pub trait HdrTextureFormatExt {
@@ -171,6 +273,7 @@ impl Pipeline for VideoPipeline {
pipeline,
bind_group_layout,
sampler,
format,
videos: BTreeMap::new(),
}
}

View File

@@ -1,44 +1,107 @@
use crate::{Error, Result, ResultExt};
use gst::{
Bus, Gst, MessageType, MessageView, Sink, Source,
app::AppSink,
caps::{Caps, CapsType},
element::ElementExt,
pipeline::PipelineExt,
playback::Playbin3,
videoconvertscale::VideoConvert,
};
use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Debug, Clone)]
pub struct VideoSource {
playbin: Playbin3,
videoconvert: VideoConvert,
appsink: AppSink,
bus: Bus,
pub(crate) playbin: Playbin3,
pub(crate) videoconvert: VideoConvert,
pub(crate) appsink: AppSink,
pub(crate) bus: Bus,
pub(crate) ready: Arc<AtomicBool>,
pub(crate) frame: Arc<Mutex<Vec<u8>>>,
}
impl VideoSource {
/// Creates a new video source from the given URL.
/// Since this doesn't have to parse the pipeline manually, we aren't sanitizing the URL for
/// now.
pub async fn new(url: impl AsRef<str>) -> Result<Self> {
pub fn new(url: impl AsRef<str>) -> Result<Self> {
Gst::new();
let videoconvert = VideoConvert::new("iced-video-convert").change_context(Error)?;
let appsink = AppSink::new("iced-video-sink").change_context(Error)?;
let videoconvert = VideoConvert::new("iced-video-convert")
// .change_context(Error)?
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
.change_context(Error)?;
let appsink = AppSink::new("iced-video-sink")
.change_context(Error)?
.with_caps(
Caps::builder(CapsType::Video)
.field("format", "RGBA")
.build(),
);
let video_sink = videoconvert.link(&appsink).change_context(Error)?;
let playbin = gst::plugins::playback::Playbin3::new("iced-video")
.change_context(Error)?
.with_uri(url.as_ref())
.with_video_sink(&video_sink);
let bus = playbin.bus().change_context(Error)?;
playbin.wait_ready()?;
// let bus_stream = bus.stream();
// bus_stream.find(|message| {
// let view = message.view();
// if let gst::MessageView::StateChanged(change) = view {
// change.current() == gst::State::Ready
// } else {
// false
// }
// });
playbin.pause().change_context(Error)?;
let ready = Arc::new(AtomicBool::new(false));
let frame = Arc::new(Mutex::new(Vec::new()));
let appsink = appsink.on_new_frame({
let ready = Arc::clone(&ready);
let frame = Arc::clone(&frame);
move |appsink| {
let Ok(sample) = appsink.pull_sample() else {
return Ok(());
};
let caps = sample.caps().ok_or(gst::gstreamer::FlowError::Error)?;
let structure_0 = caps.structure(0).ok_or(gst::gstreamer::FlowError::Error)?;
let width = structure_0
.get::<i32>("width")
.map_err(|_| gst::gstreamer::FlowError::Error)?;
let height = structure_0
.get::<i32>("height")
.map_err(|_| gst::gstreamer::FlowError::Error)?;
let buffer = sample.buffer().and_then(|b| b.map_readable().ok());
if let Some(buffer) = buffer {
{
let mut frame = frame.lock().expect("BUG: Mutex poisoned");
debug_assert_eq!(buffer.size(), (width * height * 4) as usize);
if frame.len() != buffer.size() {
frame.resize(buffer.size(), 0);
}
frame.copy_from_slice(buffer.as_slice());
ready.store(true, std::sync::atomic::Ordering::Relaxed);
}
// if written.is_err() {
// tracing::error!("Failed to write video frame to buffer");
// } else {
// ready.store(true, std::sync::atomic::Ordering::Relaxed);
// }
}
Ok(())
}
});
Ok(Self {
playbin,
videoconvert,
appsink,
bus,
ready,
frame,
})
}
pub async fn wait(self) -> Result<()> {
self.playbin
.wait_for_states(&[gst::State::Paused, gst::State::Playing])
.await
.change_context(Error)
.attach("Failed to wait for video initialisation")
}
pub fn play(&self) -> Result<()> {
self.playbin
.play()
@@ -53,12 +116,15 @@ impl VideoSource {
.attach("Failed to pause video")
}
pub fn bus(&self) -> &Bus {}
// pub fn copy_frame_to_texture(&self, texture: wgpu::TextureView) -> Result<()> {
// let frame = self
// .appsink
// .try_pull_sample(core::time::Duration::from_millis(1))?
// .ok_or(Error)
// .attach("No video frame available")?;
// }
pub fn size(&self) -> Result<(i32, i32)> {
let caps = self
.appsink
.sink("sink")
.current_caps()
.change_context(Error)?;
caps.width()
.and_then(|width| caps.height().map(|height| (width, height)))
.ok_or(Error)
.attach("Failed to get width, height")
}
}