feat: Added iced_video_player
Some checks failed
build / checks-matrix (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
build / checks-build (push) Has been cancelled

This commit is contained in:
uttarayan21
2025-11-20 21:59:47 +05:30
parent f41625e0ed
commit b1cfc19b96
24 changed files with 8109 additions and 15 deletions

View File

@@ -0,0 +1,76 @@
//! # Iced Video Player
//!
//! A convenient video player widget for Iced.
//!
//! To get started, load a video from a URI (e.g., a file path prefixed with `file:///`) using [`Video::new`](crate::Video::new),
//! then use it like any other Iced widget in your `view` function by creating a [`VideoPlayer`].
//!
//! Example:
//! ```rust
//! use iced_video_player::{Video, VideoPlayer};
//!
//! fn main() -> iced::Result {
//! iced::run("Video Player", (), App::view)
//! }
//!
//! struct App {
//! video: Video,
//! }
//!
//! impl Default for App {
//! fn default() -> Self {
//! App {
//! video: Video::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(),
//! }
//! }
//! }
//!
//! impl App {
//! fn view(&self) -> iced::Element<()> {
//! VideoPlayer::new(&self.video).into()
//! }
//! }
//! ```
//!
//! You can programmatically control the video (e.g., seek, pause, loop, grab thumbnails) by accessing various methods on [`Video`].
mod pipeline;
mod video;
mod video_player;
use gstreamer as gst;
use thiserror::Error;
pub use video::Position;
pub use video::Video;
pub use video_player::VideoPlayer;
#[derive(Debug, Error)]
pub enum Error {
#[error("{0}")]
Glib(#[from] glib::Error),
#[error("{0}")]
Bool(#[from] glib::BoolError),
#[error("failed to get the gstreamer bus")]
Bus,
#[error("failed to get AppSink element with name='{0}' from gstreamer pipeline")]
AppSink(String),
#[error("{0}")]
StateChange(#[from] gst::StateChangeError),
#[error("failed to cast gstreamer element")]
Cast,
#[error("{0}")]
Io(#[from] std::io::Error),
#[error("invalid URI")]
Uri,
#[error("failed to get media capabilities")]
Caps,
#[error("failed to query media duration or position")]
Duration,
#[error("failed to sync with playback")]
Sync,
#[error("failed to lock internal sync primitive")]
Lock,
#[error("invalid framerate: {0}")]
Framerate(f64),
}

View File

@@ -0,0 +1,469 @@
use crate::video::Frame;
use iced_wgpu::primitive::Primitive;
use iced_wgpu::wgpu;
use std::{
collections::{btree_map::Entry, BTreeMap},
num::NonZero,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc, Mutex,
},
};
#[repr(C)]
struct Uniforms {
rect: [f32; 4],
// because wgpu min_uniform_buffer_offset_alignment
_pad: [u8; 240],
}
struct VideoEntry {
texture_y: wgpu::Texture,
texture_uv: wgpu::Texture,
instances: wgpu::Buffer,
bg0: wgpu::BindGroup,
alive: Arc<AtomicBool>,
prepare_index: AtomicUsize,
render_index: AtomicUsize,
}
pub(crate) struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bg0_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
videos: BTreeMap<u64, VideoEntry>,
}
impl VideoPipeline {
fn new(device: &wgpu::Device, format: wgpu::TextureFormat) -> Self {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("iced_video_player shader"),
source: wgpu::ShaderSource::Wgsl(include_str!("shader.wgsl").into()),
});
let bg0_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("iced_video_player bind group 0 layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 3,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: true,
min_binding_size: None,
},
count: None,
},
],
});
let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("iced_video_player pipeline layout"),
bind_group_layouts: &[&bg0_layout],
push_constant_ranges: &[],
});
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("iced_video_player pipeline"),
layout: Some(&layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: Some("vs_main"),
buffers: &[],
compilation_options: wgpu::PipelineCompilationOptions::default(),
},
primitive: wgpu::PrimitiveState::default(),
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format,
blend: None,
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: wgpu::PipelineCompilationOptions::default(),
}),
multiview: None,
cache: None,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("iced_video_player sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Nearest,
lod_min_clamp: 0.0,
lod_max_clamp: 1.0,
compare: None,
anisotropy_clamp: 1,
border_color: None,
});
VideoPipeline {
pipeline,
bg0_layout,
sampler,
videos: BTreeMap::new(),
}
}
fn upload(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
video_id: u64,
alive: &Arc<AtomicBool>,
(width, height): (u32, u32),
frame: &[u8],
) {
if let Entry::Vacant(entry) = self.videos.entry(video_id) {
let texture_y = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::R8Unorm,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let texture_uv = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width: width / 2,
height: height / 2,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg8Unorm,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let view_y = texture_y.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
usage: Some(wgpu::TextureUsages::empty()),
});
let view_uv = texture_uv.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
usage: Some(wgpu::TextureUsages::empty()),
});
let instances = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced_video_player uniform buffer"),
size: 256 * std::mem::size_of::<Uniforms>() as u64, // max 256 video players per frame
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM,
mapped_at_creation: false,
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced_video_player bind group"),
layout: &self.bg0_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&view_y),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureView(&view_uv),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&self.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: &instances,
offset: 0,
size: Some(NonZero::new(std::mem::size_of::<Uniforms>() as _).unwrap()),
}),
},
],
});
entry.insert(VideoEntry {
texture_y,
texture_uv,
instances,
bg0: bind_group,
alive: Arc::clone(alive),
prepare_index: AtomicUsize::new(0),
render_index: AtomicUsize::new(0),
});
}
let VideoEntry {
texture_y,
texture_uv,
..
} = self.videos.get(&video_id).unwrap();
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: texture_y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&frame[..(width * height) as usize],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(width),
rows_per_image: Some(height),
},
wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
},
);
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: texture_uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&frame[(width * height) as usize..],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(width),
rows_per_image: Some(height / 2),
},
wgpu::Extent3d {
width: width / 2,
height: height / 2,
depth_or_array_layers: 1,
},
);
}
fn cleanup(&mut self) {
let ids: Vec<_> = self
.videos
.iter()
.filter_map(|(id, entry)| (!entry.alive.load(Ordering::SeqCst)).then_some(*id))
.collect();
for id in ids {
if let Some(video) = self.videos.remove(&id) {
video.texture_y.destroy();
video.texture_uv.destroy();
video.instances.destroy();
}
}
}
fn prepare(&mut self, queue: &wgpu::Queue, video_id: u64, bounds: &iced::Rectangle) {
if let Some(video) = self.videos.get_mut(&video_id) {
let uniforms = Uniforms {
rect: [
bounds.x,
bounds.y,
bounds.x + bounds.width,
bounds.y + bounds.height,
],
_pad: [0; 240],
};
queue.write_buffer(
&video.instances,
(video.prepare_index.load(Ordering::Relaxed) * std::mem::size_of::<Uniforms>())
as u64,
unsafe {
std::slice::from_raw_parts(
&uniforms as *const _ as *const u8,
std::mem::size_of::<Uniforms>(),
)
},
);
video.prepare_index.fetch_add(1, Ordering::Relaxed);
video.render_index.store(0, Ordering::Relaxed);
}
self.cleanup();
}
fn draw(
&self,
target: &wgpu::TextureView,
encoder: &mut wgpu::CommandEncoder,
clip: &iced::Rectangle<u32>,
video_id: u64,
) {
if let Some(video) = self.videos.get(&video_id) {
let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced_video_player render pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
depth_slice: None,
})],
depth_stencil_attachment: None,
timestamp_writes: None,
occlusion_query_set: None,
});
pass.set_pipeline(&self.pipeline);
pass.set_bind_group(
0,
&video.bg0,
&[
(video.render_index.load(Ordering::Relaxed) * std::mem::size_of::<Uniforms>())
as u32,
],
);
pass.set_scissor_rect(clip.x as _, clip.y as _, clip.width as _, clip.height as _);
pass.draw(0..6, 0..1);
video.prepare_index.store(0, Ordering::Relaxed);
video.render_index.fetch_add(1, Ordering::Relaxed);
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct VideoPrimitive {
video_id: u64,
alive: Arc<AtomicBool>,
frame: Arc<Mutex<Frame>>,
size: (u32, u32),
upload_frame: bool,
}
impl VideoPrimitive {
pub fn new(
video_id: u64,
alive: Arc<AtomicBool>,
frame: Arc<Mutex<Frame>>,
size: (u32, u32),
upload_frame: bool,
) -> Self {
VideoPrimitive {
video_id,
alive,
frame,
size,
upload_frame,
}
}
}
impl Primitive for VideoPrimitive {
type Renderer = VideoPipeline;
fn initialize(
&self,
device: &wgpu::Device,
_queue: &wgpu::Queue,
format: wgpu::TextureFormat,
) -> Self::Renderer {
VideoPipeline::new(device, format)
}
fn prepare(
&self,
renderer: &mut Self::Renderer,
device: &wgpu::Device,
queue: &wgpu::Queue,
bounds: &iced::Rectangle,
viewport: &iced_wgpu::graphics::Viewport,
) {
if self.upload_frame {
if let Some(readable) = self.frame.lock().expect("lock frame mutex").readable() {
renderer.upload(
device,
queue,
self.video_id,
&self.alive,
self.size,
readable.as_slice(),
);
}
}
renderer.prepare(
queue,
self.video_id,
&(*bounds
* iced::Transformation::orthographic(
viewport.logical_size().width as _,
viewport.logical_size().height as _,
)),
);
}
fn render(
&self,
renderer: &Self::Renderer,
encoder: &mut wgpu::CommandEncoder,
target: &wgpu::TextureView,
clip_bounds: &iced::Rectangle<u32>,
) {
renderer.draw(target, encoder, clip_bounds, self.video_id);
}
}

View File

@@ -0,0 +1,61 @@
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
struct Uniforms {
rect: vec4<f32>,
}
@group(0) @binding(0)
var tex_y: texture_2d<f32>;
@group(0) @binding(1)
var tex_uv: texture_2d<f32>;
@group(0) @binding(2)
var s: sampler;
@group(0) @binding(3)
var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> VertexOutput {
var quad = array<vec4<f32>, 6>(
vec4<f32>(uniforms.rect.xy, 0.0, 0.0),
vec4<f32>(uniforms.rect.zy, 1.0, 0.0),
vec4<f32>(uniforms.rect.xw, 0.0, 1.0),
vec4<f32>(uniforms.rect.zy, 1.0, 0.0),
vec4<f32>(uniforms.rect.zw, 1.0, 1.0),
vec4<f32>(uniforms.rect.xw, 0.0, 1.0),
);
var out: VertexOutput;
out.uv = quad[in_vertex_index].zw;
out.position = vec4<f32>(quad[in_vertex_index].xy, 1.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let yuv2r = vec3<f32>(1.164, 0.0, 1.596);
let yuv2g = vec3<f32>(1.164, -0.391, -0.813);
let yuv2b = vec3<f32>(1.164, 2.018, 0.0);
var yuv = vec3<f32>(0.0);
yuv.x = textureSample(tex_y, s, in.uv).r - 0.0625;
yuv.y = textureSample(tex_uv, s, in.uv).r - 0.5;
yuv.z = textureSample(tex_uv, s, in.uv).g - 0.5;
var rgb = vec3<f32>(0.0);
rgb.x = dot(yuv, yuv2r);
rgb.y = dot(yuv, yuv2g);
rgb.z = dot(yuv, yuv2b);
let threshold = rgb <= vec3<f32>(0.04045);
let hi = pow((rgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
let lo = rgb * vec3<f32>(1.0 / 12.92);
rgb = select(hi, lo, threshold);
return vec4<f32>(rgb, 1.0);
}

View File

@@ -0,0 +1,660 @@
use crate::Error;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gstreamer_app::prelude::*;
use iced::widget::image as img;
use std::num::NonZeroU8;
use std::ops::{Deref, DerefMut};
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Duration, Instant};
/// Position in the media.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Position {
/// Position based on time.
///
/// Not the most accurate format for videos.
Time(Duration),
/// Position based on nth frame.
Frame(u64),
}
impl From<Position> for gst::GenericFormattedValue {
fn from(pos: Position) -> Self {
match pos {
Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(),
Position::Frame(f) => gst::format::Default::from_u64(f).into(),
}
}
}
impl From<Duration> for Position {
fn from(t: Duration) -> Self {
Position::Time(t)
}
}
impl From<u64> for Position {
fn from(f: u64) -> Self {
Position::Frame(f)
}
}
#[derive(Debug)]
pub(crate) struct Frame(gst::Sample);
impl Frame {
pub fn empty() -> Self {
Self(gst::Sample::builder().build())
}
pub fn readable(&self) -> Option<gst::BufferMap<'_, gst::buffer::Readable>> {
self.0.buffer().and_then(|x| x.map_readable().ok())
}
}
#[derive(Debug)]
pub(crate) struct Internal {
pub(crate) id: u64,
pub(crate) bus: gst::Bus,
pub(crate) source: gst::Pipeline,
pub(crate) alive: Arc<AtomicBool>,
pub(crate) worker: Option<std::thread::JoinHandle<()>>,
pub(crate) width: i32,
pub(crate) height: i32,
pub(crate) framerate: f64,
pub(crate) duration: Duration,
pub(crate) speed: f64,
pub(crate) sync_av: bool,
pub(crate) frame: Arc<Mutex<Frame>>,
pub(crate) upload_frame: Arc<AtomicBool>,
pub(crate) last_frame_time: Arc<Mutex<Instant>>,
pub(crate) looping: bool,
pub(crate) is_eos: bool,
pub(crate) restart_stream: bool,
pub(crate) sync_av_avg: u64,
pub(crate) sync_av_counter: u64,
pub(crate) subtitle_text: Arc<Mutex<Option<String>>>,
pub(crate) upload_text: Arc<AtomicBool>,
}
impl Internal {
pub(crate) fn seek(&self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
let position = position.into();
// gstreamer complains if the start & end value types aren't the same
match &position {
Position::Time(_) => self.source.seek(
self.speed,
gst::SeekFlags::FLUSH
| if accurate {
gst::SeekFlags::ACCURATE
} else {
gst::SeekFlags::empty()
},
gst::SeekType::Set,
gst::GenericFormattedValue::from(position),
gst::SeekType::Set,
gst::ClockTime::NONE,
)?,
Position::Frame(_) => self.source.seek(
self.speed,
gst::SeekFlags::FLUSH
| if accurate {
gst::SeekFlags::ACCURATE
} else {
gst::SeekFlags::empty()
},
gst::SeekType::Set,
gst::GenericFormattedValue::from(position),
gst::SeekType::Set,
gst::format::Default::NONE,
)?,
};
*self.subtitle_text.lock().expect("lock subtitle_text") = None;
self.upload_text.store(true, Ordering::SeqCst);
Ok(())
}
pub(crate) fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
let Some(position) = self.source.query_position::<gst::ClockTime>() else {
return Err(Error::Caps);
};
if speed > 0.0 {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
position,
gst::SeekType::End,
gst::ClockTime::from_seconds(0),
)?;
} else {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
gst::ClockTime::from_seconds(0),
gst::SeekType::Set,
position,
)?;
}
self.speed = speed;
Ok(())
}
pub(crate) fn restart_stream(&mut self) -> Result<(), Error> {
self.is_eos = false;
self.set_paused(false);
self.seek(0, false)?;
Ok(())
}
pub(crate) fn set_paused(&mut self, paused: bool) {
self.source
.set_state(if paused {
gst::State::Paused
} else {
gst::State::Playing
})
.unwrap(/* state was changed in ctor; state errors caught there */);
// Set restart_stream flag to make the stream restart on the next Message::NextFrame
if self.is_eos && !paused {
self.restart_stream = true;
}
}
pub(crate) fn paused(&self) -> bool {
self.source.state(gst::ClockTime::ZERO).1 == gst::State::Paused
}
/// Syncs audio with video when there is (inevitably) latency presenting the frame.
pub(crate) fn set_av_offset(&mut self, offset: Duration) {
if self.sync_av {
self.sync_av_counter += 1;
self.sync_av_avg = self.sync_av_avg * (self.sync_av_counter - 1) / self.sync_av_counter
+ offset.as_nanos() as u64 / self.sync_av_counter;
if self.sync_av_counter % 128 == 0 {
self.source
.set_property("av-offset", -(self.sync_av_avg as i64));
}
}
}
}
/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream).
#[derive(Debug)]
pub struct Video(pub(crate) RwLock<Internal>);
impl Drop for Video {
fn drop(&mut self) {
let inner = self.0.get_mut().expect("failed to lock");
inner
.source
.set_state(gst::State::Null)
.expect("failed to set state");
inner.alive.store(false, Ordering::SeqCst);
if let Some(worker) = inner.worker.take() {
if let Err(err) = worker.join() {
match err.downcast_ref::<String>() {
Some(e) => log::error!("Video thread panicked: {e}"),
None => log::error!("Video thread panicked with unknown reason"),
}
}
}
}
}
impl Video {
/// Create a new video player from a given video which loads from `uri`.
/// Note that live sources will report the duration to be zero.
pub fn new(uri: &url::Url) -> Result<Self, Error> {
gst::init()?;
let pipeline = format!("playbin uri=\"{}\" text-sink=\"appsink name=iced_text sync=true drop=true\" video-sink=\"videoscale ! videoconvert ! appsink name=iced_video drop=true caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1\"", uri.as_str());
let pipeline = gst::parse::launch(pipeline.as_ref())?
.downcast::<gst::Pipeline>()
.map_err(|_| Error::Cast)?;
let video_sink: gst::Element = pipeline.property("video-sink");
let pad = video_sink.pads().first().cloned().unwrap();
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
let bin = pad
.parent_element()
.unwrap()
.downcast::<gst::Bin>()
.unwrap();
let video_sink = bin.by_name("iced_video").unwrap();
let video_sink = video_sink.downcast::<gst_app::AppSink>().unwrap();
let text_sink: gst::Element = pipeline.property("text-sink");
let text_sink = text_sink.downcast::<gst_app::AppSink>().unwrap();
Self::from_gst_pipeline(pipeline, video_sink, Some(text_sink))
}
/// Creates a new video based on an existing GStreamer pipeline and appsink.
/// Expects an `appsink` plugin with `caps=video/x-raw,format=NV12`.
///
/// An optional `text_sink` can be provided, which enables subtitle messages
/// to be emitted.
///
/// **Note:** Many functions of [`Video`] assume a `playbin` pipeline.
/// Non-`playbin` pipelines given here may not have full functionality.
pub fn from_gst_pipeline(
pipeline: gst::Pipeline,
video_sink: gst_app::AppSink,
text_sink: Option<gst_app::AppSink>,
) -> Result<Self, Error> {
gst::init()?;
static NEXT_ID: AtomicU64 = AtomicU64::new(0);
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
// We need to ensure we stop the pipeline if we hit an error,
// or else there may be audio left playing in the background.
macro_rules! cleanup {
($expr:expr) => {
$expr.map_err(|e| {
let _ = pipeline.set_state(gst::State::Null);
e
})
};
}
let pad = video_sink.pads().first().cloned().unwrap();
cleanup!(pipeline.set_state(gst::State::Playing))?;
// wait for up to 5 seconds until the decoder gets the source capabilities
cleanup!(pipeline.state(gst::ClockTime::from_seconds(5)).0)?;
// extract resolution and framerate
// TODO(jazzfool): maybe we want to extract some other information too?
let caps = cleanup!(pad.current_caps().ok_or(Error::Caps))?;
let s = cleanup!(caps.structure(0).ok_or(Error::Caps))?;
let width = cleanup!(s.get::<i32>("width").map_err(|_| Error::Caps))?;
let height = cleanup!(s.get::<i32>("height").map_err(|_| Error::Caps))?;
// resolution should be mod4
let width = ((width + 4 - 1) / 4) * 4;
let framerate = cleanup!(s.get::<gst::Fraction>("framerate").map_err(|_| Error::Caps))?;
let framerate = framerate.numer() as f64 / framerate.denom() as f64;
if framerate.is_nan()
|| framerate.is_infinite()
|| framerate < 0.0
|| framerate.abs() < f64::EPSILON
{
let _ = pipeline.set_state(gst::State::Null);
return Err(Error::Framerate(framerate));
}
let duration = Duration::from_nanos(
pipeline
.query_duration::<gst::ClockTime>()
.map(|duration| duration.nseconds())
.unwrap_or(0),
);
let sync_av = pipeline.has_property("av-offset", None);
// NV12 = 12bpp
let frame = Arc::new(Mutex::new(Frame::empty()));
let upload_frame = Arc::new(AtomicBool::new(false));
let alive = Arc::new(AtomicBool::new(true));
let last_frame_time = Arc::new(Mutex::new(Instant::now()));
let frame_ref = Arc::clone(&frame);
let upload_frame_ref = Arc::clone(&upload_frame);
let alive_ref = Arc::clone(&alive);
let last_frame_time_ref = Arc::clone(&last_frame_time);
let subtitle_text = Arc::new(Mutex::new(None));
let upload_text = Arc::new(AtomicBool::new(false));
let subtitle_text_ref = Arc::clone(&subtitle_text);
let upload_text_ref = Arc::clone(&upload_text);
let pipeline_ref = pipeline.clone();
let worker = std::thread::spawn(move || {
let mut clear_subtitles_at = None;
while alive_ref.load(Ordering::Acquire) {
if let Err(gst::FlowError::Error) = (|| -> Result<(), gst::FlowError> {
let sample =
if pipeline_ref.state(gst::ClockTime::ZERO).1 != gst::State::Playing {
video_sink
.try_pull_preroll(gst::ClockTime::from_mseconds(16))
.ok_or(gst::FlowError::Eos)?
} else {
video_sink
.try_pull_sample(gst::ClockTime::from_mseconds(16))
.ok_or(gst::FlowError::Eos)?
};
*last_frame_time_ref
.lock()
.map_err(|_| gst::FlowError::Error)? = Instant::now();
let frame_segment = sample.segment().cloned().ok_or(gst::FlowError::Error)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let frame_pts = buffer.pts().ok_or(gst::FlowError::Error)?;
let frame_duration = buffer.duration().ok_or(gst::FlowError::Error)?;
{
let mut frame_guard =
frame_ref.lock().map_err(|_| gst::FlowError::Error)?;
*frame_guard = Frame(sample);
}
upload_frame_ref.swap(true, Ordering::SeqCst);
if let Some(at) = clear_subtitles_at {
if frame_pts >= at {
*subtitle_text_ref
.lock()
.map_err(|_| gst::FlowError::Error)? = None;
upload_text_ref.store(true, Ordering::SeqCst);
clear_subtitles_at = None;
}
}
let text = text_sink
.as_ref()
.and_then(|sink| sink.try_pull_sample(gst::ClockTime::from_seconds(0)));
if let Some(text) = text {
let text_segment = text.segment().ok_or(gst::FlowError::Error)?;
let text = text.buffer().ok_or(gst::FlowError::Error)?;
let text_pts = text.pts().ok_or(gst::FlowError::Error)?;
let text_duration = text.duration().ok_or(gst::FlowError::Error)?;
let frame_running_time = frame_segment.to_running_time(frame_pts).value();
let frame_running_time_end = frame_segment
.to_running_time(frame_pts + frame_duration)
.value();
let text_running_time = text_segment.to_running_time(text_pts).value();
let text_running_time_end = text_segment
.to_running_time(text_pts + text_duration)
.value();
// see gst-plugins-base/ext/pango/gstbasetextoverlay.c (gst_base_text_overlay_video_chain)
// as an example of how to correctly synchronize the text+video segments
if text_running_time_end > frame_running_time
&& frame_running_time_end > text_running_time
{
let duration = text.duration().unwrap_or(gst::ClockTime::ZERO);
let map = text.map_readable().map_err(|_| gst::FlowError::Error)?;
let text = std::str::from_utf8(map.as_slice())
.map_err(|_| gst::FlowError::Error)?
.to_string();
*subtitle_text_ref
.lock()
.map_err(|_| gst::FlowError::Error)? = Some(text);
upload_text_ref.store(true, Ordering::SeqCst);
clear_subtitles_at = Some(text_pts + duration);
}
}
Ok(())
})() {
log::error!("error pulling frame");
}
}
});
Ok(Video(RwLock::new(Internal {
id,
bus: pipeline.bus().unwrap(),
source: pipeline,
alive,
worker: Some(worker),
width,
height,
framerate,
duration,
speed: 1.0,
sync_av,
frame,
upload_frame,
last_frame_time,
looping: false,
is_eos: false,
restart_stream: false,
sync_av_avg: 0,
sync_av_counter: 0,
subtitle_text,
upload_text,
})))
}
pub(crate) fn read(&self) -> impl Deref<Target = Internal> + '_ {
self.0.read().expect("lock")
}
pub(crate) fn write(&self) -> impl DerefMut<Target = Internal> + '_ {
self.0.write().expect("lock")
}
pub(crate) fn get_mut(&mut self) -> impl DerefMut<Target = Internal> + '_ {
self.0.get_mut().expect("lock")
}
/// Get the size/resolution of the video as `(width, height)`.
pub fn size(&self) -> (i32, i32) {
(self.read().width, self.read().height)
}
/// Get the framerate of the video as frames per second.
pub fn framerate(&self) -> f64 {
self.read().framerate
}
/// Set the volume multiplier of the audio.
/// `0.0` = 0% volume, `1.0` = 100% volume.
///
/// This uses a linear scale, for example `0.5` is perceived as half as loud.
pub fn set_volume(&mut self, volume: f64) {
self.get_mut().source.set_property("volume", volume);
self.set_muted(self.muted()); // for some reason gstreamer unmutes when changing volume?
}
/// Get the volume multiplier of the audio.
pub fn volume(&self) -> f64 {
self.read().source.property("volume")
}
/// Set if the audio is muted or not, without changing the volume.
pub fn set_muted(&mut self, muted: bool) {
self.get_mut().source.set_property("mute", muted);
}
/// Get if the audio is muted or not.
pub fn muted(&self) -> bool {
self.read().source.property("mute")
}
/// Get if the stream ended or not.
pub fn eos(&self) -> bool {
self.read().is_eos
}
/// Get if the media will loop or not.
pub fn looping(&self) -> bool {
self.read().looping
}
/// Set if the media will loop or not.
pub fn set_looping(&mut self, looping: bool) {
self.get_mut().looping = looping;
}
/// Set if the media is paused or not.
pub fn set_paused(&mut self, paused: bool) {
self.get_mut().set_paused(paused)
}
/// Get if the media is paused or not.
pub fn paused(&self) -> bool {
self.read().paused()
}
/// Jumps to a specific position in the media.
/// Passing `true` to the `accurate` parameter will result in more accurate seeking,
/// however, it is also slower. For most seeks (e.g., scrubbing) this is not needed.
pub fn seek(&mut self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
self.get_mut().seek(position, accurate)
}
/// Set the playback speed of the media.
/// The default speed is `1.0`.
pub fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
self.get_mut().set_speed(speed)
}
/// Get the current playback speed.
pub fn speed(&self) -> f64 {
self.read().speed
}
/// Get the current playback position in time.
pub fn position(&self) -> Duration {
Duration::from_nanos(
self.read()
.source
.query_position::<gst::ClockTime>()
.map_or(0, |pos| pos.nseconds()),
)
}
/// Get the media duration.
pub fn duration(&self) -> Duration {
self.read().duration
}
/// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false.
pub fn restart_stream(&mut self) -> Result<(), Error> {
self.get_mut().restart_stream()
}
/// Set the subtitle URL to display.
pub fn set_subtitle_url(&mut self, url: &url::Url) -> Result<(), Error> {
let paused = self.paused();
let mut inner = self.get_mut();
inner.source.set_state(gst::State::Ready)?;
inner.source.set_property("suburi", url.as_str());
inner.set_paused(paused);
Ok(())
}
/// Get the current subtitle URL.
pub fn subtitle_url(&self) -> Option<url::Url> {
url::Url::parse(
&self
.read()
.source
.property::<Option<String>>("current-suburi")?,
)
.ok()
}
/// Get the underlying GStreamer pipeline.
pub fn pipeline(&self) -> gst::Pipeline {
self.read().source.clone()
}
/// Generates a list of thumbnails based on a set of positions in the media, downscaled by a given factor.
///
/// Slow; only needs to be called once for each instance.
/// It's best to call this at the very start of playback, otherwise the position may shift.
pub fn thumbnails<I>(
&mut self,
positions: I,
downscale: NonZeroU8,
) -> Result<Vec<img::Handle>, Error>
where
I: IntoIterator<Item = Position>,
{
let downscale = u8::from(downscale) as u32;
let paused = self.paused();
let muted = self.muted();
let pos = self.position();
self.set_paused(false);
self.set_muted(true);
let out = {
let inner = self.read();
let width = inner.width;
let height = inner.height;
positions
.into_iter()
.map(|pos| {
inner.seek(pos, true)?;
inner.upload_frame.store(false, Ordering::SeqCst);
while !inner.upload_frame.load(Ordering::SeqCst) {
std::hint::spin_loop();
}
let frame_guard = inner.frame.lock().map_err(|_| Error::Lock)?;
let frame = frame_guard.readable().ok_or(Error::Lock)?;
Ok(img::Handle::from_rgba(
inner.width as u32 / downscale,
inner.height as u32 / downscale,
yuv_to_rgba(frame.as_slice(), width as _, height as _, downscale),
))
})
.collect()
};
self.set_paused(paused);
self.set_muted(muted);
self.seek(pos, true)?;
out
}
}
fn yuv_to_rgba(yuv: &[u8], width: u32, height: u32, downscale: u32) -> Vec<u8> {
let uv_start = width * height;
let mut rgba = vec![];
for y in 0..height / downscale {
for x in 0..width / downscale {
let x_src = x * downscale;
let y_src = y * downscale;
let uv_i = uv_start + width * (y_src / 2) + x_src / 2 * 2;
let y = yuv[(y_src * width + x_src) as usize] as f32;
let u = yuv[uv_i as usize] as f32;
let v = yuv[(uv_i + 1) as usize] as f32;
let r = 1.164 * (y - 16.0) + 1.596 * (v - 128.0);
let g = 1.164 * (y - 16.0) - 0.813 * (v - 128.0) - 0.391 * (u - 128.0);
let b = 1.164 * (y - 16.0) + 2.018 * (u - 128.0);
rgba.push(r as u8);
rgba.push(g as u8);
rgba.push(b as u8);
rgba.push(0xFF);
}
}
rgba
}

View File

@@ -0,0 +1,305 @@
use crate::{pipeline::VideoPrimitive, video::Video};
use gstreamer as gst;
use iced::{
advanced::{self, layout, widget, Widget},
Element,
};
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
use log::error;
use std::{marker::PhantomData, sync::atomic::Ordering};
use std::{sync::Arc, time::Instant};
/// Video player widget which displays the current frame of a [`Video`](crate::Video).
pub struct VideoPlayer<'a, Message, Theme = iced::Theme, Renderer = iced::Renderer>
where
Renderer: PrimitiveRenderer,
{
video: &'a Video,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
on_end_of_stream: Option<Message>,
on_new_frame: Option<Message>,
on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message + 'a>>,
on_error: Option<Box<dyn Fn(&glib::Error) -> Message + 'a>>,
_phantom: PhantomData<(Theme, Renderer)>,
}
impl<'a, Message, Theme, Renderer> VideoPlayer<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
/// Creates a new video player widget for a given video.
pub fn new(video: &'a Video) -> Self {
VideoPlayer {
video,
content_fit: iced::ContentFit::default(),
width: iced::Length::Shrink,
height: iced::Length::Shrink,
on_end_of_stream: None,
on_new_frame: None,
on_subtitle_text: None,
on_error: None,
_phantom: Default::default(),
}
}
/// Sets the width of the `VideoPlayer` boundaries.
pub fn width(self, width: impl Into<iced::Length>) -> Self {
VideoPlayer {
width: width.into(),
..self
}
}
/// Sets the height of the `VideoPlayer` boundaries.
pub fn height(self, height: impl Into<iced::Length>) -> Self {
VideoPlayer {
height: height.into(),
..self
}
}
/// Sets the `ContentFit` of the `VideoPlayer`.
pub fn content_fit(self, content_fit: iced::ContentFit) -> Self {
VideoPlayer {
content_fit,
..self
}
}
/// Message to send when the video reaches the end of stream (i.e., the video ends).
pub fn on_end_of_stream(self, on_end_of_stream: Message) -> Self {
VideoPlayer {
on_end_of_stream: Some(on_end_of_stream),
..self
}
}
/// Message to send when the video receives a new frame.
pub fn on_new_frame(self, on_new_frame: Message) -> Self {
VideoPlayer {
on_new_frame: Some(on_new_frame),
..self
}
}
/// Message to send when the video receives a new frame.
pub fn on_subtitle_text<F>(self, on_subtitle_text: F) -> Self
where
F: 'a + Fn(Option<String>) -> Message,
{
VideoPlayer {
on_subtitle_text: Some(Box::new(on_subtitle_text)),
..self
}
}
/// Message to send when the video playback encounters an error.
pub fn on_error<F>(self, on_error: F) -> Self
where
F: 'a + Fn(&glib::Error) -> Message,
{
VideoPlayer {
on_error: Some(Box::new(on_error)),
..self
}
}
}
impl<Message, Theme, Renderer> Widget<Message, Theme, Renderer>
for VideoPlayer<'_, Message, Theme, Renderer>
where
Message: Clone,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<iced::Length> {
iced::Size {
width: iced::Length::Shrink,
height: iced::Length::Shrink,
}
}
fn layout(
&mut self,
_tree: &mut widget::Tree,
_renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
let (video_width, video_height) = self.video.size();
// based on `Image::layout`
let image_size = iced::Size::new(video_width as f32, video_height as f32);
let raw_size = limits.resolve(self.width, self.height, image_size);
let full_size = self.content_fit.fit(image_size, raw_size);
let final_size = iced::Size {
width: match self.width {
iced::Length::Shrink => f32::min(raw_size.width, full_size.width),
_ => raw_size.width,
},
height: match self.height {
iced::Length::Shrink => f32::min(raw_size.height, full_size.height),
_ => raw_size.height,
},
};
layout::Node::new(final_size)
}
fn draw(
&self,
_tree: &widget::Tree,
renderer: &mut Renderer,
_theme: &Theme,
_style: &advanced::renderer::Style,
layout: advanced::Layout<'_>,
_cursor: advanced::mouse::Cursor,
_viewport: &iced::Rectangle,
) {
let mut inner = self.video.write();
// bounds based on `Image::draw`
let image_size = iced::Size::new(inner.width as f32, inner.height as f32);
let bounds = layout.bounds();
let adjusted_fit = self.content_fit.fit(image_size, bounds.size());
let scale = iced::Vector::new(
adjusted_fit.width / image_size.width,
adjusted_fit.height / image_size.height,
);
let final_size = image_size * scale;
let position = match self.content_fit {
iced::ContentFit::None => iced::Point::new(
bounds.x + (image_size.width - adjusted_fit.width) / 2.0,
bounds.y + (image_size.height - adjusted_fit.height) / 2.0,
),
_ => iced::Point::new(
bounds.center_x() - final_size.width / 2.0,
bounds.center_y() - final_size.height / 2.0,
),
};
let drawing_bounds = iced::Rectangle::new(position, final_size);
let upload_frame = inner.upload_frame.swap(false, Ordering::SeqCst);
if upload_frame {
let last_frame_time = inner
.last_frame_time
.lock()
.map(|time| *time)
.unwrap_or_else(|_| Instant::now());
inner.set_av_offset(Instant::now() - last_frame_time);
}
let render = |renderer: &mut Renderer| {
renderer.draw_primitive(
drawing_bounds,
VideoPrimitive::new(
inner.id,
Arc::clone(&inner.alive),
Arc::clone(&inner.frame),
(inner.width as _, inner.height as _),
upload_frame,
),
);
};
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
renderer.with_layer(bounds, render);
} else {
render(renderer);
}
}
fn update(
&mut self,
_state: &mut widget::Tree,
event: &iced::Event,
_layout: advanced::Layout<'_>,
_cursor: advanced::mouse::Cursor,
_renderer: &Renderer,
_clipboard: &mut dyn advanced::Clipboard,
shell: &mut advanced::Shell<'_, Message>,
_viewport: &iced::Rectangle,
) {
let mut inner = self.video.write();
if let iced::Event::Window(iced::window::Event::RedrawRequested(_)) = event {
if inner.restart_stream || (!inner.is_eos && !inner.paused()) {
let mut restart_stream = false;
if inner.restart_stream {
restart_stream = true;
// Set flag to false to avoid potentially multiple seeks
inner.restart_stream = false;
}
let mut eos_pause = false;
while let Some(msg) = inner
.bus
.pop_filtered(&[gst::MessageType::Error, gst::MessageType::Eos])
{
match msg.view() {
gst::MessageView::Error(err) => {
error!("bus returned an error: {err}");
if let Some(ref on_error) = self.on_error {
shell.publish(on_error(&err.error()))
};
}
gst::MessageView::Eos(_eos) => {
if let Some(on_end_of_stream) = self.on_end_of_stream.clone() {
shell.publish(on_end_of_stream);
}
if inner.looping {
restart_stream = true;
} else {
eos_pause = true;
}
}
_ => {}
}
}
// Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream
if restart_stream {
if let Err(err) = inner.restart_stream() {
error!("cannot restart stream (can't seek): {err:#?}");
}
} else if eos_pause {
inner.is_eos = true;
inner.set_paused(true);
}
if inner.upload_frame.load(Ordering::SeqCst) {
if let Some(on_new_frame) = self.on_new_frame.clone() {
shell.publish(on_new_frame);
}
}
if let Some(on_subtitle_text) = &self.on_subtitle_text {
if inner.upload_text.swap(false, Ordering::SeqCst) {
if let Ok(text) = inner.subtitle_text.try_lock() {
shell.publish(on_subtitle_text(text.clone()));
}
}
}
shell.request_redraw();
} else {
shell.request_redraw();
}
}
}
}
impl<'a, Message, Theme, Renderer> From<VideoPlayer<'a, Message, Theme, Renderer>>
for Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Theme: 'a,
Renderer: 'a + PrimitiveRenderer,
{
fn from(video_player: VideoPlayer<'a, Message, Theme, Renderer>) -> Self {
Self::new(video_player)
}
}