Compare commits
2 Commits
3382aebb1f
...
5d0b795ba5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d0b795ba5 | ||
|
|
ebe2312272 |
9
Cargo.lock
generated
9
Cargo.lock
generated
@@ -3051,14 +3051,15 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"error-stack",
|
||||
"futures",
|
||||
"futures-lite 2.6.1",
|
||||
"glib 0.21.5",
|
||||
"gstreamer 0.24.4",
|
||||
"gstreamer-app 0.24.4",
|
||||
"gstreamer-video 0.24.4",
|
||||
"smol",
|
||||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"wgpu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3614,10 +3615,14 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"error-stack",
|
||||
"gst",
|
||||
"iced",
|
||||
"iced_core",
|
||||
"iced_futures",
|
||||
"iced_renderer",
|
||||
"iced_wgpu",
|
||||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8085,6 +8090,8 @@ dependencies = [
|
||||
"gpui_util",
|
||||
"iced",
|
||||
"iced_video_player",
|
||||
"iced_wgpu",
|
||||
"iced_winit",
|
||||
"reqwest",
|
||||
"tap",
|
||||
"toml 0.9.8",
|
||||
|
||||
65
README.md
Normal file
65
README.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Jello
|
||||
|
||||
A WIP video client for jellyfin.
|
||||
|
||||
(Planned) Features
|
||||
|
||||
1. Integrate with jellyfin
|
||||
2. HDR video playback
|
||||
3. Audio Track selection
|
||||
4. Chapter selection
|
||||
|
||||
Libraries and frameworks used for this
|
||||
1. iced -> primary gui toolkit
|
||||
2. gstreamer -> primary video + audio decoding library
|
||||
3. wgpu -> rendering the video from gstreamer in iced
|
||||
|
||||
|
||||
### HDR
|
||||
I'll try to document all my findings about HDR here.
|
||||
I'm making this project to mainly learn about videos, color-spaces and gpu programming. And so very obviously I'm bound to make mistakes in either the code or the fundamental understanding of a concept. Please don't take anything in this text as absolute.
|
||||
|
||||
```rust
|
||||
let window = ... // use winnit to get a window handle, check the example in this repo
|
||||
let instance = wgpu::Instance::default();
|
||||
let surface = instance.create_surface(window).unwrap();
|
||||
let adapter = instance
|
||||
.request_adapter(&wgpu::RequestAdapterOptions {
|
||||
power_preference: wgpu::PowerPreference::default(),
|
||||
compatible_surface: Some(&surface),
|
||||
force_fallback_adapter: false,
|
||||
})
|
||||
.await
|
||||
.context("Failed to request wgpu adapter")?;
|
||||
let caps = surface.get_capabilities();
|
||||
println!("{:#?}", caps.formats);
|
||||
```
|
||||
|
||||
This should print out all the texture formats that can be used by your current hardware
|
||||
Among these the formats that support hdr (afaik) are
|
||||
```
|
||||
wgpu::TextureFormat::Rgba16Float
|
||||
wgpu::TextureFormat::Rgba32Float
|
||||
wgpu::TextureFormat::Rgb10a2Unorm
|
||||
wgpu::TextureFormat::Rgb10a2Uint // (unsure)
|
||||
```
|
||||
|
||||
My display supports Rgb10a2Unorm so I'll be going forward with that texture format.
|
||||
|
||||
`Rgb10a2Unorm` is still the same size as a `Rgba8Unorm` but data is in a different representation in each of them
|
||||
|
||||
`Rgb10a2Unorm`:
|
||||
R, G, B => 10 bits each (2^10 = 1024 [0..=1023])
|
||||
A => 2 bits (2^2 = 4 [0..=3])
|
||||
|
||||
Whereas in a normal pixel
|
||||
`Rgba8Unorm`
|
||||
R, G, B, A => 8 bits each (2^8 = 256 [0..=255])
|
||||
|
||||
|
||||
For displaying videos the alpha components is not really used (I don't know of any) so we can use re-allocate 6 bits from the alpha channel and put them in the r,g and b components.
|
||||
In the shader the components get uniformly normalized from [0..=1023] integer to [0..=1] in float so we can compute them properly
|
||||
|
||||
Videos however are generally not stored in this format or any rgb format in general because it is not as efficient for (lossy) compression as YUV formats.
|
||||
|
||||
Right now I don't want to deal with yuv formats so I'll use gstreamer caps to convert the video into `Rgba10a2` format
|
||||
@@ -7,6 +7,12 @@ edition = "2024"
|
||||
error-stack = "0.6.0"
|
||||
gst.workspace = true
|
||||
iced_core = "0.14.0"
|
||||
iced_futures = "0.14.0"
|
||||
iced_renderer = { version = "0.14.0", features = ["iced_wgpu"] }
|
||||
iced_wgpu = "0.14.0"
|
||||
thiserror = "2.0.17"
|
||||
tracing = "0.1.43"
|
||||
|
||||
[dev-dependencies]
|
||||
iced.workspace = true
|
||||
tracing-subscriber = { version = "0.3.22", features = ["env-filter"] }
|
||||
|
||||
77
crates/iced-video/examples/minimal.rs
Normal file
77
crates/iced-video/examples/minimal.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use iced_video::{Video, VideoHandle};
|
||||
|
||||
pub fn main() -> iced::Result {
|
||||
use tracing_subscriber::prelude::*;
|
||||
tracing_subscriber::registry()
|
||||
.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.with_thread_ids(true)
|
||||
.with_file(true),
|
||||
)
|
||||
.with(tracing_subscriber::EnvFilter::from_default_env())
|
||||
.init();
|
||||
iced::application(State::new, update, view).run()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct State {
|
||||
video: VideoHandle,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub fn new() -> Self {
|
||||
let video = VideoHandle::new("https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c")
|
||||
.expect("Failed to create video handle");
|
||||
Self { video }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Message {
|
||||
Play,
|
||||
Pause,
|
||||
Loaded,
|
||||
Load,
|
||||
}
|
||||
|
||||
pub fn update(state: &mut State, message: Message) -> iced::Task<Message> {
|
||||
match message {
|
||||
Message::Load => {
|
||||
// does stuff
|
||||
let src = state.video.source().clone();
|
||||
iced::Task::perform(src.wait(), |_| Message::Loaded)
|
||||
}
|
||||
Message::Play => {
|
||||
state.video.source().play().expect("Failed to play video");
|
||||
iced::Task::none()
|
||||
}
|
||||
Message::Pause => {
|
||||
state.video.source().pause().expect("Failed to pause video");
|
||||
iced::Task::none()
|
||||
}
|
||||
Message::Loaded => {
|
||||
// Video loaded
|
||||
iced::Task::none()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn view<'a>(state: &'a State) -> iced::Element<'a, Message> {
|
||||
let video_widget = Video::new(&state.video)
|
||||
.width(iced::Length::Fill)
|
||||
.height(iced::Length::Fill)
|
||||
.content_fit(iced::ContentFit::Contain);
|
||||
|
||||
iced::widget::Column::new()
|
||||
.push(video_widget)
|
||||
.push(
|
||||
iced::widget::Row::new()
|
||||
.push(iced::widget::Button::new("Play").on_press(Message::Play))
|
||||
.push(iced::widget::Button::new("Pause").on_press(Message::Pause))
|
||||
.spacing(5)
|
||||
.padding(10)
|
||||
.align_y(iced::Alignment::Center),
|
||||
)
|
||||
.align_x(iced::Alignment::Center)
|
||||
.into()
|
||||
}
|
||||
55
crates/iced-video/src/id.rs
Normal file
55
crates/iced-video/src/id.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use std::borrow;
|
||||
use std::sync::atomic::{self, AtomicUsize};
|
||||
|
||||
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
/// The identifier of a generic widget.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct Id(Internal);
|
||||
|
||||
impl Id {
|
||||
/// Creates a new [`Id`] from a static `str`.
|
||||
pub const fn new(id: &'static str) -> Self {
|
||||
Self(Internal::Custom(borrow::Cow::Borrowed(id)))
|
||||
}
|
||||
|
||||
/// Creates a unique [`Id`].
|
||||
///
|
||||
/// This function produces a different [`Id`] every time it is called.
|
||||
pub fn unique() -> Self {
|
||||
let id = NEXT_ID.fetch_add(1, atomic::Ordering::Relaxed);
|
||||
|
||||
Self(Internal::Unique(id))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'static str> for Id {
|
||||
fn from(value: &'static str) -> Self {
|
||||
Self::new(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Id {
|
||||
fn from(value: String) -> Self {
|
||||
Self(Internal::Custom(borrow::Cow::Owned(value)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
enum Internal {
|
||||
Unique(usize),
|
||||
Custom(borrow::Cow<'static, str>),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Id;
|
||||
|
||||
#[test]
|
||||
fn unique_generates_different_ids() {
|
||||
let a = Id::unique();
|
||||
let b = Id::unique();
|
||||
|
||||
assert_ne!(a, b);
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,12 @@
|
||||
pub mod id;
|
||||
pub mod primitive;
|
||||
pub mod source;
|
||||
use iced_core as iced;
|
||||
use iced_renderer::Renderer as RendererWithFallback;
|
||||
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
|
||||
|
||||
use error_stack::{Report, ResultExt};
|
||||
use gst::*;
|
||||
use iced_core::Length;
|
||||
use iced::Length;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use gst::plugins::app::AppSink;
|
||||
@@ -16,16 +19,21 @@ pub struct Error;
|
||||
pub type Result<T, E = Report<Error>> = core::result::Result<T, E>;
|
||||
|
||||
use std::sync::{Arc, Mutex, atomic::AtomicBool};
|
||||
pub struct Video {
|
||||
id: iced_core::Id,
|
||||
|
||||
/// This is the video handle that is used to control the video playback.
|
||||
/// This should be keps in the application state.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VideoHandle {
|
||||
id: id::Id,
|
||||
source: source::VideoSource,
|
||||
is_metadata_loaded: Arc<AtomicBool>,
|
||||
is_playing: Arc<AtomicBool>,
|
||||
is_eos: Arc<AtomicBool>,
|
||||
texture: Mutex<Option<iced_wgpu::wgpu::TextureView>>,
|
||||
frame_ready: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl Video {
|
||||
pub fn id(&self) -> &iced_core::Id {
|
||||
impl VideoHandle {
|
||||
pub fn id(&self) -> &id::Id {
|
||||
&self.id
|
||||
}
|
||||
|
||||
@@ -33,91 +41,212 @@ impl Video {
|
||||
&self.source
|
||||
}
|
||||
|
||||
pub async fn new(url: impl AsRef<str>) -> Result<Self> {
|
||||
pub fn new(url: impl AsRef<str>) -> Result<Self> {
|
||||
let source = source::VideoSource::new(url)?;
|
||||
let frame_ready = Arc::clone(&source.ready);
|
||||
Ok(Self {
|
||||
id: iced_core::Id::unique(),
|
||||
source: source::VideoSource::new(url)?,
|
||||
id: id::Id::unique(),
|
||||
source: source,
|
||||
is_metadata_loaded: Arc::new(AtomicBool::new(false)),
|
||||
is_playing: Arc::new(AtomicBool::new(false)),
|
||||
is_eos: Arc::new(AtomicBool::new(false)),
|
||||
texture: Mutex::new(None),
|
||||
frame_ready,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VideoPlayer<'a, Message, Theme = iced_core::Theme, Renderer = iced_wgpu::Renderer>
|
||||
/// This is the Video widget that displays a video.
|
||||
/// This should be used in the view function.
|
||||
pub struct Video<'a, Message, Theme = iced::Theme, Renderer = iced_wgpu::Renderer>
|
||||
where
|
||||
Renderer: PrimitiveRenderer,
|
||||
{
|
||||
videos: &'a Video,
|
||||
content_fit: iced_core::ContentFit,
|
||||
width: iced_core::Length,
|
||||
height: iced_core::Length,
|
||||
id: id::Id,
|
||||
handle: &'a VideoHandle,
|
||||
content_fit: iced::ContentFit,
|
||||
width: iced::Length,
|
||||
height: iced::Length,
|
||||
on_end_of_stream: Option<Message>,
|
||||
on_new_frame: Option<Message>,
|
||||
looping: bool,
|
||||
// on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message + 'a>>,
|
||||
// on_error: Option<Box<dyn Fn(&glib::Error) -> Message + 'a>>,
|
||||
theme: Theme,
|
||||
__marker: PhantomData<Renderer>,
|
||||
// theme: Theme,
|
||||
__marker: PhantomData<(Renderer, Theme)>,
|
||||
}
|
||||
|
||||
impl<Message, Theme, Renderer> VideoPlayer<Message, Theme, Renderer>
|
||||
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
|
||||
where
|
||||
Renderer: PrimitiveRenderer,
|
||||
{
|
||||
pub fn new(source: source::VideoSource) -> Self {
|
||||
pub fn new(handle: &'a VideoHandle) -> Self {
|
||||
Self {
|
||||
videos: Video {
|
||||
id: iced_core::Id::unique(),
|
||||
source,
|
||||
is_playing: Arc::new(AtomicBool::new(false)),
|
||||
is_eos: Arc::new(AtomicBool::new(false)),
|
||||
texture: Mutex::new(None),
|
||||
},
|
||||
content_fit: iced_core::ContentFit::Contain,
|
||||
id: handle.id.clone(),
|
||||
handle: &handle,
|
||||
content_fit: iced::ContentFit::Contain,
|
||||
width: Length::Shrink,
|
||||
height: Length::Shrink,
|
||||
on_end_of_stream: None,
|
||||
on_new_frame: None,
|
||||
looping: false,
|
||||
theme: Theme::default(),
|
||||
// theme: Theme::default(),
|
||||
__marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Message, Theme, Renderer> iced_core::Widget<Message, Theme, Renderer>
|
||||
for VideoPlayer<'_, Message, Theme, Renderer>
|
||||
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
|
||||
where
|
||||
Renderer: PrimitiveRenderer,
|
||||
{
|
||||
pub fn width(mut self, width: Length) -> Self {
|
||||
self.width = width;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn height(mut self, height: Length) -> Self {
|
||||
self.height = height;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn content_fit(mut self, fit: iced::ContentFit) -> Self {
|
||||
self.content_fit = fit;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_end_of_stream(mut self, message: Message) -> Self {
|
||||
self.on_end_of_stream = Some(message);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_new_frame(mut self, message: Message) -> Self {
|
||||
self.on_new_frame = Some(message);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn looping(mut self, looping: bool) -> Self {
|
||||
self.looping = looping;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
|
||||
for Video<'_, Message, Theme, Renderer>
|
||||
where
|
||||
Message: Clone,
|
||||
Renderer: PrimitiveRenderer,
|
||||
{
|
||||
fn size(&self) -> iced_core::Size<Length> {
|
||||
iced_core::Size {
|
||||
fn size(&self) -> iced::Size<Length> {
|
||||
iced::Size {
|
||||
width: self.width,
|
||||
height: self.height,
|
||||
}
|
||||
}
|
||||
|
||||
// The video player should take max space by default
|
||||
fn layout(
|
||||
&mut self,
|
||||
iced_core::widget::tree: &mut iced_core::widget::Tree,
|
||||
iced_core::renderer: &Renderer,
|
||||
limits: &iced_core::layout::Limits,
|
||||
) -> iced_core::layout::Node {
|
||||
todo!()
|
||||
_tree: &mut iced::widget::Tree,
|
||||
_renderer: &Renderer,
|
||||
limits: &iced::layout::Limits,
|
||||
) -> iced::layout::Node {
|
||||
iced::layout::Node::new(limits.max())
|
||||
}
|
||||
|
||||
fn draw(
|
||||
&self,
|
||||
iced_core::widget::tree: &iced_core::widget::Tree,
|
||||
iced_core::renderer: &mut Renderer,
|
||||
tree: &iced::widget::Tree,
|
||||
renderer: &mut Renderer,
|
||||
theme: &Theme,
|
||||
style: &iced_core::renderer::Style,
|
||||
iced_core::layout: iced_core::Layout<'_>,
|
||||
cursor: iced_core::mouse::Cursor,
|
||||
viewport: &iced_core::Rectangle,
|
||||
style: &iced::renderer::Style,
|
||||
layout: iced::Layout<'_>,
|
||||
cursor: iced::mouse::Cursor,
|
||||
viewport: &iced::Rectangle,
|
||||
) {
|
||||
todo!()
|
||||
if let Ok((width, height)) = self.handle.source.size() {
|
||||
let video_size = iced::Size {
|
||||
width: width as f32,
|
||||
height: height as f32,
|
||||
};
|
||||
let bounds = layout.bounds();
|
||||
let adjusted_fit = self.content_fit.fit(video_size, bounds.size());
|
||||
let scale = iced::Vector::new(
|
||||
adjusted_fit.width / video_size.width,
|
||||
adjusted_fit.height / video_size.height,
|
||||
);
|
||||
let final_size = video_size * scale;
|
||||
let position = match self.content_fit {
|
||||
iced::ContentFit::None => iced::Point::new(
|
||||
bounds.x + (video_size.width - adjusted_fit.width) / 2.0,
|
||||
bounds.y + (video_size.height - adjusted_fit.height) / 2.0,
|
||||
),
|
||||
_ => iced::Point::new(
|
||||
bounds.center_x() - final_size.width / 2.0,
|
||||
bounds.center_y() - final_size.height / 2.0,
|
||||
),
|
||||
};
|
||||
|
||||
let drawing_bounds = iced::Rectangle::new(position, final_size);
|
||||
|
||||
let render = |renderer: &mut Renderer| {
|
||||
renderer.draw_primitive(
|
||||
drawing_bounds,
|
||||
primitive::VideoFrame {
|
||||
id: self.id.clone(),
|
||||
size: iced_wgpu::wgpu::Extent3d {
|
||||
width: width as u32,
|
||||
height: height as u32,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
ready: Arc::clone(&self.handle.frame_ready),
|
||||
frame: Arc::clone(&self.handle.source.frame),
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
|
||||
renderer.with_layer(bounds, render);
|
||||
} else {
|
||||
render(renderer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
_tree: &mut iced_core::widget::Tree,
|
||||
event: &iced::Event,
|
||||
_layout: iced_core::Layout<'_>,
|
||||
_cursor: iced_core::mouse::Cursor,
|
||||
_renderer: &Renderer,
|
||||
_clipboard: &mut dyn iced_core::Clipboard,
|
||||
shell: &mut iced_core::Shell<'_, Message>,
|
||||
_viewport: &iced::Rectangle,
|
||||
) {
|
||||
if let iced::Event::Window(iced::window::Event::RedrawRequested(_)) = event {
|
||||
if self
|
||||
.handle
|
||||
.frame_ready
|
||||
.load(std::sync::atomic::Ordering::SeqCst)
|
||||
{
|
||||
shell.request_redraw();
|
||||
} else {
|
||||
shell.request_redraw_at(iced::window::RedrawRequest::At(
|
||||
iced_core::time::Instant::now() + core::time::Duration::from_millis(32),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
|
||||
for iced::Element<'a, Message, Theme, Renderer>
|
||||
where
|
||||
Message: 'a + Clone,
|
||||
Theme: 'a,
|
||||
Renderer: 'a + iced_wgpu::primitive::Renderer,
|
||||
{
|
||||
fn from(video: Video<'a, Message, Theme, Renderer>) -> Self {
|
||||
Self::new(video)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
use crate::id;
|
||||
use iced_wgpu::primitive::Pipeline;
|
||||
use iced_wgpu::wgpu;
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::{Arc, atomic::AtomicBool};
|
||||
use std::sync::{Arc, Mutex, atomic::AtomicBool};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VideoPrimitive {
|
||||
texture: wgpu::TextureView,
|
||||
ready: Arc<AtomicBool>,
|
||||
pub struct VideoFrame {
|
||||
pub id: id::Id,
|
||||
pub size: wgpu::Extent3d,
|
||||
pub ready: Arc<AtomicBool>,
|
||||
pub frame: Arc<Mutex<Vec<u8>>>,
|
||||
}
|
||||
impl iced_wgpu::Primitive for VideoPrimitive {
|
||||
|
||||
impl iced_wgpu::Primitive for VideoFrame {
|
||||
type Pipeline = VideoPipeline;
|
||||
|
||||
fn prepare(
|
||||
@@ -19,11 +23,102 @@ impl iced_wgpu::Primitive for VideoPrimitive {
|
||||
bounds: &iced_wgpu::core::Rectangle,
|
||||
viewport: &iced_wgpu::graphics::Viewport,
|
||||
) {
|
||||
todo!()
|
||||
let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| {
|
||||
let texture = device.create_texture(&wgpu::TextureDescriptor {
|
||||
label: Some("iced-video-texture"),
|
||||
size: self.size,
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: pipeline.format,
|
||||
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
|
||||
view_formats: &[],
|
||||
});
|
||||
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
|
||||
label: Some("iced-video-texture-bind-group"),
|
||||
layout: &pipeline.bind_group_layout,
|
||||
entries: &[
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 0,
|
||||
resource: wgpu::BindingResource::TextureView(
|
||||
&texture.create_view(&wgpu::TextureViewDescriptor::default()),
|
||||
),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 1,
|
||||
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
|
||||
},
|
||||
],
|
||||
});
|
||||
VideoTextures {
|
||||
id: self.id.clone(),
|
||||
texture,
|
||||
bind_group,
|
||||
ready: Arc::clone(&self.ready),
|
||||
}
|
||||
});
|
||||
// dbg!(&self.size, video.texture.size());
|
||||
if self.size != video.texture.size() {
|
||||
// Resize the texture if the size has changed.
|
||||
let new_texture = device.create_texture(&wgpu::TextureDescriptor {
|
||||
label: Some("iced-video-texture-resized"),
|
||||
size: self.size,
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: pipeline.format,
|
||||
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
|
||||
view_formats: &[],
|
||||
});
|
||||
let new_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
|
||||
label: Some("iced-video-texture-bind-group-resized"),
|
||||
layout: &pipeline.bind_group_layout,
|
||||
entries: &[
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 0,
|
||||
resource: wgpu::BindingResource::TextureView(
|
||||
&new_texture.create_view(&wgpu::TextureViewDescriptor::default()),
|
||||
),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 1,
|
||||
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
|
||||
},
|
||||
],
|
||||
});
|
||||
video.texture = new_texture;
|
||||
video.bind_group = new_bind_group;
|
||||
}
|
||||
// BUG: This causes a panic because the texture size is not correct for some reason.
|
||||
if video.ready.load(std::sync::atomic::Ordering::SeqCst) {
|
||||
let frame = self.frame.lock().expect("BUG: Mutex poisoned");
|
||||
if frame.len() != (4 * self.size.width * self.size.height) as usize {
|
||||
tracing::warn!(
|
||||
"Frame size mismatch: expected {}, got {}",
|
||||
4 * self.size.width * self.size.height,
|
||||
frame.len()
|
||||
);
|
||||
return;
|
||||
}
|
||||
queue.write_texture(
|
||||
wgpu::TexelCopyTextureInfo {
|
||||
texture: &video.texture,
|
||||
mip_level: 0,
|
||||
origin: wgpu::Origin3d::ZERO,
|
||||
aspect: wgpu::TextureAspect::All,
|
||||
},
|
||||
&frame,
|
||||
wgpu::TexelCopyBufferLayout {
|
||||
offset: 0,
|
||||
bytes_per_row: Some(4 * video.texture.size().width),
|
||||
rows_per_image: Some(video.texture.size().height),
|
||||
},
|
||||
self.size,
|
||||
);
|
||||
video
|
||||
.ready
|
||||
.store(false, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn draw(&self, _pipeline: &Self::Pipeline, _render_pass: &mut wgpu::RenderPass<'_>) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn render(
|
||||
@@ -33,14 +128,21 @@ impl iced_wgpu::Primitive for VideoPrimitive {
|
||||
target: &wgpu::TextureView,
|
||||
clip_bounds: &iced_wgpu::core::Rectangle<u32>,
|
||||
) {
|
||||
if self.ready.load(std::sync::atomic::Ordering::SeqCst) {
|
||||
let Some(video) = pipeline.videos.get(&self.id) else {
|
||||
return;
|
||||
};
|
||||
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
|
||||
label: Some("iced-video-render-pass"),
|
||||
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
|
||||
view: target,
|
||||
resolve_target: None,
|
||||
ops: wgpu::Operations {
|
||||
load: wgpu::LoadOp::Load,
|
||||
load: wgpu::LoadOp::Clear(wgpu::Color {
|
||||
r: 0.1,
|
||||
g: 0.2,
|
||||
b: 0.3,
|
||||
a: 1.0,
|
||||
}),
|
||||
store: wgpu::StoreOp::Store,
|
||||
},
|
||||
depth_slice: None,
|
||||
@@ -51,17 +153,16 @@ impl iced_wgpu::Primitive for VideoPrimitive {
|
||||
});
|
||||
|
||||
render_pass.set_pipeline(&pipeline.pipeline);
|
||||
render_pass.set_bind_group(0, &self.bind_group, &[]);
|
||||
render_pass.set_bind_group(0, &video.bind_group, &[]);
|
||||
render_pass.draw(0..3, 0..1);
|
||||
self.ready
|
||||
.store(false, std::sync::atomic::Ordering::Relaxed);
|
||||
}
|
||||
// self.ready
|
||||
// .store(false, std::sync::atomic::Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VideoTextures {
|
||||
id: u64,
|
||||
id: id::Id,
|
||||
texture: wgpu::Texture,
|
||||
bind_group: wgpu::BindGroup,
|
||||
ready: Arc<AtomicBool>,
|
||||
@@ -72,7 +173,8 @@ pub struct VideoPipeline {
|
||||
pipeline: wgpu::RenderPipeline,
|
||||
bind_group_layout: wgpu::BindGroupLayout,
|
||||
sampler: wgpu::Sampler,
|
||||
videos: BTreeMap<u64, VideoTextures>,
|
||||
videos: BTreeMap<id::Id, VideoTextures>,
|
||||
format: wgpu::TextureFormat,
|
||||
}
|
||||
|
||||
pub trait HdrTextureFormatExt {
|
||||
@@ -171,6 +273,7 @@ impl Pipeline for VideoPipeline {
|
||||
pipeline,
|
||||
bind_group_layout,
|
||||
sampler,
|
||||
format,
|
||||
videos: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,44 +1,107 @@
|
||||
use crate::{Error, Result, ResultExt};
|
||||
use gst::{
|
||||
Bus, Gst, MessageType, MessageView, Sink, Source,
|
||||
app::AppSink,
|
||||
caps::{Caps, CapsType},
|
||||
element::ElementExt,
|
||||
pipeline::PipelineExt,
|
||||
playback::Playbin3,
|
||||
videoconvertscale::VideoConvert,
|
||||
};
|
||||
use std::sync::{Arc, Mutex, atomic::AtomicBool};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VideoSource {
|
||||
playbin: Playbin3,
|
||||
videoconvert: VideoConvert,
|
||||
appsink: AppSink,
|
||||
bus: Bus,
|
||||
pub(crate) playbin: Playbin3,
|
||||
pub(crate) videoconvert: VideoConvert,
|
||||
pub(crate) appsink: AppSink,
|
||||
pub(crate) bus: Bus,
|
||||
pub(crate) ready: Arc<AtomicBool>,
|
||||
pub(crate) frame: Arc<Mutex<Vec<u8>>>,
|
||||
}
|
||||
|
||||
impl VideoSource {
|
||||
/// Creates a new video source from the given URL.
|
||||
/// Since this doesn't have to parse the pipeline manually, we aren't sanitizing the URL for
|
||||
/// now.
|
||||
pub async fn new(url: impl AsRef<str>) -> Result<Self> {
|
||||
pub fn new(url: impl AsRef<str>) -> Result<Self> {
|
||||
Gst::new();
|
||||
let videoconvert = VideoConvert::new("iced-video-convert").change_context(Error)?;
|
||||
let appsink = AppSink::new("iced-video-sink").change_context(Error)?;
|
||||
let videoconvert = VideoConvert::new("iced-video-convert")
|
||||
// .change_context(Error)?
|
||||
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
|
||||
.change_context(Error)?;
|
||||
let appsink = AppSink::new("iced-video-sink")
|
||||
.change_context(Error)?
|
||||
.with_caps(
|
||||
Caps::builder(CapsType::Video)
|
||||
.field("format", "RGBA")
|
||||
.build(),
|
||||
);
|
||||
let video_sink = videoconvert.link(&appsink).change_context(Error)?;
|
||||
let playbin = gst::plugins::playback::Playbin3::new("iced-video")
|
||||
.change_context(Error)?
|
||||
.with_uri(url.as_ref())
|
||||
.with_video_sink(&video_sink);
|
||||
let bus = playbin.bus().change_context(Error)?;
|
||||
playbin.wait_ready()?;
|
||||
// let bus_stream = bus.stream();
|
||||
// bus_stream.find(|message| {
|
||||
// let view = message.view();
|
||||
// if let gst::MessageView::StateChanged(change) = view {
|
||||
// change.current() == gst::State::Ready
|
||||
playbin.pause().change_context(Error)?;
|
||||
let ready = Arc::new(AtomicBool::new(false));
|
||||
let frame = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let appsink = appsink.on_new_frame({
|
||||
let ready = Arc::clone(&ready);
|
||||
let frame = Arc::clone(&frame);
|
||||
move |appsink| {
|
||||
let Ok(sample) = appsink.pull_sample() else {
|
||||
return Ok(());
|
||||
};
|
||||
let caps = sample.caps().ok_or(gst::gstreamer::FlowError::Error)?;
|
||||
let structure_0 = caps.structure(0).ok_or(gst::gstreamer::FlowError::Error)?;
|
||||
let width = structure_0
|
||||
.get::<i32>("width")
|
||||
.map_err(|_| gst::gstreamer::FlowError::Error)?;
|
||||
let height = structure_0
|
||||
.get::<i32>("height")
|
||||
.map_err(|_| gst::gstreamer::FlowError::Error)?;
|
||||
|
||||
let buffer = sample.buffer().and_then(|b| b.map_readable().ok());
|
||||
if let Some(buffer) = buffer {
|
||||
{
|
||||
let mut frame = frame.lock().expect("BUG: Mutex poisoned");
|
||||
debug_assert_eq!(buffer.size(), (width * height * 4) as usize);
|
||||
if frame.len() != buffer.size() {
|
||||
frame.resize(buffer.size(), 0);
|
||||
}
|
||||
frame.copy_from_slice(buffer.as_slice());
|
||||
ready.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
}
|
||||
// if written.is_err() {
|
||||
// tracing::error!("Failed to write video frame to buffer");
|
||||
// } else {
|
||||
// false
|
||||
// ready.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
// }
|
||||
// });
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
playbin,
|
||||
videoconvert,
|
||||
appsink,
|
||||
bus,
|
||||
ready,
|
||||
frame,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn wait(self) -> Result<()> {
|
||||
self.playbin
|
||||
.wait_for_states(&[gst::State::Paused, gst::State::Playing])
|
||||
.await
|
||||
.change_context(Error)
|
||||
.attach("Failed to wait for video initialisation")
|
||||
}
|
||||
|
||||
pub fn play(&self) -> Result<()> {
|
||||
self.playbin
|
||||
.play()
|
||||
@@ -53,12 +116,15 @@ impl VideoSource {
|
||||
.attach("Failed to pause video")
|
||||
}
|
||||
|
||||
pub fn bus(&self) -> &Bus {}
|
||||
// pub fn copy_frame_to_texture(&self, texture: wgpu::TextureView) -> Result<()> {
|
||||
// let frame = self
|
||||
// .appsink
|
||||
// .try_pull_sample(core::time::Duration::from_millis(1))?
|
||||
// .ok_or(Error)
|
||||
// .attach("No video frame available")?;
|
||||
// }
|
||||
pub fn size(&self) -> Result<(i32, i32)> {
|
||||
let caps = self
|
||||
.appsink
|
||||
.sink("sink")
|
||||
.current_caps()
|
||||
.change_context(Error)?;
|
||||
caps.width()
|
||||
.and_then(|width| caps.height().map(|height| (width, height)))
|
||||
.ok_or(Error)
|
||||
.attach("Failed to get width, height")
|
||||
}
|
||||
}
|
||||
|
||||
24
flake.lock
generated
24
flake.lock
generated
@@ -3,11 +3,11 @@
|
||||
"advisory-db": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1765811277,
|
||||
"narHash": "sha256-QF/aUvQwJG/ndoRZCjb+d7xASs0ELCmpqpK8u6Se2f4=",
|
||||
"lastModified": 1766435619,
|
||||
"narHash": "sha256-3A5Z5K28YB45REOHMWtyQ24cEUXW76MOtbT6abPrARE=",
|
||||
"owner": "rustsec",
|
||||
"repo": "advisory-db",
|
||||
"rev": "2d254c1fad2260522209e9bce2fdc93012b0627f",
|
||||
"rev": "a98dbc80b16730a64c612c6ab5d5fecb4ebb79ba",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -18,11 +18,11 @@
|
||||
},
|
||||
"crane": {
|
||||
"locked": {
|
||||
"lastModified": 1765739568,
|
||||
"narHash": "sha256-gQYx35Of4UDKUjAYvmxjUEh/DdszYeTtT6MDin4loGE=",
|
||||
"lastModified": 1766194365,
|
||||
"narHash": "sha256-4AFsUZ0kl6MXSm4BaQgItD0VGlEKR3iq7gIaL7TjBvc=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "67d2baff0f9f677af35db61b32b5df6863bcc075",
|
||||
"rev": "7d8ec2c71771937ab99790b45e6d9b93d15d9379",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -106,11 +106,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1765779637,
|
||||
"narHash": "sha256-KJ2wa/BLSrTqDjbfyNx70ov/HdgNBCBBSQP3BIzKnv4=",
|
||||
"lastModified": 1766309749,
|
||||
"narHash": "sha256-3xY8CZ4rSnQ0NqGhMKAy5vgC+2IVK0NoVEzDoOh4DA4=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1306659b587dc277866c7b69eb97e5f07864d8c4",
|
||||
"rev": "a6531044f6d0bef691ea18d4d4ce44d0daa6e816",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -138,11 +138,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1765852971,
|
||||
"narHash": "sha256-rQdOMqfQNhcfqvh1dFIVWh09mrIWwerUJqqBdhIsf8g=",
|
||||
"lastModified": 1766371695,
|
||||
"narHash": "sha256-W7CX9vy7H2Jj3E8NI4djHyF8iHSxKpb2c/7uNQ/vGFU=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "5f98ccecc9f1bc1c19c0a350a659af1a04b3b319",
|
||||
"rev": "d81285ba8199b00dc31847258cae3c655b605e8c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@@ -195,12 +195,13 @@
|
||||
cargo-outdated
|
||||
lld
|
||||
lldb
|
||||
cargo-flamegraph
|
||||
cargo-audit
|
||||
]
|
||||
++ (lib.optionals pkgs.stdenv.isDarwin [
|
||||
apple-sdk_26
|
||||
])
|
||||
++ (lib.optionals pkgs.stdenv.isLinux [
|
||||
cargo-flamegraph
|
||||
perf
|
||||
mold
|
||||
]);
|
||||
|
||||
@@ -8,13 +8,14 @@ edition = "2024"
|
||||
[dependencies]
|
||||
error-stack = "0.6"
|
||||
futures = "0.3.31"
|
||||
futures-lite = "2.6.1"
|
||||
glib = "0.21.5"
|
||||
gstreamer = { version = "0.24.4", features = ["v1_18"] }
|
||||
gstreamer-app = { version = "0.24.4", features = ["v1_18"] }
|
||||
gstreamer-video = { version = "0.24.4", features = ["v1_18"] }
|
||||
thiserror = "2.0"
|
||||
tracing = { version = "0.1", features = ["log"] }
|
||||
wgpu = { version = "27.0.1", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
smol = "2.0.2"
|
||||
tracing-subscriber = "0.3.22"
|
||||
|
||||
@@ -17,4 +17,11 @@ impl Bus {
|
||||
pub fn stream(&self) -> gstreamer::bus::BusStream {
|
||||
self.inner.stream()
|
||||
}
|
||||
|
||||
pub fn filtered_stream<'a>(
|
||||
&self,
|
||||
msg_types: &'a [gstreamer::MessageType],
|
||||
) -> impl futures::stream::FusedStream<Item = gstreamer::Message> + Unpin + Send + 'a {
|
||||
self.inner.stream_filtered(msg_types)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use crate::*;
|
||||
use gstreamer::Fraction;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[repr(transparent)]
|
||||
pub struct Caps {
|
||||
pub(crate) inner: gstreamer::caps::Caps,
|
||||
@@ -16,7 +18,6 @@ pub struct CapsBuilder {
|
||||
|
||||
impl CapsBuilder {
|
||||
pub fn field<V: Into<glib::Value> + Send>(mut self, name: impl AsRef<str>, value: V) -> Self {
|
||||
use gstreamer::prelude::*;
|
||||
self.inner = self.inner.field(name.as_ref(), value);
|
||||
self
|
||||
}
|
||||
@@ -53,10 +54,25 @@ impl CapsBuilder {
|
||||
}
|
||||
|
||||
impl Caps {
|
||||
pub fn format(&self) -> Option<&str> {
|
||||
use gstreamer::prelude::*;
|
||||
pub fn format(&self) -> Option<gstreamer_video::VideoFormat> {
|
||||
self.inner
|
||||
.structure(0)
|
||||
.and_then(|s| s.get::<&str>("format").ok())
|
||||
.map(|s| gstreamer_video::VideoFormat::from_string(s))
|
||||
}
|
||||
pub fn width(&self) -> Option<i32> {
|
||||
self.inner
|
||||
.structure(0)
|
||||
.and_then(|s| s.get::<i32>("width").ok())
|
||||
}
|
||||
pub fn height(&self) -> Option<i32> {
|
||||
self.inner
|
||||
.structure(0)
|
||||
.and_then(|s| s.get::<i32>("height").ok())
|
||||
}
|
||||
pub fn framerate(&self) -> Option<gstreamer::Fraction> {
|
||||
self.inner
|
||||
.structure(0)
|
||||
.and_then(|s| s.get::<Fraction>("framerate").ok())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,15 @@ impl Element {
|
||||
use gstreamer::prelude::*;
|
||||
self.inner.static_pad(name.as_ref()).map(Pad::from)
|
||||
}
|
||||
|
||||
pub fn bus(&self) -> Result<Bus> {
|
||||
use gstreamer::prelude::*;
|
||||
self.inner
|
||||
.bus()
|
||||
.map(Bus::from)
|
||||
.ok_or(Error)
|
||||
.attach_with(|| format!("Failed to get bus from Element: {}", self.inner.name()))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Sink: ChildOf<Element> {
|
||||
@@ -108,3 +117,17 @@ pub trait Source: ChildOf<Element> {
|
||||
// Ok(())
|
||||
// }
|
||||
}
|
||||
|
||||
pub trait ElementExt: ChildOf<Element> + Sync {
|
||||
#[track_caller]
|
||||
fn bus(&self) -> Result<Bus> {
|
||||
self.upcast_ref().bus()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn pad(&self, name: impl AsRef<str>) -> Option<Pad> {
|
||||
self.upcast_ref().pad(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ChildOf<Element> + Sync> ElementExt for T {}
|
||||
|
||||
@@ -13,6 +13,9 @@ pub use bin::*;
|
||||
pub use bus::*;
|
||||
pub use caps::*;
|
||||
pub use element::*;
|
||||
pub use gstreamer;
|
||||
#[doc(inline)]
|
||||
pub use gstreamer::{Message, MessageType, MessageView, State};
|
||||
pub use pad::*;
|
||||
pub use pipeline::*;
|
||||
pub use plugins::*;
|
||||
@@ -21,6 +24,7 @@ pub(crate) mod priv_prelude {
|
||||
pub use crate::errors::*;
|
||||
pub use crate::wrapper::*;
|
||||
pub use crate::*;
|
||||
pub use gstreamer::prelude::ElementExt as _;
|
||||
pub use gstreamer::prelude::*;
|
||||
#[track_caller]
|
||||
pub fn duration_to_clocktime(
|
||||
@@ -33,15 +37,12 @@ pub(crate) mod priv_prelude {
|
||||
.attach("Failed to convert duration to ClockTime")?;
|
||||
Ok(Some(clocktime))
|
||||
}
|
||||
None => Ok(None),
|
||||
None => Ok(gstreamer::ClockTime::NONE),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use errors::*;
|
||||
use gstreamer::prelude::*;
|
||||
use std::sync::Arc;
|
||||
|
||||
static GST: std::sync::LazyLock<std::sync::Arc<Gst>> = std::sync::LazyLock::new(|| {
|
||||
gstreamer::init().expect("Failed to initialize GStreamer");
|
||||
std::sync::Arc::new(Gst {
|
||||
@@ -49,7 +50,6 @@ static GST: std::sync::LazyLock<std::sync::Arc<Gst>> = std::sync::LazyLock::new(
|
||||
})
|
||||
});
|
||||
|
||||
/// This should be a global singleton
|
||||
pub struct Gst {
|
||||
__private: core::marker::PhantomData<()>,
|
||||
}
|
||||
@@ -58,14 +58,4 @@ impl Gst {
|
||||
pub fn new() -> Arc<Self> {
|
||||
Arc::clone(&GST)
|
||||
}
|
||||
|
||||
// pub fn pipeline_from_str(&self, s: &str) -> Result<Pipeline> {
|
||||
// let pipeline = gstreamer::parse::launch(s).change_context(Error)?;
|
||||
// let pipeline = pipeline.downcast::<gstreamer::Pipeline>();
|
||||
// let pipeline = match pipeline {
|
||||
// Err(_e) => return Err(Error).attach("Failed to downcast to Pipeline"),
|
||||
// Ok(p) => p,
|
||||
// };
|
||||
// Ok(Pipeline { inner: pipeline })
|
||||
// }
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use crate::priv_prelude::*;
|
||||
/// Pads are link points between elements
|
||||
|
||||
wrap_gst!(Pad, gstreamer::Pad);
|
||||
|
||||
impl Pad {
|
||||
#[track_caller]
|
||||
pub fn ghost(target: &Pad) -> Result<Pad> {
|
||||
let ghost_pad = gstreamer::GhostPad::with_target(&target.inner)
|
||||
.change_context(Error)
|
||||
@@ -12,6 +13,7 @@ impl Pad {
|
||||
})
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn link(&self, peer: &Pad) -> Result<()> {
|
||||
use gstreamer::prelude::*;
|
||||
self.inner
|
||||
@@ -21,6 +23,7 @@ impl Pad {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn current_caps(&self) -> Result<Caps> {
|
||||
let caps = self
|
||||
.inner
|
||||
@@ -30,6 +33,7 @@ impl Pad {
|
||||
Ok(Caps { inner: caps })
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn activate(&self, activate: bool) -> Result<()> {
|
||||
use gstreamer::prelude::*;
|
||||
self.inner
|
||||
|
||||
@@ -12,6 +12,7 @@ impl Drop for Pipeline {
|
||||
}
|
||||
|
||||
impl Pipeline {
|
||||
#[track_caller]
|
||||
pub fn bus(&self) -> Result<Bus> {
|
||||
let bus = self
|
||||
.inner
|
||||
@@ -22,15 +23,17 @@ impl Pipeline {
|
||||
}
|
||||
|
||||
/// Get the state
|
||||
#[track_caller]
|
||||
pub fn state(
|
||||
&self,
|
||||
timeout: impl Into<Option<core::time::Duration>>,
|
||||
) -> Result<gstreamer::State> {
|
||||
let (result, current, pending) = self.inner.state(duration_to_clocktime(timeout)?);
|
||||
let (result, current, _pending) = self.inner.state(duration_to_clocktime(timeout)?);
|
||||
result.change_context(Error).attach("Failed to get state")?;
|
||||
Ok(current)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn play(&self) -> Result<()> {
|
||||
self.inner
|
||||
.set_state(gstreamer::State::Playing)
|
||||
@@ -39,6 +42,7 @@ impl Pipeline {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn pause(&self) -> Result<()> {
|
||||
self.inner
|
||||
.set_state(gstreamer::State::Paused)
|
||||
@@ -47,6 +51,7 @@ impl Pipeline {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn ready(&self) -> Result<()> {
|
||||
self.inner
|
||||
.set_state(gstreamer::State::Ready)
|
||||
@@ -55,6 +60,7 @@ impl Pipeline {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn set_state(&self, state: gstreamer::State) -> Result<gstreamer::StateChangeSuccess> {
|
||||
let result = self
|
||||
.inner
|
||||
@@ -63,37 +69,135 @@ impl Pipeline {
|
||||
.attach("Failed to set pipeline state")?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PipelineExt {
|
||||
fn bus(&self) -> Result<Bus>;
|
||||
fn play(&self) -> Result<()>;
|
||||
fn pause(&self) -> Result<()>;
|
||||
fn ready(&self) -> Result<()>;
|
||||
fn set_state(&self, state: gstreamer::State) -> Result<gstreamer::StateChangeSuccess>;
|
||||
fn state(&self, timeout: impl Into<Option<core::time::Duration>>) -> Result<gstreamer::State>;
|
||||
}
|
||||
|
||||
impl<T> PipelineExt for T
|
||||
where
|
||||
T: ChildOf<Pipeline>,
|
||||
{
|
||||
fn bus(&self) -> Result<Bus> {
|
||||
self.upcast_ref().bus()
|
||||
pub async fn wait_for(&self, state: gstreamer::State) -> Result<()> {
|
||||
let current_state = self.state(core::time::Duration::ZERO)?;
|
||||
if current_state == state {
|
||||
Ok(())
|
||||
} else {
|
||||
// use futures::stream::StreamExt;
|
||||
use futures_lite::stream::StreamExt as _;
|
||||
self.bus()?
|
||||
.filtered_stream(&[MessageType::StateChanged])
|
||||
.find(|message: &gstreamer::Message| {
|
||||
let view = message.view();
|
||||
if let gstreamer::MessageView::StateChanged(changed) = view {
|
||||
changed.current() == state
|
||||
&& changed.src().is_some_and(|s| s == &self.inner)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wait_for_states(&self, states: impl AsRef<[gstreamer::State]>) -> Result<()> {
|
||||
let current_state = self.state(core::time::Duration::ZERO)?;
|
||||
let states = states.as_ref();
|
||||
if states.contains(¤t_state) {
|
||||
Ok(())
|
||||
} else {
|
||||
use futures_lite::stream::StreamExt as _;
|
||||
self.bus()?
|
||||
.filtered_stream(&[MessageType::StateChanged])
|
||||
.find(|message: &gstreamer::Message| {
|
||||
let view = message.view();
|
||||
if let gstreamer::MessageView::StateChanged(changed) = view {
|
||||
states.contains(&changed.current())
|
||||
&& changed.src().is_some_and(|s| s == &self.inner)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wait_for_message<'a, F2>(
|
||||
&self,
|
||||
filter: Option<&'a [gstreamer::MessageType]>,
|
||||
filter_fn: F2,
|
||||
) -> Result<gstreamer::Message>
|
||||
where
|
||||
F2: Fn(&gstreamer::Message) -> bool + Send + 'a,
|
||||
{
|
||||
use futures_lite::stream::StreamExt as _;
|
||||
match filter {
|
||||
Some(filter) => {
|
||||
let message = self.bus()?.filtered_stream(filter).find(filter_fn).await;
|
||||
match message {
|
||||
Some(msg) => Ok(msg),
|
||||
None => {
|
||||
Err(Error).attach("Failed to find message matching the provided filter")
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let message = self.bus()?.stream().find(filter_fn).await;
|
||||
match message {
|
||||
Some(msg) => Ok(msg),
|
||||
None => {
|
||||
Err(Error).attach("Failed to find message matching the provided filter")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PipelineExt: ChildOf<Pipeline> + Sync {
|
||||
// #[track_caller]
|
||||
// fn bus(&self) -> Result<Bus> {
|
||||
// self.upcast_ref().bus()
|
||||
// }
|
||||
#[track_caller]
|
||||
fn play(&self) -> Result<()> {
|
||||
self.upcast_ref().play()
|
||||
}
|
||||
#[track_caller]
|
||||
fn pause(&self) -> Result<()> {
|
||||
self.upcast_ref().pause()
|
||||
}
|
||||
#[track_caller]
|
||||
fn ready(&self) -> Result<()> {
|
||||
self.upcast_ref().ready()
|
||||
}
|
||||
#[track_caller]
|
||||
fn set_state(&self, state: gstreamer::State) -> Result<gstreamer::StateChangeSuccess> {
|
||||
self.upcast_ref().set_state(state)
|
||||
}
|
||||
#[track_caller]
|
||||
fn state(&self, timeout: impl Into<Option<core::time::Duration>>) -> Result<gstreamer::State> {
|
||||
self.upcast_ref().state(timeout)
|
||||
}
|
||||
|
||||
fn wait_for(
|
||||
&self,
|
||||
state: gstreamer::State,
|
||||
) -> impl std::future::Future<Output = Result<()>> + Send {
|
||||
self.upcast_ref().wait_for(state)
|
||||
}
|
||||
|
||||
fn wait_for_states(
|
||||
&self,
|
||||
states: impl AsRef<[gstreamer::State]> + Send,
|
||||
) -> impl std::future::Future<Output = Result<()>> + Send {
|
||||
self.upcast_ref().wait_for_states(states)
|
||||
}
|
||||
|
||||
fn wait_for_message<'a, F2>(
|
||||
&self,
|
||||
filter: Option<&'a [gstreamer::MessageType]>,
|
||||
filter_fn: F2,
|
||||
) -> impl std::future::Future<Output = Result<gstreamer::Message>> + Send
|
||||
where
|
||||
F2: Fn(&gstreamer::Message) -> bool + Send + 'a,
|
||||
{
|
||||
self.upcast_ref().wait_for_message(filter, filter_fn)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ChildOf<Pipeline> + Sync> PipelineExt for T {}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use crate::priv_prelude::*;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use gstreamer_app::AppSinkCallbacks;
|
||||
|
||||
wrap_gst!(AppSink, gstreamer::Element);
|
||||
parent_child!(Pipeline, AppSink, downcast); // since AppSink is an Element internaly
|
||||
parent_child!(Element, AppSink);
|
||||
|
||||
impl Sink for AppSink {}
|
||||
@@ -12,6 +14,7 @@ impl AppSink {
|
||||
.downcast_ref::<gstreamer_app::AppSink>()
|
||||
.expect("Failed to downcast to AppSink")
|
||||
}
|
||||
|
||||
pub fn new(name: impl AsRef<str>) -> Result<Self> {
|
||||
use gstreamer::prelude::*;
|
||||
let inner = gstreamer::ElementFactory::make("appsink")
|
||||
@@ -22,14 +25,47 @@ impl AppSink {
|
||||
Ok(AppSink { inner })
|
||||
}
|
||||
|
||||
pub fn with_caps(mut self, caps: Caps) -> Self {
|
||||
pub fn with_emit_signals(self, emit: bool) -> Self {
|
||||
self.inner.set_property("emit-signals", emit);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_async(self, async_: bool) -> Self {
|
||||
self.inner.set_property("async", async_);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_sync(self, sync: bool) -> Self {
|
||||
self.inner.set_property("sync", sync);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_caps(self, caps: Caps) -> Self {
|
||||
self.inner.set_property("caps", caps.inner);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_callbacks(&self, callbacks: gstreamer_app::AppSinkCallbacks) -> Result<()> {
|
||||
pub fn with_callbacks(self, callbacks: gstreamer_app::AppSinkCallbacks) -> Self {
|
||||
self.appsink().set_callbacks(callbacks);
|
||||
Ok(())
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_new_frame<F>(self, mut f: F) -> Self
|
||||
where
|
||||
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
|
||||
{
|
||||
self.with_emit_signals(true).with_callbacks(
|
||||
AppSinkCallbacks::builder()
|
||||
.new_sample(move |appsink| {
|
||||
use glib::object::Cast;
|
||||
let element = appsink.upcast_ref::<gstreamer::Element>();
|
||||
let appsink = AppSink::from_gst_ref(element);
|
||||
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
|
||||
.unwrap_or(Err(gstreamer::FlowError::Error))
|
||||
.map(|_| gstreamer::FlowSuccess::Ok)
|
||||
})
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn pull_sample(&self) -> Result<Sample> {
|
||||
@@ -151,3 +187,49 @@ fn test_appsink() {
|
||||
}
|
||||
// std::thread::sleep(std::time::Duration::from_secs(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_appsink_metadata() {
|
||||
use tracing_subscriber::prelude::*;
|
||||
tracing_subscriber::registry()
|
||||
.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.with_thread_ids(true)
|
||||
.with_file(true),
|
||||
)
|
||||
.init();
|
||||
|
||||
crate::Gst::new();
|
||||
|
||||
let url = "https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c";
|
||||
|
||||
let videoconvert = crate::plugins::videoconvertscale::VideoConvert::new("iced-video-convert")
|
||||
// .unwrap();
|
||||
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
|
||||
.unwrap();
|
||||
let appsink = crate::plugins::app::AppSink::new("iced-video-sink")
|
||||
.unwrap()
|
||||
.with_async(true)
|
||||
.with_sync(true);
|
||||
|
||||
let video_sink = videoconvert.link(&appsink).unwrap();
|
||||
let playbin = crate::plugins::playback::Playbin3::new("iced-video")
|
||||
.unwrap()
|
||||
.with_uri(url)
|
||||
.with_video_sink(&video_sink);
|
||||
|
||||
playbin.pause().unwrap();
|
||||
|
||||
smol::block_on(async {
|
||||
playbin.wait_for(gstreamer::State::Paused).await.unwrap();
|
||||
});
|
||||
// std::thread::sleep(core::time::Duration::from_secs(1));
|
||||
let pad = appsink.pad("sink").unwrap();
|
||||
let caps = pad.current_caps().unwrap();
|
||||
let format = caps.format();
|
||||
let height = caps.height();
|
||||
let width = caps.width();
|
||||
let framerate = caps.framerate();
|
||||
dbg!(&format, height, width, framerate);
|
||||
dbg!(&caps);
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ pub trait GstWrapper {
|
||||
fn from_gst(gst: Self::GstType) -> Self;
|
||||
// fn into_gst(self) -> Self::GstType;
|
||||
fn as_gst_ref(&self) -> &Self::GstType;
|
||||
fn from_gst_ref(gst: &Self::GstType) -> &Self;
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
@@ -51,6 +52,10 @@ macro_rules! wrap_gst {
|
||||
fn as_gst_ref(&self) -> &Self::GstType {
|
||||
&self.inner
|
||||
}
|
||||
|
||||
fn from_gst_ref(gst: &Self::GstType) -> &Self {
|
||||
unsafe { &*(gst as *const Self::GstType as *const Self) }
|
||||
}
|
||||
}
|
||||
|
||||
impl ChildOf<$name> for $name {
|
||||
@@ -97,7 +102,14 @@ macro_rules! parent_child {
|
||||
let downcasted = self
|
||||
.inner
|
||||
.downcast_ref::<<$parent as GstWrapper>::GstType>()
|
||||
.expect("BUG: Failed to downcast GStreamer type from child to parent");
|
||||
.expect(
|
||||
format!(
|
||||
"BUG: Failed to downcast GStreamer type from child {} to parent {}",
|
||||
stringify!($child),
|
||||
stringify!($parent)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
unsafe {
|
||||
&*(downcasted as *const <$parent as GstWrapper>::GstType as *const $parent)
|
||||
}
|
||||
|
||||
28
src/main.rs
28
src/main.rs
@@ -14,31 +14,3 @@ fn main() -> Result<()> {
|
||||
ui_iced::ui().change_context(Error)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// #[tokio::main]
|
||||
// pub async fn main() -> Result<()> {
|
||||
// dotenvy::dotenv()
|
||||
// .change_context(Error)
|
||||
// .inspect_err(|err| {
|
||||
// eprintln!("Failed to load .env file: {}", err);
|
||||
// })
|
||||
// .ok();
|
||||
// let config = JellyfinConfig::new(
|
||||
// std::env::var("JELLYFIN_USERNAME").change_context(Error)?,
|
||||
// std::env::var("JELLYFIN_PASSWORD").change_context(Error)?,
|
||||
// std::env::var("JELLYFIN_SERVER_URL").change_context(Error)?,
|
||||
// "jello".to_string(),
|
||||
// );
|
||||
// let mut jellyfin = api::JellyfinClient::new(config);
|
||||
// jellyfin
|
||||
// .authenticate_with_cached_token(".session")
|
||||
// .await
|
||||
// .change_context(Error)?;
|
||||
//
|
||||
// #[cfg(feature = "iced")]
|
||||
// ui_iced::ui(jellyfin);
|
||||
// #[cfg(feature = "gpui")]
|
||||
// ui_gpui::ui(jellyfin);
|
||||
//
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
@@ -20,6 +20,8 @@ iced = { workspace = true, default-features = true, features = [
|
||||
|
||||
|
||||
iced_video_player = { workspace = true }
|
||||
iced_wgpu = "0.14.0"
|
||||
iced_winit = "0.14.0"
|
||||
reqwest = "0.12.24"
|
||||
tap = "1.0.1"
|
||||
toml = "0.9.8"
|
||||
|
||||
Reference in New Issue
Block a user