Compare commits

...

16 Commits

Author SHA1 Message Date
d509fb7813 feat: Update cargo.lock 2026-01-29 00:45:46 +05:30
dcbb5a127b feat: Move settings to a tab based ui 2026-01-29 00:40:12 +05:30
e66c457b57 feat: Added BACKGROUND_COLOR to settings popup 2026-01-28 02:06:57 +05:30
76fc14c73b feat: Use a floating box for settings 2026-01-28 02:00:45 +05:30
5b4fbd5df6 feat(store): add SecretStore, ApiKey, remove Store trait 2026-01-26 21:00:56 +05:30
e7fd01c0af chore: Update cargo.lock 2026-01-20 21:52:46 +05:30
a040478069 chore: Update flake.lock 2026-01-20 21:52:46 +05:30
e5ef173473 fix(iced-video): Update the color matrices and subtract .5 from uv samples to
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-15 17:25:09 +05:30
429371002b fix(iced-video): Write the conversion matrix buffer so the video actually shows up
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-15 17:01:41 +05:30
335e8fdbef feat: move cuda to linux
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-14 15:55:49 +05:30
9dac0b6c78 feat(iced-video): added video format to the video frame
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-14 09:51:56 +05:30
uttarayan21
97a7a632d4 feat(iced-video): implement planar YUV texture support with HDR conversion matrices and update dependencies
Some checks failed
build / checks-matrix (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
build / checks-build (push) Has been cancelled
2026-01-04 23:02:47 +05:30
uttarayan21
29390140cd feat(settings): simplify form updates and temporarily disable server toggler 2025-12-27 00:13:54 +05:30
uttarayan21
97c2b3f14c feat(settings): implement user and server form handling with update functions and UI views 2025-12-27 00:04:42 +05:30
uttarayan21
2b2e8060e7 feat(ui-iced): implement settings screen with navigation and basic UI elements
Some checks failed
build / checks-matrix (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
build / checks-build (push) Has been cancelled
2025-12-26 21:21:58 +05:30
uttarayan21
584495453f feat: Many more improvements to video player now with a subscription 2025-12-26 19:06:40 +05:30
24 changed files with 2451 additions and 1455 deletions

1558
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -13,14 +13,15 @@ members = [
]
[workspace.dependencies]
iced = { version = "0.14.0" }
iced_video_player = "0.6"
gst = { version = "0.1.0", path = "gst" }
iced_wgpu = { version = "0.14.0" }
iced-video = { version = "0.1.0", path = "crates/iced-video" }
[patch.crates-io]
iced_wgpu = { git = "https://github.com/uttarayan21/iced", branch = "0.14" }
iced_core = { git = "https://github.com/uttarayan21/iced", branch = "0.14" }
iced_renderer = { git = "https://github.com/uttarayan21/iced", branch = "0.14" }
iced_futures = { git = "https://github.com/uttarayan21/iced", branch = "0.14" }
iced = { git = "https://github.com/uttarayan21/iced", branch = "0.14" }
[package]
@@ -31,6 +32,7 @@ license = "MIT"
[dependencies]
api = { version = "0.1.0", path = "api" }
bytemuck = { version = "1.24.0", features = ["derive"] }
clap = { version = "4.5", features = ["derive"] }
clap-verbosity-flag = { version = "3.0.4", features = ["tracing"] }
clap_complete = "4.5"

View File

@@ -63,3 +63,50 @@ In the shader the components get uniformly normalized from [0..=1023] integer to
Videos however are generally not stored in this format or any rgb format in general because it is not as efficient for (lossy) compression as YUV formats.
Right now I don't want to deal with yuv formats so I'll use gstreamer caps to convert the video into `Rgba10a2` format
## Pixel formats and Planes
Dated: Sun Jan 4 09:09:16 AM IST 2026
| value | count | quantile | percentage | frequency |
| --- | --- | --- | --- | --- |
| yuv420p | 1815 | 0.5067001675041876 | 50.67% | ************************************************** |
| yuv420p10le | 1572 | 0.4388609715242881 | 43.89% | ******************************************* |
| yuvj420p | 171 | 0.04773869346733668 | 4.77% | **** |
| rgba | 14 | 0.003908431044109436 | 0.39% | |
| yuvj444p | 10 | 0.0027917364600781687 | 0.28% | |
For all of my media collection these are the pixel formats for all the videos
### RGBA
Pretty self evident
8 channels for each of R, G, B and A
Hopefully shouldn't be too hard to make a function or possibly a lut that takes data from rgba and maps it to Rgb10a2Unorm
```mermaid
packet
title RGBA
+8: "R"
+8: "G"
+8: "B"
+8: "A"
```
### YUV
[All YUV formats](https://learn.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#surface-definitions)
[10 and 16 bit yuv formats](https://learn.microsoft.com/en-us/windows/win32/medfound/10-bit-and-16-bit-yuv-video-formats)
Y -> Luminance
U,V -> Chrominance
p -> Planar
sp -> semi planar
j -> full range
planar formats have each of the channels in a contiguous array one after another
in semi-planar formats the y channel is seperate and uv channels are interleaved
## Chroma Subsampling

View File

@@ -4,7 +4,9 @@ version = "0.1.0"
edition = "2024"
[dependencies]
bytemuck = "1.24.0"
error-stack = "0.6.0"
futures-lite = "2.6.1"
gst.workspace = true
iced_core = "0.14.0"
iced_futures = "0.14.0"
@@ -12,6 +14,7 @@ iced_renderer = { version = "0.14.0", features = ["iced_wgpu"] }
iced_wgpu = { version = "0.14.0" }
thiserror = "2.0.17"
tracing = "0.1.43"
wgpu = { version = "27.0.1", features = ["vulkan"] }
[dev-dependencies]
iced.workspace = true

View File

@@ -11,87 +11,154 @@ pub fn main() -> iced::Result {
.with(tracing_subscriber::EnvFilter::from_default_env())
.init();
iced::application(State::new, update, view)
.subscription(keyboard_event)
.subscription(|state| {
// Foo
match &state.video {
Some(video) => video.subscription_with(state, keyboard_event),
None => keyboard_event(state),
}
})
.run()
}
fn keyboard_event(state: &State) -> iced::Subscription<Message> {
fn keyboard_event(_state: &State) -> iced::Subscription<Message> {
use iced::keyboard::{Key, key::Named};
iced::keyboard::listen().map(move |event| match event {
iced::keyboard::Event::KeyPressed { key, .. } => {
let key = key.as_ref();
match key {
Key::Named(Named::Escape) | Key::Character("q") => Message::Quit,
Key::Character("f") => Message::Fullscreen,
Key::Named(Named::Space) => Message::Toggle,
_ => Message::Load,
_ => Message::Noop,
}
// if key == &space {
// // Toggle play/pause
// let is_playing = state
// .video
// .source()
// .is_playing()
// .expect("Failed to get playing state");
// if is_playing {
// Message::Pause
// } else {
// Message::Play
// }
// }
}
_ => Message::Load,
_ => Message::Noop,
})
}
#[derive(Debug, Clone)]
pub struct State {
video: VideoHandle,
video: Option<VideoHandle<Message>>,
fullscreen: bool,
}
impl State {
pub fn new() -> Self {
let video = VideoHandle::new("https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c")
.expect("Failed to create video handle");
Self { video }
pub fn new() -> (Self, iced::Task<Message>) {
(
Self {
video: None,
fullscreen: false,
},
iced::Task::done(Message::Load),
)
}
}
#[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone)]
pub enum Message {
Play,
Pause,
Toggle,
Noop,
Load,
Fullscreen,
OnLoad(VideoHandle<Message>),
OnError(String),
NewFrame,
Eos,
Quit,
}
pub fn update(state: &mut State, message: Message) -> iced::Task<Message> {
match message {
Message::Load => {
// does stuff
Message::NewFrame => {
iced::Task::none()
}
Message::Eos => {
iced::Task::done(Message::Pause)
}
Message::Load => {
iced::Task::perform(
VideoHandle::load(
"https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c",
),
|result| match result {
Ok(video) => Message::OnLoad(video),
Err(err) => Message::OnError(format!("Error loading video: {:?}", err)),
},
).chain(iced::Task::done(Message::Play))
}
Message::OnError(err) => {
eprintln!("Error: {}", err);
iced::Task::none()
}
Message::OnLoad(video) => {
state.video = Some(video.on_new_frame(Message::NewFrame).on_end_of_stream(Message::Eos));
iced::Task::none()
}
Message::Fullscreen => {
state.fullscreen = !state.fullscreen;
let fullscreen = state.fullscreen;
let mode = if fullscreen {
iced::window::Mode::Fullscreen
} else {
iced::window::Mode::Windowed
};
iced::window::oldest().and_then(move |id| iced::window::set_mode::<Message>(id, mode))
}
Message::Play => {
state.video.source().play().expect("Failed to play video");
state
.video
.as_ref()
.unwrap()
.source()
.play()
.expect("Failed to play video");
iced::Task::none()
}
Message::Pause => {
state.video.source().pause().expect("Failed to pause video");
state
.video
.as_ref()
.unwrap()
.source()
.pause()
.expect("Failed to pause video");
iced::Task::none()
}
Message::Toggle => {
state.video.source().toggle().expect("Failed to stop video");
state
.video
.as_ref()
.unwrap()
.source()
.toggle()
.expect("Failed to stop video");
iced::Task::none()
}
Message::Quit => {
state.video.source().stop().expect("Failed to stop video");
state
.video
.as_ref()
.unwrap()
.source()
.stop()
.expect("Failed to stop video");
std::process::exit(0);
}
Message::Noop => iced::Task::none(),
}
}
pub fn view<'a>(state: &'a State) -> iced::Element<'a, Message> {
let video_widget = Video::new(&state.video)
if let None = &state.video {
return iced::widget::Column::new()
.push(iced::widget::Text::new("Press any key to load video"))
.align_x(iced::Alignment::Center)
.into();
}
let video_widget = Video::new(&state.video.as_ref().unwrap())
.width(iced::Length::Fill)
.height(iced::Length::Fill)
.content_fit(iced::ContentFit::Contain);

View File

@@ -0,0 +1,8 @@
info:
RUST_LOG=info,wgpu_core=warn,wgpu_hal=warn cargo run --release --example minimal
# GST_DEBUG=5 RUST_LOG="" cargo run --release --example minimal
flame:
cargo flamegraph run --release --example minimal
heaptrack:
cargo build --release --example minimal
RUST_LOG="info,wgpu_hal=info" heaptrack $CARGO_TARGET_DIR/release/examples/minimal

View File

@@ -1,13 +1,10 @@
pub mod id;
pub mod primitive;
pub mod source;
use iced_core as iced;
use iced_renderer::Renderer as RendererWithFallback;
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
pub mod widget;
pub use widget::Video;
use error_stack::{Report, ResultExt};
use iced::Length;
use std::marker::PhantomData;
use gst::plugins::app::AppSink;
use gst::plugins::playback::Playbin3;
@@ -20,19 +17,63 @@ pub type Result<T, E = Report<Error>> = core::result::Result<T, E>;
use std::sync::{Arc, Mutex, atomic::AtomicBool};
mod seal {
pub trait Sealed {}
impl Sealed for super::Unknown {}
impl Sealed for super::Ready {}
}
pub trait State: seal::Sealed {
fn is_ready() -> bool {
false
}
}
#[derive(Debug, Clone)]
pub struct Unknown;
#[derive(Debug, Clone)]
pub struct Ready;
impl State for Unknown {}
impl State for Ready {
fn is_ready() -> bool {
true
}
}
/// This is the video handle that is used to control the video playback.
/// This should be keps in the application state.
#[derive(Debug, Clone)]
pub struct VideoHandle {
pub struct VideoHandle<Message, S: State = Unknown> {
id: id::Id,
source: source::VideoSource,
is_metadata_loaded: Arc<AtomicBool>,
is_playing: Arc<AtomicBool>,
is_eos: Arc<AtomicBool>,
pub source: source::VideoSource,
frame_ready: Arc<AtomicBool>,
on_new_frame: Option<Box<Message>>,
on_end_of_stream: Option<Box<Message>>,
on_about_to_finish: Option<Box<Message>>,
__marker: core::marker::PhantomData<S>,
}
impl VideoHandle {
impl<Message: Send + Sync + Clone> VideoHandle<Message, Unknown> {
pub fn new(url: impl AsRef<str>) -> Result<Self> {
let source = source::VideoSource::new(url)?;
let frame_ready = Arc::clone(&source.ready);
Ok(Self {
id: id::Id::unique(),
source: source,
on_new_frame: None,
on_end_of_stream: None,
on_about_to_finish: None,
frame_ready,
__marker: core::marker::PhantomData,
})
}
/// Creates a new video handle and waits for the metadata to be loaded.
pub async fn load(url: impl AsRef<str>) -> Result<VideoHandle<Message, Ready>> {
let handle = VideoHandle::new(url)?;
handle.wait().await
}
}
impl<Message: Send + Sync + Clone, S: State> VideoHandle<Message, S> {
pub fn id(&self) -> &id::Id {
&self.id
}
@@ -41,212 +82,83 @@ impl VideoHandle {
&self.source
}
pub fn new(url: impl AsRef<str>) -> Result<Self> {
let source = source::VideoSource::new(url)?;
let frame_ready = Arc::clone(&source.ready);
Ok(Self {
id: id::Id::unique(),
source: source,
is_metadata_loaded: Arc::new(AtomicBool::new(false)),
is_playing: Arc::new(AtomicBool::new(false)),
is_eos: Arc::new(AtomicBool::new(false)),
frame_ready,
})
pub async fn wait(self) -> Result<VideoHandle<Message, Ready>> {
self.source.wait().await?;
Ok(self.state::<Ready>())
}
}
/// This is the Video widget that displays a video.
/// This should be used in the view function.
pub struct Video<'a, Message, Theme = iced::Theme, Renderer = iced_wgpu::Renderer>
where
Renderer: PrimitiveRenderer,
{
id: id::Id,
handle: &'a VideoHandle,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
on_end_of_stream: Option<Message>,
on_new_frame: Option<Message>,
looping: bool,
// on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message + 'a>>,
// on_error: Option<Box<dyn Fn(&glib::Error) -> Message + 'a>>,
// theme: Theme,
__marker: PhantomData<(Renderer, Theme)>,
}
fn state<S2: State>(self) -> VideoHandle<Message, S2> {
VideoHandle {
id: self.id,
source: self.source,
on_new_frame: self.on_new_frame,
on_end_of_stream: self.on_end_of_stream,
on_about_to_finish: self.on_about_to_finish,
frame_ready: self.frame_ready,
__marker: core::marker::PhantomData,
}
}
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
pub fn new(handle: &'a VideoHandle) -> Self {
// pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> {
// let sub = widget::VideoSubscription {
// id: self.id.clone(),
// on_end_of_stream: self.on_end_of_stream.clone(),
// on_new_frame: self.on_new_frame.clone(),
// on_about_to_finish: self.on_about_to_finish.clone(),
// bus: self.source.bus.clone(),
// };
// iced_futures::subscription::from_recipe(sub)
// }
//
// pub fn subscription_with<State>(
// &self,
// state: &State,
// f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static,
// ) -> iced_futures::subscription::Subscription<Message>
// where
// State: Send + Sync + 'static,
// {
// let sub = self.subscription();
// iced_futures::subscription::Subscription::batch([sub, f(state)])
// }
pub fn on_new_frame(self, message: Message) -> Self {
Self {
id: handle.id.clone(),
handle: &handle,
content_fit: iced::ContentFit::Contain,
width: Length::Shrink,
height: Length::Shrink,
on_end_of_stream: None,
on_new_frame: None,
looping: false,
// theme: Theme::default(),
__marker: PhantomData,
on_new_frame: Some(Box::new(message)),
..self
}
}
pub fn on_end_of_stream(self, message: Message) -> Self {
Self {
on_end_of_stream: Some(Box::new(message)),
..self
}
}
pub fn on_about_to_finish(self, message: Message) -> Self {
Self {
on_about_to_finish: Some(Box::new(message)),
..self
}
}
pub fn play(&self) {
self.source.play();
}
pub fn pause(&self) {
self.source.pause();
}
pub fn stop(&self) {
self.source.stop();
}
}
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
pub fn width(mut self, width: Length) -> Self {
self.width = width;
self
}
pub fn height(mut self, height: Length) -> Self {
self.height = height;
self
}
pub fn content_fit(mut self, fit: iced::ContentFit) -> Self {
self.content_fit = fit;
self
}
pub fn on_end_of_stream(mut self, message: Message) -> Self {
self.on_end_of_stream = Some(message);
self
}
pub fn on_new_frame(mut self, message: Message) -> Self {
self.on_new_frame = Some(message);
self
}
pub fn looping(mut self, looping: bool) -> Self {
self.looping = looping;
self
}
}
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
for Video<'_, Message, Theme, Renderer>
where
Message: Clone,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<Length> {
iced::Size {
width: self.width,
height: self.height,
}
}
// The video player should take max space by default
fn layout(
&mut self,
_tree: &mut iced::widget::Tree,
_renderer: &Renderer,
limits: &iced::layout::Limits,
) -> iced::layout::Node {
iced::layout::Node::new(limits.max())
}
fn draw(
&self,
tree: &iced::widget::Tree,
renderer: &mut Renderer,
theme: &Theme,
style: &iced::renderer::Style,
layout: iced::Layout<'_>,
cursor: iced::mouse::Cursor,
viewport: &iced::Rectangle,
) {
if let Ok((width, height)) = self.handle.source.size() {
let video_size = iced::Size {
width: width as f32,
height: height as f32,
};
let bounds = layout.bounds();
let adjusted_fit = self.content_fit.fit(video_size, bounds.size());
let scale = iced::Vector::new(
adjusted_fit.width / video_size.width,
adjusted_fit.height / video_size.height,
);
let final_size = video_size * scale;
let position = match self.content_fit {
iced::ContentFit::None => iced::Point::new(
bounds.x + (video_size.width - adjusted_fit.width) / 2.0,
bounds.y + (video_size.height - adjusted_fit.height) / 2.0,
),
_ => iced::Point::new(
bounds.center_x() - final_size.width / 2.0,
bounds.center_y() - final_size.height / 2.0,
),
};
let drawing_bounds = iced::Rectangle::new(position, final_size);
let render = |renderer: &mut Renderer| {
renderer.draw_primitive(
drawing_bounds,
primitive::VideoFrame {
id: self.id.clone(),
size: iced_wgpu::wgpu::Extent3d {
width: width as u32,
height: height as u32,
depth_or_array_layers: 1,
},
ready: Arc::clone(&self.handle.frame_ready),
frame: Arc::clone(&self.handle.source.frame),
},
);
};
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
renderer.with_layer(bounds, render);
} else {
render(renderer);
}
}
}
fn update(
&mut self,
_tree: &mut iced_core::widget::Tree,
event: &iced::Event,
_layout: iced_core::Layout<'_>,
_cursor: iced_core::mouse::Cursor,
_renderer: &Renderer,
_clipboard: &mut dyn iced_core::Clipboard,
shell: &mut iced_core::Shell<'_, Message>,
_viewport: &iced::Rectangle,
) {
if let iced::Event::Window(iced::window::Event::RedrawRequested(_)) = event {
if self
.handle
.frame_ready
.load(std::sync::atomic::Ordering::SeqCst)
{
shell.request_redraw();
} else {
shell.request_redraw_at(iced::window::RedrawRequest::At(
iced_core::time::Instant::now() + core::time::Duration::from_millis(32),
));
}
}
}
}
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
for iced::Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Theme: 'a,
Renderer: 'a + iced_wgpu::primitive::Renderer,
{
fn from(video: Video<'a, Message, Theme, Renderer>) -> Self {
Self::new(video)
impl<Message: Send + Sync + Clone> VideoHandle<Message, Ready> {
pub fn format(&self) -> Result<gst::VideoFormat> {
self.source
.format()
.change_context(Error)
.attach("Failed to get video format")
}
}

View File

@@ -1,15 +1,101 @@
use crate::id;
use gst::videoconvertscale::VideoFormat;
use iced_wgpu::primitive::Pipeline;
use iced_wgpu::wgpu;
use std::collections::BTreeMap;
use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Clone, Copy, Debug, bytemuck::Zeroable, bytemuck::Pod)]
#[repr(transparent)]
pub struct ConversionMatrix {
matrix: [Vec3f; 3],
}
#[derive(Clone, Copy, Debug, bytemuck::Zeroable, bytemuck::Pod)]
#[repr(C, align(16))]
pub struct Vec3f {
data: [f32; 3],
__padding: u32,
}
impl From<[f32; 3]> for Vec3f {
fn from(value: [f32; 3]) -> Self {
Vec3f {
data: [value[0], value[1], value[2]],
__padding: 0,
}
}
}
impl Vec3f {
pub fn new(x: f32, y: f32, z: f32) -> Self {
Vec3f {
data: [x, y, z],
__padding: 0,
}
}
pub const fn from(data: [f32; 3]) -> Self {
Vec3f {
data: [data[0], data[1], data[2]],
__padding: 0,
}
}
}
// impl ConversionMatrix {
// pub fn desc() -> wgpu::VertexBufferLayout<'static> {
// wgpu::VertexBufferLayout {
// array_stride: core::mem::size_of::<ConversionMatrix>() as wgpu::BufferAddress,
// step_mode: wgpu::VertexStepMode::Vertex,
// attributes: &[
// wgpu::VertexAttribute {
// offset: 0,
// shader_location: 0,
// format: wgpu::VertexFormat::Float32x4,
// },
// wgpu::VertexAttribute {
// offset: 16,
// shader_location: 1,
// format: wgpu::VertexFormat::Float32x4,
// },
// wgpu::VertexAttribute {
// offset: 32,
// shader_location: 2,
// format: wgpu::VertexFormat::Float32x4,
// },
// wgpu::VertexAttribute {
// offset: 48,
// shader_location: 3,
// format: wgpu::VertexFormat::Float32x4,
// },
// ],
// }
// }
// }
pub const BT2020_TO_RGB: ConversionMatrix = ConversionMatrix {
matrix: [
Vec3f::from([1.0, 0.0, 1.4746]),
Vec3f::from([1.0, -0.16455, -0.5714]),
Vec3f::from([1.0, 1.8814, 0.0]),
],
};
pub const BT709_TO_RGB: ConversionMatrix = ConversionMatrix {
matrix: [
Vec3f::from([1.0, 0.0, 1.5748]),
Vec3f::from([1.0, -0.1873, -0.4681]),
Vec3f::from([1.0, 1.8556, 0.0]),
],
};
#[derive(Debug)]
pub struct VideoFrame {
pub id: id::Id,
pub size: wgpu::Extent3d,
pub ready: Arc<AtomicBool>,
pub frame: Arc<Mutex<gst::Sample>>,
pub format: VideoFormat,
}
impl iced_wgpu::Primitive for VideoFrame {
@@ -24,73 +110,89 @@ impl iced_wgpu::Primitive for VideoFrame {
viewport: &iced_wgpu::graphics::Viewport,
) {
let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| {
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced-video-texture"),
size: self.size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: pipeline.format,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced-video-buffer"),
size: (self.size.width * self.size.height * 4) as u64,
usage: wgpu::BufferUsages::COPY_SRC | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
let texture = VideoTexture::new(
"iced-video-texture",
self.size,
device,
pipeline.format,
self.format,
);
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced-video-texture-bind-group"),
layout: &pipeline.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
resource: wgpu::BindingResource::TextureView(&texture.y_texture()),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureView(&texture.uv_texture()),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(
texture
.conversion_matrix_buffer()
.as_entire_buffer_binding(),
),
},
],
});
VideoTextures {
let matrix = if matches!(self.format, VideoFormat::P01010le | VideoFormat::P016Le) {
BT2020_TO_RGB
} else {
BT709_TO_RGB
};
texture.write_conversion_matrix(&matrix, queue);
VideoFrameData {
id: self.id.clone(),
texture,
buffer,
bind_group,
conversion_matrix: matrix,
ready: Arc::clone(&self.ready),
}
});
// dbg!(&self.size, video.texture.size());
if self.size != video.texture.size() {
// Resize the texture if the size has changed.
let new_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced-video-texture-resized"),
size: self.size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: pipeline.format,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let new_texture = video
.texture
.resize("iced-video-texture-resized", self.size, device);
new_texture.write_conversion_matrix(&video.conversion_matrix, queue);
let new_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced-video-texture-bind-group-resized"),
label: Some("iced-video-texture-bind-group"),
layout: &pipeline.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&new_texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
resource: wgpu::BindingResource::TextureView(&new_texture.y_texture()),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureView(&new_texture.uv_texture()),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(
video
.texture
.conversion_matrix_buffer()
.as_entire_buffer_binding(),
),
},
],
});
video.texture = new_texture;
@@ -105,7 +207,9 @@ impl iced_wgpu::Primitive for VideoFrame {
let data = buffer
.map_readable()
.expect("BUG: Failed to map gst::Buffer readable");
queue.write_buffer(&video.buffer, 0, &data);
video.texture.write_texture(&data, queue);
drop(data);
video
.ready
@@ -118,29 +222,12 @@ impl iced_wgpu::Primitive for VideoFrame {
pipeline: &Self::Pipeline,
encoder: &mut wgpu::CommandEncoder,
target: &wgpu::TextureView,
_clip_bounds: &iced_wgpu::core::Rectangle<u32>,
bounds: &iced_wgpu::core::Rectangle<u32>,
) {
let Some(video) = pipeline.videos.get(&self.id) else {
return;
};
encoder.copy_buffer_to_texture(
wgpu::TexelCopyBufferInfo {
buffer: &video.buffer,
layout: wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(4 * self.size.width),
rows_per_image: Some(self.size.height),
},
},
wgpu::TexelCopyTextureInfo {
texture: &video.texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
self.size,
);
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced-video-render-pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
@@ -159,42 +246,219 @@ impl iced_wgpu::Primitive for VideoFrame {
render_pass.set_pipeline(&pipeline.pipeline);
render_pass.set_bind_group(0, &video.bind_group, &[]);
render_pass.set_scissor_rect(
bounds.x as _,
bounds.y as _,
bounds.width as _,
bounds.height as _,
);
render_pass.draw(0..3, 0..1);
// self.ready
// .store(false, std::sync::atomic::Ordering::Relaxed);
}
}
/// NV12 or P010 are only supported in DX12 and Vulkan backends.
/// While we can use vulkan with moltenvk on macos, I'd much rather use metal directly
/// Right now only supports interleaved UV formats.
/// For planar formats we would need 3 textures.
/// Also NV12 and P010 textures are not COPY_DST capable
/// This assumes 4:2:0 chroma subsampling (for now).
/// So for 4 Y samples there is 1 U and 1 V sample.
/// This means that the UV texture is half the width and half the height of the Y texture.
#[derive(Debug)]
pub struct VideoTextures {
pub struct VideoTexture {
y: wgpu::Texture,
uv: wgpu::Texture,
size: wgpu::Extent3d,
video_format: VideoFormat,
surface_format: wgpu::TextureFormat,
conversion_matrix_buffer: wgpu::Buffer,
}
impl VideoTexture {
pub fn size(&self) -> wgpu::Extent3d {
self.size
}
pub fn new(
label: &str,
size: wgpu::Extent3d,
device: &wgpu::Device,
surface_format: wgpu::TextureFormat,
video_format: VideoFormat,
) -> Self {
let surface_hdr = surface_format.is_wide();
let video_hdr = matches!(video_format, VideoFormat::P01010le | VideoFormat::P016Le);
if surface_hdr && !video_hdr {
tracing::warn!("Surface texture is HDR but video format is SDR");
} else if !surface_hdr && video_hdr {
tracing::warn!("Video format is HDR but surface does not support HDR");
}
let y_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some(&format!("{}-y", label)),
size: wgpu::Extent3d {
width: size.width,
height: size.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::R16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let uv_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some(&format!("{}-uv", label)),
size: wgpu::Extent3d {
width: size.width / 2,
height: size.height / 2,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced-video-conversion-matrix-buffer"),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
size: core::mem::size_of::<ConversionMatrix>() as wgpu::BufferAddress,
mapped_at_creation: false,
});
VideoTexture {
y: y_texture,
uv: uv_texture,
size,
surface_format,
video_format,
conversion_matrix_buffer: buffer,
}
}
// This return the surface texture format, not the video pixel format
pub fn format(&self) -> wgpu::TextureFormat {
self.surface_format
}
pub fn y_texture(&self) -> wgpu::TextureView {
self.y.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn uv_texture(&self) -> wgpu::TextureView {
self.uv.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn resize(&self, name: &str, new_size: wgpu::Extent3d, device: &wgpu::Device) -> Self {
VideoTexture::new(name, new_size, device, self.format(), self.pixel_format())
}
pub fn pixel_format(&self) -> VideoFormat {
self.video_format
}
/// This assumes that the data is laid out correctly for the texture format.
pub fn write_texture(&self, data: &[u8], queue: &wgpu::Queue) {
let Self { y, uv, .. } = self;
let y_size = y.size();
let uv_size = uv.size();
let y_data_size = (y_size.width * y_size.height * 2) as usize;
let uv_data_size = (y_data_size / 2) as usize; // UV is interleaved
let y_data = &data[0..y_data_size];
let uv_data = &data[y_data_size..y_data_size + uv_data_size];
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
y_data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(y_size.width * 2),
rows_per_image: None,
},
y_size,
);
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
uv_data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(uv_size.width * 4),
rows_per_image: None,
},
uv_size,
);
}
pub fn write_conversion_matrix(&self, matrix: &ConversionMatrix, queue: &wgpu::Queue) {
queue.write_buffer(
&self.conversion_matrix_buffer,
0,
bytemuck::bytes_of(matrix),
);
}
pub fn conversion_matrix_buffer(&self) -> &wgpu::Buffer {
&self.conversion_matrix_buffer
}
}
#[derive(Debug)]
pub struct VideoFrameData {
id: id::Id,
texture: wgpu::Texture,
buffer: wgpu::Buffer,
texture: VideoTexture,
bind_group: wgpu::BindGroup,
conversion_matrix: ConversionMatrix,
ready: Arc<AtomicBool>,
}
impl VideoFrameData {
pub fn is_hdr(&self) -> bool {
self.texture.format().is_wide()
}
}
#[derive(Debug)]
pub struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
videos: BTreeMap<id::Id, VideoTextures>,
format: wgpu::TextureFormat,
videos: BTreeMap<id::Id, VideoFrameData>,
}
pub trait HdrTextureFormatExt {
fn is_hdr(&self) -> bool;
pub trait WideTextureFormatExt {
fn is_wide(&self) -> bool;
}
impl HdrTextureFormatExt for wgpu::TextureFormat {
fn is_hdr(&self) -> bool {
impl WideTextureFormatExt for wgpu::TextureFormat {
fn is_wide(&self) -> bool {
matches!(
self,
wgpu::TextureFormat::Rgba16Float
| wgpu::TextureFormat::Rgba32Float
| wgpu::TextureFormat::Rgb10a2Unorm
| wgpu::TextureFormat::Rgb10a2Uint
| wgpu::TextureFormat::P010
)
}
}
@@ -204,15 +468,14 @@ impl Pipeline for VideoPipeline {
where
Self: Sized,
{
if format.is_hdr() {
if format.is_wide() {
tracing::info!("HDR texture format detected: {:?}", format);
}
let shader_passthrough =
device.create_shader_module(wgpu::include_wgsl!("shaders/passthrough.wgsl"));
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("iced-video-texture-bind-group-layout"),
entries: &[
// y
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
@@ -223,15 +486,40 @@ impl Pipeline for VideoPipeline {
},
count: None,
},
// uv
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
multisampled: false,
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
},
count: None,
},
// sampler
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
// conversion matrix
wgpu::BindGroupLayoutEntry {
binding: 3,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
});
let shader_passthrough =
device.create_shader_module(wgpu::include_wgsl!("shaders/passthrough.wgsl"));
let render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("iced-video-render-pipeline-layout"),
@@ -252,7 +540,7 @@ impl Pipeline for VideoPipeline {
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format,
blend: Some(wgpu::BlendState::ALPHA_BLENDING),
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: wgpu::PipelineCompilationOptions::default(),

View File

@@ -1,9 +1,7 @@
// Vertex shader
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>,
};
}
@vertex
fn vs_main(
@@ -17,15 +15,16 @@ fn vs_main(
return out;
}
// Fragment shader
@group(0) @binding(0)
var t_diffuse: texture_2d<f32>;
@group(0) @binding(1)
var s_diffuse: sampler;
@group(0) @binding(0) var y_texture: texture_2d<f32>;
@group(0) @binding(1) var uv_texture: texture_2d<f32>;
@group(0) @binding(2) var texture_sampler: sampler;
@group(0) @binding(3) var<uniform> rgb_primaries: mat3x3<f32>;
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return textureSample(t_diffuse, s_diffuse, in.tex_coords);
fn fs_main(input: VertexOutput) -> @location(0) vec4<f32> {
let y = textureSample(y_texture, texture_sampler, input.tex_coords).r;
let uv = textureSample(uv_texture, texture_sampler, input.tex_coords).rg;
let yuv = vec3f(y, uv.x - 0.5, uv.y - 0.5);
return vec4f(yuv * rgb_primaries, 1.0);
}

View File

@@ -13,11 +13,11 @@ use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Debug, Clone)]
pub struct VideoSource {
pub(crate) playbin: Playbin3,
pub(crate) videoconvert: VideoConvert,
pub(crate) appsink: AppSink,
pub(crate) bus: Bus,
pub(crate) ready: Arc<AtomicBool>,
pub(crate) frame: Arc<Mutex<gst::Sample>>,
pub(crate) size: std::sync::OnceLock<(i32, i32)>,
}
impl VideoSource {
@@ -26,22 +26,12 @@ impl VideoSource {
/// now.
pub fn new(url: impl AsRef<str>) -> Result<Self> {
Gst::new();
let videoconvert = VideoConvert::new("iced-video-convert")
// .change_context(Error)?
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
.change_context(Error)?;
let mut appsink = AppSink::new("iced-video-sink").change_context(Error)?;
appsink
.drop(true)
.sync(true)
// .async_(true)
.emit_signals(true)
.caps(
Caps::builder(CapsType::Video)
.field("format", "RGB10A2_LE") // Forced for now
.build(),
);
let video_sink = videoconvert.link(&appsink).change_context(Error)?;
.emit_signals(true);
let playbin = Playbin3::new("iced-video")
.change_context(Error)?
.with_uri(url.as_ref())
@@ -49,13 +39,13 @@ impl VideoSource {
.with_buffer_size(4096 * 4096 * 4 * 3)
.with_ring_buffer_max_size(4096 * 4096 * 4 * 3)
.with_flags(Playbin3::default_flags() | PlayFlags::DOWNLOAD)
.with_video_sink(&video_sink);
.with_video_sink(&appsink);
let bus = playbin.bus().change_context(Error)?;
playbin.pause().change_context(Error)?;
let ready = Arc::new(AtomicBool::new(false));
let frame = Arc::new(Mutex::new(gst::Sample::new()));
appsink.on_new_frame({
appsink.on_new_sample({
let ready = Arc::clone(&ready);
let frame = Arc::clone(&frame);
move |appsink| {
@@ -74,20 +64,58 @@ impl VideoSource {
Ok(Self {
playbin,
videoconvert,
appsink,
bus,
ready,
frame,
size: std::sync::OnceLock::new(),
})
}
pub async fn wait(self) -> Result<()> {
pub async fn wait(&self) -> Result<()> {
use futures_lite::StreamExt;
// self.bus_stream()
// .for_each(|msg: gst::Message| {
// use gst::gstreamer::prelude::*;
// match msg.view() {
// MessageView::Eos(_) => {
// tracing::info!("Video reached end of stream");
// }
// MessageView::Error(err) => {
// tracing::error!(
// "Video Error from {:?}: {} ({:?})",
// err.src().map(|s| s.path_string()),
// err.error(),
// err.debug()
// );
// }
// view => tracing::info!("Video Message: {:#?}", view),
// }
// })
// .await;
self.playbin
.wait_for_states(&[gst::State::Paused, gst::State::Playing])
.await
.change_context(Error)
.attach("Failed to wait for video initialisation")
.attach("Failed to wait for video initialisation")?;
Ok(())
}
pub fn format(&self) -> Result<gst::VideoFormat> {
let caps = self
.appsink
.sink("sink")
.current_caps()
.change_context(Error)?;
let format = caps
.format()
.ok_or(Error)
.attach("Failed to get video caps structure")?;
Ok(format)
}
pub fn bus_stream(&self) -> impl futures_lite::Stream<Item = gst::Message> {
self.bus.stream()
}
pub fn is_playing(&self) -> Result<bool> {
@@ -126,14 +154,20 @@ impl VideoSource {
}
pub fn size(&self) -> Result<(i32, i32)> {
if let Some(size) = self.size.get() {
return Ok(*size);
}
let caps = self
.appsink
.sink("sink")
.current_caps()
.change_context(Error)?;
caps.width()
let out = caps
.width()
.and_then(|width| caps.height().map(|height| (width, height)))
.ok_or(Error)
.attach("Failed to get width, height")
.attach("Failed to get width, height")?;
self.size.set(out);
Ok(out)
}
}

View File

@@ -0,0 +1,258 @@
use super::*;
use iced::Length;
use iced_core as iced;
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
use std::marker::PhantomData;
/// This is the Video widget that displays a video.
/// This should be used in the view function.
pub struct Video<'a, Message, Theme = iced::Theme, Renderer = iced_wgpu::Renderer>
where
Renderer: PrimitiveRenderer,
{
id: id::Id,
handle: &'a VideoHandle<Message, Ready>,
video_format: gst::VideoFormat,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
looping: bool,
__marker: PhantomData<(Renderer, Theme)>,
}
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
Message: Clone + Send + Sync,
{
pub fn new(handle: &'a VideoHandle<Message, Ready>) -> Self {
Self {
id: handle.id.clone(),
handle: &handle,
video_format: handle
.format()
.expect("Failed to get video format during widget creation"),
content_fit: iced::ContentFit::Contain,
width: Length::Shrink,
height: Length::Shrink,
looping: false,
__marker: PhantomData,
}
}
}
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
pub fn width(mut self, width: Length) -> Self {
self.width = width;
self
}
pub fn height(mut self, height: Length) -> Self {
self.height = height;
self
}
pub fn content_fit(mut self, fit: iced::ContentFit) -> Self {
self.content_fit = fit;
self
}
// pub fn on_end_of_stream(mut self, message: Message) -> Self {
// self.on_end_of_stream = Some(message);
// self
// }
//
// pub fn on_new_frame(mut self, message: Message) -> Self {
// self.on_new_frame = Some(message);
// self
// }
pub fn looping(mut self, looping: bool) -> Self {
self.looping = looping;
self
}
}
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
for Video<'_, Message, Theme, Renderer>
where
Message: Clone + Send + Sync,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<Length> {
iced::Size {
width: self.width,
height: self.height,
}
}
// The video player should take max space by default
fn layout(
&mut self,
_tree: &mut iced::widget::Tree,
_renderer: &Renderer,
limits: &iced::layout::Limits,
) -> iced::layout::Node {
iced::layout::Node::new(limits.max())
}
fn draw(
&self,
tree: &iced::widget::Tree,
renderer: &mut Renderer,
theme: &Theme,
style: &iced::renderer::Style,
layout: iced::Layout<'_>,
cursor: iced::mouse::Cursor,
viewport: &iced::Rectangle,
) {
if let Ok((width, height)) = self.handle.source.size() {
let video_size = iced::Size {
width: width as f32,
height: height as f32,
};
let bounds = layout.bounds();
let adjusted_fit = self.content_fit.fit(video_size, bounds.size());
let scale = iced::Vector::new(
adjusted_fit.width / video_size.width,
adjusted_fit.height / video_size.height,
);
let final_size = video_size * scale;
let position = match self.content_fit {
iced::ContentFit::None => iced::Point::new(
bounds.x + (video_size.width - adjusted_fit.width) / 2.0,
bounds.y + (video_size.height - adjusted_fit.height) / 2.0,
),
_ => iced::Point::new(
bounds.center_x() - final_size.width / 2.0,
bounds.center_y() - final_size.height / 2.0,
),
};
let drawing_bounds = iced::Rectangle::new(position, final_size);
let render = |renderer: &mut Renderer| {
renderer.draw_primitive(
drawing_bounds,
primitive::VideoFrame {
id: self.id.clone(),
size: iced_wgpu::wgpu::Extent3d {
width: width as u32,
height: height as u32,
depth_or_array_layers: 1,
},
ready: Arc::clone(&self.handle.frame_ready),
frame: Arc::clone(&self.handle.source.frame),
format: self.video_format,
},
);
};
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
renderer.with_layer(bounds, render);
} else {
render(renderer);
}
}
}
fn update(
&mut self,
_tree: &mut iced_core::widget::Tree,
event: &iced::Event,
_layout: iced_core::Layout<'_>,
_cursor: iced_core::mouse::Cursor,
_renderer: &Renderer,
_clipboard: &mut dyn iced_core::Clipboard,
shell: &mut iced_core::Shell<'_, Message>,
_viewport: &iced::Rectangle,
) {
if let iced::Event::Window(iced::window::Event::RedrawRequested(when)) = event {
if self
.handle
.frame_ready
.load(std::sync::atomic::Ordering::SeqCst)
{
shell.request_redraw();
} else {
shell.request_redraw_at(iced::window::RedrawRequest::At(
iced_core::time::Instant::now() + core::time::Duration::from_millis(16)
- when.elapsed(),
));
}
}
}
}
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
for iced::Element<'a, Message, Theme, Renderer>
where
Message: Send + Sync + 'a + Clone,
Theme: 'a,
Renderer: 'a + iced_wgpu::primitive::Renderer,
{
fn from(video: Video<'a, Message, Theme, Renderer>) -> Self {
Self::new(video)
}
}
#[derive(Debug, Clone)]
pub struct VideoSubscription<Message> {
pub(crate) id: id::Id,
pub(crate) on_end_of_stream: Option<Box<Message>>,
pub(crate) on_new_frame: Option<Box<Message>>,
pub(crate) on_about_to_finish: Option<Box<Message>>,
// on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message>>,
// on_error: Option<Box<dyn Fn(&glib::Error) -> Message>>,
pub(crate) bus: gst::Bus,
}
impl<Message> VideoSubscription<Message> where Message: Clone {}
impl<Message> iced_futures::subscription::Recipe for VideoSubscription<Message>
where
Message: Clone + Send + Sync + 'static,
{
type Output = Message;
fn hash(&self, state: &mut iced_futures::subscription::Hasher) {
use std::hash::Hash;
self.id.hash(state);
}
fn stream(
self: Box<Self>,
_input: core::pin::Pin<
Box<dyn iced_futures::futures::Stream<Item = iced_futures::subscription::Event> + Send>,
>,
) -> core::pin::Pin<Box<dyn iced_futures::futures::Stream<Item = Self::Output> + Send>> {
// use iced_futures::futures::StreamExt;
use futures_lite::stream::StreamExt;
Box::pin(
self.bus
.filtered_stream(&[gst::MessageType::Eos, gst::MessageType::Element])
.filter_map({
let eos = self.on_end_of_stream.clone();
let frame = self.on_new_frame.clone();
move |message: gst::Message| match message.view() {
gst::MessageView::Eos(_) => eos.clone().map(|m| *m),
gst::MessageView::Element(element_msg) => {
let structure = element_msg.structure();
if let Some(structure) = structure {
if structure.name() == "GstVideoFrameReady" {
frame.clone().map(|m| *m)
} else {
None
}
} else {
None
}
}
_ => None,
}
}),
)
}
}

View File

@@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# gst = { workspace = true }
wgpu = "*"
wgpu = "27"
gstreamer = { version = "0.24.4", features = ["v1_26"] }
gstreamer-app = { version = "0.24.4", features = ["v1_26"] }
gstreamer-base = { version = "0.24.4", features = ["v1_26"] }

35
flake.lock generated
View File

@@ -3,11 +3,11 @@
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1766435619,
"narHash": "sha256-3A5Z5K28YB45REOHMWtyQ24cEUXW76MOtbT6abPrARE=",
"lastModified": 1768679419,
"narHash": "sha256-l9rM4lXBeS2mIAJsJjVfl0UABx3S3zg5tul7bv+bn50=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "a98dbc80b16730a64c612c6ab5d5fecb4ebb79ba",
"rev": "c700e1cd023ca87343cbd9217d50d47023e9adc7",
"type": "github"
},
"original": {
@@ -18,11 +18,11 @@
},
"crane": {
"locked": {
"lastModified": 1766194365,
"narHash": "sha256-4AFsUZ0kl6MXSm4BaQgItD0VGlEKR3iq7gIaL7TjBvc=",
"lastModified": 1768873933,
"narHash": "sha256-CfyzdaeLNGkyAHp3kT5vjvXhA1pVVK7nyDziYxCPsNk=",
"owner": "ipetkov",
"repo": "crane",
"rev": "7d8ec2c71771937ab99790b45e6d9b93d15d9379",
"rev": "0bda7e7d005ccb5522a76d11ccfbf562b71953ca",
"type": "github"
},
"original": {
@@ -34,10 +34,10 @@
"crates-io-index": {
"flake": false,
"locked": {
"lastModified": 1763363725,
"narHash": "sha256-cxr5xIKZFP45yV1ZHFTB1sHo5YGiR3FA8D9vAfDizMo=",
"lastModified": 1769614137,
"narHash": "sha256-3Td8fiv6iFVxeS0hYq3xdd10ZvUkC9INMAiQx/mECas=",
"ref": "refs/heads/master",
"rev": "0382002e816a4cbd17d8d5b172f08b848aa22ff6",
"rev": "c7e7d6394bc95555d6acd5c6783855f47d64c90d",
"shallow": true,
"type": "git",
"url": "https://github.com/rust-lang/crates.io-index"
@@ -50,7 +50,9 @@
},
"crates-nix": {
"inputs": {
"crates-io-index": "crates-io-index"
"crates-io-index": [
"crates-io-index"
]
},
"locked": {
"lastModified": 1763364255,
@@ -106,11 +108,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1766309749,
"narHash": "sha256-3xY8CZ4rSnQ0NqGhMKAy5vgC+2IVK0NoVEzDoOh4DA4=",
"lastModified": 1768564909,
"narHash": "sha256-Kell/SpJYVkHWMvnhqJz/8DqQg2b6PguxVWOuadbHCc=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a6531044f6d0bef691ea18d4d4ce44d0daa6e816",
"rev": "e4bae1bd10c9c57b2cf517953ab70060a828ee6f",
"type": "github"
},
"original": {
@@ -124,6 +126,7 @@
"inputs": {
"advisory-db": "advisory-db",
"crane": "crane",
"crates-io-index": "crates-io-index",
"crates-nix": "crates-nix",
"flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
@@ -138,11 +141,11 @@
]
},
"locked": {
"lastModified": 1766371695,
"narHash": "sha256-W7CX9vy7H2Jj3E8NI4djHyF8iHSxKpb2c/7uNQ/vGFU=",
"lastModified": 1768877311,
"narHash": "sha256-abSDl0cNr0B+YCsIDpO1SjXD9JMxE4s8EFnhLEFVovI=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "d81285ba8199b00dc31847258cae3c655b605e8c",
"rev": "59e4ab96304585fde3890025fd59bd2717985cc1",
"type": "github"
},
"original": {

View File

@@ -9,7 +9,14 @@
url = "github:nix-community/nix-github-actions";
inputs.nixpkgs.follows = "nixpkgs";
};
crates-nix.url = "github:uttarayan21/crates.nix";
crates-io-index = {
url = "git+https://github.com/rust-lang/crates.io-index?shallow=1";
flake = false;
};
crates-nix = {
url = "github:uttarayan21/crates.nix";
inputs.crates-io-index.follows = "crates-io-index";
};
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
@@ -88,7 +95,6 @@
glib
glib-networking
cudatoolkit
wrapGAppsHook4
# bzip2_1_1
@@ -104,6 +110,7 @@
++ (lib.optionals pkgs.stdenv.isLinux [
gst_all_1.gstreamermm
gst_all_1.gst-vaapi
cudatoolkit
# util-linux
# libselinux
@@ -179,38 +186,49 @@
devShells = rec {
rust-shell =
pkgs.mkShell.override {
stdenv =
if pkgs.stdenv.isLinux
then (pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv)
else pkgs.clangStdenv;
} (commonArgs
stdenv = pkgs.clangStdenv;
# if pkgs.stdenv.isLinux
# then (pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv)
# else pkgs.clangStdenv;
}
(commonArgs
// {
# GST_PLUGIN_PATH = "/run/current-system/sw/lib/gstreamer-1.0/";
GIO_EXTRA_MODULES = "${pkgs.glib-networking}/lib/gio/modules";
packages = with pkgs;
[
toolchainWithRustAnalyzer
cargo-nextest
bacon
cargo-audit
cargo-deny
cargo-expand
bacon
cargo-make
cargo-hack
cargo-make
cargo-nextest
cargo-outdated
lld
lldb
cargo-audit
(crates.buildCrate "cargo-with" {doCheck = false;})
(crates.buildCrate "dioxus-cli" {
nativeBuildInputs = with pkgs; [pkg-config];
buildInputs = [openssl];
doCheck = false;
})
(crates.buildCrate "cargo-hot" {
nativeBuildInputs = with pkgs; [pkg-config];
buildInputs = [openssl];
})
]
++ (lib.optionals pkgs.stdenv.isDarwin [
apple-sdk_26
])
++ (lib.optionals pkgs.stdenv.isLinux [
ffmpeg
heaptrack
samply
cargo-flamegraph
perf
mold
# mold
]);
});
default = rust-shell;

View File

@@ -17,6 +17,7 @@ pub use element::*;
pub use gstreamer;
#[doc(inline)]
pub use gstreamer::{Message, MessageType, MessageView, State};
pub use gstreamer_video::VideoFormat;
pub use pad::*;
pub use pipeline::*;
pub use plugins::*;

View File

@@ -6,9 +6,71 @@ pub use gstreamer_app::AppSinkCallbacks;
wrap_gst!(AppSink, gstreamer::Element);
parent_child!(Element, AppSink);
pub struct AppSinkBuilder {
inner: AppSink,
callbacks: Option<gstreamer_app::app_sink::AppSinkCallbacksBuilder>,
}
impl AppSinkBuilder {
pub fn on_new_sample<F>(mut self, mut f: F) -> Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{
let mut callbacks_builder = self
.callbacks
.take()
.unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder);
callbacks_builder = callbacks_builder.new_sample(move |appsink| {
use glib::object::Cast;
let element = appsink.upcast_ref::<gstreamer::Element>();
let appsink = AppSink::from_gst_ref(element);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
.unwrap_or(Err(gstreamer::FlowError::Error))
.map(|_| gstreamer::FlowSuccess::Ok)
});
self.callbacks = Some(callbacks_builder);
self
}
pub fn on_new_preroll<F>(mut self, mut f: F) -> Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{
let mut callbacks_builder = self
.callbacks
.take()
.unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder);
callbacks_builder = callbacks_builder.new_preroll(move |appsink| {
use glib::object::Cast;
let element = appsink.upcast_ref::<gstreamer::Element>();
let appsink = AppSink::from_gst_ref(element);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
.unwrap_or(Err(gstreamer::FlowError::Error))
.map(|_| gstreamer::FlowSuccess::Ok)
});
self.callbacks = Some(callbacks_builder);
self
}
pub fn build(self) -> AppSink {
let AppSinkBuilder { inner, callbacks } = self;
if let Some(callbacks) = callbacks {
inner.appsink().set_callbacks(callbacks.build());
}
inner
}
}
impl Sink for AppSink {}
impl AppSink {
pub fn builder(name: impl AsRef<str>) -> AppSinkBuilder {
let inner = AppSink::new(name).expect("Failed to create AppSink");
AppSinkBuilder {
inner,
callbacks: None,
}
}
fn appsink(&self) -> &gstreamer_app::AppSink {
self.inner
.downcast_ref::<gstreamer_app::AppSink>()
@@ -54,7 +116,7 @@ impl AppSink {
self
}
pub fn on_new_frame<F>(&mut self, mut f: F) -> &mut Self
pub fn on_new_sample<F>(&mut self, mut f: F) -> &mut Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{

View File

@@ -1,5 +1,7 @@
iced-video:
cd crates/iced-video && cargo run --release --example minimal
jello:
cargo r -r -- -vv
# iced-video:
# cd crates/iced-video && cargo run --release --example minimal
typegen:
@echo "Generating jellyfin type definitions..."
cd typegen && cargo run
@@ -10,6 +12,7 @@ hdrtest:
GST_DEBUG=3 gst-launch-1.0 playbin3 uri=https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c video-sink="videoconvert ! video/x-raw,format=(string)RGB10A2_LE ! fakesink"
codec:
GST_DEBUG=3 gst-discoverer-1.0 -v https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c
GST_DEBUG=3 gst-discoverer-1.0 https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c
ffprobe:
ffprobe -v error -show_format -show_streams "https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c" | grep pix_fmt

View File

@@ -8,6 +8,7 @@ bson = { version = "3.1.0", features = ["serde"] }
futures = "0.3.31"
parking_lot = "0.12.5"
redb = { version = "3.1.0", features = ["uuid"] }
secrecy = "0.10.3"
serde = "1.0.228"
tokio = { version = "1.48.0", features = ["rt"] }
uuid = "1.18.1"
uuid = { version = "1.18.1", features = ["v4"] }

View File

@@ -1,10 +1,10 @@
pub mod redb;
pub mod sqlite;
pub mod toml;
use std::collections::BTreeMap;
pub trait Store {
fn image(&self, id: &str) -> Option<Vec<u8>>;
fn save_image(&mut self, id: &str, data: &[u8]);
use uuid::Uuid;
pub struct ApiKey {
inner: secrecy::SecretBox<String>,
}
pub struct SecretStore {
api_keys: BTreeMap<Uuid, ApiKey>,
}
pub struct Settings {}

View File

@@ -1,225 +1,225 @@
use std::{
borrow::Borrow,
collections::VecDeque,
marker::PhantomData,
path::Path,
sync::{Arc, RwLock, atomic::AtomicBool},
};
use futures::task::AtomicWaker;
use redb::{Error, Key, ReadableDatabase, TableDefinition, Value};
use serde::{Serialize, de::DeserializeOwned};
const USERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("users");
const SERVERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("servers");
const SETTINGS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("settings");
#[derive(Debug)]
pub struct TableInner<T> {
db: Arc<T>,
}
impl<T> Clone for TableInner<T> {
fn clone(&self) -> Self {
Self {
db: Arc::clone(&self.db),
}
}
}
impl<T> TableInner<T> {
fn new(db: Arc<T>) -> Self {
Self { db }
}
}
impl TableInner<DatabaseHandle> {
async fn get<'a, K: Key, V: Serialize + DeserializeOwned>(
&self,
table: TableDefinition<'static, K, Vec<u8>>,
key: impl Borrow<K::SelfType<'a>>,
) -> Result<Option<V>> {
let db: &redb::Database = &self.db.as_ref().database;
let db_reader = db.begin_read()?;
let table = db_reader.open_table(table)?;
table
.get(key)?
.map(|value| bson::deserialize_from_slice(&value.value()))
.transpose()
.map_err(|e| redb::Error::Io(std::io::Error::other(e)))
}
async fn insert<
'a,
'b,
K: Key + Send + Sync,
V: Serialize + DeserializeOwned + Send + Sync + 'a,
>(
&'b self,
table: TableDefinition<'static, K, Vec<u8>>,
key: impl Borrow<K::SelfType<'a>> + Send + 'b,
value: V,
) -> Result<Option<V>> {
let db: &redb::Database = &self.db.as_ref().database;
// self.db
// .writing
// .store(true, std::sync::atomic::Ordering::SeqCst);
// let out = tokio::task::spawn_blocking(move || -> Result<Option<V>>
let out = tokio::task::spawn_blocking(|| -> Result<Option<V>> {
let db_writer = db.begin_write()?;
let out = {
let mut table = db_writer.open_table(table)?;
let serialized_value = bson::serialize_to_vec(&value)
.map_err(|e| redb::Error::Io(std::io::Error::other(e)))?;
let previous = table.insert(key, &serialized_value)?;
let out = previous
.map(|value| bson::deserialize_from_slice(&value.value()))
.transpose()
.map_err(|e| redb::Error::Io(std::io::Error::other(e)));
out
};
db_writer.commit()?;
out
})
.await
.expect("Task panicked");
out
}
}
// impl<K: Key, V: Serialize + DeserializeOwned> Table<K, V> for TableInner {
// async fn get(&self, key: K) -> Result<Option<Value>> {}
// async fn insert(&self, key: K, value: V) -> Result<Option<Value>> {}
// async fn modify(&self, key: K, v: FnOnce(V) -> V) -> Result<bool> {}
// async fn remove(&self, key: K) -> Result<Option<Value>> {}
// }
#[derive(Debug)]
pub struct Users<T>(TableInner<T>);
impl<T> Clone for Users<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Users<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = USERS;
}
#[derive(Debug)]
pub struct Servers<T>(TableInner<T>);
impl<T> Clone for Servers<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Servers<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SERVERS;
}
#[derive(Debug)]
pub struct Settings<T>(TableInner<T>);
impl<T> Clone for Settings<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Settings<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SETTINGS;
}
#[derive(Debug, Clone)]
pub struct Database {
users: Users<DatabaseHandle>,
servers: Servers<DatabaseHandle>,
settings: Settings<DatabaseHandle>,
handle: Arc<DatabaseHandle>,
}
#[derive(Debug)]
pub struct DatabaseHandle {
database: redb::Database,
writing: AtomicBool,
wakers: RwLock<VecDeque<AtomicWaker>>,
}
#[derive(Debug)]
pub struct DatabaseWriterGuard<'a> {
handle: &'a DatabaseHandle,
dropper: Arc<AtomicBool>,
}
// impl Drop for DatabaseWriterGuard<'_> {
// fn drop(&mut self) {
// self.handle
// .writing
// .store(false, std::sync::atomic::Ordering::SeqCst);
// let is_panicking = std::thread::panicking();
// let Ok(writer) = self.handle.wakers.write() else {
// if is_panicking {
// return;
// } else {
// panic!("Wakers lock poisoned");
// }
// }
// if let Some(waker) = (self.handle.wakers.write()).pop() {
// waker.wake();
// use std::{
// borrow::Borrow,
// collections::VecDeque,
// marker::PhantomData,
// path::Path,
// sync::{Arc, RwLock, atomic::AtomicBool},
// };
// // let mut wakers = self.handle.wakers.write().expect();
// // if let Some(waker) = self.handle.wakers.write().expect("Wakers lock poisoned").pop_front() {
// // waker.wake();
// // }
// // while let Some(waker) = wakers.pop_front() {
// // waker.wake();
// // }
//
// use futures::task::AtomicWaker;
// use redb::{Error, Key, ReadableDatabase, TableDefinition, Value};
// use serde::{Serialize, de::DeserializeOwned};
//
// const USERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("users");
// const SERVERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("servers");
// const SETTINGS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("settings");
//
// #[derive(Debug)]
// pub struct TableInner<T> {
// db: Arc<T>,
// }
//
// impl<T> Clone for TableInner<T> {
// fn clone(&self) -> Self {
// Self {
// db: Arc::clone(&self.db),
// }
// }
// }
//
// impl<T> TableInner<T> {
// fn new(db: Arc<T>) -> Self {
// Self { db }
// }
// }
//
// impl TableInner<DatabaseHandle> {
// async fn get<'a, K: Key, V: Serialize + DeserializeOwned>(
// &self,
// table: TableDefinition<'static, K, Vec<u8>>,
// key: impl Borrow<K::SelfType<'a>>,
// ) -> Result<Option<V>> {
// let db: &redb::Database = &self.db.as_ref().database;
// let db_reader = db.begin_read()?;
// let table = db_reader.open_table(table)?;
// table
// .get(key)?
// .map(|value| bson::deserialize_from_slice(&value.value()))
// .transpose()
// .map_err(|e| redb::Error::Io(std::io::Error::other(e)))
// }
//
// async fn insert<
// 'a,
// 'b,
// K: Key + Send + Sync,
// V: Serialize + DeserializeOwned + Send + Sync + 'a,
// >(
// &'b self,
// table: TableDefinition<'static, K, Vec<u8>>,
// key: impl Borrow<K::SelfType<'a>> + Send + 'b,
// value: V,
// ) -> Result<Option<V>> {
// let db: &redb::Database = &self.db.as_ref().database;
// // self.db
// // .writing
// // .store(true, std::sync::atomic::Ordering::SeqCst);
//
// // let out = tokio::task::spawn_blocking(move || -> Result<Option<V>>
//
// let out = tokio::task::spawn_blocking(|| -> Result<Option<V>> {
// let db_writer = db.begin_write()?;
// let out = {
// let mut table = db_writer.open_table(table)?;
// let serialized_value = bson::serialize_to_vec(&value)
// .map_err(|e| redb::Error::Io(std::io::Error::other(e)))?;
// let previous = table.insert(key, &serialized_value)?;
// let out = previous
// .map(|value| bson::deserialize_from_slice(&value.value()))
// .transpose()
// .map_err(|e| redb::Error::Io(std::io::Error::other(e)));
// out
// };
// db_writer.commit()?;
// out
// })
// .await
// .expect("Task panicked");
//
// out
// }
// }
//
// // impl<K: Key, V: Serialize + DeserializeOwned> Table<K, V> for TableInner {
// // async fn get(&self, key: K) -> Result<Option<Value>> {}
// // async fn insert(&self, key: K, value: V) -> Result<Option<Value>> {}
// // async fn modify(&self, key: K, v: FnOnce(V) -> V) -> Result<bool> {}
// // async fn remove(&self, key: K) -> Result<Option<Value>> {}
// // }
//
// #[derive(Debug)]
// pub struct Users<T>(TableInner<T>);
//
// impl<T> Clone for Users<T> {
// fn clone(&self) -> Self {
// Self(self.0.clone())
// }
// }
// impl<T> Users<T> {
// const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = USERS;
// }
//
// #[derive(Debug)]
// pub struct Servers<T>(TableInner<T>);
// impl<T> Clone for Servers<T> {
// fn clone(&self) -> Self {
// Self(self.0.clone())
// }
// }
// impl<T> Servers<T> {
// const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SERVERS;
// }
//
// #[derive(Debug)]
// pub struct Settings<T>(TableInner<T>);
// impl<T> Clone for Settings<T> {
// fn clone(&self) -> Self {
// Self(self.0.clone())
// }
// }
// impl<T> Settings<T> {
// const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SETTINGS;
// }
//
// #[derive(Debug, Clone)]
// pub struct Database {
// users: Users<DatabaseHandle>,
// servers: Servers<DatabaseHandle>,
// settings: Settings<DatabaseHandle>,
// handle: Arc<DatabaseHandle>,
// }
//
// #[derive(Debug)]
// pub struct DatabaseHandle {
// database: redb::Database,
// writing: AtomicBool,
// wakers: RwLock<VecDeque<AtomicWaker>>,
// }
//
// #[derive(Debug)]
// pub struct DatabaseWriterGuard<'a> {
// handle: &'a DatabaseHandle,
// dropper: Arc<AtomicBool>,
// }
//
// // impl Drop for DatabaseWriterGuard<'_> {
// // fn drop(&mut self) {
// // self.handle
// // .writing
// // .store(false, std::sync::atomic::Ordering::SeqCst);
// // let is_panicking = std::thread::panicking();
// // let Ok(writer) = self.handle.wakers.write() else {
// // if is_panicking {
// // return;
// // } else {
// // panic!("Wakers lock poisoned");
// // }
// // }
// // if let Some(waker) = (self.handle.wakers.write()).pop() {
// // waker.wake();
// // };
// // // let mut wakers = self.handle.wakers.write().expect();
// // // if let Some(waker) = self.handle.wakers.write().expect("Wakers lock poisoned").pop_front() {
// // // waker.wake();
// // // }
// // // while let Some(waker) = wakers.pop_front() {
// // // waker.wake();
// // // }
// // }
// // }
//
// type Result<O, E = redb::Error> = core::result::Result<O, E>;
//
// pub trait Table<K: Key> {
// fn insert<V: Serialize + DeserializeOwned>(
// &self,
// key: K,
// value: V,
// ) -> impl Future<Output = Result<Option<V>>> + Send;
// fn modify<V: Serialize + DeserializeOwned, O: Serialize + DeserializeOwned>(
// &self,
// key: K,
// v: impl FnOnce(V) -> O,
// ) -> impl Future<Output = Result<bool>> + Send;
// fn remove<V: Serialize + DeserializeOwned>(
// &self,
// key: K,
// ) -> impl Future<Output = Result<Option<V>>> + Send;
// fn get<V: Serialize + DeserializeOwned>(
// &self,
// key: K,
// ) -> impl Future<Output = Result<Option<V>>> + Send;
// }
//
// impl Database {
// pub fn create(path: impl AsRef<Path>) -> Result<Self, Error> {
// let writing = AtomicBool::new(false);
// let wakers = RwLock::new(VecDeque::new());
// let db = redb::Database::create(path)?;
// let db = Arc::new(DatabaseHandle {
// database: db,
// writing,
// wakers,
// });
// let table_inner = TableInner::new(Arc::clone(&db));
// let users = Users(table_inner.clone());
// let servers = Servers(table_inner.clone());
// let settings = Settings(table_inner.clone());
// Ok(Self {
// servers,
// users,
// settings,
// handle: db,
// })
// }
// }
type Result<O, E = redb::Error> = core::result::Result<O, E>;
pub trait Table<K: Key> {
fn insert<V: Serialize + DeserializeOwned>(
&self,
key: K,
value: V,
) -> impl Future<Output = Result<Option<V>>> + Send;
fn modify<V: Serialize + DeserializeOwned, O: Serialize + DeserializeOwned>(
&self,
key: K,
v: impl FnOnce(V) -> O,
) -> impl Future<Output = Result<bool>> + Send;
fn remove<V: Serialize + DeserializeOwned>(
&self,
key: K,
) -> impl Future<Output = Result<Option<V>>> + Send;
fn get<V: Serialize + DeserializeOwned>(
&self,
key: K,
) -> impl Future<Output = Result<Option<V>>> + Send;
}
impl Database {
pub fn create(path: impl AsRef<Path>) -> Result<Self, Error> {
let writing = AtomicBool::new(false);
let wakers = RwLock::new(VecDeque::new());
let db = redb::Database::create(path)?;
let db = Arc::new(DatabaseHandle {
database: db,
writing,
wakers,
});
let table_inner = TableInner::new(Arc::clone(&db));
let users = Users(table_inner.clone());
let servers = Servers(table_inner.clone());
let settings = Settings(table_inner.clone());
Ok(Self {
servers,
users,
settings,
handle: db,
})
}
}

View File

@@ -9,20 +9,22 @@ api = { version = "0.1.0", path = "../api" }
blurhash = "0.2.3"
bytes = "1.11.0"
gpui_util = "0.2.2"
iced = { workspace = true, default-features = true, features = [
iced = { workspace = true, features = [
"advanced",
"canvas",
"image",
"sipper",
"tokio",
"debug",
] }
"hot",
], default-features = true }
iced_video_player = { workspace = true }
iced-video = { workspace = true }
iced_aw = "0.13.0"
iced_wgpu = "0.14.0"
iced_winit = "0.14.0"
reqwest = "0.12.24"
reqwest = "0.13"
tap = "1.0.1"
toml = "0.9.8"
tracing = "0.1.41"

View File

@@ -2,8 +2,9 @@ mod settings;
mod video;
mod shared_string;
use iced_video_player::{Video, VideoPlayer};
use iced_video::{Ready, Video, VideoHandle};
use shared_string::SharedString;
use tap::Pipe as _;
use std::sync::Arc;
@@ -25,6 +26,8 @@ pub struct ItemCache {
pub tree: BTreeMap<Option<uuid::Uuid>, BTreeSet<uuid::Uuid>>,
}
const BACKGROUND_COLOR: iced::Color = iced::Color::from_rgba8(30, 30, 30, 0.7);
impl ItemCache {
pub fn insert(&mut self, parent: impl Into<Option<uuid::Uuid>>, item: Item) {
let parent = parent.into();
@@ -140,7 +143,7 @@ struct State {
screen: Screen,
settings: settings::SettingsState,
is_authenticated: bool,
video: Option<Arc<Video>>,
video: Option<Arc<VideoHandle<Message, Ready>>>,
}
impl State {
@@ -155,8 +158,6 @@ impl State {
query: None,
screen: Screen::Home,
settings: settings::SettingsState::default(),
// username_input: String::new(),
// password_input: String::new(),
is_authenticated: false,
video: None,
}
@@ -172,24 +173,14 @@ pub enum Message {
OpenItem(Option<uuid::Uuid>),
LoadedItem(Option<uuid::Uuid>, Vec<Item>),
Error(String),
SetToken(String),
Back,
Home,
// Login {
// username: String,
// password: String,
// config: api::JellyfinConfig,
// },
// LoginSuccess(String),
// LoadedClient(api::JellyfinClient, bool),
// Logout,
Video(video::VideoMessage),
}
fn update(state: &mut State, message: Message) -> Task<Message> {
// if let Some(client) = state.jellyfin_client.clone() {
match message {
Message::Settings(msg) => settings::update(&mut state.settings, msg),
Message::Settings(msg) => settings::update(state, msg),
Message::OpenItem(id) => {
if let Some(client) = state.jellyfin_client.clone() {
use api::jellyfin::BaseItemKind::*;
@@ -250,15 +241,6 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
state.messages.push(err);
Task::none()
}
Message::SetToken(token) => {
tracing::info!("Authenticated with token: {}", token);
state
.jellyfin_client
.as_mut()
.map(|mut client| client.set_token(token));
state.is_authenticated = true;
Task::none()
}
Message::Back => {
state.current = state.history.pop().unwrap_or(None);
Task::none()
@@ -269,7 +251,6 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
}
Message::SearchQueryChanged(query) => {
state.query = Some(query);
// Handle search query change
Task::none()
}
Message::Search => {
@@ -294,9 +275,29 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
}
fn view(state: &State) -> Element<'_, Message> {
let content = home(state);
match state.screen {
Screen::Settings => settings::settings(state),
Screen::Home | _ => home(state),
Screen::Settings => {
let settings = settings::settings(state);
let settings = container(settings)
.width(Length::FillPortion(4))
.height(Length::FillPortion(4))
.style(container::rounded_box)
.pipe(mouse_area)
.on_press(Message::Refresh)
.pipe(|c| iced::widget::column![space::vertical(), c, space::vertical()])
.pipe(container)
.width(Length::Fill)
.width(Length::Fill)
.align_y(Alignment::Center)
.align_x(Alignment::Center)
.style(|_| container::background(BACKGROUND_COLOR))
.padding(50)
.pipe(mouse_area)
.on_press(Message::Settings(settings::SettingsMessage::Close));
stack![content, settings].into()
}
Screen::Home | _ => content,
}
}
@@ -311,18 +312,16 @@ fn body(state: &State) -> Element<'_, Message> {
if let Some(ref video) = state.video {
video::player(video)
} else {
scrollable(
container(
Grid::with_children(state.cache.items_of(state.current).into_iter().map(card))
.fluid(400)
.spacing(50),
)
.spacing(50)
.pipe(container)
.padding(50)
.align_x(Alignment::Center)
// .align_y(Alignment::Center)
.height(Length::Fill)
.width(Length::Fill),
)
.width(Length::Fill)
.pipe(scrollable)
.height(Length::Fill)
.into()
}
@@ -330,19 +329,17 @@ fn body(state: &State) -> Element<'_, Message> {
fn header(state: &State) -> Element<'_, Message> {
row([
container(
Button::new(
Text::new(
text(
state
.jellyfin_client
.as_ref()
.map(|c| c.config.server_url.as_str())
.unwrap_or("No Server"),
)
.align_x(Alignment::Start),
)
.on_press(Message::Home),
)
.align_x(Alignment::Start)
.pipe(button)
.on_press(Message::Home)
.pipe(container)
.padding(10)
.width(Length::Fill)
.height(Length::Fill)
@@ -351,7 +348,6 @@ fn header(state: &State) -> Element<'_, Message> {
.style(container::rounded_box)
.into(),
search(state),
container(
row([
button("Refresh").on_press(Message::Refresh).into(),
button("Settings")
@@ -361,8 +357,8 @@ fn header(state: &State) -> Element<'_, Message> {
.on_press(Message::Video(video::VideoMessage::Test))
.into(),
])
.spacing(10),
)
.spacing(10)
.pipe(container)
.padding(10)
.width(Length::Fill)
.height(Length::Fill)
@@ -378,14 +374,13 @@ fn header(state: &State) -> Element<'_, Message> {
}
fn search(state: &State) -> Element<'_, Message> {
container(
TextInput::new("Search...", state.query.as_deref().unwrap_or_default())
.padding(10)
.size(16)
.width(Length::Fill)
.on_input(Message::SearchQueryChanged)
.on_submit(Message::Search),
)
.on_submit(Message::Search)
.pipe(container)
.padding(10)
.width(Length::Fill)
.height(Length::Shrink)

View File

@@ -2,16 +2,26 @@ use crate::*;
use iced::Element;
pub fn settings(state: &State) -> Element<'_, Message> {
empty()
screens::settings(state)
}
pub fn update(_state: &mut SettingsState, message: SettingsMessage) -> Task<Message> {
pub fn update(state: &mut State, message: SettingsMessage) -> Task<Message> {
match message {
SettingsMessage::Open => {}
SettingsMessage::Close => {}
SettingsMessage::Open => {
tracing::trace!("Opening settings");
state.screen = Screen::Settings;
}
SettingsMessage::Close => {
tracing::trace!("Closing settings");
state.screen = Screen::Home;
}
SettingsMessage::Select(screen) => {
tracing::trace!("Switching settings screen to {:?}", screen);
state.settings.screen = screen;
}
SettingsMessage::User(user) => state.settings.login_form.update(user),
SettingsMessage::Server(server) => state.settings.server_form.update(server),
}
Task::none()
}
@@ -32,9 +42,31 @@ pub enum SettingsMessage {
Open,
Close,
Select(SettingsScreen),
User(UserMessage),
Server(ServerMessage),
}
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone)]
pub enum UserMessage {
Add,
UsernameChanged(String),
PasswordChanged(String),
// Edit(uuid::Uuid),
// Delete(uuid::Uuid),
Clear,
}
#[derive(Debug, Clone)]
pub enum ServerMessage {
Add,
NameChanged(String),
UrlChanged(String),
// Edit(uuid::Uuid),
// Delete(uuid::Uuid),
Clear,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub enum SettingsScreen {
#[default]
Main,
@@ -58,25 +90,207 @@ pub struct UserItem {
#[derive(Debug, Clone, Default)]
pub struct LoginForm {
username: Option<String>,
password: Option<String>,
username: String,
password: String,
}
impl LoginForm {
pub fn update(&mut self, message: UserMessage) {
match message {
UserMessage::UsernameChanged(data) => {
self.username = data;
}
UserMessage::PasswordChanged(data) => {
self.password = data;
}
UserMessage::Add => {
// Handle adding user
}
UserMessage::Clear => {
self.username.clear();
self.password.clear();
}
}
}
pub fn view(&self) -> Element<'_, Message> {
iced::widget::column![
text("Login Form"),
text_input("Enter Username", &self.username).on_input(|data| {
Message::Settings(SettingsMessage::User(UserMessage::UsernameChanged(data)))
}),
text_input("Enter Password", &self.password)
.secure(true)
.on_input(|data| {
Message::Settings(SettingsMessage::User(UserMessage::PasswordChanged(data)))
}),
row![
button(text("Add User")).on_press_maybe(self.validate()),
button(text("Cancel"))
.on_press(Message::Settings(SettingsMessage::User(UserMessage::Clear))),
]
.spacing(10),
]
.spacing(10)
.padding([10, 0])
.into()
}
pub fn validate(&self) -> Option<Message> {
(!self.username.is_empty() && !self.password.is_empty())
.then(|| Message::Settings(SettingsMessage::User(UserMessage::Add)))
}
}
#[derive(Debug, Clone, Default)]
pub struct ServerForm {
name: Option<String>,
url: Option<String>,
name: String,
url: String,
}
impl ServerForm {
pub fn update(&mut self, message: ServerMessage) {
match message {
ServerMessage::NameChanged(data) => {
self.name = data;
}
ServerMessage::UrlChanged(data) => {
self.url = data;
}
ServerMessage::Add => {
// Handle adding server
}
ServerMessage::Clear => {
self.name.clear();
self.url.clear();
}
_ => {}
}
}
pub fn view(&self) -> Element<'_, Message> {
iced::widget::column![
text("Add New Server"),
text_input("Enter server name", &self.name).on_input(|data| {
Message::Settings(SettingsMessage::Server(ServerMessage::NameChanged(data)))
}),
text_input("Enter server URL", &self.url).on_input(|data| {
Message::Settings(SettingsMessage::Server(ServerMessage::UrlChanged(data)))
}),
row![
button(text("Add Server")).on_press_maybe(self.validate()),
button(text("Cancel")).on_press(Message::Settings(SettingsMessage::Server(
ServerMessage::Clear
))),
]
.spacing(10),
]
.spacing(10)
.padding([10, 0])
.into()
}
pub fn validate(&self) -> Option<Message> {
(!self.name.is_empty() && !self.url.is_empty())
.then(|| Message::Settings(SettingsMessage::Server(ServerMessage::Add)))
}
}
mod screens {
use iced_aw::Tabs;
use super::*;
pub fn settings(state: &State) -> Element<'_, Message> {
Tabs::new(|f| Message::Settings(SettingsMessage::Select(f)))
.push(
SettingsScreen::Main,
iced_aw::TabLabel::Text("General".into()),
main(state),
)
.push(
SettingsScreen::Servers,
iced_aw::TabLabel::Text("Servers".into()),
server(state),
)
.push(
SettingsScreen::Users,
iced_aw::TabLabel::Text("Users".into()),
user(state),
)
.set_active_tab(&state.settings.screen)
.into()
}
pub fn settings_screen(state: &State) -> Element<'_, Message> {
container(match state.settings.screen {
SettingsScreen::Main => main(state),
SettingsScreen::Servers => server(state),
SettingsScreen::Users => user(state),
})
.width(Length::FillPortion(10))
.height(Length::Fill)
.style(|theme| container::background(theme.extended_palette().background.base.color))
.pipe(container)
.padding(10)
.style(|theme| container::background(theme.extended_palette().secondary.base.color))
.width(Length::FillPortion(10))
.into()
}
pub fn settings_list(state: &State) -> Element<'_, Message> {
column(
[
button(center_text("General")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Main),
)),
button(center_text("Servers")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Servers),
)),
button(center_text("Users")).on_press(Message::Settings(SettingsMessage::Select(
SettingsScreen::Users,
))),
]
.map(|p| p.clip(true).width(Length::Fill).into()),
)
.width(Length::FillPortion(2))
.spacing(10)
.padding(10)
.pipe(scrollable)
.into()
}
pub fn main(state: &State) -> Element<'_, Message> {
empty()
Column::new()
.push(text("Main Settings"))
.push(toggler(true).label("HDR"))
.spacing(20)
.padding(20)
.pipe(container)
.into()
}
pub fn server(state: &State) -> Element<'_, Message> {
empty()
Column::new()
.push(text("Server Settings"))
.push(state.settings.server_form.view())
.spacing(20)
.padding(20)
.pipe(container)
.into()
}
pub fn user(state: &State) -> Element<'_, Message> {
empty()
Column::new()
.push(text("User Settings"))
.push(state.settings.login_form.view())
.spacing(20)
.padding(20)
.pipe(container)
.into()
}
}
pub fn center_text(content: &str) -> Element<'_, Message> {
text(content)
.align_x(Alignment::Center)
.width(Length::Fill)
.into()
}

View File

@@ -3,6 +3,7 @@ use super::*;
pub enum VideoMessage {
EndOfStream,
Open(url::Url),
Loaded(VideoHandle<Message, Ready>),
Pause,
Play,
Seek(f64),
@@ -17,34 +18,26 @@ pub fn update(state: &mut State, message: VideoMessage) -> Task<Message> {
Task::none()
}
VideoMessage::Open(url) => {
match Video::new(&url)
.inspect_err(|err| {
tracing::error!("Failed to play video at {}: {:?}", url, err);
Task::perform(VideoHandle::load(url.clone()), move |result| match result {
Ok(video) => Message::Video(VideoMessage::Loaded(video)),
Err(err) => Message::Error(format!("Error opening video at {}: {:?}", url, err)),
})
.inspect(|video| {
tracing::error!("Framerate is {}", video.framerate());
})
.map(Arc::new)
{
Ok(video) => {
state.video = Some(video);
Task::none()
}
Err(err) => Task::done(Message::Error(format!(
"Error opening video at {}: {:?}",
url, err
))),
}
VideoMessage::Loaded(video) => {
state.video = Some(Arc::new(
video.on_end_of_stream(Message::Video(VideoMessage::EndOfStream)),
));
Task::done(VideoMessage::Play).map(Message::Video)
}
VideoMessage::Pause => {
if let Some(video) = state.video.as_mut().and_then(Arc::get_mut) {
video.set_paused(true);
if let Some(ref video) = state.video {
video.pause();
}
Task::none()
}
VideoMessage::Play => {
if let Some(video) = state.video.as_mut().and_then(Arc::get_mut) {
video.set_paused(false);
if let Some(ref video) = state.video {
video.play();
}
Task::none()
}
@@ -55,28 +48,26 @@ pub fn update(state: &mut State, message: VideoMessage) -> Task<Message> {
Task::none()
}
VideoMessage::Stop => {
state.video.as_ref().map(|video| {
video.stop();
});
state.video = None;
Task::none()
}
VideoMessage::Test => {
let url = url::Url::parse(
// "file:///home/servius/Projects/jello/crates/iced_video_player/.media/test.mp4",
"https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm",
// "https://www.youtube.com/watch?v=QbUUaXGA3C4",
)
let url = url::Url::parse("https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c")
.expect("Impossible: Failed to parse hardcoded URL");
Task::done(Message::Video(VideoMessage::Open(url)))
Task::done(VideoMessage::Open(url)).map(Message::Video)
}
}
}
pub fn player(video: &Video) -> Element<'_, Message> {
pub fn player(video: &VideoHandle<Message, Ready>) -> Element<'_, Message> {
container(
VideoPlayer::new(video)
Video::new(video)
.width(Length::Fill)
.height(Length::Fill)
.content_fit(iced::ContentFit::Contain)
.on_end_of_stream(Message::Video(VideoMessage::EndOfStream)),
.content_fit(iced::ContentFit::Contain),
)
.style(|_| container::background(iced::Color::BLACK))
.width(Length::Fill)