Compare commits

..

14 Commits

Author SHA1 Message Date
d509fb7813 feat: Update cargo.lock 2026-01-29 00:45:46 +05:30
dcbb5a127b feat: Move settings to a tab based ui 2026-01-29 00:40:12 +05:30
e66c457b57 feat: Added BACKGROUND_COLOR to settings popup 2026-01-28 02:06:57 +05:30
76fc14c73b feat: Use a floating box for settings 2026-01-28 02:00:45 +05:30
5b4fbd5df6 feat(store): add SecretStore, ApiKey, remove Store trait 2026-01-26 21:00:56 +05:30
e7fd01c0af chore: Update cargo.lock 2026-01-20 21:52:46 +05:30
a040478069 chore: Update flake.lock 2026-01-20 21:52:46 +05:30
e5ef173473 fix(iced-video): Update the color matrices and subtract .5 from uv samples to
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-15 17:25:09 +05:30
429371002b fix(iced-video): Write the conversion matrix buffer so the video actually shows up
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-15 17:01:41 +05:30
335e8fdbef feat: move cuda to linux
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-14 15:55:49 +05:30
9dac0b6c78 feat(iced-video): added video format to the video frame
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2026-01-14 09:51:56 +05:30
uttarayan21
97a7a632d4 feat(iced-video): implement planar YUV texture support with HDR conversion matrices and update dependencies
Some checks failed
build / checks-matrix (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
build / checks-build (push) Has been cancelled
2026-01-04 23:02:47 +05:30
uttarayan21
29390140cd feat(settings): simplify form updates and temporarily disable server toggler 2025-12-27 00:13:54 +05:30
uttarayan21
97c2b3f14c feat(settings): implement user and server form handling with update functions and UI views 2025-12-27 00:04:42 +05:30
21 changed files with 1725 additions and 889 deletions

916
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -32,6 +32,7 @@ license = "MIT"
[dependencies]
api = { version = "0.1.0", path = "api" }
bytemuck = { version = "1.24.0", features = ["derive"] }
clap = { version = "4.5", features = ["derive"] }
clap-verbosity-flag = { version = "3.0.4", features = ["tracing"] }
clap_complete = "4.5"

View File

@@ -63,3 +63,50 @@ In the shader the components get uniformly normalized from [0..=1023] integer to
Videos however are generally not stored in this format or any rgb format in general because it is not as efficient for (lossy) compression as YUV formats.
Right now I don't want to deal with yuv formats so I'll use gstreamer caps to convert the video into `Rgba10a2` format
## Pixel formats and Planes
Dated: Sun Jan 4 09:09:16 AM IST 2026
| value | count | quantile | percentage | frequency |
| --- | --- | --- | --- | --- |
| yuv420p | 1815 | 0.5067001675041876 | 50.67% | ************************************************** |
| yuv420p10le | 1572 | 0.4388609715242881 | 43.89% | ******************************************* |
| yuvj420p | 171 | 0.04773869346733668 | 4.77% | **** |
| rgba | 14 | 0.003908431044109436 | 0.39% | |
| yuvj444p | 10 | 0.0027917364600781687 | 0.28% | |
For all of my media collection these are the pixel formats for all the videos
### RGBA
Pretty self evident
8 channels for each of R, G, B and A
Hopefully shouldn't be too hard to make a function or possibly a lut that takes data from rgba and maps it to Rgb10a2Unorm
```mermaid
packet
title RGBA
+8: "R"
+8: "G"
+8: "B"
+8: "A"
```
### YUV
[All YUV formats](https://learn.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#surface-definitions)
[10 and 16 bit yuv formats](https://learn.microsoft.com/en-us/windows/win32/medfound/10-bit-and-16-bit-yuv-video-formats)
Y -> Luminance
U,V -> Chrominance
p -> Planar
sp -> semi planar
j -> full range
planar formats have each of the channels in a contiguous array one after another
in semi-planar formats the y channel is seperate and uv channels are interleaved
## Chroma Subsampling

View File

@@ -4,6 +4,7 @@ version = "0.1.0"
edition = "2024"
[dependencies]
bytemuck = "1.24.0"
error-stack = "0.6.0"
futures-lite = "2.6.1"
gst.workspace = true
@@ -13,6 +14,7 @@ iced_renderer = { version = "0.14.0", features = ["iced_wgpu"] }
iced_wgpu = { version = "0.14.0" }
thiserror = "2.0.17"
tracing = "0.1.43"
wgpu = { version = "27.0.1", features = ["vulkan"] }
[dev-dependencies]
iced.workspace = true

View File

@@ -17,27 +17,42 @@ pub type Result<T, E = Report<Error>> = core::result::Result<T, E>;
use std::sync::{Arc, Mutex, atomic::AtomicBool};
mod seal {
pub trait Sealed {}
impl Sealed for super::Unknown {}
impl Sealed for super::Ready {}
}
pub trait State: seal::Sealed {
fn is_ready() -> bool {
false
}
}
#[derive(Debug, Clone)]
pub struct Unknown;
#[derive(Debug, Clone)]
pub struct Ready;
impl State for Unknown {}
impl State for Ready {
fn is_ready() -> bool {
true
}
}
/// This is the video handle that is used to control the video playback.
/// This should be keps in the application state.
#[derive(Debug, Clone)]
pub struct VideoHandle<Message> {
pub struct VideoHandle<Message, S: State = Unknown> {
id: id::Id,
pub source: source::VideoSource,
frame_ready: Arc<AtomicBool>,
on_new_frame: Option<Box<Message>>,
on_end_of_stream: Option<Box<Message>>,
on_about_to_finish: Option<Box<Message>>,
__marker: core::marker::PhantomData<S>,
}
impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
pub fn id(&self) -> &id::Id {
&self.id
}
pub fn source(&self) -> &source::VideoSource {
&self.source
}
impl<Message: Send + Sync + Clone> VideoHandle<Message, Unknown> {
pub fn new(url: impl AsRef<str>) -> Result<Self> {
let source = source::VideoSource::new(url)?;
let frame_ready = Arc::clone(&source.ready);
@@ -48,36 +63,64 @@ impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
on_end_of_stream: None,
on_about_to_finish: None,
frame_ready,
__marker: core::marker::PhantomData,
})
}
pub async fn wait(self) -> Result<Self> {
/// Creates a new video handle and waits for the metadata to be loaded.
pub async fn load(url: impl AsRef<str>) -> Result<VideoHandle<Message, Ready>> {
let handle = VideoHandle::new(url)?;
handle.wait().await
}
}
impl<Message: Send + Sync + Clone, S: State> VideoHandle<Message, S> {
pub fn id(&self) -> &id::Id {
&self.id
}
pub fn source(&self) -> &source::VideoSource {
&self.source
}
pub async fn wait(self) -> Result<VideoHandle<Message, Ready>> {
self.source.wait().await?;
Ok(self)
Ok(self.state::<Ready>())
}
pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> {
let sub = widget::VideoSubscription {
id: self.id.clone(),
on_end_of_stream: self.on_end_of_stream.clone(),
on_new_frame: self.on_new_frame.clone(),
on_about_to_finish: self.on_about_to_finish.clone(),
bus: self.source.bus.clone(),
};
iced_futures::subscription::from_recipe(sub)
fn state<S2: State>(self) -> VideoHandle<Message, S2> {
VideoHandle {
id: self.id,
source: self.source,
on_new_frame: self.on_new_frame,
on_end_of_stream: self.on_end_of_stream,
on_about_to_finish: self.on_about_to_finish,
frame_ready: self.frame_ready,
__marker: core::marker::PhantomData,
}
}
pub fn subscription_with<State>(
&self,
state: &State,
f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static,
) -> iced_futures::subscription::Subscription<Message>
where
State: Send + Sync + 'static,
{
let sub = self.subscription();
iced_futures::subscription::Subscription::batch([sub, f(state)])
}
// pub fn subscription(&self) -> iced_futures::subscription::Subscription<Message> {
// let sub = widget::VideoSubscription {
// id: self.id.clone(),
// on_end_of_stream: self.on_end_of_stream.clone(),
// on_new_frame: self.on_new_frame.clone(),
// on_about_to_finish: self.on_about_to_finish.clone(),
// bus: self.source.bus.clone(),
// };
// iced_futures::subscription::from_recipe(sub)
// }
//
// pub fn subscription_with<State>(
// &self,
// state: &State,
// f: impl FnOnce(&State) -> iced_futures::subscription::Subscription<Message> + 'static,
// ) -> iced_futures::subscription::Subscription<Message>
// where
// State: Send + Sync + 'static,
// {
// let sub = self.subscription();
// iced_futures::subscription::Subscription::batch([sub, f(state)])
// }
pub fn on_new_frame(self, message: Message) -> Self {
Self {
@@ -109,10 +152,13 @@ impl<Message: Send + Sync + Clone + 'static> VideoHandle<Message> {
pub fn stop(&self) {
self.source.stop();
}
}
/// Creates a new video handle and waits for the metadata to be loaded.
pub async fn load(url: impl AsRef<str>) -> Result<Self> {
let handle = Self::new(url)?;
handle.wait().await
impl<Message: Send + Sync + Clone> VideoHandle<Message, Ready> {
pub fn format(&self) -> Result<gst::VideoFormat> {
self.source
.format()
.change_context(Error)
.attach("Failed to get video format")
}
}

View File

@@ -1,15 +1,101 @@
use crate::id;
use gst::videoconvertscale::VideoFormat;
use iced_wgpu::primitive::Pipeline;
use iced_wgpu::wgpu;
use std::collections::BTreeMap;
use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Clone, Copy, Debug, bytemuck::Zeroable, bytemuck::Pod)]
#[repr(transparent)]
pub struct ConversionMatrix {
matrix: [Vec3f; 3],
}
#[derive(Clone, Copy, Debug, bytemuck::Zeroable, bytemuck::Pod)]
#[repr(C, align(16))]
pub struct Vec3f {
data: [f32; 3],
__padding: u32,
}
impl From<[f32; 3]> for Vec3f {
fn from(value: [f32; 3]) -> Self {
Vec3f {
data: [value[0], value[1], value[2]],
__padding: 0,
}
}
}
impl Vec3f {
pub fn new(x: f32, y: f32, z: f32) -> Self {
Vec3f {
data: [x, y, z],
__padding: 0,
}
}
pub const fn from(data: [f32; 3]) -> Self {
Vec3f {
data: [data[0], data[1], data[2]],
__padding: 0,
}
}
}
// impl ConversionMatrix {
// pub fn desc() -> wgpu::VertexBufferLayout<'static> {
// wgpu::VertexBufferLayout {
// array_stride: core::mem::size_of::<ConversionMatrix>() as wgpu::BufferAddress,
// step_mode: wgpu::VertexStepMode::Vertex,
// attributes: &[
// wgpu::VertexAttribute {
// offset: 0,
// shader_location: 0,
// format: wgpu::VertexFormat::Float32x4,
// },
// wgpu::VertexAttribute {
// offset: 16,
// shader_location: 1,
// format: wgpu::VertexFormat::Float32x4,
// },
// wgpu::VertexAttribute {
// offset: 32,
// shader_location: 2,
// format: wgpu::VertexFormat::Float32x4,
// },
// wgpu::VertexAttribute {
// offset: 48,
// shader_location: 3,
// format: wgpu::VertexFormat::Float32x4,
// },
// ],
// }
// }
// }
pub const BT2020_TO_RGB: ConversionMatrix = ConversionMatrix {
matrix: [
Vec3f::from([1.0, 0.0, 1.4746]),
Vec3f::from([1.0, -0.16455, -0.5714]),
Vec3f::from([1.0, 1.8814, 0.0]),
],
};
pub const BT709_TO_RGB: ConversionMatrix = ConversionMatrix {
matrix: [
Vec3f::from([1.0, 0.0, 1.5748]),
Vec3f::from([1.0, -0.1873, -0.4681]),
Vec3f::from([1.0, 1.8556, 0.0]),
],
};
#[derive(Debug)]
pub struct VideoFrame {
pub id: id::Id,
pub size: wgpu::Extent3d,
pub ready: Arc<AtomicBool>,
pub frame: Arc<Mutex<gst::Sample>>,
pub format: VideoFormat,
}
impl iced_wgpu::Primitive for VideoFrame {
@@ -24,73 +110,89 @@ impl iced_wgpu::Primitive for VideoFrame {
viewport: &iced_wgpu::graphics::Viewport,
) {
let video = pipeline.videos.entry(self.id.clone()).or_insert_with(|| {
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced-video-texture"),
size: self.size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: pipeline.format,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced-video-buffer"),
size: (self.size.width * self.size.height * 4) as u64,
usage: wgpu::BufferUsages::COPY_SRC | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
let texture = VideoTexture::new(
"iced-video-texture",
self.size,
device,
pipeline.format,
self.format,
);
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced-video-texture-bind-group"),
layout: &pipeline.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
resource: wgpu::BindingResource::TextureView(&texture.y_texture()),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureView(&texture.uv_texture()),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(
texture
.conversion_matrix_buffer()
.as_entire_buffer_binding(),
),
},
],
});
VideoTextures {
let matrix = if matches!(self.format, VideoFormat::P01010le | VideoFormat::P016Le) {
BT2020_TO_RGB
} else {
BT709_TO_RGB
};
texture.write_conversion_matrix(&matrix, queue);
VideoFrameData {
id: self.id.clone(),
texture,
buffer,
bind_group,
conversion_matrix: matrix,
ready: Arc::clone(&self.ready),
}
});
// dbg!(&self.size, video.texture.size());
if self.size != video.texture.size() {
// Resize the texture if the size has changed.
let new_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced-video-texture-resized"),
size: self.size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: pipeline.format,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let new_texture = video
.texture
.resize("iced-video-texture-resized", self.size, device);
new_texture.write_conversion_matrix(&video.conversion_matrix, queue);
let new_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced-video-texture-bind-group-resized"),
label: Some("iced-video-texture-bind-group"),
layout: &pipeline.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&new_texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
resource: wgpu::BindingResource::TextureView(&new_texture.y_texture()),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureView(&new_texture.uv_texture()),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&pipeline.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(
video
.texture
.conversion_matrix_buffer()
.as_entire_buffer_binding(),
),
},
],
});
video.texture = new_texture;
@@ -105,22 +207,9 @@ impl iced_wgpu::Primitive for VideoFrame {
let data = buffer
.map_readable()
.expect("BUG: Failed to map gst::Buffer readable");
// queue.write_buffer(&video.buffer, 0, &data);
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: &video.texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(4 * self.size.width),
rows_per_image: Some(self.size.height),
},
self.size,
);
video.texture.write_texture(&data, queue);
drop(data);
video
.ready
@@ -139,23 +228,6 @@ impl iced_wgpu::Primitive for VideoFrame {
return;
};
// encoder.copy_buffer_to_texture(
// wgpu::TexelCopyBufferInfo {
// buffer: &video.buffer,
// layout: wgpu::TexelCopyBufferLayout {
// offset: 0,
// bytes_per_row: Some(4 * self.size.width),
// rows_per_image: Some(self.size.height),
// },
// },
// wgpu::TexelCopyTextureInfo {
// texture: &video.texture,
// mip_level: 0,
// origin: wgpu::Origin3d::ZERO,
// aspect: wgpu::TextureAspect::All,
// },
// self.size,
// );
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced-video-render-pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
@@ -186,36 +258,207 @@ impl iced_wgpu::Primitive for VideoFrame {
}
}
/// NV12 or P010 are only supported in DX12 and Vulkan backends.
/// While we can use vulkan with moltenvk on macos, I'd much rather use metal directly
/// Right now only supports interleaved UV formats.
/// For planar formats we would need 3 textures.
/// Also NV12 and P010 textures are not COPY_DST capable
/// This assumes 4:2:0 chroma subsampling (for now).
/// So for 4 Y samples there is 1 U and 1 V sample.
/// This means that the UV texture is half the width and half the height of the Y texture.
#[derive(Debug)]
pub struct VideoTextures {
pub struct VideoTexture {
y: wgpu::Texture,
uv: wgpu::Texture,
size: wgpu::Extent3d,
video_format: VideoFormat,
surface_format: wgpu::TextureFormat,
conversion_matrix_buffer: wgpu::Buffer,
}
impl VideoTexture {
pub fn size(&self) -> wgpu::Extent3d {
self.size
}
pub fn new(
label: &str,
size: wgpu::Extent3d,
device: &wgpu::Device,
surface_format: wgpu::TextureFormat,
video_format: VideoFormat,
) -> Self {
let surface_hdr = surface_format.is_wide();
let video_hdr = matches!(video_format, VideoFormat::P01010le | VideoFormat::P016Le);
if surface_hdr && !video_hdr {
tracing::warn!("Surface texture is HDR but video format is SDR");
} else if !surface_hdr && video_hdr {
tracing::warn!("Video format is HDR but surface does not support HDR");
}
let y_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some(&format!("{}-y", label)),
size: wgpu::Extent3d {
width: size.width,
height: size.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::R16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let uv_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some(&format!("{}-uv", label)),
size: wgpu::Extent3d {
width: size.width / 2,
height: size.height / 2,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg16Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced-video-conversion-matrix-buffer"),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
size: core::mem::size_of::<ConversionMatrix>() as wgpu::BufferAddress,
mapped_at_creation: false,
});
VideoTexture {
y: y_texture,
uv: uv_texture,
size,
surface_format,
video_format,
conversion_matrix_buffer: buffer,
}
}
// This return the surface texture format, not the video pixel format
pub fn format(&self) -> wgpu::TextureFormat {
self.surface_format
}
pub fn y_texture(&self) -> wgpu::TextureView {
self.y.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn uv_texture(&self) -> wgpu::TextureView {
self.uv.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn resize(&self, name: &str, new_size: wgpu::Extent3d, device: &wgpu::Device) -> Self {
VideoTexture::new(name, new_size, device, self.format(), self.pixel_format())
}
pub fn pixel_format(&self) -> VideoFormat {
self.video_format
}
/// This assumes that the data is laid out correctly for the texture format.
pub fn write_texture(&self, data: &[u8], queue: &wgpu::Queue) {
let Self { y, uv, .. } = self;
let y_size = y.size();
let uv_size = uv.size();
let y_data_size = (y_size.width * y_size.height * 2) as usize;
let uv_data_size = (y_data_size / 2) as usize; // UV is interleaved
let y_data = &data[0..y_data_size];
let uv_data = &data[y_data_size..y_data_size + uv_data_size];
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
y_data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(y_size.width * 2),
rows_per_image: None,
},
y_size,
);
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
uv_data,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(uv_size.width * 4),
rows_per_image: None,
},
uv_size,
);
}
pub fn write_conversion_matrix(&self, matrix: &ConversionMatrix, queue: &wgpu::Queue) {
queue.write_buffer(
&self.conversion_matrix_buffer,
0,
bytemuck::bytes_of(matrix),
);
}
pub fn conversion_matrix_buffer(&self) -> &wgpu::Buffer {
&self.conversion_matrix_buffer
}
}
#[derive(Debug)]
pub struct VideoFrameData {
id: id::Id,
texture: wgpu::Texture,
buffer: wgpu::Buffer,
texture: VideoTexture,
bind_group: wgpu::BindGroup,
conversion_matrix: ConversionMatrix,
ready: Arc<AtomicBool>,
}
impl VideoFrameData {
pub fn is_hdr(&self) -> bool {
self.texture.format().is_wide()
}
}
#[derive(Debug)]
pub struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
videos: BTreeMap<id::Id, VideoTextures>,
format: wgpu::TextureFormat,
videos: BTreeMap<id::Id, VideoFrameData>,
}
pub trait HdrTextureFormatExt {
fn is_hdr(&self) -> bool;
pub trait WideTextureFormatExt {
fn is_wide(&self) -> bool;
}
impl HdrTextureFormatExt for wgpu::TextureFormat {
fn is_hdr(&self) -> bool {
impl WideTextureFormatExt for wgpu::TextureFormat {
fn is_wide(&self) -> bool {
matches!(
self,
wgpu::TextureFormat::Rgba16Float
| wgpu::TextureFormat::Rgba32Float
| wgpu::TextureFormat::Rgb10a2Unorm
| wgpu::TextureFormat::Rgb10a2Uint
| wgpu::TextureFormat::P010
)
}
}
@@ -225,15 +468,14 @@ impl Pipeline for VideoPipeline {
where
Self: Sized,
{
if format.is_hdr() {
if format.is_wide() {
tracing::info!("HDR texture format detected: {:?}", format);
}
let shader_passthrough =
device.create_shader_module(wgpu::include_wgsl!("shaders/passthrough.wgsl"));
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("iced-video-texture-bind-group-layout"),
entries: &[
// y
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
@@ -244,15 +486,40 @@ impl Pipeline for VideoPipeline {
},
count: None,
},
// uv
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
multisampled: false,
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
},
count: None,
},
// sampler
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
// conversion matrix
wgpu::BindGroupLayoutEntry {
binding: 3,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
});
let shader_passthrough =
device.create_shader_module(wgpu::include_wgsl!("shaders/passthrough.wgsl"));
let render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("iced-video-render-pipeline-layout"),
@@ -273,7 +540,7 @@ impl Pipeline for VideoPipeline {
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format,
blend: Some(wgpu::BlendState::ALPHA_BLENDING),
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: wgpu::PipelineCompilationOptions::default(),

View File

@@ -1,14 +1,12 @@
// Vertex shader
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>,
};
}
@vertex
fn vs_main(
@builtin(vertex_index) in_vertex_index: u32,
) -> VertexOutput {
) -> VertexOutput {
var out: VertexOutput;
let uv = vec2<f32>(f32((in_vertex_index << 1u) & 2u), f32(in_vertex_index & 2u));
out.clip_position = vec4<f32>(uv * 2.0 - 1.0, 0.0, 1.0);
@@ -17,15 +15,16 @@ fn vs_main(
return out;
}
// Fragment shader
@group(0) @binding(0)
var t_diffuse: texture_2d<f32>;
@group(0) @binding(1)
var s_diffuse: sampler;
@group(0) @binding(0) var y_texture: texture_2d<f32>;
@group(0) @binding(1) var uv_texture: texture_2d<f32>;
@group(0) @binding(2) var texture_sampler: sampler;
@group(0) @binding(3) var<uniform> rgb_primaries: mat3x3<f32>;
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return textureSample(t_diffuse, s_diffuse, in.tex_coords);
fn fs_main(input: VertexOutput) -> @location(0) vec4<f32> {
let y = textureSample(y_texture, texture_sampler, input.tex_coords).r;
let uv = textureSample(uv_texture, texture_sampler, input.tex_coords).rg;
let yuv = vec3f(y, uv.x - 0.5, uv.y - 0.5);
return vec4f(yuv * rgb_primaries, 1.0);
}

View File

@@ -13,7 +13,6 @@ use std::sync::{Arc, Mutex, atomic::AtomicBool};
#[derive(Debug, Clone)]
pub struct VideoSource {
pub(crate) playbin: Playbin3,
pub(crate) videoconvert: VideoConvert,
pub(crate) appsink: AppSink,
pub(crate) bus: Bus,
pub(crate) ready: Arc<AtomicBool>,
@@ -27,22 +26,12 @@ impl VideoSource {
/// now.
pub fn new(url: impl AsRef<str>) -> Result<Self> {
Gst::new();
let videoconvert = VideoConvert::new("iced-video-convert")
// .change_context(Error)?
// .with_output_format(gst::plugins::videoconvertscale::VideoFormat::Rgba)
.change_context(Error)?;
let mut appsink = AppSink::new("iced-video-sink").change_context(Error)?;
appsink
.drop(true)
.sync(true)
// .async_(true)
.emit_signals(true)
.caps(
Caps::builder(CapsType::Video)
.field("format", "RGB10A2_LE") // Forced for now
.build(),
);
let video_sink = videoconvert.link(&appsink).change_context(Error)?;
.emit_signals(true);
let playbin = Playbin3::new("iced-video")
.change_context(Error)?
.with_uri(url.as_ref())
@@ -50,13 +39,13 @@ impl VideoSource {
.with_buffer_size(4096 * 4096 * 4 * 3)
.with_ring_buffer_max_size(4096 * 4096 * 4 * 3)
.with_flags(Playbin3::default_flags() | PlayFlags::DOWNLOAD)
.with_video_sink(&video_sink);
.with_video_sink(&appsink);
let bus = playbin.bus().change_context(Error)?;
playbin.pause().change_context(Error)?;
let ready = Arc::new(AtomicBool::new(false));
let frame = Arc::new(Mutex::new(gst::Sample::new()));
appsink.on_new_frame({
appsink.on_new_sample({
let ready = Arc::clone(&ready);
let frame = Arc::clone(&frame);
move |appsink| {
@@ -75,7 +64,6 @@ impl VideoSource {
Ok(Self {
playbin,
videoconvert,
appsink,
bus,
ready,
@@ -85,6 +73,26 @@ impl VideoSource {
}
pub async fn wait(&self) -> Result<()> {
use futures_lite::StreamExt;
// self.bus_stream()
// .for_each(|msg: gst::Message| {
// use gst::gstreamer::prelude::*;
// match msg.view() {
// MessageView::Eos(_) => {
// tracing::info!("Video reached end of stream");
// }
// MessageView::Error(err) => {
// tracing::error!(
// "Video Error from {:?}: {} ({:?})",
// err.src().map(|s| s.path_string()),
// err.error(),
// err.debug()
// );
// }
// view => tracing::info!("Video Message: {:#?}", view),
// }
// })
// .await;
self.playbin
.wait_for_states(&[gst::State::Paused, gst::State::Playing])
.await
@@ -93,6 +101,23 @@ impl VideoSource {
Ok(())
}
pub fn format(&self) -> Result<gst::VideoFormat> {
let caps = self
.appsink
.sink("sink")
.current_caps()
.change_context(Error)?;
let format = caps
.format()
.ok_or(Error)
.attach("Failed to get video caps structure")?;
Ok(format)
}
pub fn bus_stream(&self) -> impl futures_lite::Stream<Item = gst::Message> {
self.bus.stream()
}
pub fn is_playing(&self) -> Result<bool> {
let state = self.playbin.state(None).change_context(Error)?;
Ok(state == gst::State::Playing)

View File

@@ -10,7 +10,8 @@ where
Renderer: PrimitiveRenderer,
{
id: id::Id,
handle: &'a VideoHandle<Message>,
handle: &'a VideoHandle<Message, Ready>,
video_format: gst::VideoFormat,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
@@ -21,12 +22,15 @@ where
impl<'a, Message, Theme, Renderer> Video<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
Message: Clone,
Message: Clone + Send + Sync,
{
pub fn new(handle: &'a VideoHandle<Message>) -> Self {
pub fn new(handle: &'a VideoHandle<Message, Ready>) -> Self {
Self {
id: handle.id.clone(),
handle: &handle,
video_format: handle
.format()
.expect("Failed to get video format during widget creation"),
content_fit: iced::ContentFit::Contain,
width: Length::Shrink,
height: Length::Shrink,
@@ -74,7 +78,7 @@ where
impl<Message, Theme, Renderer> iced::Widget<Message, Theme, Renderer>
for Video<'_, Message, Theme, Renderer>
where
Message: Clone,
Message: Clone + Send + Sync,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<Length> {
@@ -141,6 +145,7 @@ where
},
ready: Arc::clone(&self.handle.frame_ready),
frame: Arc::clone(&self.handle.source.frame),
format: self.video_format,
},
);
};
@@ -184,7 +189,7 @@ where
impl<'a, Message, Theme, Renderer> From<Video<'a, Message, Theme, Renderer>>
for iced::Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Message: Send + Sync + 'a + Clone,
Theme: 'a,
Renderer: 'a + iced_wgpu::primitive::Renderer,
{

35
flake.lock generated
View File

@@ -3,11 +3,11 @@
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1766435619,
"narHash": "sha256-3A5Z5K28YB45REOHMWtyQ24cEUXW76MOtbT6abPrARE=",
"lastModified": 1768679419,
"narHash": "sha256-l9rM4lXBeS2mIAJsJjVfl0UABx3S3zg5tul7bv+bn50=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "a98dbc80b16730a64c612c6ab5d5fecb4ebb79ba",
"rev": "c700e1cd023ca87343cbd9217d50d47023e9adc7",
"type": "github"
},
"original": {
@@ -18,11 +18,11 @@
},
"crane": {
"locked": {
"lastModified": 1766194365,
"narHash": "sha256-4AFsUZ0kl6MXSm4BaQgItD0VGlEKR3iq7gIaL7TjBvc=",
"lastModified": 1768873933,
"narHash": "sha256-CfyzdaeLNGkyAHp3kT5vjvXhA1pVVK7nyDziYxCPsNk=",
"owner": "ipetkov",
"repo": "crane",
"rev": "7d8ec2c71771937ab99790b45e6d9b93d15d9379",
"rev": "0bda7e7d005ccb5522a76d11ccfbf562b71953ca",
"type": "github"
},
"original": {
@@ -34,10 +34,10 @@
"crates-io-index": {
"flake": false,
"locked": {
"lastModified": 1763363725,
"narHash": "sha256-cxr5xIKZFP45yV1ZHFTB1sHo5YGiR3FA8D9vAfDizMo=",
"lastModified": 1769614137,
"narHash": "sha256-3Td8fiv6iFVxeS0hYq3xdd10ZvUkC9INMAiQx/mECas=",
"ref": "refs/heads/master",
"rev": "0382002e816a4cbd17d8d5b172f08b848aa22ff6",
"rev": "c7e7d6394bc95555d6acd5c6783855f47d64c90d",
"shallow": true,
"type": "git",
"url": "https://github.com/rust-lang/crates.io-index"
@@ -50,7 +50,9 @@
},
"crates-nix": {
"inputs": {
"crates-io-index": "crates-io-index"
"crates-io-index": [
"crates-io-index"
]
},
"locked": {
"lastModified": 1763364255,
@@ -106,11 +108,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1766309749,
"narHash": "sha256-3xY8CZ4rSnQ0NqGhMKAy5vgC+2IVK0NoVEzDoOh4DA4=",
"lastModified": 1768564909,
"narHash": "sha256-Kell/SpJYVkHWMvnhqJz/8DqQg2b6PguxVWOuadbHCc=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a6531044f6d0bef691ea18d4d4ce44d0daa6e816",
"rev": "e4bae1bd10c9c57b2cf517953ab70060a828ee6f",
"type": "github"
},
"original": {
@@ -124,6 +126,7 @@
"inputs": {
"advisory-db": "advisory-db",
"crane": "crane",
"crates-io-index": "crates-io-index",
"crates-nix": "crates-nix",
"flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
@@ -138,11 +141,11 @@
]
},
"locked": {
"lastModified": 1766371695,
"narHash": "sha256-W7CX9vy7H2Jj3E8NI4djHyF8iHSxKpb2c/7uNQ/vGFU=",
"lastModified": 1768877311,
"narHash": "sha256-abSDl0cNr0B+YCsIDpO1SjXD9JMxE4s8EFnhLEFVovI=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "d81285ba8199b00dc31847258cae3c655b605e8c",
"rev": "59e4ab96304585fde3890025fd59bd2717985cc1",
"type": "github"
},
"original": {

View File

@@ -9,7 +9,14 @@
url = "github:nix-community/nix-github-actions";
inputs.nixpkgs.follows = "nixpkgs";
};
crates-nix.url = "github:uttarayan21/crates.nix";
crates-io-index = {
url = "git+https://github.com/rust-lang/crates.io-index?shallow=1";
flake = false;
};
crates-nix = {
url = "github:uttarayan21/crates.nix";
inputs.crates-io-index.follows = "crates-io-index";
};
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
@@ -88,7 +95,6 @@
glib
glib-networking
cudatoolkit
wrapGAppsHook4
# bzip2_1_1
@@ -104,6 +110,7 @@
++ (lib.optionals pkgs.stdenv.isLinux [
gst_all_1.gstreamermm
gst_all_1.gst-vaapi
cudatoolkit
# util-linux
# libselinux
@@ -179,38 +186,49 @@
devShells = rec {
rust-shell =
pkgs.mkShell.override {
stdenv =
if pkgs.stdenv.isLinux
then (pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv)
else pkgs.clangStdenv;
} (commonArgs
stdenv = pkgs.clangStdenv;
# if pkgs.stdenv.isLinux
# then (pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv)
# else pkgs.clangStdenv;
}
(commonArgs
// {
# GST_PLUGIN_PATH = "/run/current-system/sw/lib/gstreamer-1.0/";
GIO_EXTRA_MODULES = "${pkgs.glib-networking}/lib/gio/modules";
packages = with pkgs;
[
toolchainWithRustAnalyzer
cargo-nextest
bacon
cargo-audit
cargo-deny
cargo-expand
bacon
cargo-make
cargo-hack
cargo-make
cargo-nextest
cargo-outdated
lld
lldb
cargo-audit
(crates.buildCrate "cargo-with" {doCheck = false;})
(crates.buildCrate "dioxus-cli" {
nativeBuildInputs = with pkgs; [pkg-config];
buildInputs = [openssl];
doCheck = false;
})
(crates.buildCrate "cargo-hot" {
nativeBuildInputs = with pkgs; [pkg-config];
buildInputs = [openssl];
})
]
++ (lib.optionals pkgs.stdenv.isDarwin [
apple-sdk_26
])
++ (lib.optionals pkgs.stdenv.isLinux [
ffmpeg
heaptrack
samply
cargo-flamegraph
perf
mold
# mold
]);
});
default = rust-shell;

View File

@@ -17,6 +17,7 @@ pub use element::*;
pub use gstreamer;
#[doc(inline)]
pub use gstreamer::{Message, MessageType, MessageView, State};
pub use gstreamer_video::VideoFormat;
pub use pad::*;
pub use pipeline::*;
pub use plugins::*;

View File

@@ -6,9 +6,71 @@ pub use gstreamer_app::AppSinkCallbacks;
wrap_gst!(AppSink, gstreamer::Element);
parent_child!(Element, AppSink);
pub struct AppSinkBuilder {
inner: AppSink,
callbacks: Option<gstreamer_app::app_sink::AppSinkCallbacksBuilder>,
}
impl AppSinkBuilder {
pub fn on_new_sample<F>(mut self, mut f: F) -> Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{
let mut callbacks_builder = self
.callbacks
.take()
.unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder);
callbacks_builder = callbacks_builder.new_sample(move |appsink| {
use glib::object::Cast;
let element = appsink.upcast_ref::<gstreamer::Element>();
let appsink = AppSink::from_gst_ref(element);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
.unwrap_or(Err(gstreamer::FlowError::Error))
.map(|_| gstreamer::FlowSuccess::Ok)
});
self.callbacks = Some(callbacks_builder);
self
}
pub fn on_new_preroll<F>(mut self, mut f: F) -> Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{
let mut callbacks_builder = self
.callbacks
.take()
.unwrap_or_else(gstreamer_app::app_sink::AppSinkCallbacks::builder);
callbacks_builder = callbacks_builder.new_preroll(move |appsink| {
use glib::object::Cast;
let element = appsink.upcast_ref::<gstreamer::Element>();
let appsink = AppSink::from_gst_ref(element);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(appsink)))
.unwrap_or(Err(gstreamer::FlowError::Error))
.map(|_| gstreamer::FlowSuccess::Ok)
});
self.callbacks = Some(callbacks_builder);
self
}
pub fn build(self) -> AppSink {
let AppSinkBuilder { inner, callbacks } = self;
if let Some(callbacks) = callbacks {
inner.appsink().set_callbacks(callbacks.build());
}
inner
}
}
impl Sink for AppSink {}
impl AppSink {
pub fn builder(name: impl AsRef<str>) -> AppSinkBuilder {
let inner = AppSink::new(name).expect("Failed to create AppSink");
AppSinkBuilder {
inner,
callbacks: None,
}
}
fn appsink(&self) -> &gstreamer_app::AppSink {
self.inner
.downcast_ref::<gstreamer_app::AppSink>()
@@ -54,7 +116,7 @@ impl AppSink {
self
}
pub fn on_new_frame<F>(&mut self, mut f: F) -> &mut Self
pub fn on_new_sample<F>(&mut self, mut f: F) -> &mut Self
where
F: FnMut(&AppSink) -> Result<(), gstreamer::FlowError> + Send + 'static,
{

View File

@@ -1,5 +1,7 @@
iced-video:
cd crates/iced-video && cargo run --release --example minimal
jello:
cargo r -r -- -vv
# iced-video:
# cd crates/iced-video && cargo run --release --example minimal
typegen:
@echo "Generating jellyfin type definitions..."
cd typegen && cargo run
@@ -10,6 +12,7 @@ hdrtest:
GST_DEBUG=3 gst-launch-1.0 playbin3 uri=https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c video-sink="videoconvert ! video/x-raw,format=(string)RGB10A2_LE ! fakesink"
codec:
GST_DEBUG=3 gst-discoverer-1.0 -v https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c
GST_DEBUG=3 gst-discoverer-1.0 https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c
ffprobe:
ffprobe -v error -show_format -show_streams "https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c" | grep pix_fmt

View File

@@ -8,6 +8,7 @@ bson = { version = "3.1.0", features = ["serde"] }
futures = "0.3.31"
parking_lot = "0.12.5"
redb = { version = "3.1.0", features = ["uuid"] }
secrecy = "0.10.3"
serde = "1.0.228"
tokio = { version = "1.48.0", features = ["rt"] }
uuid = "1.18.1"
uuid = { version = "1.18.1", features = ["v4"] }

View File

@@ -1,10 +1,10 @@
pub mod redb;
pub mod sqlite;
pub mod toml;
use std::collections::BTreeMap;
pub trait Store {
fn image(&self, id: &str) -> Option<Vec<u8>>;
fn save_image(&mut self, id: &str, data: &[u8]);
use uuid::Uuid;
pub struct ApiKey {
inner: secrecy::SecretBox<String>,
}
pub struct SecretStore {
api_keys: BTreeMap<Uuid, ApiKey>,
}
pub struct Settings {}

View File

@@ -1,225 +1,225 @@
use std::{
borrow::Borrow,
collections::VecDeque,
marker::PhantomData,
path::Path,
sync::{Arc, RwLock, atomic::AtomicBool},
};
use futures::task::AtomicWaker;
use redb::{Error, Key, ReadableDatabase, TableDefinition, Value};
use serde::{Serialize, de::DeserializeOwned};
const USERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("users");
const SERVERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("servers");
const SETTINGS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("settings");
#[derive(Debug)]
pub struct TableInner<T> {
db: Arc<T>,
}
impl<T> Clone for TableInner<T> {
fn clone(&self) -> Self {
Self {
db: Arc::clone(&self.db),
}
}
}
impl<T> TableInner<T> {
fn new(db: Arc<T>) -> Self {
Self { db }
}
}
impl TableInner<DatabaseHandle> {
async fn get<'a, K: Key, V: Serialize + DeserializeOwned>(
&self,
table: TableDefinition<'static, K, Vec<u8>>,
key: impl Borrow<K::SelfType<'a>>,
) -> Result<Option<V>> {
let db: &redb::Database = &self.db.as_ref().database;
let db_reader = db.begin_read()?;
let table = db_reader.open_table(table)?;
table
.get(key)?
.map(|value| bson::deserialize_from_slice(&value.value()))
.transpose()
.map_err(|e| redb::Error::Io(std::io::Error::other(e)))
}
async fn insert<
'a,
'b,
K: Key + Send + Sync,
V: Serialize + DeserializeOwned + Send + Sync + 'a,
>(
&'b self,
table: TableDefinition<'static, K, Vec<u8>>,
key: impl Borrow<K::SelfType<'a>> + Send + 'b,
value: V,
) -> Result<Option<V>> {
let db: &redb::Database = &self.db.as_ref().database;
// self.db
// .writing
// .store(true, std::sync::atomic::Ordering::SeqCst);
// let out = tokio::task::spawn_blocking(move || -> Result<Option<V>>
let out = tokio::task::spawn_blocking(|| -> Result<Option<V>> {
let db_writer = db.begin_write()?;
let out = {
let mut table = db_writer.open_table(table)?;
let serialized_value = bson::serialize_to_vec(&value)
.map_err(|e| redb::Error::Io(std::io::Error::other(e)))?;
let previous = table.insert(key, &serialized_value)?;
let out = previous
.map(|value| bson::deserialize_from_slice(&value.value()))
.transpose()
.map_err(|e| redb::Error::Io(std::io::Error::other(e)));
out
};
db_writer.commit()?;
out
})
.await
.expect("Task panicked");
out
}
}
// impl<K: Key, V: Serialize + DeserializeOwned> Table<K, V> for TableInner {
// async fn get(&self, key: K) -> Result<Option<Value>> {}
// async fn insert(&self, key: K, value: V) -> Result<Option<Value>> {}
// async fn modify(&self, key: K, v: FnOnce(V) -> V) -> Result<bool> {}
// async fn remove(&self, key: K) -> Result<Option<Value>> {}
// use std::{
// borrow::Borrow,
// collections::VecDeque,
// marker::PhantomData,
// path::Path,
// sync::{Arc, RwLock, atomic::AtomicBool},
// };
//
// use futures::task::AtomicWaker;
// use redb::{Error, Key, ReadableDatabase, TableDefinition, Value};
// use serde::{Serialize, de::DeserializeOwned};
//
// const USERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("users");
// const SERVERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("servers");
// const SETTINGS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("settings");
//
// #[derive(Debug)]
// pub struct TableInner<T> {
// db: Arc<T>,
// }
#[derive(Debug)]
pub struct Users<T>(TableInner<T>);
impl<T> Clone for Users<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Users<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = USERS;
}
#[derive(Debug)]
pub struct Servers<T>(TableInner<T>);
impl<T> Clone for Servers<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Servers<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SERVERS;
}
#[derive(Debug)]
pub struct Settings<T>(TableInner<T>);
impl<T> Clone for Settings<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Settings<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SETTINGS;
}
#[derive(Debug, Clone)]
pub struct Database {
users: Users<DatabaseHandle>,
servers: Servers<DatabaseHandle>,
settings: Settings<DatabaseHandle>,
handle: Arc<DatabaseHandle>,
}
#[derive(Debug)]
pub struct DatabaseHandle {
database: redb::Database,
writing: AtomicBool,
wakers: RwLock<VecDeque<AtomicWaker>>,
}
#[derive(Debug)]
pub struct DatabaseWriterGuard<'a> {
handle: &'a DatabaseHandle,
dropper: Arc<AtomicBool>,
}
// impl Drop for DatabaseWriterGuard<'_> {
// fn drop(&mut self) {
// self.handle
// .writing
// .store(false, std::sync::atomic::Ordering::SeqCst);
// let is_panicking = std::thread::panicking();
// let Ok(writer) = self.handle.wakers.write() else {
// if is_panicking {
// return;
// } else {
// panic!("Wakers lock poisoned");
// }
//
// impl<T> Clone for TableInner<T> {
// fn clone(&self) -> Self {
// Self {
// db: Arc::clone(&self.db),
// }
// if let Some(waker) = (self.handle.wakers.write()).pop() {
// waker.wake();
// };
// // let mut wakers = self.handle.wakers.write().expect();
// // if let Some(waker) = self.handle.wakers.write().expect("Wakers lock poisoned").pop_front() {
// // waker.wake();
// // }
// // while let Some(waker) = wakers.pop_front() {
// // waker.wake();
// // }
// }
// }
type Result<O, E = redb::Error> = core::result::Result<O, E>;
pub trait Table<K: Key> {
fn insert<V: Serialize + DeserializeOwned>(
&self,
key: K,
value: V,
) -> impl Future<Output = Result<Option<V>>> + Send;
fn modify<V: Serialize + DeserializeOwned, O: Serialize + DeserializeOwned>(
&self,
key: K,
v: impl FnOnce(V) -> O,
) -> impl Future<Output = Result<bool>> + Send;
fn remove<V: Serialize + DeserializeOwned>(
&self,
key: K,
) -> impl Future<Output = Result<Option<V>>> + Send;
fn get<V: Serialize + DeserializeOwned>(
&self,
key: K,
) -> impl Future<Output = Result<Option<V>>> + Send;
}
impl Database {
pub fn create(path: impl AsRef<Path>) -> Result<Self, Error> {
let writing = AtomicBool::new(false);
let wakers = RwLock::new(VecDeque::new());
let db = redb::Database::create(path)?;
let db = Arc::new(DatabaseHandle {
database: db,
writing,
wakers,
});
let table_inner = TableInner::new(Arc::clone(&db));
let users = Users(table_inner.clone());
let servers = Servers(table_inner.clone());
let settings = Settings(table_inner.clone());
Ok(Self {
servers,
users,
settings,
handle: db,
})
}
}
//
// impl<T> TableInner<T> {
// fn new(db: Arc<T>) -> Self {
// Self { db }
// }
// }
//
// impl TableInner<DatabaseHandle> {
// async fn get<'a, K: Key, V: Serialize + DeserializeOwned>(
// &self,
// table: TableDefinition<'static, K, Vec<u8>>,
// key: impl Borrow<K::SelfType<'a>>,
// ) -> Result<Option<V>> {
// let db: &redb::Database = &self.db.as_ref().database;
// let db_reader = db.begin_read()?;
// let table = db_reader.open_table(table)?;
// table
// .get(key)?
// .map(|value| bson::deserialize_from_slice(&value.value()))
// .transpose()
// .map_err(|e| redb::Error::Io(std::io::Error::other(e)))
// }
//
// async fn insert<
// 'a,
// 'b,
// K: Key + Send + Sync,
// V: Serialize + DeserializeOwned + Send + Sync + 'a,
// >(
// &'b self,
// table: TableDefinition<'static, K, Vec<u8>>,
// key: impl Borrow<K::SelfType<'a>> + Send + 'b,
// value: V,
// ) -> Result<Option<V>> {
// let db: &redb::Database = &self.db.as_ref().database;
// // self.db
// // .writing
// // .store(true, std::sync::atomic::Ordering::SeqCst);
//
// // let out = tokio::task::spawn_blocking(move || -> Result<Option<V>>
//
// let out = tokio::task::spawn_blocking(|| -> Result<Option<V>> {
// let db_writer = db.begin_write()?;
// let out = {
// let mut table = db_writer.open_table(table)?;
// let serialized_value = bson::serialize_to_vec(&value)
// .map_err(|e| redb::Error::Io(std::io::Error::other(e)))?;
// let previous = table.insert(key, &serialized_value)?;
// let out = previous
// .map(|value| bson::deserialize_from_slice(&value.value()))
// .transpose()
// .map_err(|e| redb::Error::Io(std::io::Error::other(e)));
// out
// };
// db_writer.commit()?;
// out
// })
// .await
// .expect("Task panicked");
//
// out
// }
// }
//
// // impl<K: Key, V: Serialize + DeserializeOwned> Table<K, V> for TableInner {
// // async fn get(&self, key: K) -> Result<Option<Value>> {}
// // async fn insert(&self, key: K, value: V) -> Result<Option<Value>> {}
// // async fn modify(&self, key: K, v: FnOnce(V) -> V) -> Result<bool> {}
// // async fn remove(&self, key: K) -> Result<Option<Value>> {}
// // }
//
// #[derive(Debug)]
// pub struct Users<T>(TableInner<T>);
//
// impl<T> Clone for Users<T> {
// fn clone(&self) -> Self {
// Self(self.0.clone())
// }
// }
// impl<T> Users<T> {
// const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = USERS;
// }
//
// #[derive(Debug)]
// pub struct Servers<T>(TableInner<T>);
// impl<T> Clone for Servers<T> {
// fn clone(&self) -> Self {
// Self(self.0.clone())
// }
// }
// impl<T> Servers<T> {
// const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SERVERS;
// }
//
// #[derive(Debug)]
// pub struct Settings<T>(TableInner<T>);
// impl<T> Clone for Settings<T> {
// fn clone(&self) -> Self {
// Self(self.0.clone())
// }
// }
// impl<T> Settings<T> {
// const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SETTINGS;
// }
//
// #[derive(Debug, Clone)]
// pub struct Database {
// users: Users<DatabaseHandle>,
// servers: Servers<DatabaseHandle>,
// settings: Settings<DatabaseHandle>,
// handle: Arc<DatabaseHandle>,
// }
//
// #[derive(Debug)]
// pub struct DatabaseHandle {
// database: redb::Database,
// writing: AtomicBool,
// wakers: RwLock<VecDeque<AtomicWaker>>,
// }
//
// #[derive(Debug)]
// pub struct DatabaseWriterGuard<'a> {
// handle: &'a DatabaseHandle,
// dropper: Arc<AtomicBool>,
// }
//
// // impl Drop for DatabaseWriterGuard<'_> {
// // fn drop(&mut self) {
// // self.handle
// // .writing
// // .store(false, std::sync::atomic::Ordering::SeqCst);
// // let is_panicking = std::thread::panicking();
// // let Ok(writer) = self.handle.wakers.write() else {
// // if is_panicking {
// // return;
// // } else {
// // panic!("Wakers lock poisoned");
// // }
// // }
// // if let Some(waker) = (self.handle.wakers.write()).pop() {
// // waker.wake();
// // };
// // // let mut wakers = self.handle.wakers.write().expect();
// // // if let Some(waker) = self.handle.wakers.write().expect("Wakers lock poisoned").pop_front() {
// // // waker.wake();
// // // }
// // // while let Some(waker) = wakers.pop_front() {
// // // waker.wake();
// // // }
// // }
// // }
//
// type Result<O, E = redb::Error> = core::result::Result<O, E>;
//
// pub trait Table<K: Key> {
// fn insert<V: Serialize + DeserializeOwned>(
// &self,
// key: K,
// value: V,
// ) -> impl Future<Output = Result<Option<V>>> + Send;
// fn modify<V: Serialize + DeserializeOwned, O: Serialize + DeserializeOwned>(
// &self,
// key: K,
// v: impl FnOnce(V) -> O,
// ) -> impl Future<Output = Result<bool>> + Send;
// fn remove<V: Serialize + DeserializeOwned>(
// &self,
// key: K,
// ) -> impl Future<Output = Result<Option<V>>> + Send;
// fn get<V: Serialize + DeserializeOwned>(
// &self,
// key: K,
// ) -> impl Future<Output = Result<Option<V>>> + Send;
// }
//
// impl Database {
// pub fn create(path: impl AsRef<Path>) -> Result<Self, Error> {
// let writing = AtomicBool::new(false);
// let wakers = RwLock::new(VecDeque::new());
// let db = redb::Database::create(path)?;
// let db = Arc::new(DatabaseHandle {
// database: db,
// writing,
// wakers,
// });
// let table_inner = TableInner::new(Arc::clone(&db));
// let users = Users(table_inner.clone());
// let servers = Servers(table_inner.clone());
// let settings = Settings(table_inner.clone());
// Ok(Self {
// servers,
// users,
// settings,
// handle: db,
// })
// }
// }

View File

@@ -21,9 +21,10 @@ iced = { workspace = true, features = [
iced-video = { workspace = true }
iced_aw = "0.13.0"
iced_wgpu = "0.14.0"
iced_winit = "0.14.0"
reqwest = "0.12.24"
reqwest = "0.13"
tap = "1.0.1"
toml = "0.9.8"
tracing = "0.1.41"

View File

@@ -2,8 +2,9 @@ mod settings;
mod video;
mod shared_string;
use iced_video::{Video, VideoHandle};
use iced_video::{Ready, Video, VideoHandle};
use shared_string::SharedString;
use tap::Pipe as _;
use std::sync::Arc;
@@ -25,6 +26,8 @@ pub struct ItemCache {
pub tree: BTreeMap<Option<uuid::Uuid>, BTreeSet<uuid::Uuid>>,
}
const BACKGROUND_COLOR: iced::Color = iced::Color::from_rgba8(30, 30, 30, 0.7);
impl ItemCache {
pub fn insert(&mut self, parent: impl Into<Option<uuid::Uuid>>, item: Item) {
let parent = parent.into();
@@ -140,7 +143,7 @@ struct State {
screen: Screen,
settings: settings::SettingsState,
is_authenticated: bool,
video: Option<Arc<VideoHandle<Message>>>,
video: Option<Arc<VideoHandle<Message, Ready>>>,
}
impl State {
@@ -155,8 +158,6 @@ impl State {
query: None,
screen: Screen::Home,
settings: settings::SettingsState::default(),
// username_input: String::new(),
// password_input: String::new(),
is_authenticated: false,
video: None,
}
@@ -172,17 +173,8 @@ pub enum Message {
OpenItem(Option<uuid::Uuid>),
LoadedItem(Option<uuid::Uuid>, Vec<Item>),
Error(String),
SetToken(String),
Back,
Home,
// Login {
// username: String,
// password: String,
// config: api::JellyfinConfig,
// },
// LoginSuccess(String),
// LoadedClient(api::JellyfinClient, bool),
// Logout,
Video(video::VideoMessage),
}
@@ -249,15 +241,6 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
state.messages.push(err);
Task::none()
}
Message::SetToken(token) => {
tracing::info!("Authenticated with token: {}", token);
state
.jellyfin_client
.as_mut()
.map(|mut client| client.set_token(token));
state.is_authenticated = true;
Task::none()
}
Message::Back => {
state.current = state.history.pop().unwrap_or(None);
Task::none()
@@ -268,7 +251,6 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
}
Message::SearchQueryChanged(query) => {
state.query = Some(query);
// Handle search query change
Task::none()
}
Message::Search => {
@@ -293,9 +275,29 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
}
fn view(state: &State) -> Element<'_, Message> {
let content = home(state);
match state.screen {
Screen::Settings => settings::settings(state),
Screen::Home | _ => home(state),
Screen::Settings => {
let settings = settings::settings(state);
let settings = container(settings)
.width(Length::FillPortion(4))
.height(Length::FillPortion(4))
.style(container::rounded_box)
.pipe(mouse_area)
.on_press(Message::Refresh)
.pipe(|c| iced::widget::column![space::vertical(), c, space::vertical()])
.pipe(container)
.width(Length::Fill)
.width(Length::Fill)
.align_y(Alignment::Center)
.align_x(Alignment::Center)
.style(|_| container::background(BACKGROUND_COLOR))
.padding(50)
.pipe(mouse_area)
.on_press(Message::Settings(settings::SettingsMessage::Close));
stack![content, settings].into()
}
Screen::Home | _ => content,
}
}
@@ -310,38 +312,34 @@ fn body(state: &State) -> Element<'_, Message> {
if let Some(ref video) = state.video {
video::player(video)
} else {
scrollable(
container(
Grid::with_children(state.cache.items_of(state.current).into_iter().map(card))
.fluid(400)
.spacing(50),
)
Grid::with_children(state.cache.items_of(state.current).into_iter().map(card))
.fluid(400)
.spacing(50)
.pipe(container)
.padding(50)
.align_x(Alignment::Center)
// .align_y(Alignment::Center)
.height(Length::Fill)
.width(Length::Fill),
)
.height(Length::Fill)
.into()
.width(Length::Fill)
.pipe(scrollable)
.height(Length::Fill)
.into()
}
}
fn header(state: &State) -> Element<'_, Message> {
row([
container(
Button::new(
Text::new(
state
.jellyfin_client
.as_ref()
.map(|c| c.config.server_url.as_str())
.unwrap_or("No Server"),
)
.align_x(Alignment::Start),
)
.on_press(Message::Home),
text(
state
.jellyfin_client
.as_ref()
.map(|c| c.config.server_url.as_str())
.unwrap_or("No Server"),
)
.align_x(Alignment::Start)
.pipe(button)
.on_press(Message::Home)
.pipe(container)
.padding(10)
.width(Length::Fill)
.height(Length::Fill)
@@ -350,18 +348,17 @@ fn header(state: &State) -> Element<'_, Message> {
.style(container::rounded_box)
.into(),
search(state),
container(
row([
button("Refresh").on_press(Message::Refresh).into(),
button("Settings")
.on_press(Message::Settings(settings::SettingsMessage::Open))
.into(),
button("TestVideo")
.on_press(Message::Video(video::VideoMessage::Test))
.into(),
])
.spacing(10),
)
row([
button("Refresh").on_press(Message::Refresh).into(),
button("Settings")
.on_press(Message::Settings(settings::SettingsMessage::Open))
.into(),
button("TestVideo")
.on_press(Message::Video(video::VideoMessage::Test))
.into(),
])
.spacing(10)
.pipe(container)
.padding(10)
.width(Length::Fill)
.height(Length::Fill)
@@ -377,19 +374,18 @@ fn header(state: &State) -> Element<'_, Message> {
}
fn search(state: &State) -> Element<'_, Message> {
container(
TextInput::new("Search...", state.query.as_deref().unwrap_or_default())
.padding(10)
.size(16)
.width(Length::Fill)
.on_input(Message::SearchQueryChanged)
.on_submit(Message::Search),
)
.padding(10)
.width(Length::Fill)
.height(Length::Shrink)
.style(container::rounded_box)
.into()
TextInput::new("Search...", state.query.as_deref().unwrap_or_default())
.padding(10)
.size(16)
.width(Length::Fill)
.on_input(Message::SearchQueryChanged)
.on_submit(Message::Search)
.pipe(container)
.padding(10)
.width(Length::Fill)
.height(Length::Shrink)
.style(container::rounded_box)
.into()
}
fn footer(state: &State) -> Element<'_, Message> {

View File

@@ -1,6 +1,5 @@
use crate::*;
use iced::Element;
// mod widget;
pub fn settings(state: &State) -> Element<'_, Message> {
screens::settings(state)
@@ -20,6 +19,9 @@ pub fn update(state: &mut State, message: SettingsMessage) -> Task<Message> {
tracing::trace!("Switching settings screen to {:?}", screen);
state.settings.screen = screen;
}
SettingsMessage::User(user) => state.settings.login_form.update(user),
SettingsMessage::Server(server) => state.settings.server_form.update(server),
}
Task::none()
}
@@ -40,9 +42,31 @@ pub enum SettingsMessage {
Open,
Close,
Select(SettingsScreen),
User(UserMessage),
Server(ServerMessage),
}
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone)]
pub enum UserMessage {
Add,
UsernameChanged(String),
PasswordChanged(String),
// Edit(uuid::Uuid),
// Delete(uuid::Uuid),
Clear,
}
#[derive(Debug, Clone)]
pub enum ServerMessage {
Add,
NameChanged(String),
UrlChanged(String),
// Edit(uuid::Uuid),
// Delete(uuid::Uuid),
Clear,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub enum SettingsScreen {
#[default]
Main,
@@ -66,20 +90,133 @@ pub struct UserItem {
#[derive(Debug, Clone, Default)]
pub struct LoginForm {
username: Option<String>,
password: Option<String>,
username: String,
password: String,
}
impl LoginForm {
pub fn update(&mut self, message: UserMessage) {
match message {
UserMessage::UsernameChanged(data) => {
self.username = data;
}
UserMessage::PasswordChanged(data) => {
self.password = data;
}
UserMessage::Add => {
// Handle adding user
}
UserMessage::Clear => {
self.username.clear();
self.password.clear();
}
}
}
pub fn view(&self) -> Element<'_, Message> {
iced::widget::column![
text("Login Form"),
text_input("Enter Username", &self.username).on_input(|data| {
Message::Settings(SettingsMessage::User(UserMessage::UsernameChanged(data)))
}),
text_input("Enter Password", &self.password)
.secure(true)
.on_input(|data| {
Message::Settings(SettingsMessage::User(UserMessage::PasswordChanged(data)))
}),
row![
button(text("Add User")).on_press_maybe(self.validate()),
button(text("Cancel"))
.on_press(Message::Settings(SettingsMessage::User(UserMessage::Clear))),
]
.spacing(10),
]
.spacing(10)
.padding([10, 0])
.into()
}
pub fn validate(&self) -> Option<Message> {
(!self.username.is_empty() && !self.password.is_empty())
.then(|| Message::Settings(SettingsMessage::User(UserMessage::Add)))
}
}
#[derive(Debug, Clone, Default)]
pub struct ServerForm {
name: Option<String>,
url: Option<String>,
name: String,
url: String,
}
impl ServerForm {
pub fn update(&mut self, message: ServerMessage) {
match message {
ServerMessage::NameChanged(data) => {
self.name = data;
}
ServerMessage::UrlChanged(data) => {
self.url = data;
}
ServerMessage::Add => {
// Handle adding server
}
ServerMessage::Clear => {
self.name.clear();
self.url.clear();
}
_ => {}
}
}
pub fn view(&self) -> Element<'_, Message> {
iced::widget::column![
text("Add New Server"),
text_input("Enter server name", &self.name).on_input(|data| {
Message::Settings(SettingsMessage::Server(ServerMessage::NameChanged(data)))
}),
text_input("Enter server URL", &self.url).on_input(|data| {
Message::Settings(SettingsMessage::Server(ServerMessage::UrlChanged(data)))
}),
row![
button(text("Add Server")).on_press_maybe(self.validate()),
button(text("Cancel")).on_press(Message::Settings(SettingsMessage::Server(
ServerMessage::Clear
))),
]
.spacing(10),
]
.spacing(10)
.padding([10, 0])
.into()
}
pub fn validate(&self) -> Option<Message> {
(!self.name.is_empty() && !self.url.is_empty())
.then(|| Message::Settings(SettingsMessage::Server(ServerMessage::Add)))
}
}
mod screens {
use iced_aw::Tabs;
use super::*;
pub fn settings(state: &State) -> Element<'_, Message> {
row([settings_list(state), settings_screen(state)]).into()
Tabs::new(|f| Message::Settings(SettingsMessage::Select(f)))
.push(
SettingsScreen::Main,
iced_aw::TabLabel::Text("General".into()),
main(state),
)
.push(
SettingsScreen::Servers,
iced_aw::TabLabel::Text("Servers".into()),
server(state),
)
.push(
SettingsScreen::Users,
iced_aw::TabLabel::Text("Users".into()),
user(state),
)
.set_active_tab(&state.settings.screen)
.into()
}
pub fn settings_screen(state: &State) -> Element<'_, Message> {
@@ -89,63 +226,65 @@ mod screens {
SettingsScreen::Users => user(state),
})
.width(Length::FillPortion(10))
.height(Length::Fill)
.style(|theme| container::background(theme.extended_palette().background.base.color))
.pipe(container)
.padding(10)
.style(|theme| container::background(theme.extended_palette().secondary.base.color))
.width(Length::FillPortion(10))
.into()
}
pub fn settings_list(state: &State) -> Element<'_, Message> {
scrollable(
column(
[
button(center_text("Main")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Main),
)),
button(center_text("Servers")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Servers),
)),
button(center_text("Users")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Users),
)),
]
.map(|p| p.clip(true).width(Length::Fill).into()),
)
.width(Length::FillPortion(2))
// .max_width(Length::FillPortion(3))
.spacing(10)
.padding(10),
column(
[
button(center_text("General")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Main),
)),
button(center_text("Servers")).on_press(Message::Settings(
SettingsMessage::Select(SettingsScreen::Servers),
)),
button(center_text("Users")).on_press(Message::Settings(SettingsMessage::Select(
SettingsScreen::Users,
))),
]
.map(|p| p.clip(true).width(Length::Fill).into()),
)
.width(Length::FillPortion(2))
.spacing(10)
.padding(10)
.pipe(scrollable)
.into()
}
pub fn main(state: &State) -> Element<'_, Message> {
// placeholder for now
container(
Column::new()
.push(text("Main Settings"))
.push(toggler(true).label("Foobar"))
.spacing(20)
.padding(20),
)
.into()
Column::new()
.push(text("Main Settings"))
.push(toggler(true).label("HDR"))
.spacing(20)
.padding(20)
.pipe(container)
.into()
}
pub fn server(state: &State) -> Element<'_, Message> {
container(
Column::new()
.push(text("Server Settings"))
.push(toggler(false).label("Enable Server"))
.spacing(20)
.padding(20),
)
.into()
Column::new()
.push(text("Server Settings"))
.push(state.settings.server_form.view())
.spacing(20)
.padding(20)
.pipe(container)
.into()
}
pub fn user(state: &State) -> Element<'_, Message> {
container(
Column::new()
.push(text("User Settings"))
.push(toggler(true).label("Enable User"))
.spacing(20)
.padding(20),
)
.into()
Column::new()
.push(text("User Settings"))
.push(state.settings.login_form.view())
.spacing(20)
.padding(20)
.pipe(container)
.into()
}
}

View File

@@ -3,7 +3,7 @@ use super::*;
pub enum VideoMessage {
EndOfStream,
Open(url::Url),
Loaded(VideoHandle<Message>),
Loaded(VideoHandle<Message, Ready>),
Pause,
Play,
Seek(f64),
@@ -55,16 +55,14 @@ pub fn update(state: &mut State, message: VideoMessage) -> Task<Message> {
Task::none()
}
VideoMessage::Test => {
let url = url::Url::parse(
"https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm",
)
let url = url::Url::parse("https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c")
.expect("Impossible: Failed to parse hardcoded URL");
Task::done(VideoMessage::Open(url)).map(Message::Video)
}
}
}
pub fn player(video: &VideoHandle<Message>) -> Element<'_, Message> {
pub fn player(video: &VideoHandle<Message, Ready>) -> Element<'_, Message> {
container(
Video::new(video)
.width(Length::Fill)