Compare commits

...

11 Commits

Author SHA1 Message Date
uttarayan21
c2fdedf05a feat(examples): update package name and add perf ignore rules
Some checks failed
build / checks-matrix (push) Has been cancelled
build / checks-build (push) Has been cancelled
build / codecov (push) Has been cancelled
docs / docs (push) Has been cancelled
2025-12-16 02:26:54 +05:30
uttarayan21
7003002b69 chore: remove unused rust-analyzer target files and fix compilation errors in jello-test example 2025-12-16 02:25:36 +05:30
uttarayan21
c675c29be3 chore(gst): Remove flake files and configurations from gst 2025-12-16 02:25:09 +05:30
uttarayan21
7f9152e8fd feat(gst): Added gst a high level wrapper over gstreamer
chore(example): Added hdr-gstreamer-wgpu example
chore(license): Added MIT license to all crates
2025-12-16 02:23:30 +05:30
uttarayan21
6cc83ba655 chore: remove iced_video_player crate and its dependencies 2025-12-15 17:59:40 +05:30
uttarayan21
253d27c176 feat: Update iced_video_player to master 2025-12-13 03:40:12 +05:30
uttarayan21
c7afcd3f0d fix: remove debug statements from video playback initialization 2025-12-09 23:56:20 +05:30
uttarayan21
d75a2fb7e4 feat(ui): comment out gpui ui code and improve iced ui logic 2025-12-09 23:46:00 +05:30
uttarayan21
73fcf9bad1 feat: add jello-types crate and update dependencies with backtrace support 2025-12-09 23:28:51 +05:30
uttarayan21
05ae9ff570 feat(store): add database storage with redb and bson support
This commit introduces a new `store` crate that provides database functionality using redb for storage and bson for serialization. It includes tables for users, servers, and settings, along with async operations for getting, inserting, modifying, and removing data. The store supports UUID keys and integrates with the existing Jellyfin client authentication flow.

The changes also include:
- Adding new dependencies to Cargo.lock for bitvec, bson, deranged, funty, num-conv, powerfmt, radium, serde_bytes, simdutf8, time, and wyz
- Updating Cargo.toml to include the new store crate in workspace members
- Modifying ui-iced to use the new database initialization flow with config loading from TOML
- Adding a settings module to ui-iced with UI components for managing server and user configuration
- Implementing secret string handling for sensitive data like passwords
- Updating API client to support pre-authenticated clients with cached tokens
2025-11-26 16:15:41 +05:30
uttarayan21
ca1fd2e977 feat: Update the api crate 2025-11-25 18:48:13 +05:30
51 changed files with 3598 additions and 8505 deletions

775
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,10 +5,13 @@ members = [
"typegen", "typegen",
"ui-gpui", "ui-gpui",
"ui-iced", "ui-iced",
"crates/iced_video_player", "store",
"jello-types",
"gst",
"examples/hdr-gstreamer-wgpu",
] ]
[workspace.dependencies] [workspace.dependencies]
iced = { git = "https://github.com/iced-rs/iced", features = [ iced = { version = "0.14.0", features = [
"advanced", "advanced",
"canvas", "canvas",
"image", "image",
@@ -16,8 +19,10 @@ iced = { git = "https://github.com/iced-rs/iced", features = [
"tokio", "tokio",
"debug", "debug",
] } ] }
iced_wgpu = { git = "https://github.com/iced-rs/iced" } iced_video_player = "0.6"
iced_video_player = { path = "crates/iced_video_player" } gst = { version = "0.1.0", path = "gst" }
# iced_video_player = { git = "https://github.com/jazzfool/iced_video_player" }
# iced_video_player = { path = "crates/iced_video_player" }
[package] [package]
name = "jello" name = "jello"
@@ -28,7 +33,9 @@ license = "MIT"
[dependencies] [dependencies]
api = { version = "0.1.0", path = "api" } api = { version = "0.1.0", path = "api" }
clap = { version = "4.5", features = ["derive"] } clap = { version = "4.5", features = ["derive"] }
clap-verbosity-flag = { version = "3.0.4", features = ["tracing"] }
clap_complete = "4.5" clap_complete = "4.5"
color-backtrace = "0.7.2"
dotenvy = "0.15.7" dotenvy = "0.15.7"
error-stack = "0.6" error-stack = "0.6"
thiserror = "2.0" thiserror = "2.0"

View File

@@ -2,6 +2,7 @@
name = "api" name = "api"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
license = "MIT"
[dependencies] [dependencies]
bytes = "1.11.0" bytes = "1.11.0"

View File

@@ -4,7 +4,7 @@ pub async fn main() {
let config = std::fs::read_to_string("config.toml").expect("Config.toml"); let config = std::fs::read_to_string("config.toml").expect("Config.toml");
let config: JellyfinConfig = toml::from_str(&config).expect("Failed to parse config.toml"); let config: JellyfinConfig = toml::from_str(&config).expect("Failed to parse config.toml");
let mut jellyfin = JellyfinClient::new(config); let mut jellyfin = JellyfinClient::new_with_config(config);
jellyfin jellyfin
.authenticate_with_cached_token(".session") .authenticate_with_cached_token(".session")
.await .await

View File

@@ -3,7 +3,7 @@ pub mod jellyfin;
use std::sync::Arc; use std::sync::Arc;
use ::tap::*; use ::tap::*;
use reqwest::Method; use reqwest::{Method, header::InvalidHeaderValue};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
@@ -15,6 +15,8 @@ pub enum JellyfinApiError {
#[error("IO error: {0}")] #[error("IO error: {0}")]
IoError(#[from] std::io::Error), IoError(#[from] std::io::Error),
#[error("Unknown Jellyfin API error")] #[error("Unknown Jellyfin API error")]
InvalidHeader(#[from] InvalidHeaderValue),
#[error("Unknown Jellyfin API error")]
Unknown, Unknown,
} }
@@ -28,7 +30,49 @@ pub struct JellyfinClient {
} }
impl JellyfinClient { impl JellyfinClient {
pub fn new(config: JellyfinConfig) -> Self { pub async fn authenticate(
username: impl AsRef<str>,
password: impl AsRef<str>,
config: JellyfinConfig,
) -> Result<Self> {
let url = format!("{}/Users/AuthenticateByName", config.server_url);
let client = reqwest::Client::new();
let token = client
.post(url)
.json(&jellyfin::AuthenticateUserByName {
username: Some(username.as_ref().to_string()),
pw: Some(password.as_ref().to_string()),
})
.send()
.await?
.error_for_status()?
.json::<jellyfin::AuthenticationResult>()
.await?
.access_token
.ok_or_else(|| std::io::Error::other("No field access_token in auth response"))?;
Self::pre_authenticated(token, config)
}
pub fn pre_authenticated(token: impl AsRef<str>, config: JellyfinConfig) -> Result<Self> {
let auth_header = core::iter::once((
reqwest::header::HeaderName::from_static("x-emby-authorization"),
reqwest::header::HeaderValue::from_str(&format!(
"MediaBrowser Client=\"{}\", Device=\"{}\", DeviceId=\"{}\", Version=\"{}\"",
config.client_name, config.device_name, config.device_id, config.version
))?,
))
.collect();
let client = reqwest::Client::builder()
.default_headers(auth_header)
.build()?;
Ok(Self {
client,
access_token: Some(token.as_ref().to_string().into()),
config: Arc::new(config),
})
}
pub fn new_with_config(config: JellyfinConfig) -> Self {
JellyfinClient { JellyfinClient {
client: reqwest::Client::new(), client: reqwest::Client::new(),
access_token: None, access_token: None,
@@ -119,45 +163,6 @@ impl JellyfinClient {
Ok(out) Ok(out)
} }
pub async fn authenticate(&mut self) -> Result<jellyfin::AuthenticationResult> {
let auth_result: jellyfin::AuthenticationResult = self
.post(
"Users/AuthenticateByName",
&jellyfin::AuthenticateUserByName {
username: Some(self.config.username.clone()),
pw: Some(self.config.password.clone()),
},
)
.await?;
self.access_token = auth_result.access_token.clone().map(Into::into);
Ok(auth_result)
}
pub async fn authenticate_with_cached_token(
&mut self,
path: impl AsRef<std::path::Path>,
) -> Result<String> {
let path = path.as_ref();
if let Ok(token) = self
.load_token(path)
.await
.inspect_err(|err| tracing::warn!("Failed to load cached token: {}", err))
{
tracing::info!("Authenticating with cached token from {:?}", path);
self.access_token = Some(token.clone().into());
Ok(token)
} else {
tracing::info!("No cached token found at {:?}, authenticating...", path);
let token = self
.authenticate()
.await?
.access_token
.ok_or_else(|| JellyfinApiError::Unknown)?;
self.save_token(path).await?;
Ok(token)
}
}
pub async fn raw_items(&self) -> Result<jellyfin::BaseItemDtoQueryResult> { pub async fn raw_items(&self) -> Result<jellyfin::BaseItemDtoQueryResult> {
let text = &self let text = &self
.request_builder(Method::GET, "Items") .request_builder(Method::GET, "Items")
@@ -250,53 +255,16 @@ impl JellyfinClient {
"{}/Videos/{}/stream?static=true", "{}/Videos/{}/stream?static=true",
self.config.server_url.as_str(), self.config.server_url.as_str(),
item, item,
// item,
); );
Ok(url::Url::parse(&stream_url).expect("Failed to parse stream URL")) Ok(url::Url::parse(&stream_url).expect("Failed to parse stream URL"))
} }
} }
// pub trait Item {
// fn id(&self) -> &str;
// fn name(&self) -> &str;
// fn type_(&self) -> jellyfin::BaseItemKind;
// fn media_type(&self) -> &str;
// }
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JellyfinConfig { pub struct JellyfinConfig {
pub username: String,
pub password: String,
pub server_url: iref::IriBuf, pub server_url: iref::IriBuf,
pub device_id: String, pub device_id: String,
} pub device_name: String,
pub client_name: String,
impl JellyfinConfig { pub version: String,
pub fn new(
username: String,
password: String,
server_url: impl AsRef<str>,
device_id: String,
) -> Self {
JellyfinConfig {
username,
password,
server_url: iref::IriBuf::new(server_url.as_ref().into())
.expect("Failed to parse server URL"),
device_id,
}
}
}
#[test]
fn test_client_authenticate() {
let config = JellyfinConfig {
username: "servius".to_string(),
password: "nfz6yqr_NZD1nxk!faj".to_string(),
server_url: iref::IriBuf::new("https://jellyfin.tsuba.darksailor.dev".into()).unwrap(),
device_id: "testdeviceid".to_string(),
};
let mut client = JellyfinClient::new(config);
let auth_result = tokio_test::block_on(client.authenticate());
assert!(auth_result.is_ok());
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,63 +0,0 @@
[package]
name = "iced_video_player"
description = "A convenient video player widget for Iced"
homepage = "https://github.com/jazzfool/iced_video_player"
repository = "https://github.com/jazzfool/iced_video_player"
readme = "README.md"
keywords = ["gui", "iced", "video"]
categories = ["gui", "multimedia"]
version = "0.6.0"
authors = ["jazzfool"]
edition = "2021"
resolver = "2"
license = "MIT OR Apache-2.0"
exclude = [".media/test.mp4"]
[dependencies]
iced = { git = "https://github.com/iced-rs/iced", features = [
"image",
"advanced",
"wgpu",
] }
iced_wgpu = { git = "https://github.com/iced-rs/iced" }
gstreamer = "0.23"
gstreamer-app = "0.23" # appsink
gstreamer-base = "0.23" # basesrc
glib = "0.20" # gobject traits and error type
log = "0.4"
thiserror = "1"
url = "2" # media uri
[package.metadata.nix]
systems = ["x86_64-linux"]
app = true
build = true
runtimeLibs = [
"vulkan-loader",
"wayland",
"wayland-protocols",
"libxkbcommon",
"xorg.libX11",
"xorg.libXrandr",
"xorg.libXi",
"gst_all_1.gstreamer",
"gst_all_1.gstreamermm",
"gst_all_1.gst-plugins-bad",
"gst_all_1.gst-plugins-ugly",
"gst_all_1.gst-plugins-good",
"gst_all_1.gst-plugins-base",
]
buildInputs = [
"libxkbcommon",
"gst_all_1.gstreamer",
"gst_all_1.gstreamermm",
"gst_all_1.gst-plugins-bad",
"gst_all_1.gst-plugins-ugly",
"gst_all_1.gst-plugins-good",
"gst_all_1.gst-plugins-base",
]
[package.metadata.docs.rs]
rustc-args = ["--cfg", "docsrs"]
rustdoc-args = ["--cfg", "docsrs"]
targets = ["wasm32-unknown-unknown"]

View File

@@ -1,176 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

View File

@@ -1,23 +0,0 @@
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -1,64 +0,0 @@
# Iced Video Player Widget
Composable component to play videos in any Iced application built on the excellent GStreamer library.
<img src=".media/screenshot.png" width="50%" />
## Overview
In general, this supports anything that [`gstreamer/playbin`](https://gstreamer.freedesktop.org/documentation/playback/playbin.html?gi-language=c) supports.
Features:
- Load video files from any file path **or URL** (support for streaming over network).
- Video buffering when streaming on a network.
- Audio support.
- Programmatic control.
- Can capture thumbnails from a set of timestamps.
- Good performance (i.e., comparable to other video players). GStreamer (with the right plugins) will perform hardware-accelerated decoding, and the color space (YUV to RGB) is converted on the GPU whilst rendering the frame.
Limitations (hopefully to be fixed):
- GStreamer is a bit annoying to set up on Windows.
The player **does not** come with any surrounding GUI controls, but they should be quite easy to implement should you need them.
See the "minimal" example for a demonstration on how you could implement pausing, looping, and seeking.
## Example Usage
```rust
use iced_video_player::{Video, VideoPlayer};
fn main() -> iced::Result {
iced::run("Video Player", (), App::view)
}
struct App {
video: Video,
}
impl Default for App {
fn default() -> Self {
App {
video: Video::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(),
}
}
}
impl App {
fn view(&self) -> iced::Element<()> {
VideoPlayer::new(&self.video).into()
}
}
```
## Building
Follow the [GStreamer build instructions](https://github.com/sdroege/gstreamer-rs#installation). This should be able to compile on MSVC, MinGW, Linux, and MacOS.
## License
Licensed under either
- [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0)
- [MIT](http://opensource.org/licenses/MIT)
at your option.

View File

@@ -1,139 +0,0 @@
use iced::{
widget::{Button, Column, Container, Row, Slider, Text},
Element,
};
use iced_video_player::{Video, VideoPlayer};
use std::time::Duration;
fn main() -> iced::Result {
iced::run(App::update, App::view)
}
#[derive(Clone, Debug)]
enum Message {
TogglePause,
ToggleLoop,
Seek(f64),
SeekRelease,
EndOfStream,
NewFrame,
}
struct App {
video: Video,
position: f64,
dragging: bool,
}
impl Default for App {
fn default() -> Self {
App {
video: Video::new(
&url::Url::parse("https://jellyfin.tsuba.darksailor.dev/Videos/1d7e2012-e17d-edbb-25c3-2dbcc803d6b6/stream?static=true")
.expect("Failed to parse URL"),
)
.expect("Failed to create video"),
position: 0.0,
dragging: false,
}
}
}
impl App {
fn update(&mut self, message: Message) {
match message {
Message::TogglePause => {
self.video.set_paused(!self.video.paused());
}
Message::ToggleLoop => {
self.video.set_looping(!self.video.looping());
}
Message::Seek(secs) => {
self.dragging = true;
self.video.set_paused(true);
self.position = secs;
}
Message::SeekRelease => {
self.dragging = false;
self.video
.seek(Duration::from_secs_f64(self.position), false)
.expect("seek");
self.video.set_paused(false);
}
Message::EndOfStream => {
println!("end of stream");
}
Message::NewFrame => {
if !self.dragging {
self.position = self.video.position().as_secs_f64();
}
}
}
}
fn view(&self) -> Element<Message> {
Column::new()
.push(
Container::new(
VideoPlayer::new(&self.video)
.width(iced::Length::Fill)
.height(iced::Length::Fill)
.content_fit(iced::ContentFit::Contain)
.on_end_of_stream(Message::EndOfStream)
.on_new_frame(Message::NewFrame),
)
.align_x(iced::Alignment::Center)
.align_y(iced::Alignment::Center)
.width(iced::Length::Fill)
.height(iced::Length::Fill),
)
.push(
Container::new(
Slider::new(
0.0..=self.video.duration().as_secs_f64(),
self.position,
Message::Seek,
)
.step(0.1)
.on_release(Message::SeekRelease),
)
.padding(iced::Padding::new(5.0).left(10.0).right(10.0)),
)
.push(
Row::new()
.spacing(5)
.align_y(iced::alignment::Vertical::Center)
.padding(iced::Padding::new(10.0).top(0.0))
.push(
Button::new(Text::new(if self.video.paused() {
"Play"
} else {
"Pause"
}))
.width(80.0)
.on_press(Message::TogglePause),
)
.push(
Button::new(Text::new(if self.video.looping() {
"Disable Loop"
} else {
"Enable Loop"
}))
.width(120.0)
.on_press(Message::ToggleLoop),
)
.push(
Text::new(format!(
"{}:{:02}s / {}:{:02}s",
self.position as u64 / 60,
self.position as u64 % 60,
self.video.duration().as_secs() / 60,
self.video.duration().as_secs() % 60,
))
.width(iced::Length::Fill)
.align_x(iced::alignment::Horizontal::Right),
),
)
.into()
}
}

View File

@@ -1,275 +0,0 @@
{
"nodes": {
"crane": {
"flake": false,
"locked": {
"lastModified": 1758758545,
"narHash": "sha256-NU5WaEdfwF6i8faJ2Yh+jcK9vVFrofLcwlD/mP65JrI=",
"owner": "ipetkov",
"repo": "crane",
"rev": "95d528a5f54eaba0d12102249ce42f4d01f4e364",
"type": "github"
},
"original": {
"owner": "ipetkov",
"ref": "v0.21.1",
"repo": "crane",
"type": "github"
}
},
"dream2nix": {
"inputs": {
"nixpkgs": [
"nixCargoIntegration",
"nixpkgs"
],
"purescript-overlay": "purescript-overlay",
"pyproject-nix": "pyproject-nix"
},
"locked": {
"lastModified": 1763413832,
"narHash": "sha256-dkqBwDXiv8MPoFyIvOuC4bVubAP+TlVZUkVMB78TTSg=",
"owner": "nix-community",
"repo": "dream2nix",
"rev": "5658fba3a0b6b7d5cb0460b949651f64f644a743",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "dream2nix",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flakeCompat": {
"flake": false,
"locked": {
"lastModified": 1761588595,
"narHash": "sha256-XKUZz9zewJNUj46b4AJdiRZJAvSZ0Dqj2BNfXvFlJC4=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"mk-naked-shell": {
"flake": false,
"locked": {
"lastModified": 1681286841,
"narHash": "sha256-3XlJrwlR0nBiREnuogoa5i1b4+w/XPe0z8bbrJASw0g=",
"owner": "90-008",
"repo": "mk-naked-shell",
"rev": "7612f828dd6f22b7fb332cc69440e839d7ffe6bd",
"type": "github"
},
"original": {
"owner": "90-008",
"repo": "mk-naked-shell",
"type": "github"
}
},
"nixCargoIntegration": {
"inputs": {
"crane": "crane",
"dream2nix": "dream2nix",
"mk-naked-shell": "mk-naked-shell",
"nixpkgs": [
"nixpkgs"
],
"parts": "parts",
"rust-overlay": "rust-overlay",
"treefmt": "treefmt"
},
"locked": {
"lastModified": 1763619566,
"narHash": "sha256-92rSHIwh5qTXjcktVEWyKu5EPB3/7UdgjgjtWZ5ET6w=",
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"rev": "ac45d8c0d6876e6547d62bc729654c7b9a79c760",
"type": "github"
},
"original": {
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1763421233,
"narHash": "sha256-Stk9ZYRkGrnnpyJ4eqt9eQtdFWRRIvMxpNRf4sIegnw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "89c2b2330e733d6cdb5eae7b899326930c2c0648",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"parts": {
"inputs": {
"nixpkgs-lib": [
"nixCargoIntegration",
"nixpkgs"
]
},
"locked": {
"lastModified": 1762980239,
"narHash": "sha256-8oNVE8TrD19ulHinjaqONf9QWCKK+w4url56cdStMpM=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "52a2caecc898d0b46b2b905f058ccc5081f842da",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"purescript-overlay": {
"inputs": {
"flake-compat": "flake-compat",
"nixpkgs": [
"nixCargoIntegration",
"dream2nix",
"nixpkgs"
],
"slimlock": "slimlock"
},
"locked": {
"lastModified": 1728546539,
"narHash": "sha256-Sws7w0tlnjD+Bjck1nv29NjC5DbL6nH5auL9Ex9Iz2A=",
"owner": "thomashoneyman",
"repo": "purescript-overlay",
"rev": "4ad4c15d07bd899d7346b331f377606631eb0ee4",
"type": "github"
},
"original": {
"owner": "thomashoneyman",
"repo": "purescript-overlay",
"type": "github"
}
},
"pyproject-nix": {
"inputs": {
"nixpkgs": [
"nixCargoIntegration",
"dream2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1752481895,
"narHash": "sha256-luVj97hIMpCbwhx3hWiRwjP2YvljWy8FM+4W9njDhLA=",
"owner": "pyproject-nix",
"repo": "pyproject.nix",
"rev": "16ee295c25107a94e59a7fc7f2e5322851781162",
"type": "github"
},
"original": {
"owner": "pyproject-nix",
"repo": "pyproject.nix",
"type": "github"
}
},
"root": {
"inputs": {
"flakeCompat": "flakeCompat",
"nixCargoIntegration": "nixCargoIntegration",
"nixpkgs": "nixpkgs"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixCargoIntegration",
"nixpkgs"
]
},
"locked": {
"lastModified": 1763606317,
"narHash": "sha256-lsq4Urmb9Iyg2zyg2yG6oMQk9yuaoIgy+jgvYM4guxA=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "a5615abaf30cfaef2e32f1ff9bd5ca94e2911371",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"slimlock": {
"inputs": {
"nixpkgs": [
"nixCargoIntegration",
"dream2nix",
"purescript-overlay",
"nixpkgs"
]
},
"locked": {
"lastModified": 1688756706,
"narHash": "sha256-xzkkMv3neJJJ89zo3o2ojp7nFeaZc2G0fYwNXNJRFlo=",
"owner": "thomashoneyman",
"repo": "slimlock",
"rev": "cf72723f59e2340d24881fd7bf61cb113b4c407c",
"type": "github"
},
"original": {
"owner": "thomashoneyman",
"repo": "slimlock",
"type": "github"
}
},
"treefmt": {
"inputs": {
"nixpkgs": [
"nixCargoIntegration",
"nixpkgs"
]
},
"locked": {
"lastModified": 1762938485,
"narHash": "sha256-AlEObg0syDl+Spi4LsZIBrjw+snSVU4T8MOeuZJUJjM=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "5b4ee75aeefd1e2d5a1cc43cf6ba65eba75e83e4",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,38 +0,0 @@
{
inputs = {
flakeCompat = {
url = "github:edolstra/flake-compat";
flake = false;
};
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
nixCargoIntegration = {
url = "github:yusdacra/nix-cargo-integration";
inputs.nixpkgs.follows = "nixpkgs";
};
};
outputs = inputs: let
pkgs = import inputs.nixpkgs {
system = "x86_64-linux";
};
in {
devShells."x86_64-linux".default = pkgs.mkShell {
# "GST_PLUGIN_PATH" = "${pkgs.gst_all_1.gstreamer}:${pkgs.gst_all_1.gst-plugins-bad}:${pkgs.gst_all_1.gst-plugins-ugly}:${pkgs.gst_all_1.gst-plugins-good}:${pkgs.gst_all_1.gst-plugins-base}";
buildInputs = with pkgs; [
gst_all_1.gstreamer
gst_all_1.gst-plugins-bad
gst_all_1.gst-plugins-ugly
gst_all_1.gst-plugins-good
gst_all_1.gst-plugins-base
libxkbcommon
wayland
rustup
];
nativeBuildInputs = with pkgs; [
pkg-config
wayland
];
packages = with pkgs; [wayland];
};
};
}

View File

@@ -1,12 +0,0 @@
# Flake's devShell for non-flake-enabled nix instances
(import
(
let lock = builtins.fromJSON (builtins.readFile ./flake.lock);
in
fetchTarball {
url =
"https://github.com/edolstra/flake-compat/archive/${lock.nodes.flakeCompat.locked.rev}.tar.gz";
sha256 = lock.nodes.flakeCompat.locked.narHash;
}
)
{ src = ./.; }).shellNix.default

View File

@@ -1,76 +0,0 @@
//! # Iced Video Player
//!
//! A convenient video player widget for Iced.
//!
//! To get started, load a video from a URI (e.g., a file path prefixed with `file:///`) using [`Video::new`](crate::Video::new),
//! then use it like any other Iced widget in your `view` function by creating a [`VideoPlayer`].
//!
//! Example:
//! ```rust
//! use iced_video_player::{Video, VideoPlayer};
//!
//! fn main() -> iced::Result {
//! iced::run("Video Player", (), App::view)
//! }
//!
//! struct App {
//! video: Video,
//! }
//!
//! impl Default for App {
//! fn default() -> Self {
//! App {
//! video: Video::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(),
//! }
//! }
//! }
//!
//! impl App {
//! fn view(&self) -> iced::Element<()> {
//! VideoPlayer::new(&self.video).into()
//! }
//! }
//! ```
//!
//! You can programmatically control the video (e.g., seek, pause, loop, grab thumbnails) by accessing various methods on [`Video`].
mod pipeline;
mod video;
mod video_player;
use gstreamer as gst;
use thiserror::Error;
pub use video::Position;
pub use video::Video;
pub use video_player::VideoPlayer;
#[derive(Debug, Error)]
pub enum Error {
#[error("{0}")]
Glib(#[from] glib::Error),
#[error("{0}")]
Bool(#[from] glib::BoolError),
#[error("failed to get the gstreamer bus")]
Bus,
#[error("failed to get AppSink element with name='{0}' from gstreamer pipeline")]
AppSink(String),
#[error("{0}")]
StateChange(#[from] gst::StateChangeError),
#[error("failed to cast gstreamer element")]
Cast,
#[error("{0}")]
Io(#[from] std::io::Error),
#[error("invalid URI")]
Uri,
#[error("failed to get media capabilities")]
Caps,
#[error("failed to query media duration or position")]
Duration,
#[error("failed to sync with playback")]
Sync,
#[error("failed to lock internal sync primitive")]
Lock,
#[error("invalid framerate: {0}")]
Framerate(f64),
}

View File

@@ -1,469 +0,0 @@
use crate::video::Frame;
use iced_wgpu::primitive::Primitive;
use iced_wgpu::wgpu;
use std::{
collections::{btree_map::Entry, BTreeMap},
num::NonZero,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc, Mutex,
},
};
#[repr(C)]
struct Uniforms {
rect: [f32; 4],
// because wgpu min_uniform_buffer_offset_alignment
_pad: [u8; 240],
}
struct VideoEntry {
texture_y: wgpu::Texture,
texture_uv: wgpu::Texture,
instances: wgpu::Buffer,
bg0: wgpu::BindGroup,
alive: Arc<AtomicBool>,
prepare_index: AtomicUsize,
render_index: AtomicUsize,
}
pub(crate) struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bg0_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
videos: BTreeMap<u64, VideoEntry>,
}
impl VideoPipeline {
fn new(device: &wgpu::Device, format: wgpu::TextureFormat) -> Self {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("iced_video_player shader"),
source: wgpu::ShaderSource::Wgsl(include_str!("shader.wgsl").into()),
});
let bg0_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("iced_video_player bind group 0 layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 3,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: true,
min_binding_size: None,
},
count: None,
},
],
});
let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("iced_video_player pipeline layout"),
bind_group_layouts: &[&bg0_layout],
push_constant_ranges: &[],
});
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("iced_video_player pipeline"),
layout: Some(&layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: Some("vs_main"),
buffers: &[],
compilation_options: wgpu::PipelineCompilationOptions::default(),
},
primitive: wgpu::PrimitiveState::default(),
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format,
blend: None,
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: wgpu::PipelineCompilationOptions::default(),
}),
multiview: None,
cache: None,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("iced_video_player sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Nearest,
lod_min_clamp: 0.0,
lod_max_clamp: 1.0,
compare: None,
anisotropy_clamp: 1,
border_color: None,
});
VideoPipeline {
pipeline,
bg0_layout,
sampler,
videos: BTreeMap::new(),
}
}
fn upload(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
video_id: u64,
alive: &Arc<AtomicBool>,
(width, height): (u32, u32),
frame: &[u8],
) {
if let Entry::Vacant(entry) = self.videos.entry(video_id) {
let texture_y = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::R8Unorm,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let texture_uv = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width: width / 2,
height: height / 2,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg8Unorm,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let view_y = texture_y.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
usage: Some(wgpu::TextureUsages::empty()),
});
let view_uv = texture_uv.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
usage: Some(wgpu::TextureUsages::empty()),
});
let instances = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced_video_player uniform buffer"),
size: 256 * std::mem::size_of::<Uniforms>() as u64, // max 256 video players per frame
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM,
mapped_at_creation: false,
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced_video_player bind group"),
layout: &self.bg0_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&view_y),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureView(&view_uv),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&self.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: &instances,
offset: 0,
size: Some(NonZero::new(std::mem::size_of::<Uniforms>() as _).unwrap()),
}),
},
],
});
entry.insert(VideoEntry {
texture_y,
texture_uv,
instances,
bg0: bind_group,
alive: Arc::clone(alive),
prepare_index: AtomicUsize::new(0),
render_index: AtomicUsize::new(0),
});
}
let VideoEntry {
texture_y,
texture_uv,
..
} = self.videos.get(&video_id).unwrap();
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: texture_y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&frame[..(width * height) as usize],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(width),
rows_per_image: Some(height),
},
wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
},
);
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: texture_uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&frame[(width * height) as usize..],
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(width),
rows_per_image: Some(height / 2),
},
wgpu::Extent3d {
width: width / 2,
height: height / 2,
depth_or_array_layers: 1,
},
);
}
fn cleanup(&mut self) {
let ids: Vec<_> = self
.videos
.iter()
.filter_map(|(id, entry)| (!entry.alive.load(Ordering::SeqCst)).then_some(*id))
.collect();
for id in ids {
if let Some(video) = self.videos.remove(&id) {
video.texture_y.destroy();
video.texture_uv.destroy();
video.instances.destroy();
}
}
}
fn prepare(&mut self, queue: &wgpu::Queue, video_id: u64, bounds: &iced::Rectangle) {
if let Some(video) = self.videos.get_mut(&video_id) {
let uniforms = Uniforms {
rect: [
bounds.x,
bounds.y,
bounds.x + bounds.width,
bounds.y + bounds.height,
],
_pad: [0; 240],
};
queue.write_buffer(
&video.instances,
(video.prepare_index.load(Ordering::Relaxed) * std::mem::size_of::<Uniforms>())
as u64,
unsafe {
std::slice::from_raw_parts(
&uniforms as *const _ as *const u8,
std::mem::size_of::<Uniforms>(),
)
},
);
video.prepare_index.fetch_add(1, Ordering::Relaxed);
video.render_index.store(0, Ordering::Relaxed);
}
self.cleanup();
}
fn draw(
&self,
target: &wgpu::TextureView,
encoder: &mut wgpu::CommandEncoder,
clip: &iced::Rectangle<u32>,
video_id: u64,
) {
if let Some(video) = self.videos.get(&video_id) {
let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced_video_player render pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
depth_slice: None,
})],
depth_stencil_attachment: None,
timestamp_writes: None,
occlusion_query_set: None,
});
pass.set_pipeline(&self.pipeline);
pass.set_bind_group(
0,
&video.bg0,
&[
(video.render_index.load(Ordering::Relaxed) * std::mem::size_of::<Uniforms>())
as u32,
],
);
pass.set_scissor_rect(clip.x as _, clip.y as _, clip.width as _, clip.height as _);
pass.draw(0..6, 0..1);
video.prepare_index.store(0, Ordering::Relaxed);
video.render_index.fetch_add(1, Ordering::Relaxed);
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct VideoPrimitive {
video_id: u64,
alive: Arc<AtomicBool>,
frame: Arc<Mutex<Frame>>,
size: (u32, u32),
upload_frame: bool,
}
impl VideoPrimitive {
pub fn new(
video_id: u64,
alive: Arc<AtomicBool>,
frame: Arc<Mutex<Frame>>,
size: (u32, u32),
upload_frame: bool,
) -> Self {
VideoPrimitive {
video_id,
alive,
frame,
size,
upload_frame,
}
}
}
impl Primitive for VideoPrimitive {
type Renderer = VideoPipeline;
fn initialize(
&self,
device: &wgpu::Device,
_queue: &wgpu::Queue,
format: wgpu::TextureFormat,
) -> Self::Renderer {
VideoPipeline::new(device, format)
}
fn prepare(
&self,
renderer: &mut Self::Renderer,
device: &wgpu::Device,
queue: &wgpu::Queue,
bounds: &iced::Rectangle,
viewport: &iced_wgpu::graphics::Viewport,
) {
if self.upload_frame {
if let Some(readable) = self.frame.lock().expect("lock frame mutex").readable() {
renderer.upload(
device,
queue,
self.video_id,
&self.alive,
self.size,
readable.as_slice(),
);
}
}
renderer.prepare(
queue,
self.video_id,
&(*bounds
* iced::Transformation::orthographic(
viewport.logical_size().width as _,
viewport.logical_size().height as _,
)),
);
}
fn render(
&self,
renderer: &Self::Renderer,
encoder: &mut wgpu::CommandEncoder,
target: &wgpu::TextureView,
clip_bounds: &iced::Rectangle<u32>,
) {
renderer.draw(target, encoder, clip_bounds, self.video_id);
}
}

View File

@@ -1,61 +0,0 @@
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
struct Uniforms {
rect: vec4<f32>,
}
@group(0) @binding(0)
var tex_y: texture_2d<f32>;
@group(0) @binding(1)
var tex_uv: texture_2d<f32>;
@group(0) @binding(2)
var s: sampler;
@group(0) @binding(3)
var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> VertexOutput {
var quad = array<vec4<f32>, 6>(
vec4<f32>(uniforms.rect.xy, 0.0, 0.0),
vec4<f32>(uniforms.rect.zy, 1.0, 0.0),
vec4<f32>(uniforms.rect.xw, 0.0, 1.0),
vec4<f32>(uniforms.rect.zy, 1.0, 0.0),
vec4<f32>(uniforms.rect.zw, 1.0, 1.0),
vec4<f32>(uniforms.rect.xw, 0.0, 1.0),
);
var out: VertexOutput;
out.uv = quad[in_vertex_index].zw;
out.position = vec4<f32>(quad[in_vertex_index].xy, 1.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let yuv2r = vec3<f32>(1.164, 0.0, 1.596);
let yuv2g = vec3<f32>(1.164, -0.391, -0.813);
let yuv2b = vec3<f32>(1.164, 2.018, 0.0);
var yuv = vec3<f32>(0.0);
yuv.x = textureSample(tex_y, s, in.uv).r - 0.0625;
yuv.y = textureSample(tex_uv, s, in.uv).r - 0.5;
yuv.z = textureSample(tex_uv, s, in.uv).g - 0.5;
var rgb = vec3<f32>(0.0);
rgb.x = dot(yuv, yuv2r);
rgb.y = dot(yuv, yuv2g);
rgb.z = dot(yuv, yuv2b);
let threshold = rgb <= vec3<f32>(0.04045);
let hi = pow((rgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
let lo = rgb * vec3<f32>(1.0 / 12.92);
rgb = select(hi, lo, threshold);
return vec4<f32>(rgb, 1.0);
}

View File

@@ -1,662 +0,0 @@
use crate::Error;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gstreamer_app::prelude::*;
use iced::widget::image as img;
use std::num::NonZeroU8;
use std::ops::{Deref, DerefMut};
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Duration, Instant};
/// Position in the media.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Position {
/// Position based on time.
///
/// Not the most accurate format for videos.
Time(Duration),
/// Position based on nth frame.
Frame(u64),
}
impl From<Position> for gst::GenericFormattedValue {
fn from(pos: Position) -> Self {
match pos {
Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(),
Position::Frame(f) => gst::format::Default::from_u64(f).into(),
}
}
}
impl From<Duration> for Position {
fn from(t: Duration) -> Self {
Position::Time(t)
}
}
impl From<u64> for Position {
fn from(f: u64) -> Self {
Position::Frame(f)
}
}
#[derive(Debug)]
pub(crate) struct Frame(gst::Sample);
impl Frame {
pub fn empty() -> Self {
Self(gst::Sample::builder().build())
}
pub fn readable(&self) -> Option<gst::BufferMap<'_, gst::buffer::Readable>> {
self.0.buffer().and_then(|x| x.map_readable().ok())
}
}
#[derive(Debug)]
pub(crate) struct Internal {
pub(crate) id: u64,
pub(crate) bus: gst::Bus,
pub(crate) source: gst::Pipeline,
pub(crate) alive: Arc<AtomicBool>,
pub(crate) worker: Option<std::thread::JoinHandle<()>>,
pub(crate) width: i32,
pub(crate) height: i32,
pub(crate) framerate: f64,
pub(crate) duration: Duration,
pub(crate) speed: f64,
pub(crate) sync_av: bool,
pub(crate) frame: Arc<Mutex<Frame>>,
pub(crate) upload_frame: Arc<AtomicBool>,
pub(crate) last_frame_time: Arc<Mutex<Instant>>,
pub(crate) looping: bool,
pub(crate) is_eos: bool,
pub(crate) restart_stream: bool,
pub(crate) sync_av_avg: u64,
pub(crate) sync_av_counter: u64,
pub(crate) subtitle_text: Arc<Mutex<Option<String>>>,
pub(crate) upload_text: Arc<AtomicBool>,
}
impl Internal {
pub(crate) fn seek(&self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
let position = position.into();
// gstreamer complains if the start & end value types aren't the same
match &position {
Position::Time(_) => self.source.seek(
self.speed,
gst::SeekFlags::FLUSH
| if accurate {
gst::SeekFlags::ACCURATE
} else {
gst::SeekFlags::empty()
},
gst::SeekType::Set,
gst::GenericFormattedValue::from(position),
gst::SeekType::Set,
gst::ClockTime::NONE,
)?,
Position::Frame(_) => self.source.seek(
self.speed,
gst::SeekFlags::FLUSH
| if accurate {
gst::SeekFlags::ACCURATE
} else {
gst::SeekFlags::empty()
},
gst::SeekType::Set,
gst::GenericFormattedValue::from(position),
gst::SeekType::Set,
gst::format::Default::NONE,
)?,
};
*self.subtitle_text.lock().expect("lock subtitle_text") = None;
self.upload_text.store(true, Ordering::SeqCst);
Ok(())
}
pub(crate) fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
let Some(position) = self.source.query_position::<gst::ClockTime>() else {
return Err(Error::Caps);
};
if speed > 0.0 {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
position,
gst::SeekType::End,
gst::ClockTime::from_seconds(0),
)?;
} else {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
gst::ClockTime::from_seconds(0),
gst::SeekType::Set,
position,
)?;
}
self.speed = speed;
Ok(())
}
pub(crate) fn restart_stream(&mut self) -> Result<(), Error> {
self.is_eos = false;
self.set_paused(false);
self.seek(0, false)?;
Ok(())
}
pub(crate) fn set_paused(&mut self, paused: bool) {
self.source
.set_state(if paused {
gst::State::Paused
} else {
gst::State::Playing
})
.unwrap(/* state was changed in ctor; state errors caught there */);
// Set restart_stream flag to make the stream restart on the next Message::NextFrame
if self.is_eos && !paused {
self.restart_stream = true;
}
}
pub(crate) fn paused(&self) -> bool {
self.source.state(gst::ClockTime::ZERO).1 == gst::State::Paused
}
/// Syncs audio with video when there is (inevitably) latency presenting the frame.
pub(crate) fn set_av_offset(&mut self, offset: Duration) {
if self.sync_av {
self.sync_av_counter += 1;
self.sync_av_avg = self.sync_av_avg * (self.sync_av_counter - 1) / self.sync_av_counter
+ offset.as_nanos() as u64 / self.sync_av_counter;
if self.sync_av_counter % 128 == 0 {
self.source
.set_property("av-offset", -(self.sync_av_avg as i64));
}
}
}
}
/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream).
#[derive(Debug)]
pub struct Video(pub(crate) RwLock<Internal>);
impl Drop for Video {
fn drop(&mut self) {
let inner = self.0.get_mut().expect("failed to lock");
inner
.source
.set_state(gst::State::Null)
.expect("failed to set state");
inner.alive.store(false, Ordering::SeqCst);
if let Some(worker) = inner.worker.take() {
if let Err(err) = worker.join() {
match err.downcast_ref::<String>() {
Some(e) => log::error!("Video thread panicked: {e}"),
None => log::error!("Video thread panicked with unknown reason"),
}
}
}
}
}
impl Video {
/// Create a new video player from a given video which loads from `uri`.
/// Note that live sources will report the duration to be zero.
pub fn new(uri: &url::Url) -> Result<Self, Error> {
gst::init()?;
let pipeline = format!("playbin uri=\"{}\" text-sink=\"appsink name=iced_text sync=true drop=true\" video-sink=\"videoscale ! videoconvert ! appsink name=iced_video drop=true caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1\"", uri.as_str());
let pipeline = gst::parse::launch(pipeline.as_ref())?
.downcast::<gst::Pipeline>()
.map_err(|_| Error::Cast)?;
let video_sink: gst::Element = pipeline.property("video-sink");
let pad = video_sink.pads().first().cloned().unwrap();
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
let bin = pad
.parent_element()
.unwrap()
.downcast::<gst::Bin>()
.unwrap();
let video_sink = bin.by_name("iced_video").unwrap();
let video_sink = video_sink.downcast::<gst_app::AppSink>().unwrap();
let text_sink: gst::Element = pipeline.property("text-sink");
let text_sink = text_sink.downcast::<gst_app::AppSink>().unwrap();
Self::from_gst_pipeline(pipeline, video_sink, Some(text_sink))
}
/// Creates a new video based on an existing GStreamer pipeline and appsink.
/// Expects an `appsink` plugin with `caps=video/x-raw,format=NV12`.
///
/// An optional `text_sink` can be provided, which enables subtitle messages
/// to be emitted.
///
/// **Note:** Many functions of [`Video`] assume a `playbin` pipeline.
/// Non-`playbin` pipelines given here may not have full functionality.
pub fn from_gst_pipeline(
pipeline: gst::Pipeline,
video_sink: gst_app::AppSink,
text_sink: Option<gst_app::AppSink>,
) -> Result<Self, Error> {
gst::init()?;
static NEXT_ID: AtomicU64 = AtomicU64::new(0);
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
// We need to ensure we stop the pipeline if we hit an error,
// or else there may be audio left playing in the background.
macro_rules! cleanup {
($expr:expr) => {
$expr.map_err(|e| {
let _ = pipeline.set_state(gst::State::Null);
e
})
};
}
let pad = video_sink.pads().first().cloned().unwrap();
dbg!(&pad);
dbg!(&pipeline);
cleanup!(pipeline.set_state(gst::State::Playing))?;
// wait for up to 5 seconds until the decoder gets the source capabilities
cleanup!(pipeline.state(gst::ClockTime::from_seconds(5)).0)?;
// extract resolution and framerate
// TODO(jazzfool): maybe we want to extract some other information too?
let caps = cleanup!(pad.current_caps().ok_or(Error::Caps))?;
let s = cleanup!(caps.structure(0).ok_or(Error::Caps))?;
let width = cleanup!(s.get::<i32>("width").map_err(|_| Error::Caps))?;
let height = cleanup!(s.get::<i32>("height").map_err(|_| Error::Caps))?;
// resolution should be mod4
let width = ((width + 4 - 1) / 4) * 4;
let framerate = cleanup!(s.get::<gst::Fraction>("framerate").map_err(|_| Error::Caps))?;
let framerate = framerate.numer() as f64 / framerate.denom() as f64;
if framerate.is_nan()
|| framerate.is_infinite()
|| framerate < 0.0
|| framerate.abs() < f64::EPSILON
{
let _ = pipeline.set_state(gst::State::Null);
return Err(Error::Framerate(framerate));
}
let duration = Duration::from_nanos(
pipeline
.query_duration::<gst::ClockTime>()
.map(|duration| duration.nseconds())
.unwrap_or(0),
);
let sync_av = pipeline.has_property("av-offset", None);
// NV12 = 12bpp
let frame = Arc::new(Mutex::new(Frame::empty()));
let upload_frame = Arc::new(AtomicBool::new(false));
let alive = Arc::new(AtomicBool::new(true));
let last_frame_time = Arc::new(Mutex::new(Instant::now()));
let frame_ref = Arc::clone(&frame);
let upload_frame_ref = Arc::clone(&upload_frame);
let alive_ref = Arc::clone(&alive);
let last_frame_time_ref = Arc::clone(&last_frame_time);
let subtitle_text = Arc::new(Mutex::new(None));
let upload_text = Arc::new(AtomicBool::new(false));
let subtitle_text_ref = Arc::clone(&subtitle_text);
let upload_text_ref = Arc::clone(&upload_text);
let pipeline_ref = pipeline.clone();
let worker = std::thread::spawn(move || {
let mut clear_subtitles_at = None;
while alive_ref.load(Ordering::Acquire) {
if let Err(gst::FlowError::Error) = (|| -> Result<(), gst::FlowError> {
let sample =
if pipeline_ref.state(gst::ClockTime::ZERO).1 != gst::State::Playing {
video_sink
.try_pull_preroll(gst::ClockTime::from_mseconds(16))
.ok_or(gst::FlowError::Eos)?
} else {
video_sink
.try_pull_sample(gst::ClockTime::from_mseconds(16))
.ok_or(gst::FlowError::Eos)?
};
*last_frame_time_ref
.lock()
.map_err(|_| gst::FlowError::Error)? = Instant::now();
let frame_segment = sample.segment().cloned().ok_or(gst::FlowError::Error)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let frame_pts = buffer.pts().ok_or(gst::FlowError::Error)?;
let frame_duration = buffer.duration().ok_or(gst::FlowError::Error)?;
{
let mut frame_guard =
frame_ref.lock().map_err(|_| gst::FlowError::Error)?;
*frame_guard = Frame(sample);
}
upload_frame_ref.swap(true, Ordering::SeqCst);
if let Some(at) = clear_subtitles_at {
if frame_pts >= at {
*subtitle_text_ref
.lock()
.map_err(|_| gst::FlowError::Error)? = None;
upload_text_ref.store(true, Ordering::SeqCst);
clear_subtitles_at = None;
}
}
let text = text_sink
.as_ref()
.and_then(|sink| sink.try_pull_sample(gst::ClockTime::from_seconds(0)));
if let Some(text) = text {
let text_segment = text.segment().ok_or(gst::FlowError::Error)?;
let text = text.buffer().ok_or(gst::FlowError::Error)?;
let text_pts = text.pts().ok_or(gst::FlowError::Error)?;
let text_duration = text.duration().ok_or(gst::FlowError::Error)?;
let frame_running_time = frame_segment.to_running_time(frame_pts).value();
let frame_running_time_end = frame_segment
.to_running_time(frame_pts + frame_duration)
.value();
let text_running_time = text_segment.to_running_time(text_pts).value();
let text_running_time_end = text_segment
.to_running_time(text_pts + text_duration)
.value();
// see gst-plugins-base/ext/pango/gstbasetextoverlay.c (gst_base_text_overlay_video_chain)
// as an example of how to correctly synchronize the text+video segments
if text_running_time_end > frame_running_time
&& frame_running_time_end > text_running_time
{
let duration = text.duration().unwrap_or(gst::ClockTime::ZERO);
let map = text.map_readable().map_err(|_| gst::FlowError::Error)?;
let text = std::str::from_utf8(map.as_slice())
.map_err(|_| gst::FlowError::Error)?
.to_string();
*subtitle_text_ref
.lock()
.map_err(|_| gst::FlowError::Error)? = Some(text);
upload_text_ref.store(true, Ordering::SeqCst);
clear_subtitles_at = Some(text_pts + duration);
}
}
Ok(())
})() {
log::error!("error pulling frame");
}
}
});
Ok(Video(RwLock::new(Internal {
id,
bus: pipeline.bus().unwrap(),
source: pipeline,
alive,
worker: Some(worker),
width,
height,
framerate,
duration,
speed: 1.0,
sync_av,
frame,
upload_frame,
last_frame_time,
looping: false,
is_eos: false,
restart_stream: false,
sync_av_avg: 0,
sync_av_counter: 0,
subtitle_text,
upload_text,
})))
}
pub(crate) fn read(&self) -> impl Deref<Target = Internal> + '_ {
self.0.read().expect("lock")
}
pub(crate) fn write(&self) -> impl DerefMut<Target = Internal> + '_ {
self.0.write().expect("lock")
}
pub(crate) fn get_mut(&mut self) -> impl DerefMut<Target = Internal> + '_ {
self.0.get_mut().expect("lock")
}
/// Get the size/resolution of the video as `(width, height)`.
pub fn size(&self) -> (i32, i32) {
(self.read().width, self.read().height)
}
/// Get the framerate of the video as frames per second.
pub fn framerate(&self) -> f64 {
self.read().framerate
}
/// Set the volume multiplier of the audio.
/// `0.0` = 0% volume, `1.0` = 100% volume.
///
/// This uses a linear scale, for example `0.5` is perceived as half as loud.
pub fn set_volume(&mut self, volume: f64) {
self.get_mut().source.set_property("volume", volume);
self.set_muted(self.muted()); // for some reason gstreamer unmutes when changing volume?
}
/// Get the volume multiplier of the audio.
pub fn volume(&self) -> f64 {
self.read().source.property("volume")
}
/// Set if the audio is muted or not, without changing the volume.
pub fn set_muted(&mut self, muted: bool) {
self.get_mut().source.set_property("mute", muted);
}
/// Get if the audio is muted or not.
pub fn muted(&self) -> bool {
self.read().source.property("mute")
}
/// Get if the stream ended or not.
pub fn eos(&self) -> bool {
self.read().is_eos
}
/// Get if the media will loop or not.
pub fn looping(&self) -> bool {
self.read().looping
}
/// Set if the media will loop or not.
pub fn set_looping(&mut self, looping: bool) {
self.get_mut().looping = looping;
}
/// Set if the media is paused or not.
pub fn set_paused(&mut self, paused: bool) {
self.get_mut().set_paused(paused)
}
/// Get if the media is paused or not.
pub fn paused(&self) -> bool {
self.read().paused()
}
/// Jumps to a specific position in the media.
/// Passing `true` to the `accurate` parameter will result in more accurate seeking,
/// however, it is also slower. For most seeks (e.g., scrubbing) this is not needed.
pub fn seek(&mut self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
self.get_mut().seek(position, accurate)
}
/// Set the playback speed of the media.
/// The default speed is `1.0`.
pub fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
self.get_mut().set_speed(speed)
}
/// Get the current playback speed.
pub fn speed(&self) -> f64 {
self.read().speed
}
/// Get the current playback position in time.
pub fn position(&self) -> Duration {
Duration::from_nanos(
self.read()
.source
.query_position::<gst::ClockTime>()
.map_or(0, |pos| pos.nseconds()),
)
}
/// Get the media duration.
pub fn duration(&self) -> Duration {
self.read().duration
}
/// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false.
pub fn restart_stream(&mut self) -> Result<(), Error> {
self.get_mut().restart_stream()
}
/// Set the subtitle URL to display.
pub fn set_subtitle_url(&mut self, url: &url::Url) -> Result<(), Error> {
let paused = self.paused();
let mut inner = self.get_mut();
inner.source.set_state(gst::State::Ready)?;
inner.source.set_property("suburi", url.as_str());
inner.set_paused(paused);
Ok(())
}
/// Get the current subtitle URL.
pub fn subtitle_url(&self) -> Option<url::Url> {
url::Url::parse(
&self
.read()
.source
.property::<Option<String>>("current-suburi")?,
)
.ok()
}
/// Get the underlying GStreamer pipeline.
pub fn pipeline(&self) -> gst::Pipeline {
self.read().source.clone()
}
/// Generates a list of thumbnails based on a set of positions in the media, downscaled by a given factor.
///
/// Slow; only needs to be called once for each instance.
/// It's best to call this at the very start of playback, otherwise the position may shift.
pub fn thumbnails<I>(
&mut self,
positions: I,
downscale: NonZeroU8,
) -> Result<Vec<img::Handle>, Error>
where
I: IntoIterator<Item = Position>,
{
let downscale = u8::from(downscale) as u32;
let paused = self.paused();
let muted = self.muted();
let pos = self.position();
self.set_paused(false);
self.set_muted(true);
let out = {
let inner = self.read();
let width = inner.width;
let height = inner.height;
positions
.into_iter()
.map(|pos| {
inner.seek(pos, true)?;
inner.upload_frame.store(false, Ordering::SeqCst);
while !inner.upload_frame.load(Ordering::SeqCst) {
std::hint::spin_loop();
}
let frame_guard = inner.frame.lock().map_err(|_| Error::Lock)?;
let frame = frame_guard.readable().ok_or(Error::Lock)?;
Ok(img::Handle::from_rgba(
inner.width as u32 / downscale,
inner.height as u32 / downscale,
yuv_to_rgba(frame.as_slice(), width as _, height as _, downscale),
))
})
.collect()
};
self.set_paused(paused);
self.set_muted(muted);
self.seek(pos, true)?;
out
}
}
fn yuv_to_rgba(yuv: &[u8], width: u32, height: u32, downscale: u32) -> Vec<u8> {
let uv_start = width * height;
let mut rgba = vec![];
for y in 0..height / downscale {
for x in 0..width / downscale {
let x_src = x * downscale;
let y_src = y * downscale;
let uv_i = uv_start + width * (y_src / 2) + x_src / 2 * 2;
let y = yuv[(y_src * width + x_src) as usize] as f32;
let u = yuv[uv_i as usize] as f32;
let v = yuv[(uv_i + 1) as usize] as f32;
let r = 1.164 * (y - 16.0) + 1.596 * (v - 128.0);
let g = 1.164 * (y - 16.0) - 0.813 * (v - 128.0) - 0.391 * (u - 128.0);
let b = 1.164 * (y - 16.0) + 2.018 * (u - 128.0);
rgba.push(r as u8);
rgba.push(g as u8);
rgba.push(b as u8);
rgba.push(0xFF);
}
}
rgba
}

View File

@@ -1,305 +0,0 @@
use crate::{pipeline::VideoPrimitive, video::Video};
use gstreamer as gst;
use iced::{
advanced::{self, layout, widget, Widget},
Element,
};
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
use log::error;
use std::{marker::PhantomData, sync::atomic::Ordering};
use std::{sync::Arc, time::Instant};
/// Video player widget which displays the current frame of a [`Video`](crate::Video).
pub struct VideoPlayer<'a, Message, Theme = iced::Theme, Renderer = iced::Renderer>
where
Renderer: PrimitiveRenderer,
{
video: &'a Video,
content_fit: iced::ContentFit,
width: iced::Length,
height: iced::Length,
on_end_of_stream: Option<Message>,
on_new_frame: Option<Message>,
on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message + 'a>>,
on_error: Option<Box<dyn Fn(&glib::Error) -> Message + 'a>>,
_phantom: PhantomData<(Theme, Renderer)>,
}
impl<'a, Message, Theme, Renderer> VideoPlayer<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
/// Creates a new video player widget for a given video.
pub fn new(video: &'a Video) -> Self {
VideoPlayer {
video,
content_fit: iced::ContentFit::default(),
width: iced::Length::Shrink,
height: iced::Length::Shrink,
on_end_of_stream: None,
on_new_frame: None,
on_subtitle_text: None,
on_error: None,
_phantom: Default::default(),
}
}
/// Sets the width of the `VideoPlayer` boundaries.
pub fn width(self, width: impl Into<iced::Length>) -> Self {
VideoPlayer {
width: width.into(),
..self
}
}
/// Sets the height of the `VideoPlayer` boundaries.
pub fn height(self, height: impl Into<iced::Length>) -> Self {
VideoPlayer {
height: height.into(),
..self
}
}
/// Sets the `ContentFit` of the `VideoPlayer`.
pub fn content_fit(self, content_fit: iced::ContentFit) -> Self {
VideoPlayer {
content_fit,
..self
}
}
/// Message to send when the video reaches the end of stream (i.e., the video ends).
pub fn on_end_of_stream(self, on_end_of_stream: Message) -> Self {
VideoPlayer {
on_end_of_stream: Some(on_end_of_stream),
..self
}
}
/// Message to send when the video receives a new frame.
pub fn on_new_frame(self, on_new_frame: Message) -> Self {
VideoPlayer {
on_new_frame: Some(on_new_frame),
..self
}
}
/// Message to send when the video receives a new frame.
pub fn on_subtitle_text<F>(self, on_subtitle_text: F) -> Self
where
F: 'a + Fn(Option<String>) -> Message,
{
VideoPlayer {
on_subtitle_text: Some(Box::new(on_subtitle_text)),
..self
}
}
/// Message to send when the video playback encounters an error.
pub fn on_error<F>(self, on_error: F) -> Self
where
F: 'a + Fn(&glib::Error) -> Message,
{
VideoPlayer {
on_error: Some(Box::new(on_error)),
..self
}
}
}
impl<Message, Theme, Renderer> Widget<Message, Theme, Renderer>
for VideoPlayer<'_, Message, Theme, Renderer>
where
Message: Clone,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<iced::Length> {
iced::Size {
width: iced::Length::Shrink,
height: iced::Length::Shrink,
}
}
fn layout(
&mut self,
_tree: &mut widget::Tree,
_renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
let (video_width, video_height) = self.video.size();
// based on `Image::layout`
let image_size = iced::Size::new(video_width as f32, video_height as f32);
let raw_size = limits.resolve(self.width, self.height, image_size);
let full_size = self.content_fit.fit(image_size, raw_size);
let final_size = iced::Size {
width: match self.width {
iced::Length::Shrink => f32::min(raw_size.width, full_size.width),
_ => raw_size.width,
},
height: match self.height {
iced::Length::Shrink => f32::min(raw_size.height, full_size.height),
_ => raw_size.height,
},
};
layout::Node::new(final_size)
}
fn draw(
&self,
_tree: &widget::Tree,
renderer: &mut Renderer,
_theme: &Theme,
_style: &advanced::renderer::Style,
layout: advanced::Layout<'_>,
_cursor: advanced::mouse::Cursor,
_viewport: &iced::Rectangle,
) {
let mut inner = self.video.write();
// bounds based on `Image::draw`
let image_size = iced::Size::new(inner.width as f32, inner.height as f32);
let bounds = layout.bounds();
let adjusted_fit = self.content_fit.fit(image_size, bounds.size());
let scale = iced::Vector::new(
adjusted_fit.width / image_size.width,
adjusted_fit.height / image_size.height,
);
let final_size = image_size * scale;
let position = match self.content_fit {
iced::ContentFit::None => iced::Point::new(
bounds.x + (image_size.width - adjusted_fit.width) / 2.0,
bounds.y + (image_size.height - adjusted_fit.height) / 2.0,
),
_ => iced::Point::new(
bounds.center_x() - final_size.width / 2.0,
bounds.center_y() - final_size.height / 2.0,
),
};
let drawing_bounds = iced::Rectangle::new(position, final_size);
let upload_frame = inner.upload_frame.swap(false, Ordering::SeqCst);
if upload_frame {
let last_frame_time = inner
.last_frame_time
.lock()
.map(|time| *time)
.unwrap_or_else(|_| Instant::now());
inner.set_av_offset(Instant::now() - last_frame_time);
}
let render = |renderer: &mut Renderer| {
renderer.draw_primitive(
drawing_bounds,
VideoPrimitive::new(
inner.id,
Arc::clone(&inner.alive),
Arc::clone(&inner.frame),
(inner.width as _, inner.height as _),
upload_frame,
),
);
};
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
renderer.with_layer(bounds, render);
} else {
render(renderer);
}
}
fn update(
&mut self,
_state: &mut widget::Tree,
event: &iced::Event,
_layout: advanced::Layout<'_>,
_cursor: advanced::mouse::Cursor,
_renderer: &Renderer,
_clipboard: &mut dyn advanced::Clipboard,
shell: &mut advanced::Shell<'_, Message>,
_viewport: &iced::Rectangle,
) {
let mut inner = self.video.write();
if let iced::Event::Window(iced::window::Event::RedrawRequested(_)) = event {
if inner.restart_stream || (!inner.is_eos && !inner.paused()) {
let mut restart_stream = false;
if inner.restart_stream {
restart_stream = true;
// Set flag to false to avoid potentially multiple seeks
inner.restart_stream = false;
}
let mut eos_pause = false;
while let Some(msg) = inner
.bus
.pop_filtered(&[gst::MessageType::Error, gst::MessageType::Eos])
{
match msg.view() {
gst::MessageView::Error(err) => {
error!("bus returned an error: {err}");
if let Some(ref on_error) = self.on_error {
shell.publish(on_error(&err.error()))
};
}
gst::MessageView::Eos(_eos) => {
if let Some(on_end_of_stream) = self.on_end_of_stream.clone() {
shell.publish(on_end_of_stream);
}
if inner.looping {
restart_stream = true;
} else {
eos_pause = true;
}
}
_ => {}
}
}
// Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream
if restart_stream {
if let Err(err) = inner.restart_stream() {
error!("cannot restart stream (can't seek): {err:#?}");
}
} else if eos_pause {
inner.is_eos = true;
inner.set_paused(true);
}
if inner.upload_frame.load(Ordering::SeqCst) {
if let Some(on_new_frame) = self.on_new_frame.clone() {
shell.publish(on_new_frame);
}
}
if let Some(on_subtitle_text) = &self.on_subtitle_text {
if inner.upload_text.swap(false, Ordering::SeqCst) {
if let Ok(text) = inner.subtitle_text.try_lock() {
shell.publish(on_subtitle_text(text.clone()));
}
}
}
shell.request_redraw();
} else {
shell.request_redraw();
}
}
}
}
impl<'a, Message, Theme, Renderer> From<VideoPlayer<'a, Message, Theme, Renderer>>
for Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Theme: 'a,
Renderer: 'a + PrimitiveRenderer,
{
fn from(video_player: VideoPlayer<'a, Message, Theme, Renderer>) -> Self {
Self::new(video_player)
}
}

View File

@@ -92,7 +92,15 @@ allow = [
"MIT", "MIT",
"Apache-2.0", "Apache-2.0",
"Unicode-3.0", "Unicode-3.0",
#"Apache-2.0 WITH LLVM-exception", "BSD-2-Clause",
"BSD-3-Clause",
"Apache-2.0 WITH LLVM-exception",
"Zlib",
"ISC",
"NCSA",
"CC0-1.0",
"BSL-1.0",
# "LGPL",
] ]
# The confidence threshold for detecting a license from license text. # The confidence threshold for detecting a license from license text.
# The higher the value, the more closely the license text must be to the # The higher the value, the more closely the license text must be to the

View File

@@ -0,0 +1 @@
perf*

View File

@@ -0,0 +1,20 @@
[package]
name = "hdr-gstreamer-wgpu"
version = "0.1.0"
edition = "2024"
[dependencies]
# gst = { workspace = true }
wgpu = "*"
gstreamer = "*"
gstreamer-video = "*"
gstreamer-app = "*"
gstreamer-base = "*"
winit = { version = "*", features = ["wayland"] }
anyhow = "*"
pollster = "0.4.0"
tracing = { version = "0.1.43", features = ["log"] }
tracing-subscriber = "0.3.22"
[profile.release]
debug = true

View File

View File

@@ -0,0 +1,466 @@
use std::sync::Arc;
use gstreamer as gst;
use gstreamer_app as gst_app;
use anyhow::{Context, Result};
use winit::{
application::ApplicationHandler,
event::*,
event_loop::{ActiveEventLoop, EventLoop},
keyboard::*,
window::Window,
};
pub struct App {
state: Option<State>,
}
impl App {
pub fn new() -> Self {
Self { state: None }
}
}
pub struct State {
window: Arc<Window>,
gst: Video,
surface: wgpu::Surface<'static>,
surface_texture: wgpu::Texture,
device: wgpu::Device,
queue: wgpu::Queue,
config: wgpu::SurfaceConfiguration,
pipeline: wgpu::RenderPipeline,
bind_group: wgpu::BindGroup,
is_surface_initialized: bool,
}
impl State {
async fn new(window: Arc<Window>) -> Result<State> {
let instance = wgpu::Instance::default();
let surface = instance
.create_surface(window.clone())
.context("Failed to create wgpu surface")?;
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::LowPower,
compatible_surface: Some(&surface),
force_fallback_adapter: false,
})
.await
.context("Failed to request wgpu adapter")?;
let (device, queue) = adapter
.request_device(&wgpu::DeviceDescriptor {
label: None,
required_features: wgpu::Features::empty(),
required_limits: wgpu::Limits::default(),
memory_hints: wgpu::MemoryHints::default(),
..Default::default()
})
.await
.context("Failed to request wgpu device")?;
let surface_caps = surface.get_capabilities(&adapter);
let surface_format = surface_caps.formats.last().unwrap().clone();
let size = window.inner_size();
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: surface_format,
width: size.width,
height: size.height,
present_mode: surface_caps.present_modes[0],
alpha_mode: surface_caps.alpha_modes[0],
view_formats: vec![],
desired_maximum_frame_latency: 3, // calculate upto 5 frames ahead
};
surface.configure(&device, &config);
let shader = device.create_shader_module(wgpu::include_wgsl!("shader.wgsl"));
let texture_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("texture_bind_group_layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
multisampled: false,
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
],
});
let render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Jello Render Pipeline Layout"),
bind_group_layouts: &[&texture_bind_group_layout],
push_constant_ranges: &[],
});
let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Jello Render Pipeline"),
layout: Some(&render_pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: Some("vs_main"),
buffers: &[],
compilation_options: wgpu::PipelineCompilationOptions::default(),
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: Some("fs_main"),
compilation_options: wgpu::PipelineCompilationOptions::default(),
targets: &[Some(wgpu::ColorTargetState {
format: surface_format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState::default(),
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
multiview: None,
cache: None,
});
let texture_size = wgpu::Extent3d {
width: size.width,
height: size.height,
depth_or_array_layers: 1,
};
let video_texture = device.create_texture(&wgpu::TextureDescriptor {
size: texture_size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgb10a2Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
label: Some("Jello Video Texture"),
view_formats: &[],
});
/// Todo: Use a better sampler
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("texture_sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &texture_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(
&video_texture.create_view(&wgpu::TextureViewDescriptor::default()),
),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&sampler),
},
],
label: Some("Jello Texture Bind Group"),
});
let gst = Video::new().context("Failed to create Video")?;
Ok(Self {
window,
gst,
surface,
surface_texture: video_texture,
device,
queue,
config,
is_surface_initialized: true,
bind_group,
pipeline: render_pipeline,
})
}
// async fn next_frame(&mut self)
fn resize(&mut self, width: u32, height: u32) {
if width > 0 && height > 0 {
self.config.width = width;
self.config.height = height;
self.surface.configure(&self.device, &self.config);
self.is_surface_initialized = true;
}
}
fn render(&mut self) -> Result<(), wgpu::SurfaceError> {
if !self.is_surface_initialized {
return Ok(());
}
self.copy_next_frame_to_texture(&self.surface_texture)
.inspect_err(|e| {
tracing::error!("Failed to copy video frame to texture: {e:?}");
})
.map_err(|_| wgpu::SurfaceError::Lost)?;
let output = self.surface.get_current_texture()?;
let view = output
.texture
.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = self
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("Jello Render Encoder"),
});
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("Jello Render Pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.1,
g: 0.2,
b: 0.3,
a: 1.0,
}),
store: wgpu::StoreOp::Store,
},
depth_slice: None,
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
render_pass.set_pipeline(&self.pipeline);
render_pass.set_bind_group(0, &self.bind_group, &[]);
render_pass.draw(0..3, 0..1);
drop(render_pass);
self.queue.submit(std::iter::once(encoder.finish()));
output.present();
self.window.request_redraw();
Ok(())
}
pub fn copy_next_frame_to_texture(&self, texture: &wgpu::Texture) -> Result<()> {
let frame = self
.gst
.appsink
.try_pull_sample(gst::ClockTime::NONE)
.context("Failed to pull sample from appsink")?;
let caps = frame.caps().context("Failed to get caps from sample")?;
let size = caps
.structure(0)
.context("Failed to get structure from caps")?;
let width = size
.get::<i32>("width")
.context("Failed to get width from caps")?;
let height = size
.get::<i32>("height")
.context("Failed to get height from caps")?;
let buffer = frame.buffer().context("Failed to get buffer from sample")?;
let map = buffer
.map_readable()
.context("Failed to map buffer readable")?;
self.queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&map,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(4 * width as u32),
rows_per_image: Some(height as u32),
},
texture.size(),
);
Ok(())
}
}
impl ApplicationHandler<State> for App {
fn resumed(&mut self, event_loop: &ActiveEventLoop) {
#[allow(unused_mut)]
let mut window_attributes = Window::default_attributes();
let window = Arc::new(event_loop.create_window(window_attributes).unwrap());
self.state = Some(pollster::block_on(State::new(window)).expect("Failed to block"));
}
fn user_event(&mut self, _event_loop: &ActiveEventLoop, mut event: State) {
self.state = Some(event);
}
fn about_to_wait(&mut self, _event_loop: &ActiveEventLoop) {
let state = match &mut self.state {
Some(canvas) => canvas,
None => return,
};
state.window.request_redraw();
}
fn window_event(
&mut self,
event_loop: &ActiveEventLoop,
_window_id: winit::window::WindowId,
event: WindowEvent,
) {
let state = match &mut self.state {
Some(canvas) => canvas,
None => return,
};
match event {
WindowEvent::CloseRequested => event_loop.exit(),
WindowEvent::Resized(size) => state.resize(size.width, size.height),
WindowEvent::RedrawRequested => {
// dbg!("RedrawRequested");
// if state.gst.poll() {
// event_loop.exit();
// return;
// }
match state.render() {
Ok(_) => {}
// Reconfigure the surface if lost
Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => {
let size = state.window.inner_size();
tracing::info!("Reconfiguring surface to {size:?}");
state.resize(size.width, size.height);
}
// The system is out of memory, we should probably quit
Err(wgpu::SurfaceError::OutOfMemory) => event_loop.exit(),
// All other errors (Outdated, Timeout) should be resolved by the next frame
Err(e) => {
tracing::error!("Failed to render frame: {e:?}");
}
}
}
// WindowEvent::AboutToWait => {
// state.window.request_redraw();
// }
WindowEvent::KeyboardInput {
event:
KeyEvent {
physical_key: PhysicalKey::Code(code),
state,
..
},
..
} => match (code, state.is_pressed()) {
(KeyCode::Escape, true) => event_loop.exit(),
(KeyCode::KeyQ, true) => event_loop.exit(),
_ => {}
},
_ => {}
}
}
}
pub fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init();
let event_loop = EventLoop::with_user_event().build()?;
let mut app = App::new();
event_loop.run_app(&mut app)?;
Ok(())
}
pub struct Video {
pipeline: gst::Pipeline,
bus: gst::Bus,
appsink: gst_app::AppSink,
}
impl Video {
pub fn new() -> Result<Self> {
gst::init()?;
use gst::prelude::*;
let pipeline = gst::parse::launch(
r##"playbin3 uri=https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c video-sink="videoconvert ! video/x-raw,format=RGB10A2_LE ! appsink name=appsink""##,
).context("Failed to parse gst pipeline")?;
let pipeline = pipeline
.downcast::<gst::Pipeline>()
.map_err(|_| anyhow::anyhow!("Failed to downcast gst element to Pipeline"))?;
let video_sink = pipeline.property::<gst::Bin>("video-sink");
let appsink = video_sink
.by_name("appsink")
.context("Failed to get appsink from video-sink")?
.downcast::<gst_app::AppSink>()
.map_err(|_| {
anyhow::anyhow!("Failed to downcast video-sink appsink to gst_app::AppSink")
})?;
appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(|_appsink| Ok(gst::FlowSuccess::Ok))
.build(),
);
let bus = pipeline.bus().context("Failed to get gst pipeline bus")?;
pipeline.set_state(gst::State::Playing)?;
pipeline
.state(gst::ClockTime::from_seconds(5))
.0
.context("Failed to wait for pipeline")?;
Ok(Self {
pipeline,
bus,
appsink,
})
}
pub fn poll(&mut self) -> bool {
use gst::prelude::*;
for msg in self.bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => {
tracing::info!("End of stream");
self.pipeline.set_state(gst::State::Null).ok();
return true;
}
MessageView::Error(err) => {
tracing::error!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
self.pipeline.set_state(gst::State::Null).ok();
return true;
}
_ => {}
}
}
false
}
}

View File

@@ -0,0 +1,31 @@
// Vertex shader
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>,
};
@vertex
fn vs_main(
@builtin(vertex_index) in_vertex_index: u32,
) -> VertexOutput {
var out: VertexOutput;
let uv = vec2<f32>(f32((in_vertex_index << 1u) & 2u), f32(in_vertex_index & 2u));
out.clip_position = vec4<f32>(uv * 2.0 - 1.0, 0.0, 1.0);
out.clip_position.y = -out.clip_position.y;
out.tex_coords = uv;
return out;
}
// Fragment shader
@group(0) @binding(0)
var t_diffuse: texture_2d<f32>;
@group(0) @binding(1)
var s_diffuse: sampler;
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return textureSample(t_diffuse, s_diffuse, in.tex_coords);
}

24
flake.lock generated
View File

@@ -3,11 +3,11 @@
"advisory-db": { "advisory-db": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1763456551, "lastModified": 1765465865,
"narHash": "sha256-z5NogiOp+1r7Fd39jVFN0kT3aXUef8sYkuBsrAUNB5g=", "narHash": "sha256-jAyDD6FKEWZafIKN4KjzdQywcS/gR9sHz4zzjxefXcA=",
"owner": "rustsec", "owner": "rustsec",
"repo": "advisory-db", "repo": "advisory-db",
"rev": "6799e5dea99315eb8de85c6084fd99892b4a25d0", "rev": "d0bdb37b2b1dc8a81f47e2042d59227b1f06473f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -18,11 +18,11 @@
}, },
"crane": { "crane": {
"locked": { "locked": {
"lastModified": 1762538466, "lastModified": 1765739568,
"narHash": "sha256-8zrIPl6J+wLm9MH5ksHcW7BUHo7jSNOu0/hA0ohOOaM=", "narHash": "sha256-gQYx35Of4UDKUjAYvmxjUEh/DdszYeTtT6MDin4loGE=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "0cea393fffb39575c46b7a0318386467272182fe", "rev": "67d2baff0f9f677af35db61b32b5df6863bcc075",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -106,11 +106,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1763283776, "lastModified": 1765472234,
"narHash": "sha256-Y7TDFPK4GlqrKrivOcsHG8xSGqQx3A6c+i7novT85Uk=", "narHash": "sha256-9VvC20PJPsleGMewwcWYKGzDIyjckEz8uWmT0vCDYK0=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "50a96edd8d0db6cc8db57dab6bb6d6ee1f3dc49a", "rev": "2fbfb1d73d239d2402a8fe03963e37aab15abe8b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -138,11 +138,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1763433504, "lastModified": 1765766816,
"narHash": "sha256-cVid5UNpk88sPYHkLAA5aZEHOFQXSB/2L1vl18Aq7IM=", "narHash": "sha256-m2au5a2x9L3ikyBi0g3/NRJSjmHVDvT42mn+O6FlyPs=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "42ce16c6d8318a654d53f047c9400b7d902d6e61", "rev": "4f53a635709d82652567f51ef7af4365fbc0c88b",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@@ -56,7 +56,7 @@
src = let src = let
filterBySuffix = path: exts: lib.any (ext: lib.hasSuffix ext path) exts; filterBySuffix = path: exts: lib.any (ext: lib.hasSuffix ext path) exts;
sourceFilters = path: type: (craneLib.filterCargoSources path type) || filterBySuffix path [".c" ".h" ".hpp" ".cpp" ".cc"]; sourceFilters = path: type: (craneLib.filterCargoSources path type) || filterBySuffix path [".c" ".h" ".hpp" ".cpp" ".cc" "wgsl"];
in in
lib.cleanSourceWith { lib.cleanSourceWith {
filter = sourceFilters; filter = sourceFilters;
@@ -70,36 +70,49 @@
nativeBuildInputs = with pkgs; [ nativeBuildInputs = with pkgs; [
pkg-config pkg-config
]; ];
# LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [pkgs.wayland];
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs; LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs;
# SYSTEM_DEPS_LINK = "static";
# PKG_CONFIG_ALL_STATIC = "1";
buildInputs = with pkgs; buildInputs = with pkgs;
[ [
gst_all_1.gst-editing-services
gst_all_1.gst-libav gst_all_1.gst-libav
gst_all_1.gst-plugins-bad
gst_all_1.gst-plugins-base gst_all_1.gst-plugins-base
gst_all_1.gst-plugins-good gst_all_1.gst-plugins-good
gst_all_1.gst-plugins-rs gst_all_1.gst-plugins-bad
gst_all_1.gst-plugins-ugly gst_all_1.gst-plugins-ugly
gst_all_1.gst-rtsp-server gst_all_1.gst-plugins-rs
gst_all_1.gstreamer gst_all_1.gstreamer
glib
glib-networking
# bzip2_1_1
# libsysprof-capture
# pcre2
# libunwind
# elfutils
# zstd
openssl openssl
vulkan-loader vulkan-loader
glib
] ]
++ (lib.optionals pkgs.stdenv.isLinux [ ++ (lib.optionals pkgs.stdenv.isLinux [
gst_all_1.gstreamermm gst_all_1.gstreamermm
gst_all_1.gst-vaapi gst_all_1.gst-vaapi
# util-linux
# libselinux
# libsepol
alsa-lib-with-plugins alsa-lib-with-plugins
libxkbcommon libxkbcommon
udev udev
wayland wayland
wayland-protocols wayland-protocols
xorg.libX11 # xorg.libX11
xorg.libXi # xorg.libXi
xorg.libXrandr # xorg.libXrandr
]) ])
++ (lib.optionals pkgs.stdenv.isDarwin [ ++ (lib.optionals pkgs.stdenv.isDarwin [
libiconv libiconv
@@ -159,8 +172,8 @@
default = pkg; default = pkg;
}; };
devShells = { devShells = rec {
default = rust-shell =
pkgs.mkShell.override { pkgs.mkShell.override {
stdenv = stdenv =
if pkgs.stdenv.isLinux if pkgs.stdenv.isLinux
@@ -168,6 +181,8 @@
else pkgs.clangStdenv; else pkgs.clangStdenv;
} (commonArgs } (commonArgs
// { // {
GST_PLUGIN_PATH = "/run/current-system/sw/lib/gstreamer-1.0/";
GIO_EXTRA_MODULES = "${pkgs.glib-networking}/lib/gio/modules";
packages = with pkgs; packages = with pkgs;
[ [
toolchainWithRustAnalyzer toolchainWithRustAnalyzer
@@ -180,6 +195,7 @@
cargo-outdated cargo-outdated
lld lld
lldb lldb
perf
] ]
++ (lib.optionals pkgs.stdenv.isDarwin [ ++ (lib.optionals pkgs.stdenv.isDarwin [
apple-sdk_26 apple-sdk_26
@@ -188,6 +204,7 @@
mold mold
]); ]);
}); });
default = rust-shell;
}; };
} }
) )

62
gst/.github/workflows/build.yaml vendored Normal file
View File

@@ -0,0 +1,62 @@
name: build
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
checks-matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- id: set-matrix
name: Generate Nix Matrix
run: |
set -Eeu
matrix="$(nix eval --json '.#githubActions.matrix')"
echo "matrix=$matrix" >> "$GITHUB_OUTPUT"
checks-build:
needs: checks-matrix
runs-on: ${{ matrix.os }}
strategy:
matrix: ${{fromJSON(needs.checks-matrix.outputs.matrix)}}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix build -L '.#${{ matrix.attr }}'
codecov:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- name: Run codecov
run: nix build .#checks.x86_64-linux.hello-llvm-cov
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4.0.1
with:
flags: unittests
name: codecov-hello
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
files: ./result
verbose: true

38
gst/.github/workflows/docs.yaml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: docs
on:
push:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
docs:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
pages: "write"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- uses: DeterminateSystems/flake-checker-action@main
- name: Generate docs
run: nix build .#checks.x86_64-linux.hello-docs
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: result/share/doc
- name: Deploy to gh-pages
id: deployment
uses: actions/deploy-pages@v4

View File

@@ -1,3 +1,3 @@
/result
/target /target
.direnv .direnv
.media

1040
gst/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

14
gst/Cargo.toml Normal file
View File

@@ -0,0 +1,14 @@
[package]
name = "gst"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
error-stack = "0.6"
gstreamer = "0.24.4"
gstreamer-app = "0.24.4"
thiserror = "2.0"
tracing = "0.1"
wgpu = { version = "27.0.1", default-features = false }

7
gst/src/errors.rs Normal file
View File

@@ -0,0 +1,7 @@
pub use error_stack::{Report, ResultExt};
#[derive(Debug, thiserror::Error)]
#[error("An error occurred")]
pub struct Error;
pub type Result<T, E = error_stack::Report<Error>> = core::result::Result<T, E>;

245
gst/src/lib.rs Normal file
View File

@@ -0,0 +1,245 @@
pub mod errors;
use errors::*;
use gstreamer::prelude::*;
use std::sync::Arc;
static GST: std::sync::LazyLock<std::sync::Arc<Gst>> = std::sync::LazyLock::new(|| {
gstreamer::init().expect("Failed to initialize GStreamer");
std::sync::Arc::new(Gst {
__private: core::marker::PhantomData,
})
});
/// This should be a global singleton
pub struct Gst {
__private: core::marker::PhantomData<()>,
}
impl Gst {
pub fn new() -> Arc<Self> {
Arc::clone(&GST)
}
pub fn pipeline_from_str(&self, s: &str) -> Result<Pipeline> {
let pipeline = gstreamer::parse::launch(s).change_context(Error)?;
let pipeline = pipeline.downcast::<gstreamer::Pipeline>();
let pipeline = match pipeline {
Err(_e) => return Err(Error).attach("Failed to downcast to Pipeline"),
Ok(p) => p,
};
Ok(Pipeline { pipeline })
}
}
pub struct Sink {
element: gstreamer::Element,
}
pub struct Pipeline {
pipeline: gstreamer::Pipeline,
}
impl core::fmt::Debug for Pipeline {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("Pipeline")
.field("pipeline", &self.pipeline)
// .field("state", &self.pipeline.state(gstreamer::ClockTime::NONE))
.finish()
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.pipeline.set_state(gstreamer::State::Null);
}
}
impl Pipeline {
pub fn bus(&self) -> Result<Bus> {
let bus = self
.pipeline
.bus()
.ok_or(Error)
.attach("Failed to get bus from pipeline")?;
Ok(Bus { bus })
}
pub fn set_state(&self, state: gstreamer::State) -> Result<gstreamer::StateChangeSuccess> {
let result = self
.pipeline
.set_state(state)
.change_context(Error)
.attach("Failed to set pipeline state")?;
Ok(result)
}
}
pub struct Bus {
bus: gstreamer::Bus,
}
impl Bus {
pub fn iter_timed(
&self,
timeout: impl Into<Option<gstreamer::ClockTime>>,
) -> gstreamer::bus::Iter<'_> {
self.bus.iter_timed(timeout)
}
}
/// Pads are link points between elements
pub struct Pad {
pad: gstreamer::Pad,
}
pub struct Element {
element: gstreamer::Element,
}
#[test]
fn gst_parse_pipeline() {
let gst = Gst::new();
let pipeline = gst
.pipeline_from_str("videotestsrc ! autovideosink")
.expect("Failed to create pipeline");
println!("{:?}", pipeline);
}
#[test]
fn gst_parse_invalid_pipeline() {
let gst = Gst::new();
let result = gst.pipeline_from_str("invalidpipeline");
assert!(result.is_err());
}
#[test]
fn gst_play_pipeline() {
let gst = Gst::new();
let pipeline = gst
.pipeline_from_str("videotestsrc ! autovideosink")
.expect("Failed to create pipeline");
let bus = pipeline.bus().expect("Failed to get bus from pipeline");
pipeline
.set_state(gstreamer::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gstreamer::ClockTime::NONE) {
use gstreamer::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
eprintln!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gstreamer::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
#[test]
#[ignore]
fn gstreamer_unwrapped() {
gstreamer::init();
let uri = "https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm";
let pipeline = gstreamer::parse::launch(&format!("playbin uri={}", uri)).unwrap();
use gstreamer::prelude::*;
pipeline.set_state(gstreamer::State::Playing).unwrap();
let bus = pipeline.bus().unwrap();
for msg in bus.iter_timed(gstreamer::ClockTime::NONE) {
use gstreamer::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
eprintln!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline.set_state(gstreamer::State::Null).unwrap();
}
#[test]
fn test_appsink() {
let gst = Gst::new();
let pipeline = gst
.pipeline_from_str(
"videotestsrc ! videoconvert | capsfilter name=video-filter ! appsink name=video-sink",
)
.expect("Failed to create pipeline");
// let video_sink = pipeline.
let bus = pipeline.bus().expect("Failed to get bus from pipeline");
let sink = pipeline
.pipeline
.by_name("video-sink")
.expect("Sink not found")
.downcast::<gstreamer_app::AppSink>()
.expect("Failed to downcast to AppSink");
let capsfilter = pipeline
.pipeline
.by_name("video-filter")
.expect("Capsfilter not found");
let caps = gstreamer::Caps::builder("video/x-raw")
.field("format", "RGBA")
.build();
capsfilter.set_property("caps", &caps);
sink.set_callbacks(
gstreamer_app::AppSinkCallbacks::builder()
.new_sample(|sink| {
// foo
Ok(gstreamer::FlowSuccess::Ok)
})
.build(),
);
// let appsink = sink
// .dynamic_cast::<gstreamer_app::AppSink>()
// .expect("Failed to cast to AppSink");
pipeline
.set_state(gstreamer::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gstreamer::ClockTime::NONE) {
use gstreamer::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
eprintln!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
}

0
gst/src/wgpu.rs Normal file
View File

8
jello-types/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "jello-types"
version = "0.1.0"
edition = "2024"
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
uuid = { version = "1.18.1", features = ["serde"] }

6
jello-types/src/lib.rs Normal file
View File

@@ -0,0 +1,6 @@
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct User {
id: uuid::Uuid,
name: Option<String>,
primary_image_tag: Option<String>,
}

View File

@@ -4,3 +4,9 @@ typegen:
cp typegen/jellyfin.rs api/src/jellyfin.rs cp typegen/jellyfin.rs api/src/jellyfin.rs
rm typegen/jellyfin.rs rm typegen/jellyfin.rs
hdrtest:
GST_DEBUG=3 gst-launch-1.0 playbin3 uri=https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c video-sink="videoconvert ! video/x-raw,format=(string)RGB10A2_LE ! fakesink"
codec:
GST_DEBUG=3 gst-discoverer-1.0 -v https://jellyfin.tsuba.darksailor.dev/Items/6010382cf25273e624d305907010d773/Download?api_key=036c140222464878862231ef66a2bc9c

View File

@@ -1,36 +1,38 @@
#[derive(Debug, clap::Parser)] #[derive(Debug, clap::Parser)]
pub struct Cli { pub struct Cli {
#[clap(subcommand)] // #[clap(subcommand)]
pub cmd: SubCommand, // pub cmd: SubCommand,
#[command(flatten)]
pub verbosity: clap_verbosity_flag::Verbosity,
} }
#[derive(Debug, clap::Subcommand)] // #[derive(Debug, clap::Subcommand)]
pub enum SubCommand { // pub enum SubCommand {
#[clap(name = "add")] // #[clap(name = "add")]
Add(Add), // Add(Add),
#[clap(name = "list")] // #[clap(name = "list")]
List(List), // List(List),
#[clap(name = "completions")] // #[clap(name = "completions")]
Completions { shell: clap_complete::Shell }, // Completions { shell: clap_complete::Shell },
} // }
//
#[derive(Debug, clap::Args)] // #[derive(Debug, clap::Args)]
pub struct Add { // pub struct Add {
#[clap(short, long)] // #[clap(short, long)]
pub name: String, // pub name: String,
} // }
//
#[derive(Debug, clap::Args)] // #[derive(Debug, clap::Args)]
pub struct List {} // pub struct List {}
//
impl Cli { // impl Cli {
pub fn completions(shell: clap_complete::Shell) { // pub fn completions(shell: clap_complete::Shell) {
let mut command = <Cli as clap::CommandFactory>::command(); // let mut command = <Cli as clap::CommandFactory>::command();
clap_complete::generate( // clap_complete::generate(
shell, // shell,
&mut command, // &mut command,
env!("CARGO_BIN_NAME"), // env!("CARGO_BIN_NAME"),
&mut std::io::stdout(), // &mut std::io::stdout(),
); // );
} // }
} // }

View File

@@ -1,25 +1,17 @@
mod cli;
mod errors; mod errors;
use api::JellyfinConfig; use api::JellyfinConfig;
use errors::*; use errors::*;
fn jellyfin_config_try() -> Result<JellyfinConfig> {
let file = std::fs::read("config.toml").change_context(Error)?;
let config: JellyfinConfig = toml::from_slice(&file)
.change_context(Error)
.attach("Failed to parse Jellyfin Config")?;
Ok(config)
}
fn jellyfin_config() -> JellyfinConfig {
jellyfin_config_try().unwrap_or_else(|err| {
eprintln!("Error loading Jellyfin configuration: {:?}", err);
std::process::exit(1);
})
}
fn main() -> Result<()> { fn main() -> Result<()> {
tracing_subscriber::fmt::init(); color_backtrace::install();
ui_iced::ui(jellyfin_config).change_context(Error)?; let args = <cli::Cli as clap::Parser>::parse();
tracing_subscriber::fmt()
.with_max_level(args.verbosity)
.with_file(true)
.with_line_number(true)
.init();
ui_iced::ui().change_context(Error)?;
Ok(()) Ok(())
} }

13
store/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "store"
version = "0.1.0"
edition = "2024"
[dependencies]
bson = { version = "3.1.0", features = ["serde"] }
futures = "0.3.31"
parking_lot = "0.12.5"
redb = { version = "3.1.0", features = ["uuid"] }
serde = "1.0.228"
tokio = { version = "1.48.0", features = ["rt"] }
uuid = "1.18.1"

10
store/src/lib.rs Normal file
View File

@@ -0,0 +1,10 @@
pub mod redb;
pub mod sqlite;
pub mod toml;
pub trait Store {
fn image(&self, id: &str) -> Option<Vec<u8>>;
fn save_image(&mut self, id: &str, data: &[u8]);
}
pub struct Settings {}

225
store/src/redb.rs Normal file
View File

@@ -0,0 +1,225 @@
use std::{
borrow::Borrow,
collections::VecDeque,
marker::PhantomData,
path::Path,
sync::{Arc, RwLock, atomic::AtomicBool},
};
use futures::task::AtomicWaker;
use redb::{Error, Key, ReadableDatabase, TableDefinition, Value};
use serde::{Serialize, de::DeserializeOwned};
const USERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("users");
const SERVERS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("servers");
const SETTINGS: TableDefinition<uuid::Uuid, Vec<u8>> = TableDefinition::new("settings");
#[derive(Debug)]
pub struct TableInner<T> {
db: Arc<T>,
}
impl<T> Clone for TableInner<T> {
fn clone(&self) -> Self {
Self {
db: Arc::clone(&self.db),
}
}
}
impl<T> TableInner<T> {
fn new(db: Arc<T>) -> Self {
Self { db }
}
}
impl TableInner<DatabaseHandle> {
async fn get<'a, K: Key, V: Serialize + DeserializeOwned>(
&self,
table: TableDefinition<'static, K, Vec<u8>>,
key: impl Borrow<K::SelfType<'a>>,
) -> Result<Option<V>> {
let db: &redb::Database = &self.db.as_ref().database;
let db_reader = db.begin_read()?;
let table = db_reader.open_table(table)?;
table
.get(key)?
.map(|value| bson::deserialize_from_slice(&value.value()))
.transpose()
.map_err(|e| redb::Error::Io(std::io::Error::other(e)))
}
async fn insert<
'a,
'b,
K: Key + Send + Sync,
V: Serialize + DeserializeOwned + Send + Sync + 'a,
>(
&'b self,
table: TableDefinition<'static, K, Vec<u8>>,
key: impl Borrow<K::SelfType<'a>> + Send + 'b,
value: V,
) -> Result<Option<V>> {
let db: &redb::Database = &self.db.as_ref().database;
// self.db
// .writing
// .store(true, std::sync::atomic::Ordering::SeqCst);
// let out = tokio::task::spawn_blocking(move || -> Result<Option<V>>
let out = tokio::task::spawn_blocking(|| -> Result<Option<V>> {
let db_writer = db.begin_write()?;
let out = {
let mut table = db_writer.open_table(table)?;
let serialized_value = bson::serialize_to_vec(&value)
.map_err(|e| redb::Error::Io(std::io::Error::other(e)))?;
let previous = table.insert(key, &serialized_value)?;
let out = previous
.map(|value| bson::deserialize_from_slice(&value.value()))
.transpose()
.map_err(|e| redb::Error::Io(std::io::Error::other(e)));
out
};
db_writer.commit()?;
out
})
.await
.expect("Task panicked");
out
}
}
// impl<K: Key, V: Serialize + DeserializeOwned> Table<K, V> for TableInner {
// async fn get(&self, key: K) -> Result<Option<Value>> {}
// async fn insert(&self, key: K, value: V) -> Result<Option<Value>> {}
// async fn modify(&self, key: K, v: FnOnce(V) -> V) -> Result<bool> {}
// async fn remove(&self, key: K) -> Result<Option<Value>> {}
// }
#[derive(Debug)]
pub struct Users<T>(TableInner<T>);
impl<T> Clone for Users<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Users<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = USERS;
}
#[derive(Debug)]
pub struct Servers<T>(TableInner<T>);
impl<T> Clone for Servers<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Servers<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SERVERS;
}
#[derive(Debug)]
pub struct Settings<T>(TableInner<T>);
impl<T> Clone for Settings<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T> Settings<T> {
const TABLE: TableDefinition<'static, uuid::Uuid, Vec<u8>> = SETTINGS;
}
#[derive(Debug, Clone)]
pub struct Database {
users: Users<DatabaseHandle>,
servers: Servers<DatabaseHandle>,
settings: Settings<DatabaseHandle>,
handle: Arc<DatabaseHandle>,
}
#[derive(Debug)]
pub struct DatabaseHandle {
database: redb::Database,
writing: AtomicBool,
wakers: RwLock<VecDeque<AtomicWaker>>,
}
#[derive(Debug)]
pub struct DatabaseWriterGuard<'a> {
handle: &'a DatabaseHandle,
dropper: Arc<AtomicBool>,
}
// impl Drop for DatabaseWriterGuard<'_> {
// fn drop(&mut self) {
// self.handle
// .writing
// .store(false, std::sync::atomic::Ordering::SeqCst);
// let is_panicking = std::thread::panicking();
// let Ok(writer) = self.handle.wakers.write() else {
// if is_panicking {
// return;
// } else {
// panic!("Wakers lock poisoned");
// }
// }
// if let Some(waker) = (self.handle.wakers.write()).pop() {
// waker.wake();
// };
// // let mut wakers = self.handle.wakers.write().expect();
// // if let Some(waker) = self.handle.wakers.write().expect("Wakers lock poisoned").pop_front() {
// // waker.wake();
// // }
// // while let Some(waker) = wakers.pop_front() {
// // waker.wake();
// // }
// }
// }
type Result<O, E = redb::Error> = core::result::Result<O, E>;
pub trait Table<K: Key> {
fn insert<V: Serialize + DeserializeOwned>(
&self,
key: K,
value: V,
) -> impl Future<Output = Result<Option<V>>> + Send;
fn modify<V: Serialize + DeserializeOwned, O: Serialize + DeserializeOwned>(
&self,
key: K,
v: impl FnOnce(V) -> O,
) -> impl Future<Output = Result<bool>> + Send;
fn remove<V: Serialize + DeserializeOwned>(
&self,
key: K,
) -> impl Future<Output = Result<Option<V>>> + Send;
fn get<V: Serialize + DeserializeOwned>(
&self,
key: K,
) -> impl Future<Output = Result<Option<V>>> + Send;
}
impl Database {
pub fn create(path: impl AsRef<Path>) -> Result<Self, Error> {
let writing = AtomicBool::new(false);
let wakers = RwLock::new(VecDeque::new());
let db = redb::Database::create(path)?;
let db = Arc::new(DatabaseHandle {
database: db,
writing,
wakers,
});
let table_inner = TableInner::new(Arc::clone(&db));
let users = Users(table_inner.clone());
let servers = Servers(table_inner.clone());
let settings = Settings(table_inner.clone());
Ok(Self {
servers,
users,
settings,
handle: db,
})
}
}

1
store/src/sqlite.rs Normal file
View File

@@ -0,0 +1 @@

1
store/src/toml.rs Normal file
View File

@@ -0,0 +1 @@

View File

@@ -1,262 +1,262 @@
use ::tap::*; // use ::tap::*;
//
use std::{collections::BTreeMap, sync::Arc}; // use std::{collections::BTreeMap, sync::Arc};
//
use gpui::{ // use gpui::{
App, Application, Bounds, ClickEvent, Context, ImageId, ImageSource, RenderImage, Resource, // App, Application, Bounds, ClickEvent, Context, ImageId, ImageSource, RenderImage, Resource,
SharedString, Window, WindowBounds, WindowOptions, actions, div, prelude::*, px, rgb, size, // SharedString, Window, WindowBounds, WindowOptions, actions, div, prelude::*, px, rgb, size,
}; // };
//
#[derive(Clone, Debug)] // #[derive(Clone, Debug)]
pub struct AppState { // pub struct AppState {
pub title: SharedString, // pub title: SharedString,
pub items: BTreeMap<SharedString, Item>, // pub items: BTreeMap<SharedString, Item>,
pub item_ids: BTreeMap<usize, SharedString>, // pub item_ids: BTreeMap<usize, SharedString>,
pub current_item: Option<SharedString>, // pub current_item: Option<SharedString>,
pub errors: Vec<String>, // pub errors: Vec<String>,
pub jellyfin_client: api::JellyfinClient, // pub jellyfin_client: api::JellyfinClient,
} // }
//
#[derive(Clone, Debug)] // #[derive(Clone, Debug)]
pub struct Item { // pub struct Item {
pub id: SharedString, // pub id: SharedString,
pub name: SharedString, // pub name: SharedString,
pub item_type: SharedString, // pub item_type: SharedString,
pub media_type: SharedString, // pub media_type: SharedString,
} // }
//
impl Render for AppState { // impl Render for AppState {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement { // fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
div() // div()
.flex() // .flex()
.flex_col() // .flex_col()
.size_full() // .size_full()
.justify_center() // .justify_center()
.text_color(rgb(0xffffff)) // .text_color(rgb(0xffffff))
.child(Self::header()) // .child(Self::header())
.child(Self::body(self, window, cx)) // .child(Self::body(self, window, cx))
.child(Self::footer()) // .child(Self::footer())
} // }
} // }
//
actions!(jello_actions, [OpenItem, OnLoadItem, MouseDownEvent]); // actions!(jello_actions, [OpenItem, OnLoadItem, MouseDownEvent]);
//
impl AppState { // impl AppState {
fn new(title: impl AsRef<str>, jellyfin_client: api::JellyfinClient) -> Self { // fn new(title: impl AsRef<str>, jellyfin_client: api::JellyfinClient) -> Self {
AppState { // AppState {
title: SharedString::new(title.as_ref()), // title: SharedString::new(title.as_ref()),
items: BTreeMap::new(), // items: BTreeMap::new(),
item_ids: BTreeMap::new(), // item_ids: BTreeMap::new(),
current_item: None, // current_item: None,
errors: Vec::new(), // errors: Vec::new(),
jellyfin_client, // jellyfin_client,
} // }
} // }
//
// fn on_mouse_down( // // fn on_mouse_down(
// &mut self, // // &mut self,
// event: &MouseDownEvent, // // event: &MouseDownEvent,
// window: &mut Window, // // window: &mut Window,
// cx: &mut Context<Self>, // // cx: &mut Context<Self>,
// ) { // // ) {
// // Handle mouse down event // // // Handle mouse down event
// } // // }
//
fn load_item(id: usize) -> impl Fn(&mut Self, &ClickEvent, &mut Window, &mut Context<Self>) { // fn load_item(id: usize) -> impl Fn(&mut Self, &ClickEvent, &mut Window, &mut Context<Self>) {
move |state: &mut Self, event: &ClickEvent, window: &mut Window, cx: &mut Context<Self>| { // move |state: &mut Self, event: &ClickEvent, window: &mut Window, cx: &mut Context<Self>| {
let item_id = id; // let item_id = id;
cx.spawn(async move |entity, app| { // cx.spawn(async move |entity, app| {
tracing::info!("Loading item with id: {}", item_id); // tracing::info!("Loading item with id: {}", item_id);
}); // });
} // }
} // }
//
fn hover_item(id: usize) -> impl Fn(&mut Self, &bool, &mut Window, &mut Context<Self>) { // fn hover_item(id: usize) -> impl Fn(&mut Self, &bool, &mut Window, &mut Context<Self>) {
move |state: &mut Self, item: &bool, window: &mut Window, cx: &mut Context<Self>| { // move |state: &mut Self, item: &bool, window: &mut Window, cx: &mut Context<Self>| {
dbg!("Hovering over item: {:?}", id); // dbg!("Hovering over item: {:?}", id);
} // }
} // }
//
fn header() -> impl IntoElement { // fn header() -> impl IntoElement {
div() // div()
.flex() // .flex()
.flex_row() // .flex_row()
.w_full() // .w_full()
.justify_end() // .justify_end()
.h_20() // .h_20()
.border_10() // .border_10()
.bg(rgb(0x333333)) // .bg(rgb(0x333333))
.child(Self::button("Refresh")) // .child(Self::button("Refresh"))
} // }
//
fn footer() -> impl IntoElement { // fn footer() -> impl IntoElement {
div().flex().flex_row().w_full().h_20().bg(rgb(0x333333)) // div().flex().flex_row().w_full().h_20().bg(rgb(0x333333))
} // }
//
fn body(&mut self, window: &mut Window, cx: &mut Context<AppState>) -> impl IntoElement { // fn body(&mut self, window: &mut Window, cx: &mut Context<AppState>) -> impl IntoElement {
div() // div()
.flex() // .flex()
.flex_row() // .flex_row()
.size_full() // .size_full()
.child(Self::content(self, window, cx)) // .child(Self::content(self, window, cx))
.child(Self::sidebar(self, window, cx)) // .child(Self::sidebar(self, window, cx))
} // }
//
fn button(label: &str) -> impl IntoElement { // fn button(label: &str) -> impl IntoElement {
div() // div()
.flex() // .flex()
.justify_center() // .justify_center()
.items_center() // .items_center()
.bg(rgb(0xff00ff)) // .bg(rgb(0xff00ff))
.text_color(rgb(0xffffff)) // .text_color(rgb(0xffffff))
.border_5() // .border_5()
.rounded_lg() // .rounded_lg()
.child(label.to_string()) // .child(label.to_string())
} // }
//
fn content(&mut self, window: &mut Window, cx: &mut Context<AppState>) -> impl IntoElement { // fn content(&mut self, window: &mut Window, cx: &mut Context<AppState>) -> impl IntoElement {
div() // div()
.debug_below() // .debug_below()
.w_3_4() // .w_3_4()
// .flex() // // .flex()
// .flex_wrap() // // .flex_wrap()
.bg(rgb(0x111111)) // .bg(rgb(0x111111))
.justify_start() // .justify_start()
.items_start() // .items_start()
.overflow_hidden() // .overflow_hidden()
.child( // .child(
div() // div()
.size_full() // .size_full()
.flex() // .flex()
.flex_wrap() // .flex_wrap()
.justify_start() // .justify_start()
.items_start() // .items_start()
.content_start() // .content_start()
.gap_y_10() // .gap_y_10()
.gap_x_10() // .gap_x_10()
.border_t_10() // .border_t_10()
.p_5() // .p_5()
.child(Self::card(cx, 1)) // .child(Self::card(cx, 1))
.child(Self::card(cx, 2)) // .child(Self::card(cx, 2))
.child(Self::card(cx, 3)) // .child(Self::card(cx, 3))
.child(Self::card(cx, 4)) // .child(Self::card(cx, 4))
.child(Self::card(cx, 5)) // .child(Self::card(cx, 5))
.child(Self::card(cx, 6)) // .child(Self::card(cx, 6))
.child(Self::card(cx, 7)) // .child(Self::card(cx, 7))
.child(Self::card(cx, 8)) // .child(Self::card(cx, 8))
.child(Self::card(cx, 9)), // .child(Self::card(cx, 9)),
) // )
} // }
//
fn sidebar(&mut self, window: &mut Window, cx: &mut Context<AppState>) -> impl IntoElement { // fn sidebar(&mut self, window: &mut Window, cx: &mut Context<AppState>) -> impl IntoElement {
div() // div()
.flex() // .flex()
.flex_col() // .flex_col()
.w_1_4() // .w_1_4()
.min_w_1_6() // .min_w_1_6()
.bg(rgb(0x222222)) // .bg(rgb(0x222222))
.child(div().size_full().bg(gpui::yellow())) // .child(div().size_full().bg(gpui::yellow()))
} // }
//
fn card(cx: &mut Context<AppState>, number: usize) -> impl IntoElement { // fn card(cx: &mut Context<AppState>, number: usize) -> impl IntoElement {
div() // div()
.id(number) // .id(number)
.on_click(cx.listener(Self::load_item(number))) // .on_click(cx.listener(Self::load_item(number)))
.on_hover(cx.listener(Self::hover_item(number))) // .on_hover(cx.listener(Self::hover_item(number)))
.flex() // .flex()
.flex_col() // .flex_col()
.w_48() // .w_48()
.h_64() // .h_64()
.p_10() // .p_10()
.bg(rgb(0xff00ff)) // .bg(rgb(0xff00ff))
.rounded_lg() // .rounded_lg()
} // }
} // }
//
pub fn ui(jellyfin_client: api::JellyfinClient) { // pub fn ui(jellyfin_client: api::JellyfinClient) {
Application::new().run(|cx: &mut App| { // Application::new().run(|cx: &mut App| {
let bounds = Bounds::centered(None, size(px(500.0), px(500.0)), cx); // let bounds = Bounds::centered(None, size(px(500.0), px(500.0)), cx);
cx.open_window( // cx.open_window(
WindowOptions { // WindowOptions {
window_bounds: Some(WindowBounds::Windowed(bounds)), // window_bounds: Some(WindowBounds::Windowed(bounds)),
..Default::default() // ..Default::default()
}, // },
|_, cx| cx.new(|_| AppState::new("Jello Media Browser", jellyfin_client)), // |_, cx| cx.new(|_| AppState::new("Jello Media Browser", jellyfin_client)),
) // )
.expect("Failed to open window"); // .expect("Failed to open window");
}) // })
} // }
//
#[derive(Clone, Debug)] // #[derive(Clone, Debug)]
pub struct Card { // pub struct Card {
pub id: usize, // pub id: usize,
pub title: SharedString, // pub title: SharedString,
pub description: SharedString, // pub description: SharedString,
pub image: SharedString, // pub image: SharedString,
pub image_blurhash: BlurHash, // pub image_blurhash: BlurHash,
pub media_type: SharedString, // pub media_type: SharedString,
pub loading: bool, // pub loading: bool,
} // }
//
impl Render for Card { // impl Render for Card {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement { // fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
div() // div()
.id(self.id) // .id(self.id)
.flex() // .flex()
.flex_col() // .flex_col()
.w_48() // .w_48()
.h_64() // .h_64()
.p_10() // .p_10()
.bg(rgb(0xff00ff)) // .bg(rgb(0xff00ff))
.rounded_lg() // .rounded_lg()
.pipe(|card| { // .pipe(|card| {
if self.loading { // if self.loading {
card.child(self.image_blurhash.clone()) // card.child(self.image_blurhash.clone())
} else { // } else {
card.child(gpui::img(self.image.clone())) // card.child(gpui::img(self.image.clone()))
} // }
}) // })
} // }
} // }
//
#[derive(Clone, Debug)] // #[derive(Clone, Debug)]
pub struct BlurHash { // pub struct BlurHash {
pub id: ImageId, // pub id: ImageId,
pub data: Arc<RenderImage>, // pub data: Arc<RenderImage>,
} // }
//
impl BlurHash { // impl BlurHash {
pub fn new( // pub fn new(
data: impl AsRef<str>, // data: impl AsRef<str>,
width: u32, // width: u32,
height: u32, // height: u32,
punch: f32, // punch: f32,
) -> Result<Self, error_stack::Report<crate::Error>> { // ) -> Result<Self, error_stack::Report<crate::Error>> {
use error_stack::ResultExt; // use error_stack::ResultExt;
let decoded = // let decoded =
blurhash::decode(data.as_ref(), width, height, punch).change_context(crate::Error)?; // blurhash::decode(data.as_ref(), width, height, punch).change_context(crate::Error)?;
let buffer = image::RgbaImage::from_raw(width, height, decoded) // let buffer = image::RgbaImage::from_raw(width, height, decoded)
.ok_or(crate::Error) // .ok_or(crate::Error)
.attach("Failed to convert")?; // .attach("Failed to convert")?;
let frame = image::Frame::new(buffer); // let frame = image::Frame::new(buffer);
let render_image = RenderImage::new([frame]); // let render_image = RenderImage::new([frame]);
Ok(Self { // Ok(Self {
id: render_image.id, // id: render_image.id,
data: Arc::from(render_image), // data: Arc::from(render_image),
}) // })
} // }
} // }
//
impl Render for BlurHash { // impl Render for BlurHash {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement { // fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
gpui::img(ImageSource::Render(self.data.clone())) // gpui::img(ImageSource::Render(self.data.clone()))
} // }
} // }
//
impl IntoElement for BlurHash { // impl IntoElement for BlurHash {
type Element = gpui::Img; // type Element = gpui::Img;
//
fn into_element(self) -> Self::Element { // fn into_element(self) -> Self::Element {
gpui::img(ImageSource::Render(self.data.clone())) // gpui::img(ImageSource::Render(self.data.clone()))
} // }
} // }

View File

@@ -2,6 +2,7 @@
name = "ui-iced" name = "ui-iced"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
license = "MIT"
[dependencies] [dependencies]
api = { version = "0.1.0", path = "../api" } api = { version = "0.1.0", path = "../api" }
@@ -12,6 +13,7 @@ iced = { workspace = true }
iced_video_player = { workspace = true } iced_video_player = { workspace = true }
reqwest = "0.12.24" reqwest = "0.12.24"
tap = "1.0.1" tap = "1.0.1"
toml = "0.9.8"
tracing = "0.1.41" tracing = "0.1.41"
url = "2.5.7" url = "2.5.7"
uuid = "1.18.1" uuid = "1.18.1"

View File

@@ -1,15 +1,19 @@
mod settings;
mod video;
mod shared_string; mod shared_string;
use iced_video_player::{Video, VideoPlayer}; use iced_video_player::{Video, VideoPlayer};
use shared_string::SharedString; use shared_string::SharedString;
use std::sync::Arc; use std::sync::Arc;
mod blur_hash; mod blur_hash;
use blur_hash::BlurHash; use blur_hash::BlurHash;
mod preview; mod preview;
use preview::Preview; // use preview::Preview;
use iced::{Alignment, Element, Length, Shadow, Task, widget::*}; use iced::{Alignment, Element, Length, Task, widget::*};
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -102,37 +106,57 @@ pub enum Screen {
User, User,
Video, Video,
} }
#[derive(Debug, Clone)]
pub struct Config {
pub server_url: Option<String>,
pub device_id: Option<String>,
pub device_name: Option<String>,
pub client_name: Option<String>,
pub version: Option<String>,
}
impl Default for Config {
fn default() -> Self {
Config {
server_url: Some("http://localhost:8096".to_string()),
device_id: Some("jello-iced".to_string()),
device_name: Some("Jello Iced".to_string()),
client_name: Some("Jello".to_string()),
version: Some("0.1.0".to_string()),
}
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct State { struct State {
loading: Option<Loading>, loading: Option<Loading>,
current: Option<uuid::Uuid>, current: Option<uuid::Uuid>,
cache: ItemCache, cache: ItemCache,
jellyfin_client: api::JellyfinClient, jellyfin_client: Option<api::JellyfinClient>,
messages: Vec<String>, messages: Vec<String>,
history: Vec<Option<uuid::Uuid>>, history: Vec<Option<uuid::Uuid>>,
query: Option<String>, query: Option<String>,
screen: Screen, screen: Screen,
// Login form state settings: settings::SettingsState,
username_input: String,
password_input: String,
is_authenticated: bool, is_authenticated: bool,
// Video
video: Option<Arc<Video>>, video: Option<Arc<Video>>,
} }
impl State { impl State {
pub fn new(jellyfin_client: api::JellyfinClient) -> Self { pub fn new() -> Self {
State { State {
loading: None, loading: None,
current: None, current: None,
cache: ItemCache::default(), cache: ItemCache::default(),
jellyfin_client, jellyfin_client: None,
messages: Vec::new(), messages: Vec::new(),
history: Vec::new(), history: Vec::new(),
query: None, query: None,
screen: Screen::Home, screen: Screen::Home,
username_input: String::new(), settings: settings::SettingsState::default(),
password_input: String::new(), // username_input: String::new(),
// password_input: String::new(),
is_authenticated: false, is_authenticated: false,
video: None, video: None,
} }
@@ -141,8 +165,7 @@ impl State {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Message { pub enum Message {
OpenSettings, Settings(settings::SettingsMessage),
CloseSettings,
Refresh, Refresh,
Search, Search,
SearchQueryChanged(String), SearchQueryChanged(String),
@@ -152,98 +175,23 @@ pub enum Message {
SetToken(String), SetToken(String),
Back, Back,
Home, Home,
// Login-related messages // Login {
UsernameChanged(String), // username: String,
PasswordChanged(String), // password: String,
Login, // config: api::JellyfinConfig,
LoginSuccess(String), // },
Logout, // LoginSuccess(String),
Video(VideoMessage), // LoadedClient(api::JellyfinClient, bool),
} // Logout,
Video(video::VideoMessage),
#[derive(Debug, Clone)]
pub enum VideoMessage {
EndOfStream,
Open(url::Url),
Pause,
Play,
Seek(f64),
Stop,
Test,
} }
fn update(state: &mut State, message: Message) -> Task<Message> { fn update(state: &mut State, message: Message) -> Task<Message> {
// if let Some(client) = state.jellyfin_client.clone() {
match message { match message {
Message::OpenSettings => { Message::Settings(msg) => settings::update(&mut state.settings, msg),
state.screen = Screen::Settings;
Task::none()
}
Message::CloseSettings => {
state.screen = Screen::Home;
Task::none()
}
Message::UsernameChanged(username) => {
state.username_input = username;
Task::none()
}
Message::PasswordChanged(password) => {
state.password_input = password;
Task::none()
}
Message::Login => {
let username = state.username_input.clone();
let password = state.password_input.clone();
// Update the client config with the new credentials
let mut config = (*state.jellyfin_client.config).clone();
config.username = username;
config.password = password;
Task::perform(
async move {
let mut client = api::JellyfinClient::new(config);
client.authenticate().await
},
|result| match result {
Ok(auth_result) => {
if let Some(token) = auth_result.access_token {
Message::LoginSuccess(token)
} else {
Message::Error("Authentication failed: No token received".to_string())
}
}
Err(e) => Message::Error(format!("Login failed: {}", e)),
},
)
}
Message::LoginSuccess(token) => {
state.jellyfin_client.set_token(token.clone());
state.is_authenticated = true;
state.password_input.clear();
state.messages.push("Login successful!".to_string());
state.screen = Screen::Home;
// Save token and refresh items
let client = state.jellyfin_client.clone();
Task::perform(
async move {
let _ = client.save_token(".session").await;
},
|_| Message::Refresh,
)
}
Message::Logout => {
state.is_authenticated = false;
state.jellyfin_client.set_token("");
state.cache = ItemCache::default();
state.current = None;
state.username_input.clear();
state.password_input.clear();
state.messages.push("Logged out successfully".to_string());
Task::none()
}
Message::OpenItem(id) => { Message::OpenItem(id) => {
let client = state.jellyfin_client.clone(); if let Some(client) = state.jellyfin_client.clone() {
use api::jellyfin::BaseItemKind::*; use api::jellyfin::BaseItemKind::*;
if let Some(cached) = id.as_ref().and_then(|id| state.cache.get(id)) if let Some(cached) = id.as_ref().and_then(|id| state.cache.get(id))
&& matches!(cached._type, Video | Movie | Episode) && matches!(cached._type, Video | Movie | Episode)
@@ -251,7 +199,7 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
let url = client let url = client
.stream_url(id.expect("ID exists")) .stream_url(id.expect("ID exists"))
.expect("Failed to get stream URL"); .expect("Failed to get stream URL");
Task::done(Message::Video(VideoMessage::Open(url))) Task::done(Message::Video(video::VideoMessage::Open(url)))
} else { } else {
Task::perform( Task::perform(
async move { async move {
@@ -267,6 +215,9 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
}, },
) )
} }
} else {
Task::none()
}
} }
Message::LoadedItem(id, items) => { Message::LoadedItem(id, items) => {
state.cache.extend(id, items); state.cache.extend(id, items);
@@ -275,8 +226,7 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
Task::none() Task::none()
} }
Message::Refresh => { Message::Refresh => {
// Handle refresh logic if let Some(client) = state.jellyfin_client.clone() {
let client = state.jellyfin_client.clone();
let current = state.current; let current = state.current;
Task::perform( Task::perform(
async move { async move {
@@ -291,6 +241,9 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
Ok(items) => Message::LoadedItem(msg, items), Ok(items) => Message::LoadedItem(msg, items),
}, },
) )
} else {
Task::none()
}
} }
Message::Error(err) => { Message::Error(err) => {
tracing::error!("Error: {}", err); tracing::error!("Error: {}", err);
@@ -299,7 +252,10 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
} }
Message::SetToken(token) => { Message::SetToken(token) => {
tracing::info!("Authenticated with token: {}", token); tracing::info!("Authenticated with token: {}", token);
state.jellyfin_client.set_token(token); state
.jellyfin_client
.as_mut()
.map(|mut client| client.set_token(token));
state.is_authenticated = true; state.is_authenticated = true;
Task::none() Task::none()
} }
@@ -318,7 +274,8 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
} }
Message::Search => { Message::Search => {
// Handle search action // Handle search action
let client = state.jellyfin_client.clone(); // let client = state.jellyfin_client.clone();
if let Some(client) = state.jellyfin_client.clone() {
let query = state.query.clone().unwrap_or_default(); let query = state.query.clone().unwrap_or_default();
Task::perform(async move { client.search(query).await }, |r| match r { Task::perform(async move { client.search(query).await }, |r| match r {
Err(e) => Message::Error(format!("Search failed: {}", e)), Err(e) => Message::Error(format!("Search failed: {}", e)),
@@ -327,64 +284,18 @@ fn update(state: &mut State, message: Message) -> Task<Message> {
Message::LoadedItem(None, items) Message::LoadedItem(None, items)
} }
}) })
} } else {
Message::Video(msg) => match msg {
VideoMessage::EndOfStream => {
state.video = None;
Task::none() Task::none()
} }
VideoMessage::Open(url) => {
state.video = Video::new(&url)
.inspect_err(|err| {
tracing::error!("Failed to play video at {}: {:?}", url, err);
})
.ok()
.map(Arc::new);
Task::none()
} }
VideoMessage::Pause => { Message::Video(msg) => video::update(state, msg),
if let Some(video) = state.video.as_mut().and_then(Arc::get_mut) { _ => todo!(),
video.set_paused(true);
}
Task::none()
}
VideoMessage::Play => {
if let Some(video) = state.video.as_mut().and_then(Arc::get_mut) {
video.set_paused(false);
}
Task::none()
}
VideoMessage::Seek(position) => {
// if let Some(ref video) = state.video {
// // video.seek(position, true);
// }
Task::none()
}
VideoMessage::Stop => {
state.video = None;
Task::none()
}
VideoMessage::Test => {
let url = url::Url::parse(
// "file:///home/servius/Projects/jello/crates/iced_video_player/.media/test.mp4",
"https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm",
)
.unwrap();
state.video = Video::new(&url)
.inspect_err(|err| {
dbg!(err);
})
.ok()
.map(Arc::new);
Task::none()
}
},
} }
} }
fn view(state: &State) -> Element<'_, Message> { fn view(state: &State) -> Element<'_, Message> {
match state.screen { match state.screen {
Screen::Settings => settings(state), Screen::Settings => settings::settings(state),
Screen::Home | _ => home(state), Screen::Home | _ => home(state),
} }
} }
@@ -396,25 +307,9 @@ fn home(state: &State) -> Element<'_, Message> {
.into() .into()
} }
fn player(video: &Video) -> Element<'_, Message> {
container(
VideoPlayer::new(video)
.width(Length::Fill)
.height(Length::Fill)
.content_fit(iced::ContentFit::Contain)
.on_end_of_stream(Message::Video(VideoMessage::EndOfStream)),
)
.style(|_| container::background(iced::Color::BLACK))
.width(Length::Fill)
.height(Length::Fill)
.align_x(Alignment::Center)
.align_y(Alignment::Center)
.into()
}
fn body(state: &State) -> Element<'_, Message> { fn body(state: &State) -> Element<'_, Message> {
if let Some(ref video) = state.video { if let Some(ref video) = state.video {
player(video) video::player(video)
} else { } else {
scrollable( scrollable(
container( container(
@@ -437,7 +332,13 @@ fn header(state: &State) -> Element<'_, Message> {
row([ row([
container( container(
Button::new( Button::new(
Text::new(state.jellyfin_client.config.server_url.as_str()) Text::new(
state
.jellyfin_client
.as_ref()
.map(|c| c.config.server_url.as_str())
.unwrap_or("No Server"),
)
.align_x(Alignment::Start), .align_x(Alignment::Start),
) )
.on_press(Message::Home), .on_press(Message::Home),
@@ -453,9 +354,11 @@ fn header(state: &State) -> Element<'_, Message> {
container( container(
row([ row([
button("Refresh").on_press(Message::Refresh).into(), button("Refresh").on_press(Message::Refresh).into(),
button("Settings").on_press(Message::OpenSettings).into(), button("Settings")
.on_press(Message::Settings(settings::SettingsMessage::Open))
.into(),
button("TestVideo") button("TestVideo")
.on_press(Message::Video(VideoMessage::Test)) .on_press(Message::Video(video::VideoMessage::Test))
.into(), .into(),
]) ])
.spacing(10), .spacing(10),
@@ -508,123 +411,6 @@ fn footer(state: &State) -> Element<'_, Message> {
.into() .into()
} }
fn settings(state: &State) -> Element<'_, Message> {
let content = if state.is_authenticated {
// Authenticated view - show user info and logout
column([
Text::new("Settings").size(32).into(),
container(
column([
Text::new("Account").size(24).into(),
Text::new("Server URL").size(14).into(),
Text::new(state.jellyfin_client.config.server_url.as_str())
.size(12)
.into(),
container(Text::new("Status: Logged In").size(14))
.padding(10)
.width(Length::Fill)
.into(),
container(
row([
Button::new(Text::new("Logout"))
.padding(10)
.on_press(Message::Logout)
.into(),
Button::new(Text::new("Close"))
.padding(10)
.on_press(Message::CloseSettings)
.into(),
])
.spacing(10),
)
.padding(10)
.width(Length::Fill)
.into(),
])
.spacing(10)
.max_width(400)
.align_x(Alignment::Center),
)
.padding(20)
.width(Length::Fill)
.align_x(Alignment::Center)
.style(container::rounded_box)
.into(),
])
.spacing(20)
.padding(50)
.align_x(Alignment::Center)
} else {
// Not authenticated view - show login form
column([
Text::new("Settings").size(32).into(),
container(
column([
Text::new("Login to Jellyfin").size(24).into(),
Text::new("Server URL").size(14).into(),
Text::new(state.jellyfin_client.config.server_url.as_str())
.size(12)
.into(),
container(
TextInput::new("Username", &state.username_input)
.padding(10)
.size(16)
.on_input(Message::UsernameChanged),
)
.padding(10)
.width(Length::Fill)
.into(),
container(
TextInput::new("Password", &state.password_input)
.padding(10)
.size(16)
.secure(true)
.on_input(Message::PasswordChanged)
.on_submit(Message::Login),
)
.padding(10)
.width(Length::Fill)
.into(),
container(
row([
Button::new(Text::new("Login"))
.padding(10)
.on_press(Message::Login)
.into(),
Button::new(Text::new("Cancel"))
.padding(10)
.on_press(Message::CloseSettings)
.into(),
])
.spacing(10),
)
.padding(10)
.width(Length::Fill)
.into(),
])
.spacing(10)
.max_width(400)
.align_x(Alignment::Center),
)
.padding(20)
.width(Length::Fill)
.align_x(Alignment::Center)
.style(container::rounded_box)
.into(),
])
.spacing(20)
.padding(50)
.align_x(Alignment::Center)
};
container(content)
.width(Length::Fill)
.height(Length::Fill)
.align_x(Alignment::Center)
.align_y(Alignment::Center)
.into()
}
fn card(item: &Item) -> Element<'_, Message> { fn card(item: &Item) -> Element<'_, Message> {
let name = item let name = item
.name .name
@@ -660,25 +446,54 @@ fn card(item: &Item) -> Element<'_, Message> {
.into() .into()
} }
// fn video(url: &str fn init() -> (State, Task<Message>) {
// Create a default config for initial state
// let default_config = api::JellyfinConfig {
// server_url: "http://localhost:8096".parse().expect("Valid URL"),
// device_id: "jello-iced".to_string(),
// device_name: "Jello Iced".to_string(),
// client_name: "Jello".to_string(),
// version: "0.1.0".to_string(),
// };
// let default_client = api::JellyfinClient::new_with_config(default_config);
fn init(config: impl Fn() -> api::JellyfinConfig + 'static) -> impl Fn() -> (State, Task<Message>) {
move || {
let mut jellyfin = api::JellyfinClient::new(config());
( (
State::new(jellyfin.clone()), State::new(),
Task::perform( Task::perform(
async move { jellyfin.authenticate_with_cached_token(".session").await }, async move {
|token| match token { let config_str = std::fs::read_to_string("config.toml")
Ok(token) => Message::SetToken(token), .map_err(|e| api::JellyfinApiError::IoError(e))?;
Err(e) => Message::Error(format!("Authentication failed: {}", e)), let config: api::JellyfinConfig = toml::from_str(&config_str).map_err(|e| {
api::JellyfinApiError::IoError(std::io::Error::new(
std::io::ErrorKind::InvalidData,
e,
))
})?;
// Try to load cached token and authenticate
match std::fs::read_to_string(".session") {
Ok(token) => {
let client = api::JellyfinClient::pre_authenticated(token.trim(), config)?;
Ok((client, true))
}
Err(_) => {
// No cached token, create unauthenticated client
let client = api::JellyfinClient::new_with_config(config);
Ok((client, false))
}
}
},
|result: Result<_, api::JellyfinApiError>| match result {
// Ok((client, is_authenticated)) => Message::LoadedClient(client, is_authenticated),
Err(e) => Message::Error(format!("Initialization failed: {}", e)),
_ => Message::Error("Login Unimplemented".to_string()),
}, },
) )
.chain(Task::done(Message::Refresh)), .chain(Task::done(Message::Refresh)),
) )
}
} }
pub fn ui(config: impl Fn() -> api::JellyfinConfig + 'static) -> iced::Result { pub fn ui() -> iced::Result {
iced::application(init(config), update, view).run() iced::application(init, update, view).run()
} }

82
ui-iced/src/settings.rs Normal file
View File

@@ -0,0 +1,82 @@
use crate::*;
use iced::Element;
pub fn settings(state: &State) -> Element<'_, Message> {
empty()
}
pub fn update(_state: &mut SettingsState, message: SettingsMessage) -> Task<Message> {
match message {
SettingsMessage::Open => {}
SettingsMessage::Close => {}
SettingsMessage::Select(screen) => {
tracing::trace!("Switching settings screen to {:?}", screen);
}
}
Task::none()
}
pub fn empty() -> Element<'static, Message> {
column([]).into()
}
#[derive(Debug, Clone, Default)]
pub struct SettingsState {
login_form: LoginForm,
server_form: ServerForm,
screen: SettingsScreen,
}
#[derive(Debug, Clone)]
pub enum SettingsMessage {
Open,
Close,
Select(SettingsScreen),
}
#[derive(Debug, Clone, Default)]
pub enum SettingsScreen {
#[default]
Main,
Users,
Servers,
}
#[derive(Debug, Clone)]
pub struct ServerItem {
pub id: uuid::Uuid,
pub name: SharedString,
pub url: SharedString,
pub users: Vec<uuid::Uuid>,
}
#[derive(Debug, Clone)]
pub struct UserItem {
pub id: uuid::Uuid,
pub name: SharedString,
}
#[derive(Debug, Clone, Default)]
pub struct LoginForm {
username: Option<String>,
password: Option<String>,
}
#[derive(Debug, Clone, Default)]
pub struct ServerForm {
name: Option<String>,
url: Option<String>,
}
mod screens {
use super::*;
pub fn main(state: &State) -> Element<'_, Message> {
empty()
}
pub fn server(state: &State) -> Element<'_, Message> {
empty()
}
pub fn user(state: &State) -> Element<'_, Message> {
empty()
}
}

View File

@@ -49,6 +49,21 @@ impl std::ops::Deref for SharedString {
} }
} }
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SecretSharedString(ArcCow<'static, str>);
impl core::fmt::Debug for SecretSharedString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("(..secret..)")
}
}
impl From<String> for SecretSharedString {
fn from(s: String) -> Self {
Self(ArcCow::Owned(Arc::from(s)))
}
}
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, PartialEq, Eq, Hash)]
pub enum ArcCow<'a, T: ?Sized> { pub enum ArcCow<'a, T: ?Sized> {
Borrowed(&'a T), Borrowed(&'a T),
@@ -66,3 +81,9 @@ where
} }
} }
} }
impl<'a, T> From<&'a T> for ArcCow<'a, T> {
fn from(value: &'a T) -> Self {
ArcCow::Borrowed(value)
}
}

87
ui-iced/src/video.rs Normal file
View File

@@ -0,0 +1,87 @@
use super::*;
#[derive(Debug, Clone)]
pub enum VideoMessage {
EndOfStream,
Open(url::Url),
Pause,
Play,
Seek(f64),
Stop,
Test,
}
pub fn update(state: &mut State, message: VideoMessage) -> Task<Message> {
match message {
VideoMessage::EndOfStream => {
state.video = None;
Task::none()
}
VideoMessage::Open(url) => {
match Video::new(&url)
.inspect_err(|err| {
tracing::error!("Failed to play video at {}: {:?}", url, err);
})
.inspect(|video| {
tracing::error!("Framerate is {}", video.framerate());
})
.map(Arc::new)
{
Ok(video) => {
state.video = Some(video);
Task::none()
}
Err(err) => Task::done(Message::Error(format!(
"Error opening video at {}: {:?}",
url, err
))),
}
}
VideoMessage::Pause => {
if let Some(video) = state.video.as_mut().and_then(Arc::get_mut) {
video.set_paused(true);
}
Task::none()
}
VideoMessage::Play => {
if let Some(video) = state.video.as_mut().and_then(Arc::get_mut) {
video.set_paused(false);
}
Task::none()
}
VideoMessage::Seek(position) => {
// if let Some(ref video) = state.video {
// // video.seek(position, true);
// }
Task::none()
}
VideoMessage::Stop => {
state.video = None;
Task::none()
}
VideoMessage::Test => {
let url = url::Url::parse(
// "file:///home/servius/Projects/jello/crates/iced_video_player/.media/test.mp4",
"https://gstreamer.freedesktop.org/data/media/sintel_trailer-480p.webm",
// "https://www.youtube.com/watch?v=QbUUaXGA3C4",
)
.expect("Impossible: Failed to parse hardcoded URL");
Task::done(Message::Video(VideoMessage::Open(url)))
}
}
}
pub fn player(video: &Video) -> Element<'_, Message> {
container(
VideoPlayer::new(video)
.width(Length::Fill)
.height(Length::Fill)
.content_fit(iced::ContentFit::Contain)
.on_end_of_stream(Message::Video(VideoMessage::EndOfStream)),
)
.style(|_| container::background(iced::Color::BLACK))
.width(Length::Fill)
.height(Length::Fill)
.align_x(Alignment::Center)
.align_y(Alignment::Center)
.into()
}