feat: Initial commit

This commit is contained in:
uttarayan21
2025-10-31 20:54:28 +05:30
commit 07027d6121
24 changed files with 68758 additions and 0 deletions

1
.envrc Normal file
View File

@@ -0,0 +1 @@
use flake

62
.github/workflows/build.yaml vendored Normal file
View File

@@ -0,0 +1,62 @@
name: build
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
checks-matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- id: set-matrix
name: Generate Nix Matrix
run: |
set -Eeu
matrix="$(nix eval --json '.#githubActions.matrix')"
echo "matrix=$matrix" >> "$GITHUB_OUTPUT"
checks-build:
needs: checks-matrix
runs-on: ${{ matrix.os }}
strategy:
matrix: ${{fromJSON(needs.checks-matrix.outputs.matrix)}}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix build -L '.#${{ matrix.attr }}'
codecov:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- name: Run codecov
run: nix build .#checks.x86_64-linux.hello-llvm-cov
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4.0.1
with:
flags: unittests
name: codecov-hello
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
files: ./result
verbose: true

38
.github/workflows/docs.yaml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: docs
on:
push:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
docs:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
pages: "write"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- uses: DeterminateSystems/flake-checker-action@main
- name: Generate docs
run: nix build .#checks.x86_64-linux.hello-docs
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: result/share/doc
- name: Deploy to gh-pages
id: deployment
uses: actions/deploy-pages@v4

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/result
/target
.direnv

7312
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

18
Cargo.toml Normal file
View File

@@ -0,0 +1,18 @@
[workspace]
members = [".", "api", "typegen"]
[package]
name = "jello"
version = "0.1.0"
edition = "2024"
license = "MIT"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
clap_complete = "4.5"
error-stack = "0.6"
gpui = { version = "0.2.2", default-features = false, features = ["wayland"] }
thiserror = "2.0"
tokio = "1.43.1"
tracing = "0.1"
tracing-subscriber = "0.3"

1
api/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

11
api/Cargo.toml Normal file
View File

@@ -0,0 +1,11 @@
[package]
name = "api"
version = "0.1.0"
edition = "2024"
[dependencies]
iref = { version = "3.2.2", features = ["serde"] }
reqwest = "0.12.24"
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145"
thiserror = "2.0.17"

132
api/src/lib.rs Normal file
View File

@@ -0,0 +1,132 @@
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum JellyfinApiError {
#[error("Jellyfin API error: {0}")]
ReqwestError(#[from] reqwest::Error),
}
type Result<T, E = JellyfinApiError> = std::result::Result<T, E>;
#[derive(Debug, Clone)]
pub struct JellyfinClient {
client: reqwest::Client,
config: JellyfinConfig,
}
impl JellyfinClient {
pub fn new(config: JellyfinConfig) -> Self {
JellyfinClient {
client: reqwest::Client::new(),
config,
}
}
pub fn post(&self, uri: impl AsRef<str>) -> reqwest::RequestBuilder {
let url = format!("{}/{}", self.config.server_url.as_str(), uri.as_ref());
self.client.post(&url)
.header("X-Emby-Authorization", format!("MediaBrowser Client=\"Jello\", Device=\"Jello\", DeviceId=\"{}\", Version=\"1.0.0\"", self.config.device_id))
.header("Content-Type", "application/json")
}
// pub async fn authenticate(&mut self) -> Result<()> {
// self.post("Users/AuthenticateByName")
// .json(AuthenticateUserByName {
// username: self.config.username.clone(),
// pw: self.config.password.clone(),
// })
// .send()
// .await
// }
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JellyfinConfig {
pub username: String,
pub password: String,
pub server_url: iref::IriBuf,
pub device_id: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct AuthenticationResult {
user: UserDto,
session_info: Option<SessionInfoDto>,
access_token: Option<String>,
server_id: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct UserDto {}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct SerssionInfoDto {}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct AuthenticateUserByName {
username: String,
pw: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct QuickConnectDto {
secret: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
struct User {
id: String,
configuration: Configuration,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
struct Configuration {
audio_language_preference: Option<String>,
play_default_audio_track: bool,
subtitle_language_preference: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
struct Items {
items: Vec<MediaItem>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct MediaItem {
// #[serde(rename = "Id")]
pub id: String,
// #[serde(rename = "Name")]
pub name: String,
// #[serde(rename = "Type")]
pub type_: String,
// #[serde(rename = "Path")]
pub path: Option<String>,
// #[serde(rename = "CollectionType")]
pub collection_type: Option<String>,
// #[serde(rename = "ProductionYear")]
pub year: Option<i32>,
// #[serde(rename = "Overview")]
pub overview: Option<String>,
// #[serde(rename = "CommunityRating")]
pub imdb_rating: Option<f32>,
// #[serde(rename = "CriticRating")]
pub critic_rating: Option<i32>,
// #[serde(rename = "RunTimeTicks")]
pub runtime_ticks: Option<i64>,
// #[serde(rename = "SeriesId")]
pub series_id: Option<String>,
// #[serde(rename = "SeriesName")]
pub series_name: Option<String>,
// #[serde(rename = "ParentIndexNumber")]
pub parent_index_number: Option<i64>,
// #[serde(rename = "IndexNumber")]
pub index_number: Option<i64>,
}

0
api/types.rs Normal file
View File

236
deny.toml Normal file
View File

@@ -0,0 +1,236 @@
# This template contains all of the possible sections and their default values
# Note that all fields that take a lint level have these possible values:
# * deny - An error will be produced and the check will fail
# * warn - A warning will be produced, but the check will not fail
# * allow - No warning or error will be produced, though in some cases a note
# will be
# The values provided in this template are the default values that will be used
# when any section or field is not specified in your own configuration
# Root options
# The graph table configures how the dependency graph is constructed and thus
# which crates the checks are performed against
[graph]
# If 1 or more target triples (and optionally, target_features) are specified,
# only the specified targets will be checked when running `cargo deny check`.
# This means, if a particular package is only ever used as a target specific
# dependency, such as, for example, the `nix` crate only being used via the
# `target_family = "unix"` configuration, that only having windows targets in
# this list would mean the nix crate, as well as any of its exclusive
# dependencies not shared by any other crates, would be ignored, as the target
# list here is effectively saying which targets you are building for.
targets = [
# The triple can be any string, but only the target triples built in to
# rustc (as of 1.40) can be checked against actual config expressions
#"x86_64-unknown-linux-musl",
# You can also specify which target_features you promise are enabled for a
# particular target. target_features are currently not validated against
# the actual valid features supported by the target architecture.
#{ triple = "wasm32-unknown-unknown", features = ["atomics"] },
]
# When creating the dependency graph used as the source of truth when checks are
# executed, this field can be used to prune crates from the graph, removing them
# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate
# is pruned from the graph, all of its dependencies will also be pruned unless
# they are connected to another crate in the graph that hasn't been pruned,
# so it should be used with care. The identifiers are [Package ID Specifications]
# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html)
#exclude = []
# If true, metadata will be collected with `--all-features`. Note that this can't
# be toggled off if true, if you want to conditionally enable `--all-features` it
# is recommended to pass `--all-features` on the cmd line instead
all-features = false
# If true, metadata will be collected with `--no-default-features`. The same
# caveat with `all-features` applies
no-default-features = false
# If set, these feature will be enabled when collecting metadata. If `--features`
# is specified on the cmd line they will take precedence over this option.
#features = []
# The output table provides options for how/if diagnostics are outputted
[output]
# When outputting inclusion graphs in diagnostics that include features, this
# option can be used to specify the depth at which feature edges will be added.
# This option is included since the graphs can be quite large and the addition
# of features from the crate(s) to all of the graph roots can be far too verbose.
# This option can be overridden via `--feature-depth` on the cmd line
feature-depth = 1
# This section is considered when running `cargo deny check advisories`
# More documentation for the advisories section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
[advisories]
# The path where the advisory databases are cloned/fetched into
#db-path = "$CARGO_HOME/advisory-dbs"
# The url(s) of the advisory databases to use
#db-urls = ["https://github.com/rustsec/advisory-db"]
# A list of advisory IDs to ignore. Note that ignored advisories will still
# output a note when they are encountered.
ignore = [
#"RUSTSEC-0000-0000",
#{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" },
#"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish
#{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" },
]
# If this is true, then cargo deny will use the git executable to fetch advisory database.
# If this is false, then it uses a built-in git library.
# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.
# See Git Authentication for more information about setting up git authentication.
#git-fetch-with-cli = true
# This section is considered when running `cargo deny check licenses`
# More documentation for the licenses section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
[licenses]
# List of explicitly allowed licenses
# See https://spdx.org/licenses/ for list of possible licenses
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
allow = [
"MIT",
"Apache-2.0",
"Unicode-3.0",
#"Apache-2.0 WITH LLVM-exception",
]
# The confidence threshold for detecting a license from license text.
# The higher the value, the more closely the license text must be to the
# canonical license text of a valid SPDX license file.
# [possible values: any between 0.0 and 1.0].
confidence-threshold = 0.8
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
# aren't accepted for every possible crate as with the normal allow list
exceptions = [
# Each entry is the crate and version constraint, and its specific allow
# list
#{ allow = ["Zlib"], crate = "adler32" },
]
# Some crates don't have (easily) machine readable licensing information,
# adding a clarification entry for it allows you to manually specify the
# licensing information
#[[licenses.clarify]]
# The package spec the clarification applies to
#crate = "ring"
# The SPDX expression for the license requirements of the crate
#expression = "MIT AND ISC AND OpenSSL"
# One or more files in the crate's source used as the "source of truth" for
# the license expression. If the contents match, the clarification will be used
# when running the license check, otherwise the clarification will be ignored
# and the crate will be checked normally, which may produce warnings or errors
# depending on the rest of your configuration
#license-files = [
# Each entry is a crate relative path, and the (opaque) hash of its contents
#{ path = "LICENSE", hash = 0xbd0eed23 }
#]
[licenses.private]
# If true, ignores workspace crates that aren't published, or are only
# published to private registries.
# To see how to mark a crate as unpublished (to the official registry),
# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.
ignore = false
# One or more private registries that you might publish crates to, if a crate
# is only published to private registries, and ignore is true, the crate will
# not have its license(s) checked
registries = [
#"https://sekretz.com/registry
]
# This section is considered when running `cargo deny check bans`.
# More documentation about the 'bans' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
[bans]
# Lint level for when multiple versions of the same crate are detected
multiple-versions = "warn"
# Lint level for when a crate version requirement is `*`
wildcards = "allow"
# The graph highlighting used when creating dotgraphs for crates
# with multiple versions
# * lowest-version - The path to the lowest versioned duplicate is highlighted
# * simplest-path - The path to the version with the fewest edges is highlighted
# * all - Both lowest-version and simplest-path are used
highlight = "all"
# The default lint level for `default` features for crates that are members of
# the workspace that is being checked. This can be overridden by allowing/denying
# `default` on a crate-by-crate basis if desired.
workspace-default-features = "allow"
# The default lint level for `default` features for external crates that are not
# members of the workspace. This can be overridden by allowing/denying `default`
# on a crate-by-crate basis if desired.
external-default-features = "allow"
# List of crates that are allowed. Use with care!
allow = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" },
]
# List of crates to deny
deny = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" },
# Wrapper crates can optionally be specified to allow the crate when it
# is a direct dependency of the otherwise banned crate
#{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] },
]
# List of features to allow/deny
# Each entry the name of a crate and a version range. If version is
# not specified, all versions will be matched.
#[[bans.features]]
#crate = "reqwest"
# Features to not allow
#deny = ["json"]
# Features to allow
#allow = [
# "rustls",
# "__rustls",
# "__tls",
# "hyper-rustls",
# "rustls",
# "rustls-pemfile",
# "rustls-tls-webpki-roots",
# "tokio-rustls",
# "webpki-roots",
#]
# If true, the allowed features must exactly match the enabled feature set. If
# this is set there is no point setting `deny`
#exact = true
# Certain crates/versions that will be skipped when doing duplicate detection.
skip = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" },
]
# Similarly to `skip` allows you to skip certain crates during duplicate
# detection. Unlike skip, it also includes the entire tree of transitive
# dependencies starting at the specified crate, up to a certain depth, which is
# by default infinite.
skip-tree = [
#"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies
#{ crate = "ansi_term@0.11.0", depth = 20 },
]
# This section is considered when running `cargo deny check sources`.
# More documentation about the 'sources' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
[sources]
# Lint level for what to happen when a crate from a crate registry that is not
# in the allow list is encountered
unknown-registry = "warn"
# Lint level for what to happen when a crate from a git repository that is not
# in the allow list is encountered
unknown-git = "warn"
# List of URLs for allowed crate registries. Defaults to the crates.io index
# if not specified. If it is specified but empty, no registries are allowed.
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
# List of URLs for allowed Git repositories
allow-git = []
[sources.allow-org]
# github.com organizations to allow git sources for
github = []
# gitlab.com organizations to allow git sources for
gitlab = []
# bitbucket.org organizations to allow git sources for
bitbucket = []

172
flake.lock generated Normal file
View File

@@ -0,0 +1,172 @@
{
"nodes": {
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1761631338,
"narHash": "sha256-F6dlUrDiShwhMfPR+WoVmaQguGdEwjW9SI4nKlkay7c=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "2e45336771e36acf5bcefe7c99280ab214719707",
"type": "github"
},
"original": {
"owner": "rustsec",
"repo": "advisory-db",
"type": "github"
}
},
"crane": {
"locked": {
"lastModified": 1760924934,
"narHash": "sha256-tuuqY5aU7cUkR71sO2TraVKK2boYrdW3gCSXUkF4i44=",
"owner": "ipetkov",
"repo": "crane",
"rev": "c6b4d5308293d0d04fcfeee92705017537cad02f",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"crates-io-index": {
"flake": false,
"locked": {
"lastModified": 1759304910,
"narHash": "sha256-nf7smRifEW3u/d+dATK06T0RtKV8xURkug/8MI4AXlQ=",
"ref": "refs/heads/master",
"rev": "5f523323e8e03c9b94f2f9ed09a291e50dec2f40",
"shallow": true,
"type": "git",
"url": "https://github.com/rust-lang/crates.io-index"
},
"original": {
"shallow": true,
"type": "git",
"url": "https://github.com/rust-lang/crates.io-index"
}
},
"crates-nix": {
"inputs": {
"crates-io-index": "crates-io-index"
},
"locked": {
"lastModified": 1759305425,
"narHash": "sha256-Za25XBr6UjZyguuXbctheUNT/6wPhbLs8YSXNTqyXaw=",
"owner": "uttarayan21",
"repo": "crates.nix",
"rev": "8879158206af5508a2c740d732bca92ae799c2b5",
"type": "github"
},
"original": {
"owner": "uttarayan21",
"repo": "crates.nix",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1737420293,
"narHash": "sha256-F1G5ifvqTpJq7fdkT34e/Jy9VCyzd5XfJ9TO8fHhJWE=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "f4158fa080ef4503c8f4c820967d946c2af31ec9",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1761373498,
"narHash": "sha256-Q/uhWNvd7V7k1H1ZPMy/vkx3F8C13ZcdrKjO7Jv7v0c=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "6a08e6bb4e46ff7fcbb53d409b253f6bad8a28ce",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"advisory-db": "advisory-db",
"crane": "crane",
"crates-nix": "crates-nix",
"flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1761705569,
"narHash": "sha256-dqljv29XldlKvdTwFw8GkxOQHrz3/13yxdwHW8+nzBI=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "bca7909cb02f5139e0a490b0ff4bae775ea3ebf6",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

181
flake.nix Normal file
View File

@@ -0,0 +1,181 @@
{
description = "A Bevy engine flake for building and testing Rust projects with Bevy on Linux and MacOS.";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
crane.url = "github:ipetkov/crane";
nix-github-actions = {
url = "github:nix-community/nix-github-actions";
inputs.nixpkgs.follows = "nixpkgs";
};
crates-nix.url = "github:uttarayan21/crates.nix";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
advisory-db = {
url = "github:rustsec/advisory-db";
flake = false;
};
};
outputs = {
self,
crane,
flake-utils,
nixpkgs,
rust-overlay,
advisory-db,
nix-github-actions,
crates-nix,
...
}:
flake-utils.lib.eachDefaultSystem (
system: let
pkgs = import nixpkgs {
inherit system;
overlays = [
rust-overlay.overlays.default
];
};
inherit (pkgs) lib;
cargoToml = builtins.fromTOML (builtins.readFile ./Cargo.toml);
name = cargoToml.package.name;
toolchain = pkgs.rust-bin.nightly.latest.default;
toolchainWithLLvmTools = toolchain.override {
extensions = ["rust-src" "llvm-tools"];
};
toolchainWithRustAnalyzer = toolchain.override {
extensions = ["rust-src" "rust-analyzer"];
};
crates = crates-nix.mkLib {inherit pkgs;};
craneLib = (crane.mkLib pkgs).overrideToolchain toolchain;
craneLibLLvmTools = (crane.mkLib pkgs).overrideToolchain toolchainWithLLvmTools;
src = let
filterBySuffix = path: exts: lib.any (ext: lib.hasSuffix ext path) exts;
sourceFilters = path: type: (craneLib.filterCargoSources path type) || filterBySuffix path [".c" ".h" ".hpp" ".cpp" ".cc"];
in
lib.cleanSourceWith {
filter = sourceFilters;
src = ./.;
};
commonArgs = rec {
inherit src;
pname = name;
stdenv = p: p.clangStdenv;
doCheck = false;
nativeBuildInputs = with pkgs; [
pkg-config
];
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs;
buildInputs = with pkgs;
[
vulkan-loader
openssl
]
++ (lib.optionals pkgs.stdenv.isLinux [
alsa-lib-with-plugins
libxkbcommon
udev
wayland
])
++ (lib.optionals pkgs.stdenv.isDarwin [
libiconv
apple-sdk_13
]);
};
cargoArtifacts = craneLib.buildPackage commonArgs;
in {
checks =
{
"${name}-clippy" = craneLib.cargoClippy (commonArgs
// {
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
});
"${name}-docs" = craneLib.cargoDoc (commonArgs // {inherit cargoArtifacts;});
"${name}-fmt" = craneLib.cargoFmt {inherit src;};
"${name}-toml-fmt" = craneLib.taploFmt {
src = pkgs.lib.sources.sourceFilesBySuffices src [".toml"];
};
# Audit dependencies
"${name}-audit" = craneLib.cargoAudit {
inherit src advisory-db;
};
# Audit licenses
"${name}-deny" = craneLib.cargoDeny {
inherit src;
};
"${name}-nextest" = craneLib.cargoNextest (commonArgs
// {
inherit cargoArtifacts;
partitions = 1;
partitionType = "count";
});
}
// lib.optionalAttrs (!pkgs.stdenv.isDarwin) {
"${name}-llvm-cov" = craneLibLLvmTools.cargoLlvmCov (commonArgs // {inherit cargoArtifacts;});
};
packages = let
pkg = craneLib.buildPackage (commonArgs
// {inherit cargoArtifacts;}
// {
postInstall = ''
mkdir -p $out/bin
mkdir -p $out/share/bash-completions
mkdir -p $out/share/fish/vendor_completions.d
mkdir -p $out/share/zsh/site-functions
$out/bin/${name} completions bash > $out/share/bash-completions/${name}.bash
$out/bin/${name} completions fish > $out/share/fish/vendor_completions.d/${name}.fish
$out/bin/${name} completions zsh > $out/share/zsh/site-functions/_${name}
'';
});
in {
"${name}" = pkg;
default = pkg;
};
devShells = {
default =
pkgs.mkShell.override {
stdenv = pkgs.clangStdenv;
# stdenv =
# if pkgs.stdenv.isLinux
# then (pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv)
# else pkgs.clangStdenv;
} (commonArgs
// {
packages = with pkgs;
[
toolchainWithRustAnalyzer
cargo-nextest
cargo-deny
cargo-expand
bacon
cargo-make
cargo-hack
cargo-outdated
lld
]
++ (lib.optionals pkgs.stdenv.isDarwin [
apple-sdk_13
])
++ (lib.optionals pkgs.stdenv.isLinux [
mold
]);
});
};
}
)
// {
githubActions = nix-github-actions.lib.mkGithubMatrix {
checks = nixpkgs.lib.getAttrs ["x86_64-linux"] self.checks;
};
};
}

60448
jellyfin.json Normal file

File diff suppressed because it is too large Load Diff

36
src/cli.rs Normal file
View File

@@ -0,0 +1,36 @@
#[derive(Debug, clap::Parser)]
pub struct Cli {
#[clap(subcommand)]
pub cmd: SubCommand,
}
#[derive(Debug, clap::Subcommand)]
pub enum SubCommand {
#[clap(name = "add")]
Add(Add),
#[clap(name = "list")]
List(List),
#[clap(name = "completions")]
Completions { shell: clap_complete::Shell },
}
#[derive(Debug, clap::Args)]
pub struct Add {
#[clap(short, long)]
pub name: String,
}
#[derive(Debug, clap::Args)]
pub struct List {}
impl Cli {
pub fn completions(shell: clap_complete::Shell) {
let mut command = <Cli as clap::CommandFactory>::command();
clap_complete::generate(
shell,
&mut command,
env!("CARGO_BIN_NAME"),
&mut std::io::stdout(),
);
}
}

6
src/errors.rs Normal file
View File

@@ -0,0 +1,6 @@
pub use error_stack::{Report, ResultExt};
#[derive(Debug, thiserror::Error)]
#[error("An error occurred")]
pub struct Error;
pub type Result<T, E = error_stack::Report<Error>> = core::result::Result<T, E>;

9
src/main.rs Normal file
View File

@@ -0,0 +1,9 @@
mod errors;
use gpui::{
div, prelude::*, px, rgb, size, App, Application, Bounds, Context, SharedString, Window,
WindowBounds, WindowOptions,
};
pub fn main() {
}

0
src/ui.rs Normal file
View File

0
src/ui/movies.rs Normal file
View File

0
src/ui/player.rs Normal file
View File

0
src/ui/series.rs Normal file
View File

1
typegen/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

11
typegen/Cargo.toml Normal file
View File

@@ -0,0 +1,11 @@
[package]
name = "typegen"
version = "0.1.0"
edition = "2024"
[dependencies]
indexmap = { version = "2.12.0", features = ["serde"] }
quote = "1.0.41"
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145"
syn = { version = "2.0.108", features = ["full", "parsing"] }

80
typegen/src/main.rs Normal file
View File

@@ -0,0 +1,80 @@
use indexmap::IndexMap;
use syn::{FieldsNamed, parse_quote, token::Enum};
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct JellyfinOpenapi {
components: Components,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct Components {
schemas: indexmap::IndexMap<String, Schema>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct Schema {
#[serde(rename = "type")]
_type: Types,
properties: Option<indexmap::IndexMap<String, Property>>,
#[serde(rename = "oneOf")]
one_of: Option<Vec<EnumVariant>>,
description: Option<String>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct EnumVariant {
#[serde(rename = "$ref")]
_ref: String,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct Property {
#[serde(rename = "type")]
_type: Option<Types>,
nullable: Option<bool>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)]
#[serde(rename_all = "lowercase")]
pub enum Types {
Object,
String,
Boolean,
Array,
Integer,
Number,
}
fn main() {
let json = include_str!("../../jellyfin.json");
let jellyfin_openapi: JellyfinOpenapi = serde_json::from_str(json).unwrap();
let structs: IndexMap<String, Schema> = jellyfin_openapi
.components
.schemas
.iter()
.filter(|(_k, v)| v.properties.is_some())
.map(|(k, v)| (k.clone(), v.clone()))
.collect();
let enums: IndexMap<String, Schema> = jellyfin_openapi
.components
.schemas
.iter()
.filter(|(k, v)| v.one_of.is_some())
.map(|(k, v)| (k.clone(), v.clone()))
.collect();
let syn_structs: Vec<syn::ItemStruct> = structs
.iter()
.map(|(key, value)| {
let fields = value
.properties
.unwrap()
.iter()
.map(|(name, _type)| format!("{}:{}", name, _type.is_));
parse_quote! {
pub struct #key {
}
}
})
.collect();
}