feat: Initial commit
This commit is contained in:
62
.github/workflows/build.yaml
vendored
Normal file
62
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
checks-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DeterminateSystems/nix-installer-action@main
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- id: set-matrix
|
||||
name: Generate Nix Matrix
|
||||
run: |
|
||||
set -Eeu
|
||||
matrix="$(nix eval --json '.#githubActions.matrix')"
|
||||
echo "matrix=$matrix" >> "$GITHUB_OUTPUT"
|
||||
|
||||
checks-build:
|
||||
needs: checks-matrix
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix: ${{fromJSON(needs.checks-matrix.outputs.matrix)}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DeterminateSystems/nix-installer-action@main
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: nix build -L '.#${{ matrix.attr }}'
|
||||
|
||||
codecov:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: "write"
|
||||
contents: "read"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DeterminateSystems/nix-installer-action@main
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
|
||||
- name: Run codecov
|
||||
run: nix build .#checks.x86_64-linux.hello-llvm-cov
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v4.0.1
|
||||
with:
|
||||
flags: unittests
|
||||
name: codecov-hello
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./result
|
||||
verbose: true
|
||||
|
||||
38
.github/workflows/docs.yaml
vendored
Normal file
38
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: "write"
|
||||
contents: "read"
|
||||
pages: "write"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DeterminateSystems/nix-installer-action@main
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- uses: DeterminateSystems/flake-checker-action@main
|
||||
|
||||
- name: Generate docs
|
||||
run: nix build .#checks.x86_64-linux.hello-docs
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v5
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: result/share/doc
|
||||
|
||||
- name: Deploy to gh-pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
|
||||
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/result
|
||||
/target
|
||||
.direnv
|
||||
2370
Cargo.lock
generated
Normal file
2370
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
34
Cargo.toml
Normal file
34
Cargo.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[package]
|
||||
name = "yarr"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
[[bin]]
|
||||
name = "yarr"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
clap_complete = "4.5"
|
||||
error-stack = "0.5"
|
||||
thiserror = "2.0"
|
||||
tokio = { version = "1.43.1", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = "0.3"
|
||||
|
||||
# TUI dependencies
|
||||
ratatui = { version = "0.28", features = ["crossterm"] }
|
||||
crossterm = "0.28"
|
||||
|
||||
# HTTP client and serialization
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
# Date/time handling
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
|
||||
# Async utilities
|
||||
futures = "0.3"
|
||||
urlencoding = "2.1.3"
|
||||
236
deny.toml
Normal file
236
deny.toml
Normal file
@@ -0,0 +1,236 @@
|
||||
# This template contains all of the possible sections and their default values
|
||||
|
||||
# Note that all fields that take a lint level have these possible values:
|
||||
# * deny - An error will be produced and the check will fail
|
||||
# * warn - A warning will be produced, but the check will not fail
|
||||
# * allow - No warning or error will be produced, though in some cases a note
|
||||
# will be
|
||||
|
||||
# The values provided in this template are the default values that will be used
|
||||
# when any section or field is not specified in your own configuration
|
||||
|
||||
# Root options
|
||||
|
||||
# The graph table configures how the dependency graph is constructed and thus
|
||||
# which crates the checks are performed against
|
||||
[graph]
|
||||
# If 1 or more target triples (and optionally, target_features) are specified,
|
||||
# only the specified targets will be checked when running `cargo deny check`.
|
||||
# This means, if a particular package is only ever used as a target specific
|
||||
# dependency, such as, for example, the `nix` crate only being used via the
|
||||
# `target_family = "unix"` configuration, that only having windows targets in
|
||||
# this list would mean the nix crate, as well as any of its exclusive
|
||||
# dependencies not shared by any other crates, would be ignored, as the target
|
||||
# list here is effectively saying which targets you are building for.
|
||||
targets = [
|
||||
# The triple can be any string, but only the target triples built in to
|
||||
# rustc (as of 1.40) can be checked against actual config expressions
|
||||
#"x86_64-unknown-linux-musl",
|
||||
# You can also specify which target_features you promise are enabled for a
|
||||
# particular target. target_features are currently not validated against
|
||||
# the actual valid features supported by the target architecture.
|
||||
#{ triple = "wasm32-unknown-unknown", features = ["atomics"] },
|
||||
]
|
||||
# When creating the dependency graph used as the source of truth when checks are
|
||||
# executed, this field can be used to prune crates from the graph, removing them
|
||||
# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate
|
||||
# is pruned from the graph, all of its dependencies will also be pruned unless
|
||||
# they are connected to another crate in the graph that hasn't been pruned,
|
||||
# so it should be used with care. The identifiers are [Package ID Specifications]
|
||||
# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html)
|
||||
#exclude = []
|
||||
# If true, metadata will be collected with `--all-features`. Note that this can't
|
||||
# be toggled off if true, if you want to conditionally enable `--all-features` it
|
||||
# is recommended to pass `--all-features` on the cmd line instead
|
||||
all-features = false
|
||||
# If true, metadata will be collected with `--no-default-features`. The same
|
||||
# caveat with `all-features` applies
|
||||
no-default-features = false
|
||||
# If set, these feature will be enabled when collecting metadata. If `--features`
|
||||
# is specified on the cmd line they will take precedence over this option.
|
||||
#features = []
|
||||
|
||||
# The output table provides options for how/if diagnostics are outputted
|
||||
[output]
|
||||
# When outputting inclusion graphs in diagnostics that include features, this
|
||||
# option can be used to specify the depth at which feature edges will be added.
|
||||
# This option is included since the graphs can be quite large and the addition
|
||||
# of features from the crate(s) to all of the graph roots can be far too verbose.
|
||||
# This option can be overridden via `--feature-depth` on the cmd line
|
||||
feature-depth = 1
|
||||
|
||||
# This section is considered when running `cargo deny check advisories`
|
||||
# More documentation for the advisories section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
|
||||
[advisories]
|
||||
# The path where the advisory databases are cloned/fetched into
|
||||
#db-path = "$CARGO_HOME/advisory-dbs"
|
||||
# The url(s) of the advisory databases to use
|
||||
#db-urls = ["https://github.com/rustsec/advisory-db"]
|
||||
# A list of advisory IDs to ignore. Note that ignored advisories will still
|
||||
# output a note when they are encountered.
|
||||
ignore = [
|
||||
#"RUSTSEC-0000-0000",
|
||||
#{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" },
|
||||
#"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish
|
||||
#{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" },
|
||||
]
|
||||
# If this is true, then cargo deny will use the git executable to fetch advisory database.
|
||||
# If this is false, then it uses a built-in git library.
|
||||
# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.
|
||||
# See Git Authentication for more information about setting up git authentication.
|
||||
#git-fetch-with-cli = true
|
||||
|
||||
# This section is considered when running `cargo deny check licenses`
|
||||
# More documentation for the licenses section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
|
||||
[licenses]
|
||||
# List of explicitly allowed licenses
|
||||
# See https://spdx.org/licenses/ for list of possible licenses
|
||||
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
|
||||
allow = [
|
||||
"MIT",
|
||||
"Apache-2.0",
|
||||
"Unicode-3.0",
|
||||
#"Apache-2.0 WITH LLVM-exception",
|
||||
]
|
||||
# The confidence threshold for detecting a license from license text.
|
||||
# The higher the value, the more closely the license text must be to the
|
||||
# canonical license text of a valid SPDX license file.
|
||||
# [possible values: any between 0.0 and 1.0].
|
||||
confidence-threshold = 0.8
|
||||
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
|
||||
# aren't accepted for every possible crate as with the normal allow list
|
||||
exceptions = [
|
||||
# Each entry is the crate and version constraint, and its specific allow
|
||||
# list
|
||||
#{ allow = ["Zlib"], crate = "adler32" },
|
||||
]
|
||||
|
||||
# Some crates don't have (easily) machine readable licensing information,
|
||||
# adding a clarification entry for it allows you to manually specify the
|
||||
# licensing information
|
||||
#[[licenses.clarify]]
|
||||
# The package spec the clarification applies to
|
||||
#crate = "ring"
|
||||
# The SPDX expression for the license requirements of the crate
|
||||
#expression = "MIT AND ISC AND OpenSSL"
|
||||
# One or more files in the crate's source used as the "source of truth" for
|
||||
# the license expression. If the contents match, the clarification will be used
|
||||
# when running the license check, otherwise the clarification will be ignored
|
||||
# and the crate will be checked normally, which may produce warnings or errors
|
||||
# depending on the rest of your configuration
|
||||
#license-files = [
|
||||
# Each entry is a crate relative path, and the (opaque) hash of its contents
|
||||
#{ path = "LICENSE", hash = 0xbd0eed23 }
|
||||
#]
|
||||
|
||||
[licenses.private]
|
||||
# If true, ignores workspace crates that aren't published, or are only
|
||||
# published to private registries.
|
||||
# To see how to mark a crate as unpublished (to the official registry),
|
||||
# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.
|
||||
ignore = false
|
||||
# One or more private registries that you might publish crates to, if a crate
|
||||
# is only published to private registries, and ignore is true, the crate will
|
||||
# not have its license(s) checked
|
||||
registries = [
|
||||
#"https://sekretz.com/registry
|
||||
]
|
||||
|
||||
# This section is considered when running `cargo deny check bans`.
|
||||
# More documentation about the 'bans' section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
|
||||
[bans]
|
||||
# Lint level for when multiple versions of the same crate are detected
|
||||
multiple-versions = "warn"
|
||||
# Lint level for when a crate version requirement is `*`
|
||||
wildcards = "allow"
|
||||
# The graph highlighting used when creating dotgraphs for crates
|
||||
# with multiple versions
|
||||
# * lowest-version - The path to the lowest versioned duplicate is highlighted
|
||||
# * simplest-path - The path to the version with the fewest edges is highlighted
|
||||
# * all - Both lowest-version and simplest-path are used
|
||||
highlight = "all"
|
||||
# The default lint level for `default` features for crates that are members of
|
||||
# the workspace that is being checked. This can be overridden by allowing/denying
|
||||
# `default` on a crate-by-crate basis if desired.
|
||||
workspace-default-features = "allow"
|
||||
# The default lint level for `default` features for external crates that are not
|
||||
# members of the workspace. This can be overridden by allowing/denying `default`
|
||||
# on a crate-by-crate basis if desired.
|
||||
external-default-features = "allow"
|
||||
# List of crates that are allowed. Use with care!
|
||||
allow = [
|
||||
#"ansi_term@0.11.0",
|
||||
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" },
|
||||
]
|
||||
# List of crates to deny
|
||||
deny = [
|
||||
#"ansi_term@0.11.0",
|
||||
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" },
|
||||
# Wrapper crates can optionally be specified to allow the crate when it
|
||||
# is a direct dependency of the otherwise banned crate
|
||||
#{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] },
|
||||
]
|
||||
|
||||
# List of features to allow/deny
|
||||
# Each entry the name of a crate and a version range. If version is
|
||||
# not specified, all versions will be matched.
|
||||
#[[bans.features]]
|
||||
#crate = "reqwest"
|
||||
# Features to not allow
|
||||
#deny = ["json"]
|
||||
# Features to allow
|
||||
#allow = [
|
||||
# "rustls",
|
||||
# "__rustls",
|
||||
# "__tls",
|
||||
# "hyper-rustls",
|
||||
# "rustls",
|
||||
# "rustls-pemfile",
|
||||
# "rustls-tls-webpki-roots",
|
||||
# "tokio-rustls",
|
||||
# "webpki-roots",
|
||||
#]
|
||||
# If true, the allowed features must exactly match the enabled feature set. If
|
||||
# this is set there is no point setting `deny`
|
||||
#exact = true
|
||||
|
||||
# Certain crates/versions that will be skipped when doing duplicate detection.
|
||||
skip = [
|
||||
#"ansi_term@0.11.0",
|
||||
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" },
|
||||
]
|
||||
# Similarly to `skip` allows you to skip certain crates during duplicate
|
||||
# detection. Unlike skip, it also includes the entire tree of transitive
|
||||
# dependencies starting at the specified crate, up to a certain depth, which is
|
||||
# by default infinite.
|
||||
skip-tree = [
|
||||
#"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies
|
||||
#{ crate = "ansi_term@0.11.0", depth = 20 },
|
||||
]
|
||||
|
||||
# This section is considered when running `cargo deny check sources`.
|
||||
# More documentation about the 'sources' section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
|
||||
[sources]
|
||||
# Lint level for what to happen when a crate from a crate registry that is not
|
||||
# in the allow list is encountered
|
||||
unknown-registry = "warn"
|
||||
# Lint level for what to happen when a crate from a git repository that is not
|
||||
# in the allow list is encountered
|
||||
unknown-git = "warn"
|
||||
# List of URLs for allowed crate registries. Defaults to the crates.io index
|
||||
# if not specified. If it is specified but empty, no registries are allowed.
|
||||
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
||||
# List of URLs for allowed Git repositories
|
||||
allow-git = []
|
||||
|
||||
[sources.allow-org]
|
||||
# github.com organizations to allow git sources for
|
||||
github = []
|
||||
# gitlab.com organizations to allow git sources for
|
||||
gitlab = []
|
||||
# bitbucket.org organizations to allow git sources for
|
||||
bitbucket = []
|
||||
136
flake.lock
generated
Normal file
136
flake.lock
generated
Normal file
@@ -0,0 +1,136 @@
|
||||
{
|
||||
"nodes": {
|
||||
"advisory-db": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1750151065,
|
||||
"narHash": "sha256-il+CAqChFIB82xP6bO43dWlUVs+NlG7a4g8liIP5HcI=",
|
||||
"owner": "rustsec",
|
||||
"repo": "advisory-db",
|
||||
"rev": "7573f55ba337263f61167dbb0ea926cdc7c8eb5d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rustsec",
|
||||
"repo": "advisory-db",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"crane": {
|
||||
"locked": {
|
||||
"lastModified": 1750266157,
|
||||
"narHash": "sha256-tL42YoNg9y30u7zAqtoGDNdTyXTi8EALDeCB13FtbQA=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "e37c943371b73ed87faf33f7583860f81f1d5a48",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix-github-actions": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1737420293,
|
||||
"narHash": "sha256-F1G5ifvqTpJq7fdkT34e/Jy9VCyzd5XfJ9TO8fHhJWE=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"rev": "f4158fa080ef4503c8f4c820967d946c2af31ec9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1750506804,
|
||||
"narHash": "sha256-VLFNc4egNjovYVxDGyBYTrvVCgDYgENp5bVi9fPTDYc=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "4206c4cb56751df534751b058295ea61357bbbaa",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"advisory-db": "advisory-db",
|
||||
"crane": "crane",
|
||||
"flake-utils": "flake-utils",
|
||||
"nix-github-actions": "nix-github-actions",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1750732748,
|
||||
"narHash": "sha256-HR2b3RHsPeJm+Fb+1ui8nXibgniVj7hBNvUbXEyz0DU=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "4b4494b2ba7e8a8041b2e28320b2ee02c115c75f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
160
flake.nix
Normal file
160
flake.nix
Normal file
@@ -0,0 +1,160 @@
|
||||
{
|
||||
description = "A simple rust flake using rust-overlay and craneLib";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
crane.url = "github:ipetkov/crane";
|
||||
nix-github-actions = {
|
||||
url = "github:nix-community/nix-github-actions";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
advisory-db = {
|
||||
url = "github:rustsec/advisory-db";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
|
||||
outputs = {
|
||||
self,
|
||||
crane,
|
||||
flake-utils,
|
||||
nixpkgs,
|
||||
rust-overlay,
|
||||
advisory-db,
|
||||
nix-github-actions,
|
||||
...
|
||||
}:
|
||||
flake-utils.lib.eachDefaultSystem (
|
||||
system: let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [
|
||||
rust-overlay.overlays.default
|
||||
];
|
||||
};
|
||||
inherit (pkgs) lib;
|
||||
cargoToml = builtins.fromTOML (builtins.readFile ./Cargo.toml);
|
||||
name = cargoToml.package.name;
|
||||
|
||||
stableToolchain = pkgs.rust-bin.stable.latest.default;
|
||||
stableToolchainWithLLvmTools = stableToolchain.override {
|
||||
extensions = ["rust-src" "llvm-tools"];
|
||||
};
|
||||
stableToolchainWithRustAnalyzer = stableToolchain.override {
|
||||
extensions = ["rust-src" "rust-analyzer"];
|
||||
};
|
||||
craneLib = (crane.mkLib pkgs).overrideToolchain stableToolchain;
|
||||
craneLibLLvmTools = (crane.mkLib pkgs).overrideToolchain stableToolchainWithLLvmTools;
|
||||
|
||||
src = let
|
||||
filterBySuffix = path: exts: lib.any (ext: lib.hasSuffix ext path) exts;
|
||||
sourceFilters = path: type: (craneLib.filterCargoSources path type) || filterBySuffix path [".c" ".h" ".hpp" ".cpp" ".cc"];
|
||||
in
|
||||
lib.cleanSourceWith {
|
||||
filter = sourceFilters;
|
||||
src = ./.;
|
||||
};
|
||||
commonArgs =
|
||||
{
|
||||
inherit src;
|
||||
pname = name;
|
||||
stdenv = p: p.clangStdenv;
|
||||
doCheck = false;
|
||||
# LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib";
|
||||
nativeBuildInputs = with pkgs; [
|
||||
pkg-config
|
||||
# cmake
|
||||
# llvmPackages.libclang.lib
|
||||
];
|
||||
buildInputs = with pkgs;
|
||||
[
|
||||
openssl
|
||||
]
|
||||
++ (lib.optionals pkgs.stdenv.isDarwin [
|
||||
libiconv
|
||||
apple-sdk_13
|
||||
]);
|
||||
}
|
||||
// (lib.optionalAttrs pkgs.stdenv.isLinux {
|
||||
# BINDGEN_EXTRA_CLANG_ARGS = "-I${pkgs.llvmPackages.libclang.lib}/lib/clang/18/include";
|
||||
});
|
||||
cargoArtifacts = craneLib.buildPackage commonArgs;
|
||||
in {
|
||||
checks =
|
||||
{
|
||||
"${name}-clippy" = craneLib.cargoClippy (commonArgs
|
||||
// {
|
||||
inherit cargoArtifacts;
|
||||
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
|
||||
});
|
||||
"${name}-docs" = craneLib.cargoDoc (commonArgs // {inherit cargoArtifacts;});
|
||||
"${name}-fmt" = craneLib.cargoFmt {inherit src;};
|
||||
"${name}-toml-fmt" = craneLib.taploFmt {
|
||||
src = pkgs.lib.sources.sourceFilesBySuffices src [".toml"];
|
||||
};
|
||||
# Audit dependencies
|
||||
"${name}-audit" = craneLib.cargoAudit {
|
||||
inherit src advisory-db;
|
||||
};
|
||||
|
||||
# Audit licenses
|
||||
"${name}-deny" = craneLib.cargoDeny {
|
||||
inherit src;
|
||||
};
|
||||
"${name}-nextest" = craneLib.cargoNextest (commonArgs
|
||||
// {
|
||||
inherit cargoArtifacts;
|
||||
partitions = 1;
|
||||
partitionType = "count";
|
||||
});
|
||||
}
|
||||
// lib.optionalAttrs (!pkgs.stdenv.isDarwin) {
|
||||
"${name}-llvm-cov" = craneLibLLvmTools.cargoLlvmCov (commonArgs // {inherit cargoArtifacts;});
|
||||
};
|
||||
|
||||
packages = let
|
||||
pkg = craneLib.buildPackage (commonArgs
|
||||
// {inherit cargoArtifacts;}
|
||||
// {
|
||||
nativeBuildInputs = with pkgs; [
|
||||
installShellFiles
|
||||
];
|
||||
postInstall = ''
|
||||
installShellCompletion --cmd ${name} \
|
||||
--bash <($out/bin/${name} completions bash) \
|
||||
--fish <($out/bin/${name} completions fish) \
|
||||
--zsh <($out/bin/${name} completions zsh)
|
||||
'';
|
||||
});
|
||||
in {
|
||||
"${name}" = pkg;
|
||||
default = pkg;
|
||||
};
|
||||
|
||||
devShells = {
|
||||
default = pkgs.mkShell.override {stdenv = pkgs.clangStdenv;} (commonArgs
|
||||
// {
|
||||
packages = with pkgs;
|
||||
[
|
||||
stableToolchainWithRustAnalyzer
|
||||
cargo-nextest
|
||||
cargo-deny
|
||||
]
|
||||
++ (lib.optionals pkgs.stdenv.isDarwin [
|
||||
apple-sdk_13
|
||||
]);
|
||||
});
|
||||
};
|
||||
}
|
||||
)
|
||||
// {
|
||||
githubActions = nix-github-actions.lib.mkGithubMatrix {
|
||||
checks = nixpkgs.lib.getAttrs ["x86_64-linux"] self.checks;
|
||||
};
|
||||
};
|
||||
}
|
||||
1
sonarr.json
Normal file
1
sonarr.json
Normal file
File diff suppressed because one or more lines are too long
530
src/api.rs
Normal file
530
src/api.rs
Normal file
@@ -0,0 +1,530 @@
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ApiError {
|
||||
#[error("HTTP request failed: {0}")]
|
||||
Request(#[from] reqwest::Error),
|
||||
#[error("Serialization error: {0}")]
|
||||
Serialization(#[from] serde_json::Error),
|
||||
#[error("API error: {message}")]
|
||||
Api { message: String },
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, ApiError>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SonarrClient {
|
||||
client: Client,
|
||||
base_url: String,
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
impl SonarrClient {
|
||||
pub fn new(base_url: String, api_key: String) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
base_url: base_url.trim_end_matches('/').to_string(),
|
||||
api_key,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get<T: for<'de> Deserialize<'de>>(&self, endpoint: &str) -> Result<T> {
|
||||
let url = format!("{}/api/v3{}", self.base_url, endpoint);
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("X-Api-Key", &self.api_key)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(ApiError::Api {
|
||||
message: format!("HTTP {}: {}", response.status(), response.text().await?),
|
||||
});
|
||||
}
|
||||
|
||||
let text = response.text().await?;
|
||||
serde_json::from_str(&text).map_err(ApiError::from)
|
||||
}
|
||||
|
||||
async fn post<T: Serialize, R: for<'de> Deserialize<'de>>(
|
||||
&self,
|
||||
endpoint: &str,
|
||||
data: &T,
|
||||
) -> Result<R> {
|
||||
let url = format!("{}/api/v3{}", self.base_url, endpoint);
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.header("X-Api-Key", &self.api_key)
|
||||
.json(data)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(ApiError::Api {
|
||||
message: format!("HTTP {}: {}", response.status(), response.text().await?),
|
||||
});
|
||||
}
|
||||
|
||||
let text = response.text().await?;
|
||||
serde_json::from_str(&text).map_err(ApiError::from)
|
||||
}
|
||||
|
||||
pub async fn get_system_status(&self) -> Result<SystemStatus> {
|
||||
self.get("/system/status").await
|
||||
}
|
||||
|
||||
pub async fn get_series(&self) -> Result<Vec<Series>> {
|
||||
self.get("/series").await
|
||||
}
|
||||
|
||||
pub async fn get_series_by_id(&self, id: u32) -> Result<Series> {
|
||||
self.get(&format!("/series/{}", id)).await
|
||||
}
|
||||
|
||||
pub async fn get_episodes(
|
||||
&self,
|
||||
series_id: Option<u32>,
|
||||
season_number: Option<u32>,
|
||||
) -> Result<Vec<Episode>> {
|
||||
let mut query = Vec::new();
|
||||
if let Some(id) = series_id {
|
||||
query.push(format!("seriesId={}", id));
|
||||
}
|
||||
if let Some(season) = season_number {
|
||||
query.push(format!("seasonNumber={}", season));
|
||||
}
|
||||
|
||||
let query_string = if query.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("?{}", query.join("&"))
|
||||
};
|
||||
|
||||
self.get(&format!("/episode{}", query_string)).await
|
||||
}
|
||||
|
||||
pub async fn get_calendar(
|
||||
&self,
|
||||
start: Option<&str>,
|
||||
end: Option<&str>,
|
||||
) -> Result<Vec<Episode>> {
|
||||
let mut query = Vec::new();
|
||||
if let Some(start_date) = start {
|
||||
query.push(format!("start={}", start_date));
|
||||
}
|
||||
if let Some(end_date) = end {
|
||||
query.push(format!("end={}", end_date));
|
||||
}
|
||||
|
||||
let query_string = if query.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("?{}", query.join("&"))
|
||||
};
|
||||
|
||||
self.get(&format!("/calendar{}", query_string)).await
|
||||
}
|
||||
|
||||
pub async fn get_queue(&self) -> Result<QueuePagingResource> {
|
||||
self.get("/queue").await
|
||||
}
|
||||
|
||||
pub async fn get_history(&self) -> Result<HistoryPagingResource> {
|
||||
self.get("/history").await
|
||||
}
|
||||
|
||||
pub async fn get_missing_episodes(&self) -> Result<EpisodePagingResource> {
|
||||
self.get("/wanted/missing").await
|
||||
}
|
||||
|
||||
pub async fn get_health(&self) -> Result<Vec<HealthResource>> {
|
||||
self.get("/health").await
|
||||
}
|
||||
|
||||
pub async fn search_series(&self, term: &str) -> Result<Vec<Series>> {
|
||||
self.get(&format!(
|
||||
"/series/lookup?term={}",
|
||||
urlencoding::encode(term)
|
||||
))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn add_series(&self, series: &Series) -> Result<Series> {
|
||||
self.post("/series", series).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SystemStatus {
|
||||
pub app_name: Option<String>,
|
||||
pub instance_name: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub build_time: chrono::DateTime<chrono::Utc>,
|
||||
pub is_debug: bool,
|
||||
pub is_production: bool,
|
||||
pub is_admin: bool,
|
||||
pub is_user_interactive: bool,
|
||||
pub startup_path: Option<String>,
|
||||
pub app_data: Option<String>,
|
||||
pub os_name: Option<String>,
|
||||
pub os_version: Option<String>,
|
||||
pub is_net_core: bool,
|
||||
pub is_linux: bool,
|
||||
pub is_osx: bool,
|
||||
pub is_windows: bool,
|
||||
pub is_docker: bool,
|
||||
pub mode: String,
|
||||
pub branch: Option<String>,
|
||||
pub authentication: String,
|
||||
pub sqlite_version: Option<String>,
|
||||
pub migration_version: i32,
|
||||
pub url_base: Option<String>,
|
||||
pub runtime_version: Option<String>,
|
||||
pub runtime_name: Option<String>,
|
||||
pub start_time: chrono::DateTime<chrono::Utc>,
|
||||
pub package_version: Option<String>,
|
||||
pub package_author: Option<String>,
|
||||
pub package_update_mechanism: String,
|
||||
pub package_update_mechanism_message: Option<String>,
|
||||
pub database_version: Option<String>,
|
||||
pub database_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Series {
|
||||
pub id: u32,
|
||||
pub title: Option<String>,
|
||||
pub alternate_titles: Option<Vec<AlternateTitle>>,
|
||||
pub sort_title: Option<String>,
|
||||
pub status: String,
|
||||
pub ended: Option<bool>,
|
||||
pub profile_name: Option<String>,
|
||||
pub overview: Option<String>,
|
||||
pub next_airing: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub previous_airing: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub network: Option<String>,
|
||||
pub air_time: Option<String>,
|
||||
pub images: Option<Vec<MediaCover>>,
|
||||
pub original_language: Option<Language>,
|
||||
pub remote_poster: Option<String>,
|
||||
pub seasons: Option<Vec<Season>>,
|
||||
pub year: i32,
|
||||
pub path: Option<String>,
|
||||
pub quality_profile_id: u32,
|
||||
pub season_folder: bool,
|
||||
pub monitored: bool,
|
||||
pub monitor_new_items: String,
|
||||
pub use_scene_numbering: bool,
|
||||
pub runtime: i32,
|
||||
pub tvdb_id: u32,
|
||||
pub tv_rage_id: u32,
|
||||
pub tv_maze_id: u32,
|
||||
pub tmdb_id: u32,
|
||||
pub first_aired: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub last_aired: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub series_type: String,
|
||||
pub clean_title: Option<String>,
|
||||
pub imdb_id: Option<String>,
|
||||
pub title_slug: Option<String>,
|
||||
pub root_folder_path: Option<String>,
|
||||
pub folder: Option<String>,
|
||||
pub certification: Option<String>,
|
||||
pub genres: Option<Vec<String>>,
|
||||
pub tags: Option<Vec<u32>>,
|
||||
pub added: chrono::DateTime<chrono::Utc>,
|
||||
pub add_options: Option<AddSeriesOptions>,
|
||||
pub ratings: Option<Ratings>,
|
||||
pub statistics: Option<SeriesStatistics>,
|
||||
pub episodes_changed: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AlternateTitle {
|
||||
pub title: Option<String>,
|
||||
pub season_number: Option<i32>,
|
||||
pub scene_season_number: Option<i32>,
|
||||
pub scene_origin: Option<String>,
|
||||
pub comment: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MediaCover {
|
||||
pub cover_type: String,
|
||||
pub url: Option<String>,
|
||||
pub remote_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Language {
|
||||
pub id: u32,
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Season {
|
||||
pub season_number: i32,
|
||||
pub monitored: bool,
|
||||
pub statistics: Option<SeasonStatistics>,
|
||||
pub images: Option<Vec<MediaCover>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddSeriesOptions {
|
||||
pub ignore_episodes_with_files: bool,
|
||||
pub ignore_episodes_without_files: bool,
|
||||
pub monitor: String,
|
||||
pub search_for_missing_episodes: bool,
|
||||
pub search_for_cutoff_unmet_episodes: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Ratings {
|
||||
pub votes: i32,
|
||||
pub value: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SeriesStatistics {
|
||||
pub season_count: i32,
|
||||
pub episode_file_count: i32,
|
||||
pub episode_count: i32,
|
||||
pub total_episode_count: i32,
|
||||
pub size_on_disk: i64,
|
||||
pub release_groups: Option<Vec<String>>,
|
||||
pub percent_of_episodes: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SeasonStatistics {
|
||||
pub next_airing: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub previous_airing: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub episode_file_count: i32,
|
||||
pub episode_count: i32,
|
||||
pub total_episode_count: i32,
|
||||
pub size_on_disk: i64,
|
||||
pub release_groups: Option<Vec<String>>,
|
||||
pub percent_of_episodes: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Episode {
|
||||
pub id: u32,
|
||||
pub series_id: u32,
|
||||
pub tvdb_id: u32,
|
||||
pub episode_file_id: u32,
|
||||
pub season_number: i32,
|
||||
pub episode_number: i32,
|
||||
pub title: Option<String>,
|
||||
pub air_date: Option<String>,
|
||||
pub air_date_utc: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub last_search_time: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub runtime: i32,
|
||||
pub finale_type: Option<String>,
|
||||
pub overview: Option<String>,
|
||||
pub episode_file: Option<EpisodeFile>,
|
||||
pub has_file: bool,
|
||||
pub monitored: bool,
|
||||
pub absolute_episode_number: Option<i32>,
|
||||
pub scene_absolute_episode_number: Option<i32>,
|
||||
pub scene_episode_number: Option<i32>,
|
||||
pub scene_season_number: Option<i32>,
|
||||
pub unverified_scene_numbering: bool,
|
||||
pub end_time: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub grab_date: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub series: Option<Series>,
|
||||
pub images: Option<Vec<MediaCover>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EpisodeFile {
|
||||
pub id: u32,
|
||||
pub series_id: u32,
|
||||
pub season_number: i32,
|
||||
pub relative_path: Option<String>,
|
||||
pub path: Option<String>,
|
||||
pub size: i64,
|
||||
pub date_added: chrono::DateTime<chrono::Utc>,
|
||||
pub scene_name: Option<String>,
|
||||
pub release_group: Option<String>,
|
||||
pub languages: Option<Vec<Language>>,
|
||||
pub quality: Option<Quality>,
|
||||
pub custom_formats: Option<Vec<CustomFormat>>,
|
||||
pub custom_format_score: i32,
|
||||
pub indexer_flags: Option<i32>,
|
||||
pub release_type: Option<String>,
|
||||
pub media_info: Option<MediaInfo>,
|
||||
pub quality_cutoff_not_met: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Quality {
|
||||
pub quality: QualityDefinition,
|
||||
pub revision: Revision,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QualityDefinition {
|
||||
pub id: u32,
|
||||
pub name: Option<String>,
|
||||
pub source: String,
|
||||
pub resolution: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Revision {
|
||||
pub version: i32,
|
||||
pub real: i32,
|
||||
pub is_repack: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CustomFormat {
|
||||
pub id: u32,
|
||||
pub name: Option<String>,
|
||||
pub include_custom_format_when_renaming: Option<bool>,
|
||||
pub specifications: Option<Vec<HashMap<String, serde_json::Value>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MediaInfo {
|
||||
pub id: u32,
|
||||
pub audio_bitrate: i64,
|
||||
pub audio_channels: f64,
|
||||
pub audio_codec: Option<String>,
|
||||
pub audio_languages: Option<String>,
|
||||
pub audio_stream_count: i32,
|
||||
pub video_bit_depth: i32,
|
||||
pub video_bitrate: i64,
|
||||
pub video_codec: Option<String>,
|
||||
pub video_fps: f64,
|
||||
pub video_dynamic_range: Option<String>,
|
||||
pub video_dynamic_range_type: Option<String>,
|
||||
pub resolution: Option<String>,
|
||||
pub run_time: Option<String>,
|
||||
pub scan_type: Option<String>,
|
||||
pub subtitles: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QueuePagingResource {
|
||||
pub page: i32,
|
||||
pub page_size: i32,
|
||||
pub sort_key: Option<String>,
|
||||
pub sort_direction: String,
|
||||
pub total_records: i32,
|
||||
pub records: Vec<QueueItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QueueItem {
|
||||
pub id: u32,
|
||||
pub series_id: Option<u32>,
|
||||
pub episode_id: Option<u32>,
|
||||
pub season_number: Option<i32>,
|
||||
pub series: Option<Series>,
|
||||
pub episode: Option<Episode>,
|
||||
pub languages: Option<Vec<Language>>,
|
||||
pub quality: Option<Quality>,
|
||||
pub custom_formats: Option<Vec<CustomFormat>>,
|
||||
pub custom_format_score: i32,
|
||||
pub size: f64,
|
||||
pub title: Option<String>,
|
||||
pub estimated_completion_time: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub added: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub status: String,
|
||||
pub tracked_download_status: Option<String>,
|
||||
pub tracked_download_state: Option<String>,
|
||||
pub status_messages: Option<Vec<StatusMessage>>,
|
||||
pub error_message: Option<String>,
|
||||
pub download_id: Option<String>,
|
||||
pub protocol: String,
|
||||
pub download_client: Option<String>,
|
||||
pub download_client_has_post_import_category: bool,
|
||||
pub indexer: Option<String>,
|
||||
pub output_path: Option<String>,
|
||||
pub episode_has_file: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StatusMessage {
|
||||
pub title: Option<String>,
|
||||
pub messages: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HistoryPagingResource {
|
||||
pub page: i32,
|
||||
pub page_size: i32,
|
||||
pub sort_key: Option<String>,
|
||||
pub sort_direction: String,
|
||||
pub total_records: i32,
|
||||
pub records: Vec<HistoryItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HistoryItem {
|
||||
pub id: u32,
|
||||
pub episode_id: u32,
|
||||
pub series_id: u32,
|
||||
pub source_title: Option<String>,
|
||||
pub languages: Option<Vec<Language>>,
|
||||
pub quality: Option<Quality>,
|
||||
pub custom_formats: Option<Vec<CustomFormat>>,
|
||||
pub custom_format_score: i32,
|
||||
pub quality_cutoff_not_met: bool,
|
||||
pub date: chrono::DateTime<chrono::Utc>,
|
||||
pub download_id: Option<String>,
|
||||
pub event_type: String,
|
||||
pub data: Option<HashMap<String, String>>,
|
||||
pub episode: Option<Episode>,
|
||||
pub series: Option<Series>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EpisodePagingResource {
|
||||
pub page: i32,
|
||||
pub page_size: i32,
|
||||
pub sort_key: Option<String>,
|
||||
pub sort_direction: String,
|
||||
pub total_records: i32,
|
||||
pub records: Vec<Episode>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HealthResource {
|
||||
pub id: u32,
|
||||
pub source: Option<String>,
|
||||
#[serde(rename = "type")]
|
||||
pub health_type: String,
|
||||
pub message: Option<String>,
|
||||
pub wiki_url: Option<String>,
|
||||
}
|
||||
36
src/cli.rs
Normal file
36
src/cli.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct Cli {
|
||||
#[clap(subcommand)]
|
||||
pub cmd: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum SubCommand {
|
||||
#[clap(name = "add")]
|
||||
Add(Add),
|
||||
#[clap(name = "list")]
|
||||
List(List),
|
||||
#[clap(name = "completions")]
|
||||
Completions { shell: clap_complete::Shell },
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct Add {
|
||||
#[clap(short, long)]
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct List {}
|
||||
|
||||
impl Cli {
|
||||
pub fn completions(shell: clap_complete::Shell) {
|
||||
let mut command = <Cli as clap::CommandFactory>::command();
|
||||
clap_complete::generate(
|
||||
shell,
|
||||
&mut command,
|
||||
env!("CARGO_BIN_NAME"),
|
||||
&mut std::io::stdout(),
|
||||
);
|
||||
}
|
||||
}
|
||||
6
src/errors.rs
Normal file
6
src/errors.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
pub use error_stack::{Report, ResultExt};
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("An error occurred")]
|
||||
pub struct Error;
|
||||
|
||||
pub type Result<T, E = error_stack::Report<Error>> = core::result::Result<T, E>;
|
||||
3
src/lib.rs
Normal file
3
src/lib.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod errors;
|
||||
use errors::*;
|
||||
mod api;
|
||||
28
src/main.rs
Normal file
28
src/main.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
mod api;
|
||||
mod cli;
|
||||
mod errors;
|
||||
mod tui;
|
||||
use errors::*;
|
||||
|
||||
use crate::api::SonarrClient;
|
||||
pub fn main() -> Result<()> {
|
||||
// let args = <cli::Cli as clap::Parser>::parse();
|
||||
// match args.cmd {
|
||||
// cli::SubCommand::Add(add) => {
|
||||
// println!("Add: {:?}", add);
|
||||
// }
|
||||
// cli::SubCommand::List(list) => {
|
||||
// println!("List: {:?}", list);
|
||||
// }
|
||||
// cli::SubCommand::Completions { shell } => {
|
||||
// cli::Cli::completions(shell);
|
||||
// }
|
||||
// }
|
||||
|
||||
let client = SonarrClient::new(
|
||||
"https://sonarr.tsuba.darksailor.dev".into(),
|
||||
"1a47401731bf44ae9787dfcd4bab402f".into(),
|
||||
);
|
||||
tui::run_app(client);
|
||||
Ok(())
|
||||
}
|
||||
1141
src/tui.rs
Normal file
1141
src/tui.rs
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user