feat: Initial commit

This commit is contained in:
uttarayan21
2025-07-14 16:22:26 +05:30
commit 1d91e20db5
25 changed files with 5635 additions and 0 deletions

1
.envrc Normal file
View File

@@ -0,0 +1 @@
use flake

62
.github/workflows/build.yaml vendored Normal file
View File

@@ -0,0 +1,62 @@
name: build
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
checks-matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- id: set-matrix
name: Generate Nix Matrix
run: |
set -Eeu
matrix="$(nix eval --json '.#githubActions.matrix')"
echo "matrix=$matrix" >> "$GITHUB_OUTPUT"
checks-build:
needs: checks-matrix
runs-on: ${{ matrix.os }}
strategy:
matrix: ${{fromJSON(needs.checks-matrix.outputs.matrix)}}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix build -L '.#${{ matrix.attr }}'
codecov:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- name: Run codecov
run: nix build .#checks.x86_64-linux.hello-llvm-cov
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4.0.1
with:
flags: unittests
name: codecov-hello
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
files: ./result
verbose: true

38
.github/workflows/docs.yaml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: docs
on:
push:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
docs:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
pages: "write"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- uses: DeterminateSystems/flake-checker-action@main
- name: Generate docs
run: nix build .#checks.x86_64-linux.hello-docs
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: result/share/doc
- name: Deploy to gh-pages
id: deployment
uses: actions/deploy-pages@v4

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
/result
/target
.direnv
*.jpg

2559
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

44
Cargo.toml Normal file
View File

@@ -0,0 +1,44 @@
[package]
name = "detector"
version = "0.1.0"
edition = "2024"
license = "MIT"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
# [patch."https://github.com/aftershootco/mnn-rs".mnn]
# path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs"
# [patch."https://github.com/aftershootco/mnn-rs".mnn-sys]
# path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs/mnn-sys"
# [patch."https://github.com/aftershootco/mnn-rs".mnn-sync]
# path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs/mnn-sync"
# [patch."https://github.com/aftershootco/mnn-rs".mnn-bridge]
# path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs/mnn-bridge"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
clap_complete = "4.5"
error-stack = "0.5"
image = "0.25.6"
linfa = "0.7.1"
# mnn = { git = "https://github.com/aftershootco/mnn-rs", version = "0.2.0", features = [
mnn = { path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs", version = "0.2.0", features = [
"metal",
"tracing",
] }
mnn-bridge = { path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs/mnn-bridge", version = "0.1.0", features = [
"ndarray",
] }
mnn-sync = { path = "/Users/fs0c131y/Projects/aftershoot/mnn-rs/mnn-sync", version = "0.1.0", features = [
"tracing",
] }
ndarray = "0.16.1"
ndarray-image = { version = "0.1.0", path = "ndarray-image" }
rusqlite = { version = "0.36.0", features = ["modern-full"] }
thiserror = "2.0"
tokio = "1.43.1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
[profile.release]
debug = true

13
bbox/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "bbox"
version = "0.1.0"
edition = "2024"
[dependencies]
ndarray = "0.16"
num = "0.4.3"
serde = { version = "1", features = ["derive"], optional = true }
[features]
serde = ["dep:serde"]
default = ["serde"]

708
bbox/src/lib.rs Normal file
View File

@@ -0,0 +1,708 @@
pub mod traits;
/// A bounding box of co-ordinates whose origin is at the top-left corner.
#[derive(
Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Hash, serde::Serialize, serde::Deserialize,
)]
#[non_exhaustive]
pub struct BBox<T = f32> {
pub x: T,
pub y: T,
pub width: T,
pub height: T,
}
impl<T> From<[T; 4]> for BBox<T> {
fn from([x, y, width, height]: [T; 4]) -> Self {
Self {
x,
y,
width,
height,
}
}
}
impl<T: Copy> BBox<T> {
pub fn new(x: T, y: T, width: T, height: T) -> Self {
Self {
x,
y,
width,
height,
}
}
/// Casts the internal values to another type using [as] keyword
pub fn cast<T2>(self) -> BBox<T2>
where
T: num::cast::AsPrimitive<T2>,
T2: Copy + 'static,
{
BBox {
x: self.x.as_(),
y: self.y.as_(),
width: self.width.as_(),
height: self.height.as_(),
}
}
/// Clamps all the internal values to the given min and max.
pub fn clamp(&self, min: T, max: T) -> Self
where
T: std::cmp::PartialOrd,
{
Self {
x: num::clamp(self.x, min, max),
y: num::clamp(self.y, min, max),
width: num::clamp(self.width, min, max),
height: num::clamp(self.height, min, max),
}
}
pub fn clamp_box(&self, bbox: BBox<T>) -> Self
where
T: std::cmp::PartialOrd,
T: num::Zero,
T: core::ops::Add<Output = T>,
T: core::ops::Sub<Output = T>,
{
let x1 = num::clamp(self.x1(), bbox.x1(), bbox.x2());
let y1 = num::clamp(self.y1(), bbox.y1(), bbox.y2());
let x2 = num::clamp(self.x2(), bbox.x1(), bbox.x2());
let y2 = num::clamp(self.y2(), bbox.y1(), bbox.y2());
Self::new_xyxy(x1, y1, x2, y2)
}
pub fn normalize(&self, width: T, height: T) -> Self
where
T: core::ops::Div<Output = T> + Copy,
{
Self {
x: self.x / width,
y: self.y / height,
width: self.width / width,
height: self.height / height,
}
}
/// Normalize after casting to float
pub fn normalize_f64(&self, width: T, height: T) -> BBox<f64>
where
T: core::ops::Div<Output = T> + Copy,
T: num::cast::AsPrimitive<f64>,
{
BBox {
x: self.x.as_() / width.as_(),
y: self.y.as_() / height.as_(),
width: self.width.as_() / width.as_(),
height: self.height.as_() / height.as_(),
}
}
pub fn denormalize(&self, width: T, height: T) -> Self
where
T: core::ops::Mul<Output = T> + Copy,
{
Self {
x: self.x * width,
y: self.y * height,
width: self.width * width,
height: self.height * height,
}
}
pub fn height(&self) -> T {
self.height
}
pub fn width(&self) -> T {
self.width
}
pub fn padding(&self, padding: T) -> Self
where
T: core::ops::Add<Output = T> + core::ops::Sub<Output = T> + Copy,
{
Self {
x: self.x - padding,
y: self.y - padding,
width: self.width + padding + padding,
height: self.height + padding + padding,
}
}
pub fn padding_height(&self, padding: T) -> Self
where
T: core::ops::Add<Output = T> + core::ops::Sub<Output = T> + Copy,
{
Self {
x: self.x,
y: self.y - padding,
width: self.width,
height: self.height + padding + padding,
}
}
pub fn padding_width(&self, padding: T) -> Self
where
T: core::ops::Add<Output = T> + core::ops::Sub<Output = T> + Copy,
{
Self {
x: self.x - padding,
y: self.y,
width: self.width + padding + padding,
height: self.height,
}
}
// Enlarge / shrink the bounding box by a factor while
// keeping the center point and the aspect ratio fixed
pub fn scale(&self, factor: T) -> Self
where
T: core::ops::Mul<Output = T>,
T: core::ops::Sub<Output = T>,
T: core::ops::Add<Output = T>,
T: core::ops::Div<Output = T>,
T: num::One + Copy,
{
let two = num::one::<T>() + num::one::<T>();
let width = self.width * factor;
let height = self.height * factor;
let width_inc = width - self.width;
let height_inc = height - self.height;
Self {
x: self.x - width_inc / two,
y: self.y - height_inc / two,
width,
height,
}
}
pub fn scale_x(&self, factor: T) -> Self
where
T: core::ops::Mul<Output = T>
+ core::ops::Sub<Output = T>
+ core::ops::Add<Output = T>
+ core::ops::Div<Output = T>
+ num::One
+ Copy,
{
let two = num::one::<T>() + num::one::<T>();
let width = self.width * factor;
let width_inc = width - self.width;
Self {
x: self.x - width_inc / two,
y: self.y,
width,
height: self.height,
}
}
pub fn scale_y(&self, factor: T) -> Self
where
T: core::ops::Mul<Output = T>
+ core::ops::Sub<Output = T>
+ core::ops::Add<Output = T>
+ core::ops::Div<Output = T>
+ num::One
+ Copy,
{
let two = num::one::<T>() + num::one::<T>();
let height = self.height * factor;
let height_inc = height - self.height;
Self {
x: self.x,
y: self.y - height_inc / two,
width: self.width,
height,
}
}
pub fn offset(&self, offset: Point<T>) -> Self
where
T: core::ops::Add<Output = T> + Copy,
{
Self {
x: self.x + offset.x,
y: self.y + offset.y,
width: self.width,
height: self.height,
}
}
/// Translate the bounding box by the given offset
/// if they are in the same scale
pub fn translate(&self, bbox: Self) -> Self
where
T: core::ops::Add<Output = T> + Copy,
{
Self {
x: self.x + bbox.x,
y: self.y + bbox.y,
width: self.width,
height: self.height,
}
}
pub fn with_top_left(&self, top_left: Point<T>) -> Self {
Self {
x: top_left.x,
y: top_left.y,
width: self.width,
height: self.height,
}
}
pub fn center(&self) -> Point<T>
where
T: core::ops::Add<Output = T> + core::ops::Div<Output = T> + Copy,
T: num::One,
{
let two = T::one() + T::one();
Point::new(self.x + self.width / two, self.y + self.height / two)
}
pub fn area(&self) -> T
where
T: core::ops::Mul<Output = T> + Copy,
{
self.width * self.height
}
// Corresponds to self.x1() and self.y1()
pub fn top_left(&self) -> Point<T> {
Point::new(self.x, self.y)
}
pub fn top_right(&self) -> Point<T>
where
T: core::ops::Add<Output = T> + Copy,
{
Point::new(self.x + self.width, self.y)
}
pub fn bottom_left(&self) -> Point<T>
where
T: core::ops::Add<Output = T> + Copy,
{
Point::new(self.x, self.y + self.height)
}
// Corresponds to self.x2() and self.y2()
pub fn bottom_right(&self) -> Point<T>
where
T: core::ops::Add<Output = T> + Copy,
{
Point::new(self.x + self.width, self.y + self.height)
}
pub const fn x1(&self) -> T {
self.x
}
pub const fn y1(&self) -> T {
self.y
}
pub fn x2(&self) -> T
where
T: core::ops::Add<Output = T> + Copy,
{
self.x + self.width
}
pub fn y2(&self) -> T
where
T: core::ops::Add<Output = T> + Copy,
{
self.y + self.height
}
pub fn overlap(&self, other: &Self) -> T
where
T: std::cmp::PartialOrd
+ traits::min::Min
+ traits::max::Max
+ num::Zero
+ core::ops::Add<Output = T>
+ core::ops::Sub<Output = T>
+ core::ops::Mul<Output = T>
+ Copy,
{
let x1 = self.x.max(other.x);
let y1 = self.y.max(other.y);
let x2 = (self.x + self.width).min(other.x + other.width);
let y2 = (self.y + self.height).min(other.y + other.height);
let width = (x2 - x1).max(T::zero());
let height = (y2 - y1).max(T::zero());
width * height
}
pub fn iou(&self, other: &Self) -> T
where
T: std::cmp::Ord
+ num::Zero
+ traits::min::Min
+ traits::max::Max
+ core::ops::Add<Output = T>
+ core::ops::Sub<Output = T>
+ core::ops::Mul<Output = T>
+ core::ops::Div<Output = T>
+ Copy,
{
let overlap = self.overlap(other);
let union = self.area() + other.area() - overlap;
overlap / union
}
pub fn contains(&self, point: Point<T>) -> bool
where
T: std::cmp::PartialOrd + core::ops::Add<Output = T> + Copy,
{
point.x >= self.x
&& point.x <= self.x + self.width
&& point.y >= self.y
&& point.y <= self.y + self.height
}
pub fn contains_bbox(&self, other: Self) -> bool
where
T: std::cmp::PartialOrd + Copy,
T: core::ops::Add<Output = T>,
{
self.contains(other.top_left())
&& self.contains(other.top_right())
&& self.contains(other.bottom_left())
&& self.contains(other.bottom_right())
}
pub fn new_xywh(x: T, y: T, width: T, height: T) -> Self {
Self {
x,
y,
width,
height,
}
}
pub fn new_xyxy(x1: T, y1: T, x2: T, y2: T) -> Self
where
T: core::ops::Sub<Output = T> + Copy,
{
Self {
x: x1,
y: y1,
width: x2 - x1,
height: y2 - y1,
}
}
pub fn containing(box1: Self, box2: Self) -> Self
where
T: traits::min::Min + traits::max::Max + Copy,
T: core::ops::Sub<Output = T>,
T: core::ops::Add<Output = T>,
{
let x1 = box1.x.min(box2.x);
let y1 = box1.y.min(box2.y);
let x2 = box1.x2().max(box2.x2());
let y2 = box1.y2().max(box2.y2());
Self::new_xyxy(x1, y1, x2, y2)
}
}
impl<T: core::ops::Sub<Output = T> + Copy> core::ops::Sub<T> for BBox<T> {
type Output = BBox<T>;
fn sub(self, rhs: T) -> Self::Output {
BBox {
x: self.x - rhs,
y: self.y - rhs,
width: self.width - rhs,
height: self.height - rhs,
}
}
}
impl<T: core::ops::Add<Output = T> + Copy> core::ops::Add<T> for BBox<T> {
type Output = BBox<T>;
fn add(self, rhs: T) -> Self::Output {
BBox {
x: self.x + rhs,
y: self.y + rhs,
width: self.width + rhs,
height: self.height + rhs,
}
}
}
impl<T: core::ops::Mul<Output = T> + Copy> core::ops::Mul<T> for BBox<T> {
type Output = BBox<T>;
fn mul(self, rhs: T) -> Self::Output {
BBox {
x: self.x * rhs,
y: self.y * rhs,
width: self.width * rhs,
height: self.height * rhs,
}
}
}
impl<T: core::ops::Div<Output = T> + Copy> core::ops::Div<T> for BBox<T> {
type Output = BBox<T>;
fn div(self, rhs: T) -> Self::Output {
BBox {
x: self.x / rhs,
y: self.y / rhs,
width: self.width / rhs,
height: self.height / rhs,
}
}
}
impl<T> core::ops::Add<BBox<T>> for BBox<T>
where
T: core::ops::Sub<Output = T>
+ core::ops::Add<Output = T>
+ traits::min::Min
+ traits::max::Max
+ Copy,
{
type Output = BBox<T>;
fn add(self, rhs: BBox<T>) -> Self::Output {
let x1 = self.x1().min(rhs.x1());
let y1 = self.y1().min(rhs.y1());
let x2 = self.x2().max(rhs.x2());
let y2 = self.y2().max(rhs.y2());
BBox::new_xyxy(x1, y1, x2, y2)
}
}
#[test]
fn test_bbox_add() {
let bbox1: BBox<usize> = BBox::new_xyxy(0, 0, 10, 10);
let bbox2: BBox<usize> = BBox::new_xyxy(5, 5, 15, 15);
let bbox3: BBox<usize> = bbox1 + bbox2;
assert_eq!(bbox3, BBox::new_xyxy(0, 0, 15, 15).cast());
}
#[derive(
Debug, Copy, Clone, serde::Serialize, serde::Deserialize, PartialEq, PartialOrd, Eq, Ord, Hash,
)]
pub struct Point<T = f32> {
x: T,
y: T,
}
impl<T> Point<T> {
pub const fn new(x: T, y: T) -> Self {
Self { x, y }
}
pub const fn x(&self) -> T
where
T: Copy,
{
self.x
}
pub const fn y(&self) -> T
where
T: Copy,
{
self.y
}
pub fn cast<T2>(&self) -> Point<T2>
where
T: num::cast::AsPrimitive<T2>,
T2: Copy + 'static,
{
Point {
x: self.x.as_(),
y: self.y.as_(),
}
}
}
impl<T: core::ops::Sub<T, Output = T> + Copy> core::ops::Sub<Point<T>> for Point<T> {
type Output = Point<T>;
fn sub(self, rhs: Point<T>) -> Self::Output {
Point {
x: self.x - rhs.x,
y: self.y - rhs.y,
}
}
}
impl<T: core::ops::Add<T, Output = T> + Copy> core::ops::Add<Point<T>> for Point<T> {
type Output = Point<T>;
fn add(self, rhs: Point<T>) -> Self::Output {
Point {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
impl<T: core::ops::Sub<Output = T> + Copy> Point<T> {
/// If both the boxes are in the same scale then make the translation of the origin to the
/// other box
pub fn with_origin(&self, origin: Self) -> Self {
*self - origin
}
}
impl<T: core::ops::Add<Output = T> + Copy> Point<T> {
pub fn translate(&self, point: Point<T>) -> Self {
*self + point
}
}
impl<I: num::Zero> BBox<I>
where
I: num::cast::AsPrimitive<usize>,
{
pub fn zeros_ndarray_2d<T: num::Zero + Copy>(&self) -> ndarray::Array2<T> {
ndarray::Array2::<T>::zeros((self.height.as_(), self.width.as_()))
}
pub fn zeros_ndarray_3d<T: num::Zero + Copy>(&self, channels: usize) -> ndarray::Array3<T> {
ndarray::Array3::<T>::zeros((self.height.as_(), self.width.as_(), channels))
}
pub fn ones_ndarray_2d<T: num::One + Copy>(&self) -> ndarray::Array2<T> {
ndarray::Array2::<T>::ones((self.height.as_(), self.width.as_()))
}
}
impl<T: num::Float> BBox<T> {
pub fn round(&self) -> Self {
Self {
x: self.x.round(),
y: self.y.round(),
width: self.width.round(),
height: self.height.round(),
}
}
}
#[cfg(test)]
mod bbox_clamp_tests {
use super::*;
#[test]
pub fn bbox_test_clamp_box() {
let large_box = BBox::new(0, 0, 100, 100);
let small_box = BBox::new(10, 10, 20, 20);
let clamped = large_box.clamp_box(small_box);
assert_eq!(clamped, small_box);
}
#[test]
pub fn bbox_test_clamp_box_offset() {
let box_a = BBox::new(0, 0, 100, 100);
let box_b = BBox::new(-10, -10, 20, 20);
let clamped = box_b.clamp_box(box_a);
let expected = BBox::new(0, 0, 10, 10);
assert_eq!(expected, clamped);
}
}
#[cfg(test)]
mod bbox_padding_tests {
use super::*;
#[test]
pub fn bbox_test_padding() {
let bbox = BBox::new(0, 0, 10, 10);
let padded = bbox.padding(2);
assert_eq!(padded, BBox::new(-2, -2, 14, 14));
}
#[test]
pub fn bbox_test_padding_height() {
let bbox = BBox::new(0, 0, 10, 10);
let padded = bbox.padding_height(2);
assert_eq!(padded, BBox::new(0, -2, 10, 14));
}
#[test]
pub fn bbox_test_padding_width() {
let bbox = BBox::new(0, 0, 10, 10);
let padded = bbox.padding_width(2);
assert_eq!(padded, BBox::new(-2, 0, 14, 10));
}
#[test]
pub fn bbox_test_clamped_padding() {
let bbox = BBox::new(0, 0, 10, 10);
let padded = bbox.padding(2);
let clamp = BBox::new(0, 0, 12, 12);
let clamped = padded.clamp_box(clamp);
assert_eq!(clamped, clamp);
}
#[test]
pub fn bbox_clamp_failure() {
let og = BBox::new(475.0, 79.625, 37.0, 282.15);
let padded = BBox {
x: 471.3,
y: 51.412499999999994,
width: 40.69999999999999,
height: 338.54999999999995,
};
let clamp = BBox::new(0.0, 0.0, 512.0, 512.0);
let sus = padded.clamp_box(clamp);
assert!(clamp.contains_bbox(sus));
}
}
#[cfg(test)]
mod bbox_scale_tests {
use super::*;
#[test]
pub fn bbox_test_scale_int() {
let bbox = BBox::new(0, 0, 10, 10);
let scaled = bbox.scale(2);
assert_eq!(scaled, BBox::new(-5, -5, 20, 20));
}
#[test]
pub fn bbox_test_scale_float() {
let bbox = BBox::new(0, 0, 10, 10).cast();
let scaled = bbox.scale(1.05); // 5% increase
let l = 10.0 * 0.05;
assert_eq!(scaled, BBox::new(-l / 2.0, -l / 2.0, 10.0 + l, 10.0 + l));
}
#[test]
pub fn bbox_test_scale_float_negative() {
let bbox = BBox::new(0, 0, 10, 10).cast();
let scaled = bbox.scale(0.95); // 5% decrease
let l = -10.0 * 0.05;
assert_eq!(scaled, BBox::new(-l / 2.0, -l / 2.0, 10.0 + l, 10.0 + l));
}
#[test]
pub fn bbox_scale_float() {
let bbox = BBox::new_xywh(0, 0, 200, 200);
let scaled = bbox.cast::<f64>().scale(1.1).cast::<i32>().clamp(0, 1000);
let expected = BBox::new(0, 0, 220, 220);
assert_eq!(scaled, expected);
}
#[test]
pub fn add_padding_bbox_example() {
// let result = add_padding_bbox(
// vec![Rect::new(100, 200, 300, 400)],
// (0.1, 0.1),
// (1000, 1000),
// );
// assert_eq!(result[0], Rect::new(70, 160, 360, 480));
let bbox = BBox::new(100, 200, 300, 400);
let scaled = bbox.cast::<f64>().scale(1.2).cast::<i32>().clamp(0, 1000);
assert_eq!(bbox, BBox::new(100, 200, 300, 400));
assert_eq!(scaled, BBox::new(70, 160, 360, 480));
}
#[test]
pub fn scale_bboxes() {
// let result = scale_bboxes(Rect::new(100, 200, 300, 400), (1000, 1000), (500, 500));
// assert_eq!(result[0], Rect::new(200, 400, 600, 800));
let bbox = BBox::new(100, 200, 300, 400);
let scaled = bbox.scale(2);
assert_eq!(scaled, BBox::new(200, 400, 600, 800));
}
}

2
bbox/src/traits.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod max;
pub mod min;

27
bbox/src/traits/max.rs Normal file
View File

@@ -0,0 +1,27 @@
pub trait Max: Sized + Copy {
fn max(self, other: Self) -> Self;
}
macro_rules! impl_max {
($($t:ty),*) => {
$(
impl Max for $t {
fn max(self, other: Self) -> Self {
Ord::max(self, other)
}
}
)*
};
(float $($t:ty),*) => {
$(
impl Max for $t {
fn max(self, other: Self) -> Self {
Self::max(self, other)
}
}
)*
};
}
impl_max!(usize, u8, u16, u32, u64, u128, isize, i8, i16, i32, i64, i128);
impl_max!(float f32, f64);

27
bbox/src/traits/min.rs Normal file
View File

@@ -0,0 +1,27 @@
pub trait Min: Sized + Copy {
fn min(self, other: Self) -> Self;
}
macro_rules! impl_min {
($($t:ty),*) => {
$(
impl Min for $t {
fn min(self, other: Self) -> Self {
Ord::min(self, other)
}
}
)*
};
(float $($t:ty),*) => {
$(
impl Min for $t {
fn min(self, other: Self) -> Self {
Self::min(self, other)
}
}
)*
};
}
impl_min!(usize, u8, u16, u32, u64, u128, isize, i8, i16, i32, i64, i128);
impl_min!(float f32, f64);

236
deny.toml Normal file
View File

@@ -0,0 +1,236 @@
# This template contains all of the possible sections and their default values
# Note that all fields that take a lint level have these possible values:
# * deny - An error will be produced and the check will fail
# * warn - A warning will be produced, but the check will not fail
# * allow - No warning or error will be produced, though in some cases a note
# will be
# The values provided in this template are the default values that will be used
# when any section or field is not specified in your own configuration
# Root options
# The graph table configures how the dependency graph is constructed and thus
# which crates the checks are performed against
[graph]
# If 1 or more target triples (and optionally, target_features) are specified,
# only the specified targets will be checked when running `cargo deny check`.
# This means, if a particular package is only ever used as a target specific
# dependency, such as, for example, the `nix` crate only being used via the
# `target_family = "unix"` configuration, that only having windows targets in
# this list would mean the nix crate, as well as any of its exclusive
# dependencies not shared by any other crates, would be ignored, as the target
# list here is effectively saying which targets you are building for.
targets = [
# The triple can be any string, but only the target triples built in to
# rustc (as of 1.40) can be checked against actual config expressions
#"x86_64-unknown-linux-musl",
# You can also specify which target_features you promise are enabled for a
# particular target. target_features are currently not validated against
# the actual valid features supported by the target architecture.
#{ triple = "wasm32-unknown-unknown", features = ["atomics"] },
]
# When creating the dependency graph used as the source of truth when checks are
# executed, this field can be used to prune crates from the graph, removing them
# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate
# is pruned from the graph, all of its dependencies will also be pruned unless
# they are connected to another crate in the graph that hasn't been pruned,
# so it should be used with care. The identifiers are [Package ID Specifications]
# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html)
#exclude = []
# If true, metadata will be collected with `--all-features`. Note that this can't
# be toggled off if true, if you want to conditionally enable `--all-features` it
# is recommended to pass `--all-features` on the cmd line instead
all-features = false
# If true, metadata will be collected with `--no-default-features`. The same
# caveat with `all-features` applies
no-default-features = false
# If set, these feature will be enabled when collecting metadata. If `--features`
# is specified on the cmd line they will take precedence over this option.
#features = []
# The output table provides options for how/if diagnostics are outputted
[output]
# When outputting inclusion graphs in diagnostics that include features, this
# option can be used to specify the depth at which feature edges will be added.
# This option is included since the graphs can be quite large and the addition
# of features from the crate(s) to all of the graph roots can be far too verbose.
# This option can be overridden via `--feature-depth` on the cmd line
feature-depth = 1
# This section is considered when running `cargo deny check advisories`
# More documentation for the advisories section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
[advisories]
# The path where the advisory databases are cloned/fetched into
#db-path = "$CARGO_HOME/advisory-dbs"
# The url(s) of the advisory databases to use
#db-urls = ["https://github.com/rustsec/advisory-db"]
# A list of advisory IDs to ignore. Note that ignored advisories will still
# output a note when they are encountered.
ignore = [
#"RUSTSEC-0000-0000",
#{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" },
#"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish
#{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" },
]
# If this is true, then cargo deny will use the git executable to fetch advisory database.
# If this is false, then it uses a built-in git library.
# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.
# See Git Authentication for more information about setting up git authentication.
#git-fetch-with-cli = true
# This section is considered when running `cargo deny check licenses`
# More documentation for the licenses section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
[licenses]
# List of explicitly allowed licenses
# See https://spdx.org/licenses/ for list of possible licenses
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
allow = [
"MIT",
"Apache-2.0",
"Unicode-3.0",
#"Apache-2.0 WITH LLVM-exception",
]
# The confidence threshold for detecting a license from license text.
# The higher the value, the more closely the license text must be to the
# canonical license text of a valid SPDX license file.
# [possible values: any between 0.0 and 1.0].
confidence-threshold = 0.8
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
# aren't accepted for every possible crate as with the normal allow list
exceptions = [
# Each entry is the crate and version constraint, and its specific allow
# list
#{ allow = ["Zlib"], crate = "adler32" },
]
# Some crates don't have (easily) machine readable licensing information,
# adding a clarification entry for it allows you to manually specify the
# licensing information
#[[licenses.clarify]]
# The package spec the clarification applies to
#crate = "ring"
# The SPDX expression for the license requirements of the crate
#expression = "MIT AND ISC AND OpenSSL"
# One or more files in the crate's source used as the "source of truth" for
# the license expression. If the contents match, the clarification will be used
# when running the license check, otherwise the clarification will be ignored
# and the crate will be checked normally, which may produce warnings or errors
# depending on the rest of your configuration
#license-files = [
# Each entry is a crate relative path, and the (opaque) hash of its contents
#{ path = "LICENSE", hash = 0xbd0eed23 }
#]
[licenses.private]
# If true, ignores workspace crates that aren't published, or are only
# published to private registries.
# To see how to mark a crate as unpublished (to the official registry),
# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.
ignore = false
# One or more private registries that you might publish crates to, if a crate
# is only published to private registries, and ignore is true, the crate will
# not have its license(s) checked
registries = [
#"https://sekretz.com/registry
]
# This section is considered when running `cargo deny check bans`.
# More documentation about the 'bans' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
[bans]
# Lint level for when multiple versions of the same crate are detected
multiple-versions = "warn"
# Lint level for when a crate version requirement is `*`
wildcards = "allow"
# The graph highlighting used when creating dotgraphs for crates
# with multiple versions
# * lowest-version - The path to the lowest versioned duplicate is highlighted
# * simplest-path - The path to the version with the fewest edges is highlighted
# * all - Both lowest-version and simplest-path are used
highlight = "all"
# The default lint level for `default` features for crates that are members of
# the workspace that is being checked. This can be overridden by allowing/denying
# `default` on a crate-by-crate basis if desired.
workspace-default-features = "allow"
# The default lint level for `default` features for external crates that are not
# members of the workspace. This can be overridden by allowing/denying `default`
# on a crate-by-crate basis if desired.
external-default-features = "allow"
# List of crates that are allowed. Use with care!
allow = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" },
]
# List of crates to deny
deny = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" },
# Wrapper crates can optionally be specified to allow the crate when it
# is a direct dependency of the otherwise banned crate
#{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] },
]
# List of features to allow/deny
# Each entry the name of a crate and a version range. If version is
# not specified, all versions will be matched.
#[[bans.features]]
#crate = "reqwest"
# Features to not allow
#deny = ["json"]
# Features to allow
#allow = [
# "rustls",
# "__rustls",
# "__tls",
# "hyper-rustls",
# "rustls",
# "rustls-pemfile",
# "rustls-tls-webpki-roots",
# "tokio-rustls",
# "webpki-roots",
#]
# If true, the allowed features must exactly match the enabled feature set. If
# this is set there is no point setting `deny`
#exact = true
# Certain crates/versions that will be skipped when doing duplicate detection.
skip = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" },
]
# Similarly to `skip` allows you to skip certain crates during duplicate
# detection. Unlike skip, it also includes the entire tree of transitive
# dependencies starting at the specified crate, up to a certain depth, which is
# by default infinite.
skip-tree = [
#"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies
#{ crate = "ansi_term@0.11.0", depth = 20 },
]
# This section is considered when running `cargo deny check sources`.
# More documentation about the 'sources' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
[sources]
# Lint level for what to happen when a crate from a crate registry that is not
# in the allow list is encountered
unknown-registry = "warn"
# Lint level for what to happen when a crate from a git repository that is not
# in the allow list is encountered
unknown-git = "warn"
# List of URLs for allowed crate registries. Defaults to the crates.io index
# if not specified. If it is specified but empty, no registries are allowed.
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
# List of URLs for allowed Git repositories
allow-git = []
[sources.allow-org]
# github.com organizations to allow git sources for
github = []
# gitlab.com organizations to allow git sources for
gitlab = []
# bitbucket.org organizations to allow git sources for
bitbucket = []

227
flake.lock generated Normal file
View File

@@ -0,0 +1,227 @@
{
"nodes": {
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1750151065,
"narHash": "sha256-il+CAqChFIB82xP6bO43dWlUVs+NlG7a4g8liIP5HcI=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "7573f55ba337263f61167dbb0ea926cdc7c8eb5d",
"type": "github"
},
"original": {
"owner": "rustsec",
"repo": "advisory-db",
"type": "github"
}
},
"crane": {
"locked": {
"lastModified": 1750266157,
"narHash": "sha256-tL42YoNg9y30u7zAqtoGDNdTyXTi8EALDeCB13FtbQA=",
"owner": "ipetkov",
"repo": "crane",
"rev": "e37c943371b73ed87faf33f7583860f81f1d5a48",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"mnn": {
"flake": false,
"locked": {
"lastModified": 1749173738,
"narHash": "sha256-pNljvQ4xMZ4VmuxQyXt+boNBZD0+UZNpNLrWrj8Rtfw=",
"owner": "alibaba",
"repo": "MNN",
"rev": "ebdada82634300956e08bd4056ecfeb1e4f23b32",
"type": "github"
},
"original": {
"owner": "alibaba",
"ref": "3.2.0",
"repo": "MNN",
"type": "github"
}
},
"mnn-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"mnn": "mnn",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1749204972,
"narHash": "sha256-ICLU408iwxZA7uETBmEBuuForBIPLvonuy1hW/fuiME=",
"owner": "uttarayan21",
"repo": "mnn-nix-overlay",
"rev": "7b97393977689e851a6840a8e1cbea058e67363a",
"type": "github"
},
"original": {
"owner": "uttarayan21",
"repo": "mnn-nix-overlay",
"type": "github"
}
},
"mnn-src": {
"flake": false,
"locked": {
"lastModified": 1749173738,
"narHash": "sha256-pNljvQ4xMZ4VmuxQyXt+boNBZD0+UZNpNLrWrj8Rtfw=",
"owner": "alibaba",
"repo": "MNN",
"rev": "ebdada82634300956e08bd4056ecfeb1e4f23b32",
"type": "github"
},
"original": {
"owner": "alibaba",
"ref": "3.2.0",
"repo": "MNN",
"type": "github"
}
},
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1737420293,
"narHash": "sha256-F1G5ifvqTpJq7fdkT34e/Jy9VCyzd5XfJ9TO8fHhJWE=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "f4158fa080ef4503c8f4c820967d946c2af31ec9",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1750506804,
"narHash": "sha256-VLFNc4egNjovYVxDGyBYTrvVCgDYgENp5bVi9fPTDYc=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "4206c4cb56751df534751b058295ea61357bbbaa",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"advisory-db": "advisory-db",
"crane": "crane",
"flake-utils": "flake-utils",
"mnn-overlay": "mnn-overlay",
"mnn-src": "mnn-src",
"nix-github-actions": "nix-github-actions",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1750732748,
"narHash": "sha256-HR2b3RHsPeJm+Fb+1ui8nXibgniVj7hBNvUbXEyz0DU=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "4b4494b2ba7e8a8041b2e28320b2ee02c115c75f",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

176
flake.nix Normal file
View File

@@ -0,0 +1,176 @@
{
description = "A simple rust flake using rust-overlay and craneLib";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
crane.url = "github:ipetkov/crane";
nix-github-actions = {
url = "github:nix-community/nix-github-actions";
inputs.nixpkgs.follows = "nixpkgs";
};
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
advisory-db = {
url = "github:rustsec/advisory-db";
flake = false;
};
mnn-overlay = {
url = "github:uttarayan21/mnn-nix-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
mnn-src = {
url = "github:alibaba/MNN/3.2.0";
flake = false;
};
};
outputs = {
self,
crane,
flake-utils,
nixpkgs,
rust-overlay,
advisory-db,
nix-github-actions,
mnn-overlay,
mnn-src,
...
}:
flake-utils.lib.eachDefaultSystem (
system: let
pkgs = import nixpkgs {
inherit system;
overlays = [
rust-overlay.overlays.default
(final: prev: {
mnn = mnn-overlay.packages.${system}.mnn.override {
src = mnn-src;
buildConverter = true;
enableMetal = true;
enableOpencl = true;
};
})
];
};
inherit (pkgs) lib;
cargoToml = builtins.fromTOML (builtins.readFile ./Cargo.toml);
name = cargoToml.package.name;
stableToolchain = pkgs.rust-bin.stable.latest.default;
stableToolchainWithLLvmTools = stableToolchain.override {
extensions = ["rust-src" "llvm-tools"];
};
stableToolchainWithRustAnalyzer = stableToolchain.override {
extensions = ["rust-src" "rust-analyzer"];
};
craneLib = (crane.mkLib pkgs).overrideToolchain stableToolchain;
craneLibLLvmTools = (crane.mkLib pkgs).overrideToolchain stableToolchainWithLLvmTools;
src = let
filterBySuffix = path: exts: lib.any (ext: lib.hasSuffix ext path) exts;
sourceFilters = path: type: (craneLib.filterCargoSources path type) || filterBySuffix path [".c" ".h" ".hpp" ".cpp" ".cc"];
in
lib.cleanSourceWith {
filter = sourceFilters;
src = ./.;
};
commonArgs =
{
inherit src;
pname = name;
stdenv = pkgs.clangStdenv;
doCheck = false;
# LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib";
# nativeBuildInputs = with pkgs; [
# cmake
# llvmPackages.libclang.lib
# ];
buildInputs = with pkgs;
[]
++ (lib.optionals pkgs.stdenv.isDarwin [
libiconv
apple-sdk_13
]);
}
// (lib.optionalAttrs pkgs.stdenv.isLinux {
# BINDGEN_EXTRA_CLANG_ARGS = "-I${pkgs.llvmPackages.libclang.lib}/lib/clang/18/include";
});
cargoArtifacts = craneLib.buildPackage commonArgs;
in {
checks =
{
"${name}-clippy" = craneLib.cargoClippy (commonArgs
// {
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
});
"${name}-docs" = craneLib.cargoDoc (commonArgs // {inherit cargoArtifacts;});
"${name}-fmt" = craneLib.cargoFmt {inherit src;};
"${name}-toml-fmt" = craneLib.taploFmt {
src = pkgs.lib.sources.sourceFilesBySuffices src [".toml"];
};
# Audit dependencies
"${name}-audit" = craneLib.cargoAudit {
inherit src advisory-db;
};
# Audit licenses
"${name}-deny" = craneLib.cargoDeny {
inherit src;
};
"${name}-nextest" = craneLib.cargoNextest (commonArgs
// {
inherit cargoArtifacts;
partitions = 1;
partitionType = "count";
});
}
// lib.optionalAttrs (!pkgs.stdenv.isDarwin) {
"${name}-llvm-cov" = craneLibLLvmTools.cargoLlvmCov (commonArgs // {inherit cargoArtifacts;});
};
packages = let
pkg = craneLib.buildPackage (commonArgs
// {inherit cargoArtifacts;}
// {
nativeBuildInputs = with pkgs; [
installShellFiles
];
postInstall = ''
installShellCompletion --cmd ${name} \
--bash <($out/bin/${name} completions bash) \
--fish <($out/bin/${name} completions fish) \
--zsh <($out/bin/${name} completions zsh)
'';
});
in {
"${name}" = pkg;
default = pkg;
};
devShells = {
default = pkgs.mkShell.override {stdenv = pkgs.clangStdenv;} (commonArgs
// {
packages = with pkgs;
[
stableToolchainWithRustAnalyzer
cargo-nextest
cargo-deny
mnn
]
++ (lib.optionals pkgs.stdenv.isDarwin [
apple-sdk_13
]);
});
};
}
)
// {
githubActions = nix-github-actions.lib.mkGithubMatrix {
checks = nixpkgs.lib.getAttrs ["x86_64-linux"] self.checks;
};
};
}

BIN
models/retinaface.mnn Normal file

Binary file not shown.

1
ndarray-image/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

1132
ndarray-image/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

8
ndarray-image/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "ndarray-image"
version = "0.1.0"
edition = "2024"
[dependencies]
image = "0.25.6"
ndarray = "0.16.1"

181
ndarray-image/src/lib.rs Normal file
View File

@@ -0,0 +1,181 @@
type Result<T, E = ndarray::ShapeError> = core::result::Result<T, E>;
mod rgb8 {
use super::Result;
pub(super) fn image_as_ndarray(image: &image::RgbImage) -> Result<ndarray::ArrayView3<u8>> {
let (width, height) = image.dimensions();
let data = image.as_raw();
ndarray::ArrayView3::from_shape((height as usize, width as usize, 3), data)
}
pub(super) fn image_into_ndarray(image: image::RgbImage) -> Result<ndarray::Array3<u8>> {
let (width, height) = image.dimensions();
let data = image.into_raw();
ndarray::Array3::from_shape_vec((height as usize, width as usize, 3), data)
}
}
mod rgba8 {
use super::Result;
pub(super) fn image_as_ndarray(image: &image::RgbaImage) -> Result<ndarray::ArrayView3<u8>> {
let (width, height) = image.dimensions();
let data = image.as_raw();
ndarray::ArrayView3::from_shape((height as usize, width as usize, 4), data)
}
pub(super) fn image_into_ndarray(image: image::RgbaImage) -> Result<ndarray::Array3<u8>> {
let (width, height) = image.dimensions();
let data = image.into_raw();
ndarray::Array3::from_shape_vec((height as usize, width as usize, 4), data)
}
}
mod gray8 {
use super::Result;
pub(super) fn image_as_ndarray(image: &image::GrayImage) -> Result<ndarray::ArrayView2<u8>> {
let (width, height) = image.dimensions();
let data = image.as_raw();
ndarray::ArrayView2::from_shape((height as usize, width as usize), data)
}
pub(super) fn image_into_ndarray(image: image::GrayImage) -> Result<ndarray::Array2<u8>> {
let (width, height) = image.dimensions();
let data = image.into_raw();
ndarray::Array2::from_shape_vec((height as usize, width as usize), data)
}
}
mod gray_alpha8 {
use super::Result;
pub(super) fn image_as_ndarray(
image: &image::GrayAlphaImage,
) -> Result<ndarray::ArrayView3<u8>> {
let (width, height) = image.dimensions();
let data = image.as_raw();
ndarray::ArrayView3::from_shape((height as usize, width as usize, 2), data)
}
pub(super) fn image_into_ndarray(image: image::GrayAlphaImage) -> Result<ndarray::Array3<u8>> {
let (width, height) = image.dimensions();
let data = image.into_raw();
ndarray::Array3::from_shape_vec((height as usize, width as usize, 2), data)
}
}
mod dynamic_image {
use super::*;
pub fn image_as_ndarray(image: &image::DynamicImage) -> Result<ndarray::ArrayViewD<u8>> {
Ok(match image {
image::DynamicImage::ImageRgb8(img) => rgb8::image_as_ndarray(img)?.into_dyn(),
image::DynamicImage::ImageRgba8(img) => rgba8::image_as_ndarray(img)?.into_dyn(),
image::DynamicImage::ImageLuma8(img) => gray8::image_as_ndarray(img)?.into_dyn(),
image::DynamicImage::ImageLumaA8(img) => gray_alpha8::image_as_ndarray(img)?.into_dyn(),
_ => {
unimplemented!("Unsupported image format: {:?}", image);
}
})
}
pub fn image_into_ndarray(image: image::DynamicImage) -> Result<ndarray::ArrayD<u8>> {
Ok(match image {
image::DynamicImage::ImageRgb8(img) => rgb8::image_into_ndarray(img)?.into_dyn(),
image::DynamicImage::ImageRgba8(img) => rgba8::image_into_ndarray(img)?.into_dyn(),
image::DynamicImage::ImageLuma8(img) => gray8::image_into_ndarray(img)?.into_dyn(),
image::DynamicImage::ImageLumaA8(img) => {
gray_alpha8::image_into_ndarray(img)?.into_dyn()
}
_ => {
unimplemented!("Unsupported image format: {:?}", image);
}
})
}
}
pub trait ImageToNdarray {
type OwnedOutput;
type RefOutput<'a>
where
Self: 'a;
fn as_ndarray<'a>(&'a self) -> Result<Self::RefOutput<'a>>;
fn to_ndarray(&self) -> Result<Self::OwnedOutput>;
fn into_ndarray(self) -> Result<Self::OwnedOutput>;
}
impl ImageToNdarray for image::RgbImage {
type OwnedOutput = ndarray::Array3<u8>;
type RefOutput<'a> = ndarray::ArrayView3<'a, u8>;
fn as_ndarray<'a>(&'a self) -> Result<Self::RefOutput<'a>> {
rgb8::image_as_ndarray(self)
}
fn to_ndarray(&self) -> Result<Self::OwnedOutput> {
Ok(self.as_ndarray()?.to_owned())
}
fn into_ndarray(self) -> Result<Self::OwnedOutput> {
rgb8::image_into_ndarray(self)
}
}
impl ImageToNdarray for image::RgbaImage {
type OwnedOutput = ndarray::Array3<u8>;
type RefOutput<'a> = ndarray::ArrayView3<'a, u8>;
fn as_ndarray<'a>(&'a self) -> Result<Self::RefOutput<'a>> {
rgba8::image_as_ndarray(self)
}
fn to_ndarray(&self) -> Result<Self::OwnedOutput> {
Ok(self.as_ndarray()?.to_owned())
}
fn into_ndarray(self) -> Result<Self::OwnedOutput> {
rgba8::image_into_ndarray(self)
}
}
impl ImageToNdarray for image::GrayImage {
type OwnedOutput = ndarray::Array2<u8>;
type RefOutput<'a> = ndarray::ArrayView2<'a, u8>;
fn as_ndarray<'a>(&'a self) -> Result<Self::RefOutput<'a>> {
gray8::image_as_ndarray(self)
}
fn to_ndarray(&self) -> Result<Self::OwnedOutput> {
Ok(self.as_ndarray()?.to_owned())
}
fn into_ndarray(self) -> Result<Self::OwnedOutput> {
gray8::image_into_ndarray(self)
}
}
impl ImageToNdarray for image::GrayAlphaImage {
type OwnedOutput = ndarray::Array3<u8>;
type RefOutput<'a> = ndarray::ArrayView3<'a, u8>;
fn as_ndarray<'a>(&'a self) -> Result<Self::RefOutput<'a>> {
gray_alpha8::image_as_ndarray(self)
}
fn to_ndarray(&self) -> Result<Self::OwnedOutput> {
Ok(self.as_ndarray()?.to_owned())
}
fn into_ndarray(self) -> Result<Self::OwnedOutput> {
gray_alpha8::image_into_ndarray(self)
}
}
impl ImageToNdarray for image::DynamicImage {
type OwnedOutput = ndarray::ArrayD<u8>;
type RefOutput<'a> = ndarray::ArrayViewD<'a, u8>;
fn as_ndarray<'a>(&'a self) -> Result<Self::RefOutput<'a>> {
dynamic_image::image_as_ndarray(self)
}
fn to_ndarray(&self) -> Result<Self::OwnedOutput> {
Ok(self.as_ndarray()?.to_owned())
}
fn into_ndarray(self) -> Result<Self::OwnedOutput> {
dynamic_image::image_into_ndarray(self)
}
}

64
src/cli.rs Normal file
View File

@@ -0,0 +1,64 @@
use std::path::PathBuf;
#[derive(Debug, clap::Parser)]
pub struct Cli {
#[clap(subcommand)]
pub cmd: SubCommand,
}
#[derive(Debug, clap::Subcommand)]
pub enum SubCommand {
#[clap(name = "detect")]
Detect(Detect),
#[clap(name = "list")]
List(List),
#[clap(name = "completions")]
Completions { shell: clap_complete::Shell },
}
#[derive(Debug, clap::ValueEnum, Clone, Copy)]
pub enum Models {
RetinaFace,
}
#[derive(Debug, clap::ValueEnum, Clone, Copy)]
pub enum Executor {
Mnn,
Onnx,
}
#[derive(Debug, clap::ValueEnum, Clone, Copy)]
pub enum OnnxEp {
Cpu,
}
#[derive(Debug, clap::ValueEnum, Clone, Copy)]
pub enum MnnEp {
Cpu,
Metal,
OpenCL,
CoreML,
}
#[derive(Debug, clap::Args)]
pub struct Detect {
#[clap(short, long)]
pub model: Option<PathBuf>,
#[clap(short = 'M', long, default_value = "retina-face")]
pub model_type: Models,
pub image: PathBuf,
}
#[derive(Debug, clap::Args)]
pub struct List {}
impl Cli {
pub fn completions(shell: clap_complete::Shell) {
let mut command = <Cli as clap::CommandFactory>::command();
clap_complete::generate(
shell,
&mut command,
env!("CARGO_BIN_NAME"),
&mut std::io::stdout(),
);
}
}

6
src/errors.rs Normal file
View File

@@ -0,0 +1,6 @@
pub use error_stack::{Report, ResultExt};
#[derive(Debug, thiserror::Error)]
#[error("An error occurred")]
pub struct Error;
pub type Result<T, E = error_stack::Report<Error>> = core::result::Result<T, E>;

73
src/facedet.rs Normal file
View File

@@ -0,0 +1,73 @@
use crate::errors::*;
use error_stack::ResultExt;
use mnn_bridge::ndarray::NdarrayToMnn;
use std::path::Path;
pub struct FaceDetection {
handle: mnn_sync::SessionHandle,
}
impl FaceDetection {
pub fn new(path: impl AsRef<Path>) -> Result<Self> {
let model = std::fs::read(path)
.change_context(Error)
.attach_printable("Failed to read model file")?;
Self::new_from_bytes(&model)
}
pub fn new_from_bytes(model: &[u8]) -> Result<Self> {
tracing::info!("Loading face detection model from bytes");
let mut model = mnn::Interpreter::from_bytes(model)
.map_err(|e| e.into_inner())
.change_context(Error)
.attach_printable("Failed to load model from bytes")?;
model.set_session_mode(mnn::SessionMode::Release);
let bc = mnn::BackendConfig::default().with_memory_mode(mnn::MemoryMode::High);
let sc = mnn::ScheduleConfig::new()
.with_type(mnn::ForwardType::CPU)
.with_backend_config(bc);
tracing::info!("Creating session handle for face detection model");
let handle = mnn_sync::SessionHandle::new(model, sc)
.change_context(Error)
.attach_printable("Failed to create session handle")?;
Ok(FaceDetection { handle })
}
pub fn detect_faces(&self, image: ndarray::Array3<u8>) -> Result<ndarray::Array2<u8>> {
use mnn_bridge::ndarray::MnnToNdarray;
let output = self
.handle
.run(move |sr| {
let tensor = image
.as_mnn_tensor()
.ok_or_else(|| Error)
.attach_printable("Failed to convert ndarray to mnn tensor")
.change_context(mnn::error::ErrorKind::TensorError)?;
let (intptr, session) = sr.both_mut();
tracing::trace!("Copying input tensor to host");
// let input = intptr.input::<u8>(session, "input")?;
// dbg!(input.shape());
// let mut t = input.create_host_tensor_from_device(false);
// tensor.copy_to_host_tensor(&mut t)?;
//
// intptr.run_session(&session)?;
// let output = intptr.output::<u8>(&session, "output").unwrap();
// let output_tensor = output.create_host_tensor_from_device(true);
// let output_array = output_tensor
// .try_as_ndarray()
// .change_context(mnn::error::ErrorKind::TensorError)?
// .to_owned();
// Ok(output_array)
Ok(ndarray::Array2::<u8>::zeros((1, 1))) // Placeholder for actual output
})
.map_err(|e| e.into_inner())
.change_context(Error);
output
}
}

5
src/image.rs Normal file
View File

@@ -0,0 +1,5 @@
// pub struct Image {
// pub width: u32,
// pub height: u32,
// pub data: Vec<u8>,
// }

4
src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod errors;
pub mod facedet;
pub mod image;
use errors::*;

37
src/main.rs Normal file
View File

@@ -0,0 +1,37 @@
mod cli;
mod errors;
use errors::*;
use ndarray_image::*;
const RETINAFACE_MODEL: &[u8] = include_bytes!("../models/retinaface.mnn");
pub fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter("trace")
.with_thread_ids(true)
.with_thread_names(true)
.with_target(false)
.init();
let args = <cli::Cli as clap::Parser>::parse();
match args.cmd {
cli::SubCommand::Detect(detect) => {
use detector::facedet;
let model = facedet::FaceDetection::new_from_bytes(RETINAFACE_MODEL)
.change_context(errors::Error)
.attach_printable("Failed to create face detection model")?;
let image = image::open(detect.image).change_context(Error)?;
let image = image.into_rgb8();
let array = image.into_ndarray()
.change_context(errors::Error)
.attach_printable("Failed to convert image to ndarray")?;
model.detect_faces(array)
.change_context(errors::Error)
.attach_printable("Failed to detect faces")?;
}
cli::SubCommand::List(list) => {
println!("List: {:?}", list);
}
cli::SubCommand::Completions { shell } => {
cli::Cli::completions(shell);
}
}
Ok(())
}