From 7e4392ae68f97311f2389fdf8835e70a25912ff3 Mon Sep 17 00:00:00 2001 From: Kyle Simpson Date: Thu, 29 Oct 2020 20:25:20 +0000 Subject: [PATCH] Voice Rework -- Events, Track Queues (#806) This implements a proof-of-concept for an improved audio frontend. The largest change is the introduction of events and event handling: both by time elapsed and by track events, such as ending or looping. Following on from this, the library now includes a basic, event-driven track queue system (which people seem to ask for unusually often). A new sample, `examples/13_voice_events`, demonstrates both the `TrackQueue` system and some basic events via the `~queue` and `~play_fade` commands. Locks are removed from around the control of `Audio` objects, which should allow the backend to be moved to a more granular futures-based backend solution in a cleaner way. --- Cargo.toml | 155 ++++++++ README.md | 29 ++ benches/mixing.rs | 30 ++ build.rs | 23 ++ examples/README.md | 3 + examples/twilight/Cargo.toml | 21 + examples/twilight/src/main.rs | 378 ++++++++++++++++++ rustfmt.toml | 6 + songbird-ico.png | Bin 0 -> 3978 bytes songbird.png | Bin 0 -> 75078 bytes songbird.svg | 22 ++ src/constants.rs | 75 ++++ src/driver/config.rs | 10 + src/driver/connection/error.rs | 105 +++++ src/driver/connection/mod.rs | 321 ++++++++++++++++ src/driver/crypto.rs | 38 ++ src/driver/mod.rs | 233 +++++++++++ src/driver/tasks/error.rs | 97 +++++ src/driver/tasks/events.rs | 118 ++++++ src/driver/tasks/message/core.rs | 24 ++ src/driver/tasks/message/events.rs | 31 ++ src/driver/tasks/message/mixer.rs | 32 ++ src/driver/tasks/message/mod.rs | 49 +++ src/driver/tasks/message/udp_rx.rs | 7 + src/driver/tasks/message/udp_tx.rs | 4 + src/driver/tasks/message/ws.rs | 12 + src/driver/tasks/mixer.rs | 516 +++++++++++++++++++++++++ src/driver/tasks/mod.rs | 155 ++++++++ src/driver/tasks/udp_rx.rs | 286 ++++++++++++++ src/driver/tasks/udp_tx.rs | 45 +++ src/driver/tasks/ws.rs | 205 ++++++++++ src/error.rs | 69 ++++ src/events/context.rs | 137 +++++++ src/events/core.rs | 31 ++ src/events/data.rs | 88 +++++ src/events/mod.rs | 91 +++++ src/events/store.rs | 252 ++++++++++++ src/events/track.rs | 16 + src/events/untimed.rs | 28 ++ src/handler.rs | 301 +++++++++++++++ src/id.rs | 121 ++++++ src/info.rs | 137 +++++++ src/input/cached/compressed.rs | 303 +++++++++++++++ src/input/cached/hint.rs | 40 ++ src/input/cached/memory.rs | 116 ++++++ src/input/cached/mod.rs | 44 +++ src/input/cached/tests.rs | 79 ++++ src/input/child.rs | 38 ++ src/input/codec/mod.rs | 99 +++++ src/input/codec/opus.rs | 43 +++ src/input/container/frame.rs | 8 + src/input/container/mod.rs | 69 ++++ src/input/dca.rs | 137 +++++++ src/input/error.rs | 93 +++++ src/input/ffmpeg_src.rs | 146 +++++++ src/input/metadata.rs | 166 ++++++++ src/input/mod.rs | 596 +++++++++++++++++++++++++++++ src/input/reader.rs | 180 +++++++++ src/input/restartable.rs | 294 ++++++++++++++ src/input/utils.rs | 41 ++ src/input/ytdl_src.rs | 107 ++++++ src/lib.rs | 84 ++++ src/manager.rs | 353 +++++++++++++++++ src/serenity.rs | 71 ++++ src/shards.rs | 168 ++++++++ src/tracks/command.rs | 53 +++ src/tracks/handle.rs | 159 ++++++++ src/tracks/looping.rs | 22 ++ src/tracks/mod.rs | 379 ++++++++++++++++++ src/tracks/mode.rs | 37 ++ src/tracks/queue.rs | 213 +++++++++++ src/tracks/state.rs | 31 ++ src/ws.rs | 208 ++++++++++ utils/Cargo.toml | 10 + utils/README.md | 1 + utils/src/lib.rs | 67 ++++ 76 files changed, 8756 insertions(+) create mode 100644 Cargo.toml create mode 100644 README.md create mode 100644 benches/mixing.rs create mode 100644 build.rs create mode 100644 examples/README.md create mode 100644 examples/twilight/Cargo.toml create mode 100644 examples/twilight/src/main.rs create mode 100644 rustfmt.toml create mode 100644 songbird-ico.png create mode 100644 songbird.png create mode 100644 songbird.svg create mode 100644 src/constants.rs create mode 100644 src/driver/config.rs create mode 100644 src/driver/connection/error.rs create mode 100644 src/driver/connection/mod.rs create mode 100644 src/driver/crypto.rs create mode 100644 src/driver/mod.rs create mode 100644 src/driver/tasks/error.rs create mode 100644 src/driver/tasks/events.rs create mode 100644 src/driver/tasks/message/core.rs create mode 100644 src/driver/tasks/message/events.rs create mode 100644 src/driver/tasks/message/mixer.rs create mode 100644 src/driver/tasks/message/mod.rs create mode 100644 src/driver/tasks/message/udp_rx.rs create mode 100644 src/driver/tasks/message/udp_tx.rs create mode 100644 src/driver/tasks/message/ws.rs create mode 100644 src/driver/tasks/mixer.rs create mode 100644 src/driver/tasks/mod.rs create mode 100644 src/driver/tasks/udp_rx.rs create mode 100644 src/driver/tasks/udp_tx.rs create mode 100644 src/driver/tasks/ws.rs create mode 100644 src/error.rs create mode 100644 src/events/context.rs create mode 100644 src/events/core.rs create mode 100644 src/events/data.rs create mode 100644 src/events/mod.rs create mode 100644 src/events/store.rs create mode 100644 src/events/track.rs create mode 100644 src/events/untimed.rs create mode 100644 src/handler.rs create mode 100644 src/id.rs create mode 100644 src/info.rs create mode 100644 src/input/cached/compressed.rs create mode 100644 src/input/cached/hint.rs create mode 100644 src/input/cached/memory.rs create mode 100644 src/input/cached/mod.rs create mode 100644 src/input/cached/tests.rs create mode 100644 src/input/child.rs create mode 100644 src/input/codec/mod.rs create mode 100644 src/input/codec/opus.rs create mode 100644 src/input/container/frame.rs create mode 100644 src/input/container/mod.rs create mode 100644 src/input/dca.rs create mode 100644 src/input/error.rs create mode 100644 src/input/ffmpeg_src.rs create mode 100644 src/input/metadata.rs create mode 100644 src/input/mod.rs create mode 100644 src/input/reader.rs create mode 100644 src/input/restartable.rs create mode 100644 src/input/utils.rs create mode 100644 src/input/ytdl_src.rs create mode 100644 src/lib.rs create mode 100644 src/manager.rs create mode 100644 src/serenity.rs create mode 100644 src/shards.rs create mode 100644 src/tracks/command.rs create mode 100644 src/tracks/handle.rs create mode 100644 src/tracks/looping.rs create mode 100644 src/tracks/mod.rs create mode 100644 src/tracks/mode.rs create mode 100644 src/tracks/queue.rs create mode 100644 src/tracks/state.rs create mode 100644 src/ws.rs create mode 100644 utils/Cargo.toml create mode 100644 utils/README.md create mode 100644 utils/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..8b12454 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,155 @@ +[package] +authors = ["Kyle Simpson "] +description = "An async Rust library for the Discord voice API." +documentation = "https://docs.rs/songbird" +edition = "2018" +homepage = "https://github.com/serenity-rs/serenity" +include = ["src/**/*.rs", "Cargo.toml"] +keywords = ["discord", "api", "rtp", "audio"] +license = "ISC" +name = "songbird" +readme = "README.md" +repository = "https://github.com/serenity-rs/serenity.git" +version = "0.1.0" + +[dependencies] +serde = { version = "1", features = ["derive"] } +serde_json = "1" +tracing = "0.1" +tracing-futures = "0.2" + +[dependencies.async-trait] +optional = true +version = "0.1" + +[dependencies.async-tungstenite] +default-features = false +features = ["tokio-runtime"] +optional = true +version = "0.9" + +[dependencies.audiopus] +optional = true +version = "0.2" + +[dependencies.byteorder] +optional = true +version = "1" + +[dependencies.discortp] +features = ["discord-full"] +optional = true +version = "0.2" + +[dependencies.flume] +optional = true +version = "0.9" + +[dependencies.futures] +version = "0.3" + +[dependencies.parking_lot] +optional = true +version = "0.11" + +[dependencies.rand] +optional = true +version = "0.7" + +[dependencies.serenity] +optional = true +features = ["voice", "gateway"] +path = "../" +version = "0.9.0-rc.2" + +[dependencies.serenity-voice-model] +optional = true +path = "../voice-model" +version = "0.9.0-rc.2" + +[dependencies.spin_sleep] +optional = true +version = "1" + +[dependencies.streamcatcher] +optional = true +version = "0.1" + +[dependencies.tokio] +optional = true +version = "0.2" +default-features = false + +[dependencies.twilight-gateway] +optional = true +version = "0.1" +default-features = false + +[dependencies.twilight-model] +optional = true +version = "0.1" +default-features = false + +[dependencies.url] +optional = true +version = "2" + +[dependencies.xsalsa20poly1305] +optional = true +version = "0.5" + +[dev-dependencies] +criterion = "0.3" +utils = { path = "utils" } + +[features] +default = [ + "serenity-rustls", + "driver", + "gateway", +] +gateway = [ + "flume", + "parking_lot", + "tokio/sync", +] +driver = [ + "async-trait", + "async-tungstenite", + "audiopus", + "byteorder", + "discortp", + "flume", + "parking_lot", + "rand", + "serenity-voice-model", + "spin_sleep", + "streamcatcher", + "tokio/fs", + "tokio/io-util", + "tokio/net", + "tokio/rt-core", + "tokio/time", + "tokio/process", + "tokio/sync", + "url", + "xsalsa20poly1305", +] +rustls = ["async-tungstenite/tokio-rustls"] +native = ["async-tungstenite/tokio-native-tls"] +serenity-rustls = ["serenity/rustls_backend", "rustls", "gateway", "serenity-deps"] +serenity-native = ["serenity/native_tls_backend", "native", "gateway", "serenity-deps"] +twilight-rustls = ["twilight", "twilight-gateway/rustls", "rustls", "gateway"] +twilight-native = ["twilight", "twilight-gateway/native", "native", "gateway"] +twilight = ["twilight-model"] +simd-zlib = ["twilight-gateway/simd-zlib"] +stock-zlib = ["twilight-gateway/stock-zlib"] +serenity-deps = ["async-trait"] + +[[bench]] +name = "mixing" +path = "benches/mixing.rs" +harness = false + +[package.metadata.docs.rs] +all-features = true diff --git a/README.md b/README.md new file mode 100644 index 0000000..72d271b --- /dev/null +++ b/README.md @@ -0,0 +1,29 @@ +# Songbird + +![](songbird.png) + +Songbird is an async, cross-library compatible voice system for Discord, written in Rust. +The library offers: + * A standalone gateway frontend compatible with [serenity] and [twilight] using the + `"gateway"` and `"[serenity/twilight]-[rustls/native]"` features. You can even run + driverless, to help manage your [lavalink] sessions. + * A standalone driver for voice calls, via the `"driver"` feature. If you can create + a `ConnectionInfo` using any other gateway, or language for your bot, then you + can run the songbird voice driver. + * And, by default, a fully featured voice system featuring events, queues, RT(C)P packet + handling, seeking on compatible streams, shared multithreaded audio stream caches, + and direct Opus data passthrough from DCA files. + +## Examples +Full examples showing various types of functionality and integrations can be found as part of [serenity's examples], and in [this crate's examples directory]. + +## Attribution + +Songbird's logo is based upon the copyright-free image ["Black-Capped Chickadee"] by George Gorgas White. + +[serenity]: https://github.com/serenity-rs/serenity +[twilight]: https://github.com/twilight-rs/twilight +["Black-Capped Chickadee"]: https://www.oldbookillustrations.com/illustrations/black-capped-chickadee/ +[lavalink]: https://github.com/Frederikam/Lavalink +[serenity's examples]: https://github.com/serenity-rs/serenity/tree/current/examples +[this crate's examples directory]: https://github.com/serenity-rs/serenity/tree/current/songbird/examples diff --git a/benches/mixing.rs b/benches/mixing.rs new file mode 100644 index 0000000..7828bae --- /dev/null +++ b/benches/mixing.rs @@ -0,0 +1,30 @@ +use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion}; +use songbird::{constants::*, input::Input}; + +pub fn mix_one_frame(c: &mut Criterion) { + let floats = utils::make_sine(STEREO_FRAME_SIZE, true); + let mut raw_buf = [0f32; STEREO_FRAME_SIZE]; + + c.bench_function("Mix stereo source", |b| { + b.iter_batched_ref( + || black_box(Input::float_pcm(true, floats.clone().into())), + |input| { + input.mix(black_box(&mut raw_buf), black_box(1.0)); + }, + BatchSize::SmallInput, + ) + }); + + c.bench_function("Mix mono source", |b| { + b.iter_batched_ref( + || black_box(Input::float_pcm(false, floats.clone().into())), + |input| { + input.mix(black_box(&mut raw_buf), black_box(1.0)); + }, + BatchSize::SmallInput, + ) + }); +} + +criterion_group!(benches, mix_one_frame); +criterion_main!(benches); diff --git a/build.rs b/build.rs new file mode 100644 index 0000000..85b9e90 --- /dev/null +++ b/build.rs @@ -0,0 +1,23 @@ +#[cfg(all(feature = "driver", not(any(feature = "rustls", feature = "native"))))] +compile_error!( + "You have the `driver` feature enabled: \ + either the `rustls` or `native` feature must be + selected to let Songbird's driver use websockets.\n\ + - `rustls` uses Rustls, a pure Rust TLS-implemenation.\n\ + - `native` uses SChannel on Windows, Secure Transport on macOS, \ + and OpenSSL on other platforms.\n\ + If you are unsure, go with `rustls`." +); + +#[cfg(all( + feature = "twilight", + not(any(feature = "simd-zlib", feature = "stock-zlib")) +))] +compile_error!( + "Twilight requires you to specify a zlib backend: \ + either the `simd-zlib` or `stock-zlib` feature must be + selected.\n\ + If you are unsure, go with `stock-zlib`." +); + +fn main() {} diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..45fcf68 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,3 @@ +# Songbird examples + +These examples show more advanced use of Songbird, or how to include Songbird in bots built on other libraries, such as twilight. \ No newline at end of file diff --git a/examples/twilight/Cargo.toml b/examples/twilight/Cargo.toml new file mode 100644 index 0000000..04473be --- /dev/null +++ b/examples/twilight/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "basic-twilight-bot" +version = "0.1.0" +authors = ["Twilight and Serenity Contributors"] +edition = "2018" + +[dependencies] +futures = "0.3" +tracing = "0.1" +tracing-subscriber = "0.2" +serde_json = { version = "1" } +tokio = { features = ["macros", "rt-threaded", "sync"], version = "0.2" } +twilight-gateway = "0.1" +twilight-http = "0.1" +twilight-model = "0.1" +twilight-standby = "0.1" + +[dependencies.songbird] +path = "../.." +default-features = false +features = ["twilight-rustls", "gateway", "driver", "stock-zlib"] diff --git a/examples/twilight/src/main.rs b/examples/twilight/src/main.rs new file mode 100644 index 0000000..d8a49b9 --- /dev/null +++ b/examples/twilight/src/main.rs @@ -0,0 +1,378 @@ +//! This example adapts Twilight's [basic lavalink bot] to use Songbird as its voice driver. +//! +//! # Twilight-rs attribution +//! ISC License (ISC) +//! +//! Copyright (c) 2019, 2020 (c) The Twilight Contributors +//! +//! Permission to use, copy, modify, and/or distribute this software for any purpose +//! with or without fee is hereby granted, provided that the above copyright notice +//! and this permission notice appear in all copies. +//! +//! THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +//! REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +//! FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +//! INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +//! OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +//! TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +//! THIS SOFTWARE. +//! +//! +//! [basic lavalink bot]: https://github.com/twilight-rs/twilight/tree/trunk/lavalink/examples/basic-lavalink-bot + +use futures::StreamExt; +use std::{collections::HashMap, env, error::Error, future::Future, sync::Arc}; +use songbird::{input::{Input, Restartable}, tracks::{PlayMode, TrackHandle}, Songbird}; +use tokio::sync::RwLock; +use twilight_gateway::{Cluster, Event}; +use twilight_http::Client as HttpClient; +use twilight_model::{channel::Message, gateway::payload::MessageCreate, id::GuildId}; +use twilight_standby::Standby; + +#[derive(Clone, Debug)] +struct State { + cluster: Cluster, + http: HttpClient, + trackdata: Arc>>, + songbird: Arc, + standby: Standby, +} + +fn spawn( + fut: impl Future>> + Send + 'static, +) { + tokio::spawn(async move { + if let Err(why) = fut.await { + tracing::debug!("handler error: {:?}", why); + } + }); +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + // Initialize the tracing subscriber. + tracing_subscriber::fmt::init(); + + let state = { + let token = env::var("DISCORD_TOKEN")?; + + let http = HttpClient::new(&token); + let user_id = http.current_user().await?.id; + + let cluster = Cluster::new(token).await?; + + let shard_count = cluster.shards().len(); + let songbird = Songbird::twilight(cluster.clone(), shard_count as u64, user_id); + + cluster.up().await; + + State { + cluster, + http, + trackdata: Default::default(), + songbird, + standby: Standby::new(), + } + }; + + let mut events = state.cluster.events(); + + while let Some(event) = events.next().await { + state.standby.process(&event.1); + state.songbird.process(&event.1).await; + + if let Event::MessageCreate(msg) = event.1 { + if msg.guild_id.is_none() || !msg.content.starts_with('!') { + continue; + } + + match msg.content.splitn(2, ' ').next() { + Some("!join") => spawn(join(msg.0, state.clone())), + Some("!leave") => spawn(leave(msg.0, state.clone())), + Some("!pause") => spawn(pause(msg.0, state.clone())), + Some("!play") => spawn(play(msg.0, state.clone())), + Some("!seek") => spawn(seek(msg.0, state.clone())), + Some("!stop") => spawn(stop(msg.0, state.clone())), + Some("!volume") => spawn(volume(msg.0, state.clone())), + _ => continue, + } + } + } + + Ok(()) +} + +async fn join(msg: Message, state: State) -> Result<(), Box> { + state + .http + .create_message(msg.channel_id) + .content("What's the channel ID you want me to join?")? + .await?; + + let author_id = msg.author.id; + let msg = state + .standby + .wait_for_message(msg.channel_id, move |new_msg: &MessageCreate| { + new_msg.author.id == author_id + }) + .await?; + let channel_id = msg.content.parse::()?; + + let guild_id = msg.guild_id.ok_or("Can't join a non-guild channel.")?; + + let (_handle, success) = state + .songbird + .join(guild_id, channel_id) + .await; + + let content = match success?.recv_async().await { + Ok(Ok(())) => format!("Joined <#{}>!", channel_id), + Ok(Err(e)) => format!("Failed to join <#{}>! Why: {:?}", channel_id, e), + _ => format!("Failed to join <#{}>: Gateway error!", channel_id), + }; + + state + .http + .create_message(msg.channel_id) + .content(content)? + .await?; + + Ok(()) +} + +async fn leave(msg: Message, state: State) -> Result<(), Box> { + tracing::debug!( + "leave command in channel {} by {}", + msg.channel_id, + msg.author.name + ); + + let guild_id = msg.guild_id.unwrap(); + + state + .songbird + .leave(guild_id) + .await?; + + state + .http + .create_message(msg.channel_id) + .content("Left the channel")? + .await?; + + Ok(()) +} + +async fn play(msg: Message, state: State) -> Result<(), Box> { + tracing::debug!( + "play command in channel {} by {}", + msg.channel_id, + msg.author.name + ); + state + .http + .create_message(msg.channel_id) + .content("What's the URL of the audio to play?")? + .await?; + + let author_id = msg.author.id; + let msg = state + .standby + .wait_for_message(msg.channel_id, move |new_msg: &MessageCreate| { + new_msg.author.id == author_id + }) + .await?; + + let guild_id = msg.guild_id.unwrap(); + + if let Ok(song) = Restartable::ytdl(msg.content.clone()) { + let input = Input::from(song); + + let content = format!( + "Playing **{:?}** by **{:?}**", + input.metadata.title.as_ref().unwrap_or(&"".to_string()), + input.metadata.artist.as_ref().unwrap_or(&"".to_string()), + ); + + state + .http + .create_message(msg.channel_id) + .content(content)? + .await?; + + if let Some(call_lock) = state.songbird.get(guild_id) { + let mut call = call_lock.lock().await; + let handle = call.play_source(input); + + let mut store = state.trackdata.write().await; + store.insert(guild_id, handle); + } + } else { + state + .http + .create_message(msg.channel_id) + .content("Didn't find any results")? + .await?; + } + + Ok(()) +} + +async fn pause(msg: Message, state: State) -> Result<(), Box> { + tracing::debug!( + "pause command in channel {} by {}", + msg.channel_id, + msg.author.name + ); + + let guild_id = msg.guild_id.unwrap(); + + let store = state.trackdata.read().await; + + let content = if let Some(handle) = store.get(&guild_id) { + let info = handle.get_info()? + .await?; + + let paused = match info.playing { + PlayMode::Play => { + let _success = handle.pause(); + false + } + _ => { + let _success = handle.play(); + true + } + }; + + let action = if paused { "Unpaused" } else { "Paused" }; + + format!("{} the track", action) + } else { + format!("No track to (un)pause!") + }; + + state + .http + .create_message(msg.channel_id) + .content(content)? + .await?; + + Ok(()) +} + +async fn seek(msg: Message, state: State) -> Result<(), Box> { + tracing::debug!( + "seek command in channel {} by {}", + msg.channel_id, + msg.author.name + ); + state + .http + .create_message(msg.channel_id) + .content("Where in the track do you want to seek to (in seconds)?")? + .await?; + + let author_id = msg.author.id; + let msg = state + .standby + .wait_for_message(msg.channel_id, move |new_msg: &MessageCreate| { + new_msg.author.id == author_id + }) + .await?; + let guild_id = msg.guild_id.unwrap(); + let position = msg.content.parse::()?; + + let store = state.trackdata.read().await; + + let content = if let Some(handle) = store.get(&guild_id) { + if handle.is_seekable() { + let _success = handle.seek_time(std::time::Duration::from_secs(position)); + format!("Seeked to {}s", position) + } else { + format!("Track is not compatible with seeking!") + } + } else { + format!("No track to seek over!") + }; + + state + .http + .create_message(msg.channel_id) + .content(content)? + .await?; + + Ok(()) +} + +async fn stop(msg: Message, state: State) -> Result<(), Box> { + tracing::debug!( + "stop command in channel {} by {}", + msg.channel_id, + msg.author.name + ); + + let guild_id = msg.guild_id.unwrap(); + + if let Some(call_lock) = state.songbird.get(guild_id) { + let mut call = call_lock.lock().await; + let _ = call.stop(); + } + + state + .http + .create_message(msg.channel_id) + .content("Stopped the track")? + .await?; + + Ok(()) +} + +async fn volume(msg: Message, state: State) -> Result<(), Box> { + tracing::debug!( + "volume command in channel {} by {}", + msg.channel_id, + msg.author.name + ); + state + .http + .create_message(msg.channel_id) + .content("What's the volume you want to set (0.0-10.0, 1.0 being the default)?")? + .await?; + + let author_id = msg.author.id; + let msg = state + .standby + .wait_for_message(msg.channel_id, move |new_msg: &MessageCreate| { + new_msg.author.id == author_id + }) + .await?; + let guild_id = msg.guild_id.unwrap(); + let volume = msg.content.parse::()?; + + if !volume.is_finite() || volume > 10.0 || volume < 0.0 { + state + .http + .create_message(msg.channel_id) + .content("Invalid volume!")? + .await?; + + return Ok(()); + } + + let store = state.trackdata.read().await; + + let content = if let Some(handle) = store.get(&guild_id) { + let _success = handle.set_volume(volume as f32); + format!("Set the volume to {}", volume) + } else { + format!("No track to change volume!") + }; + + state + .http + .create_message(msg.channel_id) + .content(content)? + .await?; + + Ok(()) +} diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..0e82264 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,6 @@ +imports_layout = "HorizontalVertical" +match_arm_blocks = false +match_block_trailing_comma = true +newline_style = "Unix" +use_field_init_shorthand = true +use_try_shorthand = true diff --git a/songbird-ico.png b/songbird-ico.png new file mode 100644 index 0000000000000000000000000000000000000000..f9783d7b1c738d54db5b1c2d14603cef52b71ecb GIT binary patch literal 3978 zcmeAS@N?(olHy`uVBq!ia0y~yU{C;I4mJh`hT^KKFANN<#hD=yB@w}FfdWk9d zNvV1jxdjX$U}IlVkeHmETB4AYnx2_wtMq>NekFy>6kDZmQ(pt$0_W6>OpmIf)Zi+= zkmRcDWXlvKdpjE(M#Slr*a#7dNQpqLegSrHqo20xNy}^73-Ma$~*xqI7*jOG`_A z10#JSBi*8uG~MFLypqHU-MnIDkP#3wTvCgZi!uvJGV}8kKxQT;<(HP&Dk*U(C@4S; zPf3QGT~Jz-12#D&SwA%=H8(Y{q*&ij&rly(eQHG}!Zmrt(BMkW&(#OX=o{)8peQfN zOv9lZB!i*6AS1sdzc?em0PZLhE8x-?8Y^;fXsJjnz^2&O7rPBG<+-^i-Y70{Ei1vV zqd26pAXPsowK%`DC^^-&EH$r0860q+aIp%=tVqp?aLLR~%`48#&$CrBHbRq(#3AXN zpOaq{T#%TYYO7=j)&mMTE6?1-^wjXol#&d%SqO2T)Vy?b36RTjNG%vHnRtaK1 zSWg5(&Q{6Tz(CIcVNWD>Ie1K1f!*hupOR{;WDM4Zkn(hP4k*e`%gjl&RVog0cT(_l zbv84!G&9pR(t~)VBDcWGH$Npatg_;j+$JU2swA@{Cl$egGrpn2LmC1x4_re$|JM5B)_N`j;j+c~X(i=}MX8SIsd*)~N{(r1nR%Hdl?pDY#hK}OsYMD-pc2SX&jK2uaE;{| zsd@MUTOZ;ih>u_{w9&`#14IE*DFZ6Jz=0Cv;%3KXqYp0RKn0#17osFh%}cRWDpIny zlS^6C&A`Celr=m2KRJFOYlbYv!?wRHJ?9Msc`M>vmpYzUE ze9px+t7eAUE_nLMt-@rMy8iYa)jKQF9&sO1KQy;3Cg8`hYr?K|+bmtO}?hpxA!*2+X&yRNuH?txACsrXy%|I9C%d(Eone|7!Amqkldw>`{}o%QbP4#EEu zV;-Fg;fm{9sK>TXd&OMsZ*R)yFShPjcPc#g&^f&=Cx8C>nLke<{r>{%lokdC2F?PH z$YKTtJ!KGPtXOJa#=yX#@9E+g64APKdUZj_SJC74-`^?k=Jq@(oYWxDsNymutLfBY zX5Y{^B{ECwv>#c_HSjtp9<_4C)i8D)W54W);f@m?Eokd&2q6>_liVndR`dvO6L zp-(qb%HQ9+TmAh@|6b{95?8K$JM-t+z59>11KUsW!om8NJYhaVu`Woqm+Lnzb34$NCU6mKLpE+a3i8G6wmieAdNlRqxdbMcv z>dC7XAGa*rB>4V-LrBp?7KMXC3cNccHJH!UUtc-BuVm4kJ1aN+-YWY@`o;8Xx0o*l zg)5eBjraID&-BujlAx?r{hjpzlVh#pC*QfVr+en|6pI-}g1jZit@5^a+pn#ir&@l$ z_3k?ckz3LB%DI0`YXUz?=}uiev+Qe7(X#taNk5opz2* zVwgWNong^>tZpl3p8x-`@3V7rTO|cW9DJG?-&@MOQ?axEr1dzsT4L^kzmt zFWn&Cs2S1s^UP)cqT_G33w0iGl6}Lc-nR16?LB zuseBfs{^}2-gfOr@;3Ta2efCckFUA+`O?ioFBc~vf5-U#lCAS6e>2v-6jLAK;$n3> z`{~K!V%;4c<>9+oOnnbM+|c5*FynIR$DdnosH%!Ra}xDnI>wX{6;SzMp?jx`kBbje zBZK5)wFVcB$X!iyytyN`<*;%t4)WNaqSLuJ$isK8OZ;<5BfXQ!ueLBT$ZTU#TIQ)C zB9$d57`Cp)VwIwx&KVWa8>~5JhhECCVIlrZ@ITm&a;?V z{(b6X3oAxJ=Br|%wFVh3vS-r{v-n&-Vsg0UkX2R0ag&}O88?{TY!dcezQ3|#G!eU3 z!hBlpm+aK3^Cwz=Et(Z|&fxFy%P%D}o~#UAZCE<@x_5c-rV`6tf_o#hweB^y@Yvb@ z5@c@7TjnFzo+r`LlxFiS?SZ-0MXzW3Ze--f%62VE*nIP%vz5AkNN{8%!iDR{d40o!bnb{P58^hd`|nFw zmm#6oT{`DTsCB>XuMHk58dV4MXKlaRRp;z-$f)4o6dC(3yVmS@FE`V?exJ&YNhz}wthmF|GlZp=T$h&l9GCQ)x7M)!NcrJmLJzvQtG$=SMf*p#dd@CO^KQX(LPe! zZ@U&3Gt1Zh(dSt^fA>4hg}?7)=B#=kH@ETj%F7oc&TdjS3y6*NuFJFguD6frV4t#L z^G9K|3DfP{k1TO9skblf^*ytzOz&WV!9AAhhTF*}Pk4WEn#FR1_prm|5+i-j^xx~= zik#O;i3u|^YZvDHd9m3kD@w|%VuIXyx8P#!8Fh8Lt89O=x>oFswb@)HXv`&UbnEe> zpU>j4o_8cu{_HYd>l=}~CLesad4-^3sYRfQ z&xw+v$EC$5PkSmVakUwXZwRdXG2cgf#@Q{W>zvBM#CG4Wn5-L}Jk^?ES?gALy(P&F zs~et9>lJC(w8>~v@T7|WC#rjVH_?4_PASZ?o-mZlb$uRk}o zmgiTEvaa~+C(6^FDsvjlx#@o;Jm&t!GK-fDj$dL617*G@35&)0n%I1g{+mB#yK+Fz z+23OM7r#IF>9?zqyZ}FfdWk9d zNvV1jxdjX$U}IlVkeHmETB4AYnx2_wtMq>NekFy>6kDZmQ(pt$0_W6>OpmIf)Zi+= zkmRcDWXlvKdpjE(M#Slr*a#7dNQpqLegSrHqo20xNy}^73-Ma$~*xqI7*jOG`_A z10#JSBi*8uG~MFLypqHU-MnIDkP#3wTvCgZi!uvJGV}8kKxQT;<(HP&Dk*U(C@4S; zPf3QGT~Jz-12#D&SwA%=H8(Y{q*&ij&rly(eQHG}!Zmrt(BMkW&(#OX=o{)8peQfN zOv9lZB!i*6AS1sdzc?em0PZLhE8x-?8Y^;fXsJjnz^2&O7rPBG<+-^i-Y70{Ei1vV zqd26pAXPsowK%`DC^^-&EH$r0860q+aIp%=tVqp?aLLR~%`48#&$CrBGBrb!jl?DE zoS&0l6kL#)oNB9N2-XA&J1fuJ#Prng%#@N0glPx~pVYi`3>lE?a!NtTAv7Ex4r=;2{8H2SUq&%IS1B&v~GILUGm5PJhofJG> zoy`m_&CGO-^dO$8$Stt)%}>cpt8_^$NwrlnGB7gGH8jvQGzu{=v@$WaGB(mSFtjp& zI0&pdwInemu_Td@dOJ`$w@FF1D#%0l4Dw0W?p7VrGiUpab|j6YLS8ys1!2Pvw%h@Tw{4g zDkvG^3~YUflOR5VxzI)*!w(PzNF@!Z00Rd~kc*ogmyJHSpaT_pc3g5bRmafR#LLzr_~!~ zNTzO>S^2DemU-O!UXzky*OMnargVL)>pt9fJ+^G^sptPp z&pTZTIT)yHv*MLoT!l|XlIr@~dsOeNNPEP6Nd3^3M%$a%9R2PsvEGvj6|TUG@3O+I4lW;)=R0 zYp#bCu3Hvh4lXmtW#PT7#KJU zJR*x381$4un6YB1eHjCT0)wZEV@SoEH+#$H%)MIr{%`H8m(S14y!U&>!afHk7NKJz zjA}O?$oa%1<;^+LIoYDDs5n3G(>?Q(wfT16ePbTqP}IBO$HSI$c{d|xfO~3)+v1md z=NK)w%e`+u&(giwcVf#D$q#?*=KqzPd+}n`wr`uS-O66SEm5#L;z+*El%MksC_Up{ zdf+|t>_%mWvbyD)J6v@hIXyhj$SzQPEdI>v9e#Q#dTG-JtWpp8b3z=a&cP?lOL3(muy{*H!0HEWfPBhu4QPna%h5 z-s0cGcQ;z{AE%Usp7`z_yw z{w%%2AagjjnESh|$sXRS?ua9jhxwmW)IQtSzWMfCwtEftUiK)j|Lil>*!b-Ij5B90 z&El4{_+ysAH}B}L69>NEZ-|>$oE)l?B)GhF{fB2fb}S`x824`5zM$Od@l(q$DG%76 zH>4&0VP79=V1179ta0r8?OjPVUwK~WCJE+qFHimzbwJ#2^1Xil=6f%Dc+#JLu)dr4 zsO{4^*0uj_&Tjpge9vb40)tKOHsdZcl5rgmA}F^{x5&*vG?e#AB}I* z9r8EMDijoFu}VMwCVW5R-r47x?(I(fb4_~9NwJvwhi6`j_A!k+QhD5Tj_hXZ1N%R` zKDTST@UrUV{o!%xwfj%LT(0x~)sAnaAU8c1`9FF4@h`RqjyJShn#cVr|K)LJ-O=M| z*Pp$;U^V&8QCFQuYd&y%);H;X{PlQp{-=9lAFU$JEVC-snfabm=MitA+QOr*43}#B zAINry2+Vlzsq?6GV&ccUa|G*xbsB^o%rCg?ww<~D==${SbNN!lBy$uGpTD^`T=a)` z%sq?7)7q-f3*x$Gxc_MMtoyrX2J<_%L+K9LmtS9ql;O^f`PY{J{<`3?r;XRWU(b88 zmV2$(N4^7x_gEgjap1AT>C6kiR&QUh^}+FJtIfLWe_y+Pl>5A$qFcLP*J1YWUGMMy zmHlX4UgvaTlg<0j*6fe>?|m9wa{p`cp3<*Rm6tna3v$b(2d&#R-Tb~|T^ZXJu`3$d z+M@q|on7DAIWcZxUw?N?LD!mS!!Qp6h-*-eeZV330X4f zv67*s(j#Hzq^w=>K0RN%ZrtdQcw?)0ZB3l%`{FujW$r)U*6-QfUsPtvW_j=F58n=^ zJ+kZnn&ff5VsAKBY_^!Y=l$Q`+bi#%HD7o_Xp)WrFZV->1)eUaRt56T?U{J|;-x2# zGyT4OD|{MyeS&$<&w0OYA7uCt{6Kiec7?gTmvnwH-uu{L{D1eQJ-yx6*9)I~X=rG5 zL)&O&=ERO(?$c+wUUGi_{IFTSqk7-3+mD{A?^k@(nQ3TfrLgQc`;qs5r{ACZeW!kd z;PcqN`z;0wEbj}?@Za-(+Uyh0HnTKY{fy~judA}}JSkYB+aVyxq;&qX|GmTg^$6%LWo7) zp>ChL!Tc4OyXX3>6I*|&C3DG*jo&$gHZepBbn|cgx^4aE-SrI~aB}8}D{|E!)`@WItPP_oTJq zu|7q6Chy+=`TUx^Pal8J-#O{3_Wko80_2zKn%eyOy!+<&1M!=m9+Q4CTP)$2#>!Qy zbA1H-`z~GFHY-Q(^xExrj)?F7l4|!SdiKp<_r7mT`zm!PZf@toyrzi?Gv8QSxj!f= z6#DV~=e#37kK7FL+k5%Lx=gW`-+xDLOp?r9yS8M@_KWxK9hvpb^vLn|&Zo;Z{k!)5 zA@{19cjtaj*c^O?b$R8FeexSV{@SmW9-(2Bv-_iM&EM|$)DK4(wWQalemPRN%Gh(- z`WFv6)#XzzACo>IT{)>!V%IyxpskZu&kD)g{c`US`#*;J{=cZ6qV zfmz$`I6VK@d?eY{qwCDZyX+CW9!kCYbxL1S=H$k0w~}tH7V`HA42?egVof})&T^$blfcB;012)p*7@qBIjvuROvKR@RSN>}w9o3=J)?^|w_v}GTvi~qb# z&rAQj?EA*Y3HJLdeu($<^_}}Jv`=;p@B1kami2Cbl(z7n{GDnUaTT#mOh0%&@H))? zynFpay)^Ya*8ATJL=Vqi_-oIZcQ%fbRLtV4IB#!@J$^NMUCPm}r!RK(<=#Hgcfi2- zMpx%rj~hX(j;9s$t|qV4EV}8bvT|0*uKu=bWmSEBmp49CQ&Ur0u`)L$Cu73OkDO7n zt`{m@KH0eVx8}-@OSkX4yZTB$UD_*i@4rg)wtsvdpRoAbe1FgK$a&4(FR4i%dFpf> z{r}g^ims8d{37IhyslB|!^7+UJ95GlgTlOAWD-0pRp z5^wW4arkJrEB!t&<>C?p)ry&R-<zx%QZYmA!pt11M)&2?;q{6wSAI2!H+NKSk^7;cZ-(nV{8_%v_Aex^mJt5 zMX7yXp4T7x^2Nm8_OndC%_j+eo1Z3~E=lvk|LJbzyulj7C^AP*y6~@8bYZ?u)#|V( zQhNITTUawVbwXG_EZ?*5!+yCv2V6Hl{$6<5oV`%==uNT4Ipr;f6YV}NyuIV;wAnkq zOybR$)wb-V$0Qbx76FZ}pg(I~TJ_KTQKolKJ@FJw_ zuv9fW4vCs4+wL#j_3H5x;m{?&eou(H{b8lLhuShvsSCT_7#(W1(%)C4fB)Ap**{nB z|LJ%-+s^0ouB9BekGyY>xu)>oBV$v5#-aB7{d4*2Ka>^iw6QLK_vjyc-7)^UAIh6< zvR2=@$I$brOn=?;Hz{t1pT690*Z$c`Jz_;*Lv+LGbD!>?+VR}r{?6Iwb~^i(n#TWn zB>v;w-TO(mx4q8HSm7Re_qxJqu4$={{)w}6DJgpjElXXbl=#$lj}?b{|2shy=Te!< zPw5tmT2;04ld95~VppbI-F5opm%T05@4KA7)NN7oAv7vF`s6bsk4e1~%{hKFhlGY2 z+SoW)$f!)5vv64%XZv?M_j406GG9MdA_O9M0E;CK;JLe|MBYo z`D4A)AAGEspu!m%Ix%(ndKFK%>!m4?ZXDk?zqRn_Yk73}#4qg$Ir8UizNYuc|K)sb zR?`&v+TLYJqUyv+Mf?6v-Jkm1I6moa+xHDWpY8Tomh9rKW!NL5Ao$@|V5t1ZoN2Q9 zcI{7Ya5UXmcJ9XwfrSb#%2u&|Z%1x_Xx1lt%Ws8>gT@QRxw94XRX=Jb26blsv12v1 zZdRTyFyG|Ihj#m;%f8<^_wZbE$dt7Y{?-2uo-^}##I}!@Q!cA*Qkjs-dwYwauJroj z?K+wkOnahQPV4XPxxf3h@s=%Diub%$xqSKZ=ii6U+1ByAne*QBkCs@L(gptK)lAR1 zW}C$w@~`_+TekNP)Be97`yYI)>N>POQ)*S!sYN1(4i?T?wAg1_sz{_`py`?zDO>A4 zebMAS#r(|c^U|N(xcuVjRf~NxWqL=ihR3@Xy%A_uX8z*$R6|!gal!uis@~Hs)P7>& z;5v}7 znzC=6a!ZX|PVk~Nd#1S@TVfK^cRv53agE678EUqd{bRFDqqnVFwEDHCn(*Eiv)Da@ z1hzSZmYRgB&o|$Y+o+@2(6j92kD9o;=jW_%?5|e0|9jGZ$D>cineVqh_}Kh&-htPN z3!@k;?|hh_zE8bq+m4^dq<=iS{(t8GfARazOzJDsx$PDvBEWQeTkPd~CL43NTTfl2 zkuj@J;N8#rg)#yQUQ4;zb(;q?JqrEka!Pac{y71kqdrB>OjsayHhV(f(v1BXqO-5Z z^{!vkJn^HU)alg{QqqSPS8HhND$n#;R9)jRZR1375C45W$t#ZqpSNoXVA6={*p{08 z()wDvuJ6XOyKeI;epQC)?w=8G;rzM3{DDGQDib>{-iSH;;eo=_tG-Lp9{nig)P0dy zbz;HBOwFgEqB$|&3!6lhFF?-2eS< z`_9*w=Pz2gkMHuPf*khH@Qu3M#|tN@tjzeg+g|j}GasQYrQ}F4Ck~z4PGKT4?#JHi zK3vKEzi;LrhSU1H=FQr5wS}4MFSDdg;$)8lrzb9ST=hiNM=~?V_kn2L9!CClKG~$N zw@PikznOeuSAW-~sAE;%FO@#1Jf)@eft9x`uYTIR$JLp!m;9n%M}3pqH~(t3{i%hA z`yU4?gDWMPU7+>j&9_+l6P zalsu<>%{i{S(f?G;@!GI`Nxi zOz${-e-JoJYt_l83bV@!L}tHS>A7cBe@14@yvepFoYH>2D*N{CbxLZswL71ULuzHm zVfX5lt5+USzPThxdC!)5b4OEW|MPKSdb_72>^%HqhO75$p`^)+pS2RGoa1 zGsM)vZ03?hT0Tm4&51Lvn7DbU@ISsRnq%3-cVp9mYnvjvd@KU`o_$o-VVAKedZ5`S zTgmFlvFKuy%|V5I4je5G5p2s+gf+Vh-fq4Aq4E3uV-F9{{lfp{{AX!<wmfb`}h9ev;Ym31_uRCp-qbvG%u|&k9Q2cdtzCx^@>$( zI@+7nHa6*KuUOT!=!8N_%1xJv#T?o#?(3CA)+s!QK6do|&Z(VWym`Labv&MOapN~O zwqw6e^dHO2jCSmkw3+3nHj(Y-!ZllZZq1U~)mOIh$s#j8*;i?4Yi7^9Qs(7-=*Y5V zlCtwsZf+8N@|o>m0LR&Pi+^Y6>%Y0;>lzAzZzSwUd_3` zvAO1-Y~Y6PnF?n2Cs)|&q_7Gw?>lqoZ0Lh;x9^?b`@fKXdi=JH8JD;ZzVQ#|R8Udj z{8TyFDZ@YXYGnHS*W43QC68}3^fiymw>F!kqU>okZ^HD6jn5_C*3Nl4+umu(mYI$F zc&xX}JAMn>ob|Sv(@mzW?brt0&Q+(l)YO-Av^mMxR`Emzc1&~YJT5dx=0)0V-tC)y zKXu=DX0pdLm+WfY-DU5eUGlyr?0?KNeG^MbcY?){5~cQOuby13)i)INE;zZ=JZZ_Y zlgoTQ@lBaJTV*+~$Wdp{m3}(Y4wUgIJyg&tJg{I%3G1e)uC&>6?tC`b*(qCL)7Omberer3K3`2rM^UCVM9cNQgZ$~~zDjOV8-OaG0mcHHs=n7jTb}@c!@=5y-&CQ ztXcP?dH2p2i$pEXt=uqI;DLo%t8&f3mFDJ4)K6bpBYeU4*F_Uv*TAm0|4z^5CrViK zHJU$AeZW~9`1tFScCQP6mYSHXQSq0uo#MvP;k)#tq+wRp>;j+J7auyV%FgmQGP&h& z;-;TEA1j)=x`JAsC{FZPaM8K`eq4&@@%NWktX}=}z&%@?w-H|NPu7L#I>tJSZd+?; zGU4H+)6*87(sYvRzH9ycu}0mh?CXJ2A19ydQ}$N<(G;n&eAdR!xgoBzKU6XK_?10O zIo7HAZn=)q!pH7Ke5+fR);G5Q z=a{rM@cs=igNQCwl}~A0kwK9gzbJtcSz21qo4dQq<#s+UEtG%vHiTPUwcRk@A-c2zu%5^y>lkImpN=|o>4h@ zQbt4LKF2jNldkoJxw*A@stIk`T6*Dbu~6j1nX~4piF^{~xjM;<=VJXI$4_pP_AS(8 znin+d)$76{$vs`|tCqR>@s$M!277t3sJUG#*VQ|6f=Fd5uD%>`^ z+H=ig&R^4wzOgl(fAZwXpU)bx*2tNg);cCF)NW9`^ZUm$-9*nok$->A?>kWb{$Kd4 z*|P&uC68~Ln6fitn{{8WissS92ky-8Qc`X){yjO;eSQ#6s%T66_0sFJ7(`@(oD^Sl z7d*JPKOtww?9}TuB6rUya(2!PVRH=)2>m`YHT7x9T#J)FMh90-)xNniTYRhO)sWE8 z$zDr6jy!H?R?$=u{#&x|mjUnAr$?4QKBDG+b4#Xnm(-Hg|2eM5?QfeHr{9^`{ftlk zZY-zC-DNvhHeGCVG>g0E`2L3DvSq2K7CX+8K4rLBHR4jB*W&3$c9l~;DfINeQ<-by z7rNKW<|emN_j&On1~MtGPE!vo*7oRYb^p9TTTAY4cT1BBXGuT%Wg`iRHwRCpb-WT> zmCd8_Un5yr|Kiqq@xb?@Q%t8M7<8soPE5&IvHtnw@_U`DR^{B^n7pr;J?FvZ&z6nb zK_e|pKSVy4gcf^xw#>J$ke|Qf@#7b}ULTyEBH0+d?X?Bpw%qe+pFJ~XMSWowVY&Nv z-hpjvAC-$9jV-&*70~6*={aeFs$}R?!y=~n$B$1^jo$I=n7z_;ZACHp1*`MEzd5Vi(WxQx!m!_2@8udsoNnRs%k6tXeBTHyTr(F(&frOcGCF=PTy-TG)fa)nkwt!wb)~l zgR8byN@(cKnxmpIRVP(#zI-?=@wxo)MTU2wkGaLTV;NuU_! zDIBLy7+QYP+F4@n*zXscD|<7hZI58;;neAmJS zuTi$?4!^@px~_p;U9JizQZ~P+ny^5<;rb@QpC-nYy^~I{nZ!?h_ z+IU0d@lM-9u~Mp%WD=)~|Q3c<|!j+b5ISIogzZdVPerDl+4@ z&RHJ+V#(UIb0(-CKGEPCdE-ElkmcZ7nI~-IR1U zsA;KUTlMZ0TvvqG#hqNd|HplnGj(4wSpP1z_2_at<}=|OSK1@C#fLeC!sI4stnU!w zs`^+}64KwQ+w9z;8SK;{x$M|4OG}#;HK~av&4t#-j_J;SHmP_2Q6D2;HDTxPnh}0- zGMp-1uXAU;YYn)1^=i$FZ?|vkOJ+ZO_^|k_sWk_94{X=}((Et({CCbSaQtynnz%ml#@_! z56{<)sjt;iLX{RCC5@SQbb&XFN99Ll+hA!!Q0}|QKOV)>NzsX9rwe%Tq6!^M_MdvO+2-v=qh2y0G$}#5x|H$ERFm!reCQF#62#{Aban&zX!<9+ZW~c2`@k@5;VZT`20D z@y$AQ$J*(y3U+_r#&~*0yH$h!znkJej{krE{$i=Da{iuUC)H0s_`du7B84rjvhksz zI;Y*HspjmSaAEPDXYX4DW-N;|@#oImzez`2wrSR~#M@?FtCSQcOg!jhGCFx_>ud*~i?v@YG;CJ{*_NJt zn{w%Q#nYqDHT1tP)E1kh92|7Zr7~Byy8L}oX0>&efk#!q@DlD6*WIA`rLOmxNQT3RplMg=5m9hOGGfV%_l_kxKsy?#r)#y6+GvGxNivQypE}#=7_)2@Z30 zloa`=!%|^mwTyAn}&TM=3cp3{6Ox{JhxX`;e!5q4^3=dKk2+*aIBzP_+ORu1v~eOdRoZb zs-N*p)%(QJw1Q8Sd*nnHW*kYn{Nf%%+v0~GTR!|2Ikh;B@2$00ZEee%WFyO|2Rlj) zjcy!2v7;xPUr6t&XZkWvRo{@a&F{WWIQ#r$T5&^Xs+wVs%2H9qSFfW^KJl&k)jz?% zPxwp56JE#r1wBuE+SZ2Ob<6!cb;)Nor%e-Eo7ObwG^ft;aANfP8N=1J{X-wu`;^b3 z;TP^)Ir7-wCh*TYrgr)78Xs@`RH(BPJ#anZfbLGNj+VoJzTB(c@$%SfMoG@nYn{)J zz4y2L7&AHcdTB$FXa2+Uo8IUxU8HeTZC-1A@Q=TDKEyjpiqtUlB{Q2jSg*5h7}B`@#%R!-T|a;nKkwaR0;OwdW4)8XqG`hDysr{0`Ynsa|+_q1jw+PJ&g=mE2f-WY2jzpocyv!CFx6l?VpE#u^xS6aV|&1LGN13n)rId+3R=o zw98e^h)Ej=o`SMJ%Q zqU>ojEmiFNwEerX=X|osJ}OmT;Op-im{!c_*ZM0Y)G_eNqwidOF6NCMZca{m+xg^D zQYvPg*1PRerFF!)|DU?(Rx`I_%XTT9NH|}ZwQ5ytac$D1OP7pz4ksMv-o2yfby!kx z?UKoVuaxVk&zz$uqoX?YjF5Ng$I6sfS*u>Wl2kVqPJim^UGepS;xygZbKk_vgg;hy zwu#gnG;sGXS$xW@Ytf^NA||0n<@p47DnI^YW?@#H!lHac*fYo4H}%TF=T9W$B$Blx z(_Zt=e>zEbzRQ!pOD?O;y6!kJHRNb_^K8GhTFa-bPmMfks{7~M)>AR6ub(AzNWFUf z2w7KNV{1L>+j!e{fv5ckh_~{K`4go~7xWo|CE~8d@wJ zYS9-eQgwRM1eNdz?q$DXgztT2oRjs2QKWoP@cr^ymPsn&x{Q@(D^C=%ytAE?V9mEl zsaZKWBJ&D^#Z3jl_-Sjm*Uf8KKWpJey;I7HHuo6jx__JOdv-&~(Wh6YOy5-eee;a4 zS-#U_Pj7UtKJj#N%%-p9C(eJ_Sbtymgpy0;OlLb$Wiv}>=ifzVYQ#b(uUx%0>AN@k zirkz9%eM7AcDMe>mU7DbwWQ1p1%U}Oy*xc`T)95Q*hVI~BBN*Z{oOOg%WZm-PO3bP zWU`wYW7Vb~<@Nr8!BR*0g#jF=Pbi)iT6Miy$J#|Hu;gyPve!v&{(+@_1pF+T)ddcI!OjE%JAn{@-(Kb^HfG4#s7PV&(O& zfdM9>=J!_$2u*QpP?la$trKP{?$s7%wmQSvit(=&zj z@9KHxub-+ry?(~Ui-~^UWD-w{h8x;c^~CO$e{w8${<57V%C@oYA2Qiyo3A@|CjDgE z=Z4-T7C)T0rsu|;Jug=`A;VQW^{TdI;UeMK&y!Abmh|_Dbr{%9RG92@`Cfp((M`6T_fv1$8UOjIez)BT5iwaL*Jx57K%(#=XN@DW_M>OtNZa;`xaM0ih|2yW|K^Jv*>txq|P{S{}|y|?h&IDh_YlP!Uv-&Z_% z_hfTdirJs^+h(V7;u#_*282k;H0Iu{DCPed!`FY@`<_8pEmKVT!j|Tib&d8<<1zq4&= zPORB==EKQACB?ZPkBIc?w;azlFtqY@brm_>m;A)^7d!h!+d#MD?km@PFuZE-`}>mD zrd56_kJRV)OgxpclFxI6SI6YXQoF0vCteBD+xum5OyS%2L18<2to0}M9G&1RJb%f8 z8(eZXy9L(Wcb%j$>-W3#l`l%>MmH;ZcHN%gS|oYNQDHR~pU^Y`3y&$`p@mnUY5(Z| z|L0_m-|?nHwg>(`@BZ=AkEM$3hfrSTojuNhBJcj5m*4oa%Xj@r!Re}!zE=e}%BKf# zaaw;{^Fhq3%Kxp&wVdkpPW<(!H+4O)Gq(+zZiKgZ?$81XmP_y z!FzdB^UY%Wc2iWQShu}66}1nNlQ(AQi+_MS9G@Y(xp$|-25(7seEu% z>2<>!*UvvWqVhT{|AEJK!JOiz*W1@UIoU3^Y@N-aE>+o*vhNqREtwlqH0jD00bgC& z3R}T>b-y&%?M&4EyZ5+f`trzaYo<=VTRX+Ydq&4ufv&Isv1=YasUGK4GIQ1}ef>kB z&*0(}p{lB?n+EbnK8%hLyNQ5HEor>Zm#Uf;j(e^r#s&RPwdmt$nXqG^Q}L+b%jdTKZ(q;Uhb38 z+mu(VUzzsy)zv%I_x+#mu1nC9xx?*oF8cc9&ubYz9Z0+L2+ zwMFo!Hc&-0~-EZlqpcK97j{cSl?{qXoGv&-uo3-xYjYU6d zSf2FS#H}y)d$z#gx~+oe5+Si&G6!D=d!}FVIm&K0*Tj$Q)hn$=9e2JmVO`ziqo=HX zvG)r|mqqNZGZmivH^o>?J8a$42m7Y4SRJxLt@+drP!d_E}hrpNI~X=U&49=)_wR_669B@vxewRGWb^~`na-KK@~$-aNIN_+dK zAIawzRjF)}?7TNGaNUhPZ6|N=R2}fnQfX{)sVz2@^ITZ2b?TSy{^O#v?Hl~{lhw?c zb5jdtgca{3&s?O@dRTDQx}Bbpo6iL)vh)?S-Yd&dS(4Q2=D4E!;jNp7<}w!Be+6+g zswBLe#HXdLUHN6>XQ4uCX`7>!?^G)kgBs&m{=HzbdG7mBJpP|j{Ex@y&%YB{9wR^N zS=)pz(V9zZ{2j{9_w*^Jt^T{@W=6yAVv$W}xJ()4`j`KRyuLNba>J@!wf|K(LrTpm zJtj?f{Ls2ouG1HUP&elmSH8FL!XZr7qtjr}# z|0eOt*3LSoyjSP#jS~+i+dqFgeSV_r@u?D)2Y=kq=V{ z=KAv@$9jBom-f%R!ys2bW7aIQvK;#(+j93G)z;td6dKCQ7;owPckz_)>lZ5DNIXrn z=V(9N-dFr~t9juc5Aj^@RVy{JlXEOwYbVdIP-}kG9k9%$!|kv|+~l6%({11X+3|jT zZ7|7j(q*wO_0}a)9zQ--Fj+XcIUO|e&9Uy+;Z~6mFcT7)y?oWCZ?p7#zOCVWZ_)aG z$EkzUVzgghkvhHkezC)(3!;)O4<3De?Py{1|55kn155X}y|z5R_K02!QwQ^QqyFh7 z_ded1e_a26{eI``rYVtHR!>ws=jEQtR6Z%RYT3h&FFZCirCQ$D*1YWH=Cjr%4#&2! zF3Vh~acNCaLE2e7U=;{<^hp+FF+FMFO4+Rtb6Vd6;#*`gVMJYPou_ zDC=dDg4H~GIT*JsnQWS@5hv-tp{-$k-^-aS>V9t?T=?fMAu09Xr2D+I>)!f8Tql0~ zm9j3g^;US4c(?8QgmYd!{of~h_g)Tor)oJh?fN^#b@z&R-FNW^1;)1;T=w{8vex&k z&Ce%sA2LEhuW!n~FBcYn=S=WMj(5{412a3%-;?;%mMtqhaoPIH z@fRUorl+S1bN-mo<)5@vSmKF5REzfdUq)9;wLAU&8YUh3P@%~=b+Klm&T(fwOD;dA zyUHKfoL^*h#KavF;oA0u)z$5CX^#8BX+{eoI7(QUQaC-Q1u{ey)*5X36xE%O*RpJd zhzP4^U_nFnx=Q`MwY7VGy;}YG=q85sMRIfNSz}ftEjYX*b+5S0_1J3O^7`kqkIt$I z*Webdd8emya@~e!pMT!;v9C1SUcc_A{@HggS|`O{KdsX;0BB<7GMQD%UUTdP*6s%I4DM;$vGN@FDy{ebEeext-IK z-Q!$}gr-bkiQK(y<}Ww>>aw>=FE(ao^KHm~UskotFDN{^>-g+L>!qJePTsQJ^wL%P zhTLTtS@}<<+WzKzwba9}DZ`{?!rnXIA8f99aP52XfB*VZSHt5E{bO#=%#Azr;edo{ zs$z|uy}JK5A&y@spZki0s!j7Vw3@l}h&+?J>-z-piv=vfEt!#}76_YMIwaG~PLD@yk6p7`Rs=1xwK7107 zs$h(mX8Ag@?C~#9b0sj%`gcb0l-A#WcV2BSe$9|_)0XMPq*r2_Pegs(-Mac9mwR_p zktfFf^<-%5a2Gl}^Wmhkrz=0U+FrS4x;Q(oZ?1%@$c2+ngIg9!?f%BMDer64 zqKO*8!NM+w0);l3h)y-_>EqkHWO4JuhlVx(7!FMo+|*?sa`r_8+sTu{omCS{WJA9_ zBVechRz&n1$%ZRe`T3S27>Z0lX6k@t9-(>k^X9!5sXopu!$n}~##&ffFKYv;FK z^Lr;Q``e#AapFX4a+LVr9paDYl*gPXdf@qhS^QNZ@7GeBKNtR93E8dAAAa!kJ71Ho zSM%gUOUssTPj8)^^ussw>-9H6>)PXI`#zh7dAg|(5qO${roJ`V^JA@ z4o*&U)6j|Bz5RES-Rv8gAo#;(0S05`US6be)?RcURxw%MpWyzF>6YaL$`M@bP%lyF%v%2~1DVt_5 zpZ8Nuz2Cn5nCF~38gYAhG}k{2RGfZ#;%SQ~mm`ggjlb}J5#(&PkKrm}3BBfUqM>=! zj#HZ+h)kIn7-Bw0iy<(g{`lVY2fr$7EWgzD{7bpU9&_KzJDyI9-tm6lZx1#77iaI) zH$>}J+$&TRJ-`@~{(v>}UE%xse-paHbmp%;$!@-7*#nD366y1AuYDeJ!f$pjYxm80 zpVztUKJ#c92lM88b!&H=e)8E;<)rSDjk%jM1ZOOZ{9+-#`fu9a+uM?ry%TPV^%bOT z<`+HSIMHN6OZDx(hssf}l*;Gx)!drKA}=rRo&OKcFM9j+ zdf}$k{SzN1PSaVfICbW;3-2_uq@ADWOzzmxNPV4e>#19e^biw6*|Eh+MJ6{ z)Fn+evzya8=GEl+3^UGhOzt{sd8h7ot5N*+fF%>mxE8Eh$8dXFq{(~k>1ASB zKe#n6Fm`dOtXehcbm~T(-6ve+`k$NrWSN$FR!i1g^F*$h(%E@G9VfA_dgC0bTGZuw zBEET|e8T&0^Mi$Er9Ix2C1UNL^u%K250SWjFN-z8{ys_E?cY?jJHkRPUA@{q(|pC+ zl}kQ<(DN&kU$}GSwC?NcpB(x4*yh`b;uhspiL|!9RN0EXb#Zlx=GXT;SF8Kj-M(_w z$6%?C$?4G&-CsA6}*_nw{V z(bcKLopbrfhLlfjj&`#bF51#^u#xlCtMH(ppuW`R=FZ&nYJQdq+FV9V*=1)-W8Eh< z+-zI3Yn`(8@*2is+2-QgyT$uB&YnHn)8D^2{rtSoXLmhl`zyCsfWL`*J;T1EmEV}R zn3#W*xBpeQ@nqUU{{Jga*#z$CaGrKorto@p{EjJ?^=8$xCfYBxS(0?I-$q(VN24`3 zyhi-j@9M7k&*$krmt*HzdYrW?Pni4cS-UL(KY|%Y}s8bzvFLp`d{8z^;tD# zbK$r5_8vB7$9^2)xVO4YWkSTVnAlg2Fn$5X?y}rfVs8Lrx;nuILSFc}p9Dl45EXiYh`oaf+nep z%K!gyTzt{i2ww3G{SR707~~%_xiG!^+b@6N;=?jQ?JaM$rhFAURMe+@!DR0Y!LSeK z=D&KH&b&PDg)Z~XdG-DuIRZYgpLwhLB1&yhhU5;P=V|g> z$K57{>i8_W$mLeeW>NWvQSbf^v4troBsBj_H1b*;+VXmqMa7EQ$LH0~T(~c@Pwwrq z`~m^#*)vnR5)Sn1X!h_OTd^{8$+nFXm1ARV{@uA;qLcaDS$@aVck}eV?cdeEx4L}u z+r8HlJ0&ueT`g{TCFL~rO|AP?Ejr0~@(*RMoooE^@3VMMU;p62!CI5;B?fcn94ylD zQZ}_Xq%?EVjs2flq_2f9Smm7=zq$AM{NLP{ocAx;deS>!)5)_cfu-#3d=q?^dMr2M z;qOyCt+gsC!P2;gk6+=X+@+s3FGKo#E-l%&cHW)>js7hGfuT-%^6r6mniL`f9XUcy zZe=O)bv|U9r`gq{;>~gV;1=0Eo3C#-l-?8n9jq@BP#r$vdKK=EH_@n==KE}7V-Hon)RwAtBqU50UOnrwKD~Hb~OX2zJ z=lrdfJN8_5=`yvhR?ii?q>R&x_hipXp4jGfA8w1#c!FC>TN9{^(OguU%Zg+ci}FV zi5EQm{3jbP%gDQN!25Q5(%H7BDMnoGR@E!>S1tM+U_N!K6r=xy3LF1mZ%Yk@l~4El z_~RG0cFod-iK@)CJXoE${3!sP>?ooQi@V&X1e5|an57$Nn`Wn=A|os{SA!0`!$Z! z#8`?`e14+a_j}*>{w=$1S$eK`-sd^3nQm?24buhHbLDQ<_iWbRF-IkkrP5E|w)dK3 z<}#jL>$hIyyd-kzP0*}32J`&9}?-{qNZ=FX?YwynV~E7YB?L z%bX^aZ20^$hxdL(P2`j3dFJycelSu<5OCSKVfne)!7g>XCMWa6?d$Yv(CmBMr29he zw854?48c=BG>AFx)sD!jsQJB(zwqPF*^+XyDv>##CU)fD(wQJAa zeYP?2tkgNXYU8iF+|&(BOM4bxw9DFCTe6Aeq4%ur+`hN=PE$L$&+TkjaqsX!cmBdB z7YxhZ-#NCb_x6sr*KF6sZJf4tcDS*r@!7lOvJdZUE;lYMnRcdG*82Rjb=L0>@&{d8 zck}3%FR?biAFbZ;dfsA_&5v28P6!K1eb3w$@2}03 zC#%U`eX_?p-ev8rZt?9LO!9M6J#i; z-(%gbS6crb%KvNJxpU|8B5QVu**m2wm_I~I+^({9ODU=TSL|Lnsq66a#|&8q*cNsC9c@-ORadb*^h;V%6_o9aXP67%vwe zw=^o8(6~@BtbcvG%cYncBlB7w#aA4b$KE(i?!2iM%*~tjO#DXSxk{d%KH+T7<-v~+ zb?c|DyqK1iHj!I@?~D*lxqJKP`$n66>Ycs+@GaBV7ot8KsO2yG%02J+^#6a|*6nyx z)OXI$z?o?0d&0ZZ-b8sh1;*lFaH0lHlt+q|uD4H!9y>nWWYEs38 z^d0WPFZfJXDo)$-Onl~iyCWwQa&B53>U_DFhqq7ZT=_V-lwq}D+;gLz84rW2R?UC&Yij&2*SfF0``@(WO=D5| z^x-a-YsaJCL0(pK%NzxQc8WO&v@FS-W6XZ`ZRes670&=KL07?j@*)%4e0g&`@>lbj zwU<0Svha$)mZfUl+&4}fC=*SK^x2!RQa{QpMx&AolIT5mS5 z2%Ea&rn6~O;|KRlU7v@Y5|S=w&TpF;lVN1JpJUT>H}#adYs)uk%$$`!p+fMp@7aCr z&v(9G*z9v|%Z!=bH>>|E^2yI#vVH&jsJ(rcW_^8aUi9FCqSV{SXU|oBf44u>9rwp( z-_Nbq7Vj_3t@*rc_sz0v7P|Y7{@s4Q=;BW8$Tw#eKH>|H-#Rslq4yXQcQ?zaTnWnRT7%BJ)r)YfqED%@0q(A?%hJZ_A$MkZ+Mc#tq8kZfn zKRhj(zjft-FJDZy->>@}ynpBX3Ra0b4}u<8J;-l3I=S}a%D#L5#02I(%AR*=YvSRm ze;@e&={pLDn3fwa*4N&BY|7Q=n;-8GYEn4Sdvp2KS0c+doV=?1W=G!kuUA-qUJ4Pd zXgKw9&$Jj77t2qld4H~5%yD?b0fjYT3E!(Fcj-5Lk(!Ve^MB|0jai}m%3BSif`h-f zGsQ+LbvmbmB+%iqYB;{pNZcptU4Hy6EHIisR4r zal7q&Y;A62G|RxgzJpoVIAqn~mZ`ioKTnr`%-$XEAraPS7RTV_st}@iLy328)a~?C z_N^S%8YlGTq*q>72yn@E;;8#xUO!zrZ%3oMeC?N&_n)ZmS9&mehQ`As*H*T@)ck+6 zt@BRBVey@>BBxJY#<^{OG>7t~iK*L+^^bPsE@Qo=*W$i3h;hN)cOt5myM5MWOsx&^ z`||Nq9TV4t^8w|&3r}!`%m`Y<*_Sgf(|r1AjuPw3T*oIZ$q-s~(V*B^_TN9@{b#;y zmCP!f_F3=Vp^cBLXIj`bhtzIymY?g%_KW?4a&OS_$f&H#Szb|}a}OO%Jh%6OXwLng zS%2o)F^D}@UB!De$#}7mk;m1-=iQtpes_*tNMK(%?Z>n5{ACkOBevIxuFP7TS@`IH ziv5kx47Rq(cWQq#e_11;->= z;E~Q`VHryU_QsTinvR=0D<7qK&C)Y6-|Uio&b==u?fTB?h372w6EaFZCzq^##A9_* zshiuor@y!8&y7Gg_hTK_8zmjpY_|yAF!6hxlw$nzEW6@|1`AKF1=sDSFAB_%)OexA z$E~>I=d;;6f4y3L{(_%fvi4&UzHh;W406X9|4bM7&QNhTJO1!-yI+be2C>zJGQ!6V zr+V=&c<^fbZQn&2ubj%G*NO$kv3X6}lE5C7T$=tbb>=}9ANBcqepS-9P9NL+B6_dnlqH6&i;SJ6bT51{nYT)a zm-i9tB;o#d$?t3LKdiq0N9Sy-rctPB=;a){>+$Y-%3BYJpTv#wOpCBZzlq`t&KfCO_zDeS=CEtl0BC%vls8bc(Lw+ zY}&K*m$DK`3k}b|>8!M}N-~sA{u~+pVb1eCof}0Lr%0YTqt3ClK}6Q7&}nVhX>DCW zFHffZU0-acNo}q;WS(lbHt>{7&DJPg{k5->o=aU1n6!P7#w(+W?MW%~AAZ`Od*$1& z>-*>V*E|yT|LAAGbg3cVgC!>C{jbiDesQ7t zW_73M9b0C#pzHFj1gE>aPS46K8rN|dXn*RAwcDLbfYT}dw z{VT7U1;2ltoc51JDa-KQ_IU^RGtI17EM_YVv z5)Y7%=`>jN`i%a(1(UZ<+&V#lvvZHuTobn^Ke{dk3vdW3EL8~hu83<8sGR9Pby8xO z#1)(AM$@;SKVxLN_T%o&vUgKgJPcSH*1aYF|G&R)Zf$*Scy+G)<6GQ0wm0SNLU|sX zJuuzxqpAPNs;k=Z=Z^JCf0}&7X2ql{50gX7bWgA^Wt_hDKpU?_n`2@RL!r#`Dfh}I zszn?)=O*Mi>B7qrE?=t!8sfKn(`B1a8*Ki|pvQb6LA-70jFlGU4qZ!^az0vGIjcB& z;zP?l4{nuDQZ*>7=`zR(<_@-DnxvANST@Ng?4pyxo;}Kb9!oeb#PzoBX!r1Nb7Pw^ zfBu8&_+Q!2>OQQGDg1i0W$M(Htqm4)(=)3W^?7%^*rP2OanW5pZ}b0&%~!OWWp4b} zxSG+?+;8WwS$1>fWsB8e*LFUhcKgS<^1S5B=hp7|?;QW&led1l+0K@moo4QQ=L?>E zFnRoVam&Q>CsxV+IphDADT*zms`v1v89Per);Vr6ah$;xqRs5$Xdjt0MR4ln$qzk! zm)+v9*>dW_lec?b%$b|{>hJ0HEj#YcE8DroUb=Ov-<~I&H@q$sZMrZ`H@f1ecHF^q zyU#O?x9^;Pr|R>{KOa_FHPo})9ld;G&4&taj)SH7)y9#_KKu=urR}N1xMFt0;^Lzc z_qU&w*J5Y0HJN4N$_IYp5YzlefAzZ-Z?!wldk!0 z!~2KrD?JY0_+Y?NDrZ~L@v^eN(=uXrnreqwckbEk`@Ysz{bG;YTO_)9GiUtoZ*$i~ zZJZRUy}jVk4ciyz3~X&5`{>_2{cP=XL0%qXfEfo z4cKPrt0r^4&IvBD^u-J_^xmDifil^6&YPem-f8e zS?RJObF1{1uOT<~=32j9>bfPUCDCQ?@^*tYk539nhE3Si^g+XIW2%;FsM`x?59bFL zGcG?~zgC0$hI?|xtxUmQM%Uo!wQ1K*=$bt)vc1EmzdI&jL(24N5kJn&wXQtr9ap5b z?c3x?cf&gk!BNW}Gpq@E*`NFD=kEJo`|ix1eJ$?1v5VaO@12jAXt!*Bvv*U?^rs8v zuRP3N=dd(L(en}09cJIHQL1M*#jvPo-#dL*K}=^${Ob~{h@a~%`nuom-1^ULabSr; zl!8HD@`>o)=(l1&AG+QMJ~-KVNv3}Ewztb}=7`wxxMq6k%+`@%V`&srJ$q@2ieHB5 zv=t$27F93u^8TJXF4_5P#%!x&w%_Gbva&iae)RqE&mi*}_d@p&?v*Z0rrK;1l%6iE ze*ffP_5Sns+TW?3cxF-mT|>^8=j-(ubJ~1*Hh#Wy`bOoyqjzfm$88bcSag=H&-aY+ zei7sQS(7qvY^vk74V>#YNhoyH&swj~i5h8H6N1`|bc@+Dzl!%3_Y^yCRp0(|U+lG& z(d^NFDW@{F-;Q0rEBexA^*Wj3{gweax1KdM>lvP~nsooq+50tI<$r|N@i_BYr7AoM z31-;+Eont(eaWo#_HVxm>(?-*zrXkN%J;R`^Aj8QWo?aX`G4(nbLg|qk2WhpbiCrd z{L*I%&f#eiSn9n&zt8&T7lqbqT&sC<<^+UIQ8;_&!0~H6y3J?i$#_n>khk4%@geP> zzn8PrvA#UHXRBzN`|k{$)HTt&nqs5GMuatG~>um?luxx$UP$?VK|LVskx8yu|{HI8Ti{80sJ~_?k_Wp2-F*wlSN zHu`B({kHuF5*QbBt_)!gO13eL`YM?M9jLEU# zy(@>4P2#^5XIfSt=a)SyF1dTX#q_rMMHN+(Y^%S0c&lGCuen{$Eo{}7YsUj3&onaM zfBwxj{_#C!c8?|P*FA6SOzQB~d-uG_yFBovZGV5=E4{p(sa;23vWh*ETVQK^oHaf6 z(5r`2w>*pP`POF;AiH4wHUEg-2#+A4ny5@Cu8jCcu8ODlPb~3w&ouJC6ezj!f=Y&v z$XlJIi$zZft0+DH7Q9KT*CKlz>w}-HM{jSkAlV|8D@`irksz`xrM*xcT#cPVL2^^%KQwsX!Dt)GvZY@Vp~n>8tgZ-4*3h+9V> z!+)07(oZu2Hs8$?KQRC9617^x%JtXc?rom8=y8YnzK``A^X|5unKxru+tEq~WtLL=>&TE&+S zakuU%ZHY2bpT@Dd;VZNDN?wOr@439QS0B0OTJwKhB%kd3hnLd#&iVb_=7{b0n#Sw@ z9-ZIx{vc=6RWb3n0<*Q(geI%^Ox3<`-p(7Y(cyK&)> z!k*`yHcK_?5Vk11pgkE>KQzhCqDav~ptw}iCK+pj5+wVcP8|1$YAJHAMk|0{gXDw{jXS!QA8gq>&3k6v&#kLC8qblIlbj9muRd!ha#{Va59w#?{`4*aCYEoCtY8AaGDn8-rtO>reo!gFu z8JX_ylDD~V*uJ*1$(H|2(LL61LyIl5PV3)47JA;QKWgjzm7SLFZWgO+A7ht%^FF;S zi_8AhjO-&XZlq^@Vi1r0k+Uu5uG{9FE&?sCOIwxZi0#;volwB{JMzu-T$NPb`);jI zPv%AMUK`B0YtBaQ7gh7xqudyZE}MUs+>m%YQa)w%yr@ zi-m2r)Xxf2XB2t6!&%7urookCt91=;zH$zYKOiGsobpj8Yw`7hi=ykF&J5o-!I}Mg z@#pLRrdH?u6}rFcrFO-xbEh_L?djz+HnVCtCG={ml-5}b!|EBy#@swt!!G9eZ%%uf zrnmErk(%F}1wm_PRX4u8v^i8lU;a^|_lBqrr^Dyhu5;M_dF%gNZQC-@1uu*ii3fx= z&iT9KV1j7Lc4mEj!COXpQ~1g+FY~SWc{={ti;K(m1blmbzg??SKDuRSQ5Xd z?yl9n5BK9we|vlT#?E5vXtVrheedh%pRauv|Ks`j`1A66|JVMxveo`(>K(Bsn|mz+ z=1IM~;88rQy47)6X3!FDCx=NT=D%*F2DMF4$y~fOkkzb~A#)yIMa4WF52ts}imFe< zGEI7`u|>zpn(vGiySKumOUj;BmMc%17*sCm5@5S?N$O0-=}l83G-EAfyo2@r@AjN^ zRZ{NtV|}|K*)#7PUuiZeo;-F@F{<#`gNRlytMfTky!S5r%(+*?d$>9D4}*YY?=c(O zuU1ZOI_Kn`U##W-*dPB#eBa-USmnJ&PVzzgz8d3pKG}qdiW@B#I@LUdwWfBMob)Jdns0BvRHRY8cVgAg^7Lf8xZ%sI_7XxJ%=fF1U8N`_$>ecg-b->ld81ywRRh`73q10!yQ(_#=yD z>(}>N_z4`+K2=;6dEmUnqKR+B<|%lKCPthsF8ezp_<+>LUavRT@AM`IbALA}Y<1Or ze;~}V<->dnBLtsKe?{y){ddu>^--}-*Gxl74dMY-4d=-IE$d3P@H{(s|E zQ0)49p6Wt(QMS~c+b`62NY!3z;PF(M{_2`tP@9L~quc-CSj%8>lyUEAgt z`|ql3>&y8c`)s)tyQ1$B!Q-;+b2B8f+Uy_vjH|QlFP#uMZDz2ahl29Sq_DpHM8j!Y zf+FskA6PY2S;nfQMZfN4S=+T3@6wD3H-9>tJbON2lh-89CGiKjotDTyX`8aq!uI6C zW_F9ZoHlR0-Cg1P|5@4n&yRn4;9sqasOSHL6AwIG)*eqzOq5z{oR;ppx=P^3kDzBZ zY@SPl&c2^+JR>xM&%G`E_Oj2_QTLSD&z_%ImuGpcbl11iydx9r9g;W9n`!rE-?6LR zEW7wRo=QxJ&6|}pcf--%viElF@9$P=udn_)dD8JeJJmlYmCyBLFL=6g--9{D=bo+2 z|2Mz&cv-Q|?O661*L=HoJWt&mba=hXqwc7E8b5E{?I^tYU6#epQK9(2$y1KMV-xoz zE=skW@kIGU;LHQUjMEY`dT#}}-Ho|&HZr#0q^@S9^422@{gz)|s+u6<5gD7YZProK zFfGwV8mCt7f4aB8eD3qgV<%cvI7}u4tmQ8(sbY-fp6UC%BIT9E{oTJUUT^zres0%~ zooBY0L}{(6a}f#s*m6;4%Fj}E#{?xlMM;Sji(d+q8`b^Y^?F8p*2ee$oPWg4b@+BM z>u84l!tMLT&wsWoI`QOKoq&Kkr)Su;YpvJM&e$S2BS=cqrumfh`H&}X%8TRdt zeo<^`eA!q2&G$9Tg~8jNo~x~V@-oTobYx4O*UV*+`_fsiX7%#d|44pwW8>ox-(%~v z{l2%JzCN>n!Je~5d`{Sw$uaW39{&$7NxCUeX}L;@#m@8Gg-zylmC|pEj)=YBYK~1? za91YnLprNwGt-CYbC1QHX3Pm_Q1?A2>t=UJU$!guo2|~`GeLE^>+(g~A1w-b!PN9E zYOP1-X#<@NEb(I3lc#nzDm^J>4BYJ*>&~XIcY@#YvoAhmv9X(7xXk@L_x90;hbQ}( z*{s{~Y_7@d%}369uBx+_lw0ff+0bFOqK~Afu_DXYc^xY=jqP49>W(|`HGW60v%8vv zoODvb2Csb6<1hBF5Hwqt?a5WJyn0P;q@19ZA(}4>uH|2yL;VcO)Zgm?Nb&psMp#*@wfk$zC6S5 z=qvVr!WAE1?rV%){+hvzY5u`0ivM|)-AlZ(D=KDq>+M^y<6xqM*w&Ae3KR>UZhe#a zI@f-(UOQJWO~{zEB=)Iuwa^WzO|>fuFR5{;%=Efob=F_)tHj|AJD8{J zTx=?GyZl{KK>zhNt))w{1dD#}^^s(oE3hT%?V$s^y(d3yE6!ZI?25)m7njv{Ss0(s zEjL}8#y(N^m65daghk?RJwfpxAy)& zVtqb36Eo-EY?VA@&`{Rka_MsL^GoS7k|v%r(sFDNIo(q%d0^RAw;w`pD>>quo=d(e zh+DsX<+({IfS>+52085)Hov z-ej&Q=G(7*F5~cHozodxqheP!+-1qDyqDa{Q$bDJi4 zD85`Gd+ydppKo{OfBL#RPjO~jG zl_4``&3Uk9ZFI#;|NTAd>;L+#+IrPxYT+zzyUb}9-))&OJA2mej2DxZK0f7N=3mio zXb}9zglT?a#=K_Jn(LgqpU8>N(G{pazN7Kc&Gy6_q4$b|1!l*Ue4qYe-QJY?dwcVK zSAFtdJ|!_Tvfz4M?f0AM^Vf0S3_X71k+Q_?Ke;_?=UuUqWd9I+V66jl1^}ETd!Fhd;3rDG?U9p zPYbN?yxBUdI%h^+rn|7m{G%z;L|0$iQ2jmIvOwb6n!S@=oru&a<9oWRa+gMPiT<~@@z3hN2}IVUab7`J$#16rigpX z|IE92Uwy+~N!1FrV~ks)bRSu0@_1;vF6;4Xn4LD&H$LesEs$T$oKzYx$E|97{!*b^SSgci(kPm5OKk zsV4sOz=ciM^3{Z-S88j^=iNV^m*DMLc5tOh=?_mbOikI6T=CC;Pt$Mtj}dQr z7$r$L-CrKJ_Hswr<}uF?{c@(mQp}-|~Dl zzq$Xv-MhW-6*lGZEkDD#aqodChQ@0Jf|T~uWEIV0i%L&_KUcqYlZk7(waJAGPsh0l`=E8dn19Or-B zIC}KhbB2V6co_D6H@ zPvzVc^0<@XW$TT9tjrE+e$5j&xqts=zi?{tWzUIm9uL}G60d}Qb52nZiAt%s^nzi= zpM6iHZe%QvS}S(W+bi?Sr$am(E4t;LE#tdreBt!B&vEB!f1g#GQrK%^dU?`a1Bo-s z^0wb*4L|g~c9Mx>s-)884xfdWdgSaESQ>j4Z#!$MUn5y4bZJ)R83r*|v){ygK-X?bC}sZyv1kGn%g?iR)^N~eV$?FfFOy4~yX zyNOS$56!!Bnz!0=ThjmBxo$7NoaVT^Z+fl$`Ogm7MpGAsFgYy@VQ+bIEjr)Um)q)P z=4w3DszB?W%N1`7xVk5w z=Y4%Q=A7-^-PvzMj_sYZ;B&>&#f!eM9zERqSYm<3dB?h}s{9 z?gQ)h{i`qjcFntyN71xSp>vVf^_S`ITlemt7M&Qp-S3X#*&qQaMdw3ay-Sx#E~@Z`B3nBOQS%wHbuv}H$6>7P88)Wg4?syd0U-pH3|IpKxq zRPN%rWmR9k@B4oD^0tM+F|`u6-$nI5XV9y7wf^Xu^1Z*ecg;>aB6(QPui@hXvvji* z@#1D)i=|s%-q<9)c?t7$x%*OXyYK4B6_`5&>)rp}eKz;r@*bvX&s)VfOr$-gF7MEA zpY(g*#YN^1*M!%IDY7J{%;;Mird=*_>qOE|#>`n0mu};^5nIJ|c!B{>*BzS=hacT~ zYr5O-!r%Pa<>`@a>b7Y`$$NfG;?6V|TXA*O&W}@6RK*RqOW$pp@GV_yDwkt|Q^}Tg zebud14%gxyJ(rL?SqfVVQ2)?M&hpj*wb?nhw=|v1G!9xFwpzOR%)$LCnj3i(cWqz# ze2Jx5y8omW6VIZIyHkXWr+$z5A#^(=wNN&@B~Rpm)vu@D^DV_5H0^x%dq*hy^~{L$ z7X?ImZP`=*`D@<11D{qk&X(KpL?-)=UPb!R@H1@& zra7AA@t4qNTH`DRLC;>57kC-1zGnJgT9=Iq(aSz9A!#3$tC=`DYm^y_?}#@y!} zO*SpMpFPC;4IYH&+-Q(+IQi%9-O`99dWSS#x+He!t46MxV6=F$-^R4N8d1$t1fy0z zs@u79-R;W>@0)vXt(k1{-|YK=&k1#TjajxSM_=h~5qo}fW36dq^v3V|sySDMt-IZ^ zIb`)!y#?E~kofb@ zX_1Xk~H9AIHCJ7LO;*BaC4%NP395zt><2p1Wk&Vyi_xDTG0CIFWP$~ zt68K*@ni|-k)4TqXFt-_7rnLK^QGt-+YWRUoxk7NdT8!^z0Ue(@%6iBbbW%>y4vh{pICE0`R}gH zJD$xF|J0KnbLz#Zt3OuCz_sh6_6_vN$_-D$V@1Wc$R)ysc}CH?;-K=?h9;dbaG{4Zq!6r}sK9 zesgQMP#<-=cQb$oRH@#Gk{?>Y7{}OZv~1+;a~J zI(%BsTU|D)ENS-na5t;FI~*k)cLjYY^S^QU@X@30*D)6zRMj3r}CP z_4Q87*_{olC>TEoJ&3QAI^LjbD)r~%{F)Z~f0O$^T$*3SIsf;Y`_Ii|1dfZnHS4IK zbmV9Nf3@^mW-WuJIji$uJ$cGwC#@vLBDuUgp>lGErEBWHs>TmC0n4g&dPU!sb&G0; zH-53GdBgI*?C;k*HJ{JkDL&7?<2!pb$007yJ9_u@JCs7+O|W`9`^Sa$efN;I z{nIdGo%@eIo{H4#de;>Ht(0}evMDD|w_f#VnRhR4RyXgX9nOm{J~%N^`RU)OlYFu^ z$%sUsTPZN_K-ZiVXTmC6)^W|5p1$>%IMaE?V&kcbb8GJI<<~oq`Mz^+c;1o+^M0Ln zu30K_bya4jI z8!%S<{VifMB+DTL29+?gWcT4Lq7apUyspWikYOST70 zYdaOR&_Jo5|YnO{5xFppjrMwv;4W#n}0e)wZk{noxT>+Dc0mLsidbb ze$ufDCnFoSnOT!lyN))iN? z-2Q0oKW1W@xcO9Z+2SeMyX+o+5}KjPce7E=jdv$c_(GR0-CG<>uU}fLTzY%@+TgQB z#uHS0gAd<3eSdS@TEoJci4!jdZkxAt>fv^N@$*sQ|I@X#*v=a~S$gS#P2wWyPkR@9 z&h$I<`NB#@Q{$x`k0c~nI6|jAiYZmwyuRtU1?%o<@(jlWZ-jUWv!s4Kpek~+`YmU_ zj`u-UuBk~iHGj6eedxK4W$~lGJATcz_Y(df#*KQZa&b{CBcyG-Ue;?=LFJ7=!RaZqsMJ+PCy~uU9MQL=TwtHjgJ&{M7uUy;t zZ^06!>@WSi%C`MWpP8($`TF|D{`bFa?f!|+evBq% z%kP?c=PnR`S!wg>fb+ARFMGC~5?EzZx>?7yW6!512X2R_T@LdC-%q^6(X@cq*FyM$ zm#~fI#W+jWsHYNcovUx$k-ubeZnfmL_yhBPJYBiA^kqotyOk{(+78<`o_@2HMUt(8 zDdgXrQoj{bg646adjE2bSOI5$@u%BgT`K?aH^?41B)MmWdQ5um%@~DO>z3>JZ(d`- zqoBH>6;AO)?`dgdiy(6YpRm?7R5Qzk7fnUWa0H(m8sgbaq8+9@8uu;t^eY< zuI5?y&)fTdiR66~Q8VnygSkvTJzg8u z?N&U??K8_%jijePtlsVZEpUF%m!NrtFI7#Rr$=r~)aLH){<&4^Z;iFznY=SkH+}ea zq&V}?x{da6mRFRXwN*)%Kixa6DVNddd)-PIht13p7rHchE=Yx~l}XK9*c;ZQlWlC8 z>=kczeEW3)b;E5f+AjBgE4UnN*|NsLn`6^dDd}I;exmGN6E0mkc(%GCVr#;0zkV;d&UCD-e#OVbCmCzm z4VWF5pF7}c`*2dwrqt8V?kM=$-mORsofCEK_p(f)e=vgQ`cmyVu0+ z?!0;PCesA&rUep_X+66`mTjEFxZNeyeslMP+21wNU+j$E{$N_{x42TPxyMZuA1-)x zt?}2si2;{YOvH>gg%tl|-QBZEplwTXp;+=+)`9-Q`w=kMjb8}Ba0 zIK)}55Se{t+V&;|g}%poTr>h^zVzb!eD-FRNyOGV?(g;m`u3G?y(@n#+}pG6?tzb; z?iK|qZdGz?!=xQK=T#ocI6Hazl`YpQ8pK;nqaSTHd78K`T4?L+o6C3-MJ{G;<-XvT>So2G5G?y>+p^1^AOCK$O7Qa)wsq)y z>cD4`b-`(|$b#Om{qH5+#g}{9{c*85rs3@r(OJ7Ae*3b>IbRMY-}cJ|&4|O9Yn>1H`S2eQUKgzSRhKckf!*)%BEPD) zH#W{)5u|ylVjCkz$@Ew2a@|d97##&dI1XtA%?ds!y9ytd1ukk9lFQV3VInLx{nnc3&2tYM2iv^v zS{+!}E}DH!;P_gbvm3Z9o<1*leKBupa^>IKc|or^-ezX~e>-WZ$z7-Ar+0sSg0THmtQ?)%@|El;Z_ zIs8&%J#o_0={d^+i&`Tt2ld@@2X4IZl(e5ywbq`kjPbtp(d%=++}fGEdvbl_*|Sr$ zq>JC2e7a~ul=iG%H^y%zk7k%$7mTqJxWvo5P^3@8EA`gmyIqnmR3=Ctga=tDD!_>iX>2vz~)L+k~_~2cP{R)cZkXs@IW6msFy!|N3)hb)1KB z^rJtY%RkS4UuVAB-Cg1Iimz8Xm4u%zux&Cp^y;6!p6;fsr(*AJY;J#U`~GKx|Ex3} zJw2rbKYUI{_FjwB)Ax2P-#-8EywbkAJv;tv2~IuVc-!Z3nA95uk?o)Lyp5YSG)=MV zUAkYu6^;{GHG?>FJlk*-1|} zJ(;GKdw+3;olwGV{o4IHox2oXKHkW5H|&Xt@6DZ`d$#@4i@$&PM%o`2`xomj%kur= z;W<=s?E200_iC@RE@-}-Vs=G)%ZC)puU9Xbbx-iow6csy?oF~>ka4?iZF7T6gqP8j zNj!W#pW5Ho+nd)u&9-^}D*NX4?}7V%-?g@=c)+n>g+=b+xqiz}{I7ZW*wasx(w2KV?Y^6I zx2v?~@w5LA7$moT;}e-`^}e z``|Ls(=%J2?VPg4YWG3keR*HFb9(Jgi)eV(-&eT*_uX3=yOJpj7dkGGS(!Sc%uU4g zlu>E%?Z&kiCp~|9jnlzg{{Gs}yBU%no+$9Dt#qDPSg@;v!%1R|n!j^Q(1hEwAAWO> zV9)v>-@ftXo9%CJ%U|v(UlzulJlX$tK|n><560Wub6E>)swC5HT|TwxR?Mg61cnf@U$KK;={Q&=&1aHPVkw%Z1vy57%uavRd`|auZ$NKp-3@bjB&;JuWEBEaA z_+nG`w6vd3x9@G<|Les3;I-FgM9)aBsuC-D*Zlr>OI?S4t&`KeKiT<-b$QczXPXs# zH9f!lTKX*W$;X6__s>7^bX)DA0HsvT=3IYmjiZ5grMB7bJT;GN{fV`2>vy}%%wvm< zv%4%YQ$O>U^d*a>+n38PwQ1>C@ttSk702SeVO_qQA6^)RO)L*tIHS5TA25n(3Q zom0LiOYgel;qZFX%jGVwa*YGpPVicHCU21!_{r$ClyzIng-TT?vBa)k9v!`e_uew< zGrAnjd2d>|pZ|PgTkh@5h$EhBFK;_~xs>7F?s@KHoy*24tv~JHm z-q&j{?v|=~_wwnyiXbl!gNCV7bWCRP^sHri5NiKjib2b&jWkr zVOr~B@APbYz}$ul+yaru4*fAXzw3=<&ByNlr)10Tp5&fiEqedQ72ZF;{Nqn zfqi~*55@2QSbg-lq@3Jy^L5qM*Y|#2yXNj5Nsr1e6Zd{m`x~;__Gs-!zXMrpH-0C{ zW(WQ&IpO^6OZ+>YH5bJ-8+?zlr0jaL@kL7RtGKy3mwWn_d3Zfdk*-xu+W90*^QN9n z>no^iPDd|r~Z;_<8OK9fb>W;zKz`YE^8WZBi126CYl zw@crrRK1dz9{eOP7VMz8L*};pJJ+MDr_D-~T_r zzT)fGPj8;vbpM>mdhg$9`_pIso&WG^`d^)Pv0HblPWK9g{iv7!DR6)9-)bH{K7-jl z8}r^qG4bCjoc%AN_{WRFvbme5u8qF7sqk@H-tM<%tF~&L-}xzP-@mWxKfcYcpIyG^ zwbi|k>t6q8y}p0i{oix{A8zxteqkauW8S>Q>(_7R_}X|#@P-^?_521V({e_a-|3dW z&stsAIXz)tyuk97d|58H70ip9W~XwW@$Yzz0&s^2nk!J2RNu}b_;;VUvMm9dHFF%yG|88lPcgtk8^=dt|5?D*g1`+gj)f2jZe)8jkk_iJsw?>ztX=lS29$HLb>e*f=Re&pUN+5Nw* z-z%>AzA@|WE~W6TY&>rA2mfvN7Ta`{Ir?9*>h{>T2brFnmhRi7#dAno;P)QynSSbt zDJd;cx+!(%I$Ec!oNl&!&txVB%e^96{z={qKt^1ORqKDXT~ zIAoUSXEuRp^$$chrLM9lICW|1^GeoZkHTcPY!giL3~4f&+qLM$N&{JgRrbab(zY*4 zHwhk)_`u4u_Jd~h-`d)9ZQABDShp@nDWBOp>-_iA@#`=C+>x(*J6HAio0s{$cU%6a z6|dgmJ8Ox?T_ss_|HrDe{ivm>FfvBBQ_WHc%`=7-F+@8E30WQ z|J$WEW0D>n;jF5DwI*U`+vaU~&u=Vb{9k&QT~(JxSJ&lx zcXxz6Tm3@p%wlG5jZLR(+4RC4I4-LzY7{aZ?q+YfnRGU-dtRfK=QSnIPhoElu=^VJ z>vwuHUH$ZH*X(68f~}XcH~+l5DCO_Y>oH+dbSg4VFZlW7fsNLQkft-OzqSN zOz-VZ&aVwxy?y)gniEma6Q{hmC9=%*n%|kuiAF{>+2>b0UU8=6L~CM*#J2D_IPG9?Jg{G``%;;WTB_;uFCFa{In?WuM+;7InPl*W~)6tK7N#Iuqsvvk^pUA`gj7jKm){`oZg(+TI_64GxH z>mF%H^`GJIoSO3b+uSwxH%vUblSaNo!bUPJj4RVYaX49g%ao?pmoKr6u<+D;6nE3z0Mo zyu5Q}g65^CzatM$nG@RO_GG2!i>LD&gUsTzdoML;uy;t!^@`5hYRu77H8DEQbfSY_ z+#;RG*>)n28I~=ZR}}Pi=>fq9-wli&+L!#*SiEi3B7e!EjWu7teHD|tb9SpvRE$|% z30LyAhW<+vRMRq2MO-WYg?-DazNG!*rq@if%R1iNU+hmN9XodY)7tknv)kqN^!G}a z8JpWDYb>xb&e^+X+xy>kfdU4Gg)_L%-8{BB?yps!T-A)u$L%*a6)Mkln;cfns95W? z|Io*mna{q-rB-Bg^v*WFe3>ir81J8eppVXJHjV`!B&N?P=ZoH+^VI+UAN`-tw&yG6 zoe>WG_2b|7-hw4xl(yyHKlkIu4n^y)-xh6s_G9B?n?3dR>V48BwXc`y{`r=_zkPmn zujKU|Cg#ysUaVcXyG0>8L3Q~emX~WfPJPNaIPZ&DYWDWqYAVx(7WS|$WLk16qf}_w z^W`6+B>y@)zwq-r8piZgvm|WZth|zkn}qMW>Bf7V5P4y<{JL5{`@=c^Qdgd{u)M#s zjB(@Ie+*s^J^MpS!}|s7yc$jgi@L^Ho<)&CFH+a7sG z?ZNg2^MoaT^M5DWZ>b4wIG^Oyar|23yhB&-Uk}eYWME?RT_w;@FaO}wO&^cDab$2Y zoUkdGJUNnQLCUe2laGW6vdBr!GU4~O|MHMOG2{3~$D*wYT*}6R9+`(u%$ZhvS=eLk z&zIR}b7xsp&A6(4+|$}uYht^6*vlmi;)18Ru4d`KO>4h-QgTy>@unhGhTq?8PdvFI z^ZB#4O~wD_p7l}3SBYltc((I=a#>YZ>Fev8%ic=W*gtrnENt`R0kg?8lXgD4GZ%$b zHH|s4_xv-pKOcSnTUgoLVmSuyV&gOC^$iRR0wi{wUfvM2?#{C_c~cf_ojEnZd@pOn z-9NiL)c+J2_0Lb{%qX5TYiEn#O0Su-;~(5GTr8sEVmbBtqR*#y8?_2lrq4b1TSLT% z!*g?ArKg;zQp-fH_C0TRPWj%p=}E@7E{6RksC6X|KHda*N@kS;x9(OI9v*+jn8o7K4Jt0x5g$}z(>80vzg&# z<(Ds4GRtTExxGKue`#2HVIk*K{`p^~9Q$mvC~NA{w06I2#*p^K?B6yP-F17`clZ1O z2l=Zf)O~M(yvOpV_ix*4hC1dShnIifwJSV6KmYoMfXlb@QnpQ2IcdJQkn3z( zw*m*ZY1HNGYq><@wx?d5$FeOZ#no<&Tm7umFZ_Ll=WV}F+9njXPBu9)QE;izAq9nV zLaVmg&HC(Ja?5znKU4kmnha6x>uaB0{QZ7O+}aI^ccZ2~OPgs`HK&?ie%ZdF3BA$V zHfJqsbTP9_U$rwRL)UZDl%vl~ z;&;BBy(WJ9v}2cK(%;`YIQ!r8^@SH(#S0#~s>;pYHrq$e+H2NRC;7ms3LgTa7VO<) zF~{BS(&UuN`qtU^wU>UZogC+R|M+*_YeAp>?^T*#s<~s`4K@kZcukg5pU*t#Q&?nc z?VoRY;63lm*|Xf1EaTB=ZGS)0j%$k6B8JS>_6bFWCR&~zQiX|jPqamoI(Ybc z7`3X-3aWhl&3t28tN*uPz3Up`m;ZdfZg<)}@0V6sW!KAt&MMKfINO>8K5VZ4Y_~~x z@|uV}E$4)<6+V}>U$pr*!|rcyANAdSpI%ZT2u_Xx1{f>}cz! zMS}4M{O#tdKAG+~A?B@1`>Sf6tD?-o#&`U>8&ab*x_(*DVR*mw`z8*4{|T;~!R9gx z&TVrH(s<6jMa@ZMg@Ax)(pkNmr#BgtGP5p}>)DoTea7Lt?WdDQ35mNicO*n^p4k*_ z@3Dwu8soIyjt-R@EAqZ6a4^jXt4;o}X{wvj$YRk0BuT%bSFTtb8aV6iwzga%bl4-M-+2$$+yU?RL zR)>T2_kFQhpy+i-$6ZDv<cP-ssv{qu8JG>&C8P zWBGqamNzPFuYJG&=Lh%yob%%yd4EjgSnQ!Rv0%=XGgtDLUPy>!o1~D$y+eHxuhfqk zvzcC9v-f|{-1Ypc(Utls4fYLdPP80;XjPCV!8YktM9$PM){xMpTbIsV{I~t*6Z@K` z`!-)rb6>cGiU#P!6l!AvTtu&nc4SY)dh#$vXas& zLVXFZ0#-+v=z1_McxhD^eN;$=#gQqnU=!QPlR-L%os4-{Y{XSK16eND%8RI zKXw+`=zCC*FIVBAjPc(U8VjtyPkR}(lU4k$I1A(53l~l2PMa3?{oIxl&x<@ZJvwKZ zAu+92b%zbt-KSGLCUjUUhOT83V0pEVe^GZlYxD75_785;8iYUJpCrBSgLwSZRn7h; zelIH*&+L7%dYR;kRgc#_{<20k<$iYZ&2MY0*R6Y9D%Ti3oAc0`=?lyym_HnO?9bph z-ExJ(jS2cwEx$gQyz|ka(wK^eol{mxE=zxKp>eTJ?7mqmKNc!o)OcAlE7;$@U;a*= zIltW(i{0Dv)y296`^*=`RK~}vSz!58c7oC6uFufx3;?;5Kl=enxgu- zy5joR>B-l4nCj#gCLCb2Y`uBX{7%)52%QU;E^@Y=H1@YW`#tZsT)*8mxdM%iJ7%{& z*LafKXLtU$U%`&=_X4=5#iZXbeWY=3Nvv}Hf!Bw*np$Ya-hSxz!O-;*uGD(GFnYLRkX^u|2XuEGkgC|UhaMr^k?$+Ro#dmmsF zEZwfGVtS^#W6=wSJ-hne^@WA64rQvd-gud!)fHVH zM^_}wG~1>*$zxj+$NHR_#;CQYW-gC+**HaQV#ch!2Xp6kc5bxKzwqa!_nQCh!Qy|8 zhOZ2Vq>&n*8d)~Uct9P_G z_f1>yM0u`c^36?4YaTt8PriD|)u)G7xVPoxGp;Y+^p*#iNWPe_VwrBbx7+Xei-ay| z`yc!^U*Dvi3YD%`a9S9kI9)qXM~u;(eS_e{Z))dU92u?)xO}xfAH2~YTla)~biS)yD`j50ELdY|=F1mLrkwxuQ~!@~`M)n^k9-C7cD-<7Q1M*& zZr^LAYpc#bzjFHB%e9%iZmUj^t#O^D+FiBoq})P|w7HdgdDjMRIXZj6#jH>(tJ?;= zokcrcii$sZlZM50S&mgEs@5FG zXWjhys*LNY(8p;izPW2dPMiLFUTyzmntuJvckA**M1?xlxLS4Y|3CPB$BRqc6~9B@ zC%oSF_PA-=y#u?L(+nF-_-;PG|1W8J^J8%d4i=e~ePPA63@Q8?$7;i0n=dfe*5l|C zSn*=s5;@Kz51G$+m7Kk{>z!IMcSn~o2%Nf^oYGls^{%W^&h+c zKGAdC;_;p06JpCbTyhq5TFOXDs?B)wXs5VY!HWxub!FR=_g<6w9;kJzW0m|#oyEIs zN>~=!h2Ov7w8@9tktzDhM!^d{&yVPeU)r*bC8GS;&L=^cC63FtuAQBA)#+>aiRGU< zCV8w3QIqo);Ql2kVR`Dg-G5Id!%o)K&pti+EdBEKeg1~{kAW*g(!UDmcF%fup}Lk) z^yHK|S8kN8yIizWq_?WmVejrohHTQ2RdN~`kz%HIPpG?aKgear8e^?PdD8h<-4_)TeHviqTD_@$sde@H|z4KWXtlc~Nu=}>l*UP6= zzyB+`I&AGj;r|~hmt7LRQ+E6RhabO=zj!a5^nV}o5uw7OnL7P8W%m8HKP7(TUfWyB zyL$TEBaQz5PZ#zqxW3npJLTaGy#wKmoDcT@xgB1gu3`pw=0uM4XWJIHx&h*~Yg@l$iEY-MwTf{&JMmxjoX+0ysif{jkS zTl;rT#U-H$S})Q%r%g*c^l|B%JGBk%az91a=iKtro4j<{)fX@3c(06cy*yX=>g*}a zkq0NbIrS=wWFyzkSp{&4J4yKwjMij8-xUWflUB%bHAoh57O zp2xloNBHai*j`&--*fGnnw6Ds&drLBlp4n0Uk~s3bgIhF{%?}e<<09St8~3s*0@T8 zW7^wucep=9+vcv1zHs8qXS?qWN8H2~UeDzGard^zm6xwxB~)ZgDG@uQ_c%x>_Srns zXupcTrLKW+Z*THj%HK0HvEYnwZx?U?fG_mAIIzf%+IoqY0X<@@`4 z`>Wsmw*3C@*K`#j&TI2(l;r>4INquH=uXY)=QlolVsqN4kei#kWt(bdZ|R2G$K4@b z>!a5^uUNxXzks*n_W@fAmj&DRS6+X*rsyc=p__@k(vA~eXeZsfFy zDLeJFE^#oJ_VJldie98!SoY3>qd~zd<&x#Ucedf5FY^DlouL}GHpXv>rjy#_#cQ+e zI*Kd)bhiKeBfkI0zkP|5PV%V)F9_Re&@+p>Z{^qhkFPF%@Ib23;lcJPrLkwL?^wx7 zdluRKf6yzha8G36gv%>3t>S-wKmYjvGyjK){dF@U7D>)uWMFB0;%xjs9qVoR&ku3m z?@E;7;pGipJ2i4&4eQ?K&lT@(id+8u;%!hO>pbtOyMeE;>$H_2tNr@rr(3>^%C5eC z!@}|gM?%&7>OD;PlZ#KuJg+>zhj_%0!RcvI{` z(UXCkMbVXuG?I4RoV$IY^s>%YUz>|QTBgp$#gl66UcWHo+I%UQ?^OP!D_3|9Tyg2T z_DzhC(HuzkBp)`(EaT)$4c5oYmRyB^pDCAi>X_F)+|1M0SMW?SeUS#& z;>C;a6u-aw=f%6*np3;>JYBOr!OLt{T0*p$zWrYzJJG1;bv|CgR!SF|!_`70~O$6Mc}`D*PkKfl}3R#f`D852}|38%U@~ z@W?4YfAjsPz{kmFt=4UwnKNtW-MKaIjLn5R)Fdq~^eeGzrf8|$dX_j%#qPtr)s^3Q z%?nBjI+i_CS(($9v+ltpCl(dYmBw$S(xvx^MLlja@NPYPHe=4&1GAN8+voG$^yzwb zEl;lYi(uKC8)qt??`?g5zf9WBHSzVjwK4UFOF6ji{8;#X@k>>&rA#w@<~azM*!C)P zyt!}u<{$6vOiQM=+54(zFA+YH_V=UPz5U1KzEu71U9_N=IqK{4H(3@;KK}j+-a2r>qGF|=i#BsYrH=ob*txmUApTDW^v%Xb(dCc#! zb^9M$MctbHBG!UGQ0T-GiRAXsGN>Q*rGNW^3JAAG#&_#nCOW#^Up5t!0+nm!g(=S_q{s*+0KQNAb3pzs)a$CWQ|# z^5tAYMGQ)FZYwrT^{Q0yeAYSL?2`SxAd^6}GN=}z}aGPXvC)c^mKul#zT%u+{ak@$nUw`K9 z_WOSC52<=gQoa#bu=U3(J*Rso6pB;!*B;N|C|Kk5_RaM1=W3qnXcdY%aIX&CTuM>A!sG&cWsOeog&yaJ7mg_az-c0a7L@KVSds+4}%4haxxiDuP`>|y*%?xNr8-i z%VpW1qT5sdf3~-G`ow-?Q|k8BCpqr_y0(2!_h{q8{9a4&-S*YoSl=JsjHQ#>(->js}v`leH$_D?53mMmG61q zKYjY#yx?~IeK#Gcg9jWtz5jrp|8+K^TFIt_f}>_hV8xkZ0V#pue&$u%}bag%gh_5$1hr{dP;Kn-qy)} zcCEJT-vic3mYj<{aPV+)PIapJ|8HCK6HnWnn$;GzHc@KXX|Iypy?4(XF;uzwsxfe7 z*sIx7+l$QEdfIcW4?do==4#f|F2>vCa;y(l-~S^%O*gtj;f6?e>aN{Yk-O7O7Zyxd zxX>~Ev!LsxnLJaoreuGSUi;`drr*hR_&ssPdTkRYgW{UpUgf_m)+cyE3A8x#l7#_ z#>&&)8E%d1-idil}q@xySqB@14K?|Eqn(t^&?}+fOW;-mFfab39YDVRd29Kl@dz4EZsF zFRoi`d02h#!TG)KOl^#!_(k+{zkRu@;^{I`^iSgfL-p0dB~^c~eDzznD{-;KYT*|< zPgc0h@-SgKaNt1TIlJd2eijc?Tq0eAL`#HsRfT=}@%a45ndk4Exp0EzUqFk~&L;+} z8x4H#^5(g(xWwIY^Va=C@3{pySPE*M+g84e{J-~p*bPPjQBIducDb1IH;x$Wyu@Q> zb$eTGVwzQRL$PshZ@+M_n<3BK5-rwku4c0prFt*iyxDqRUP0oMa5uX~_$7}`o-1Px z$n#A0PJewcdj9M3|8+V-Pw%nUJC{|>y4I(swUleu?aIJA3vy*HZ_V9(Z&|UBB%@2` z%bAk1W+y+{9I4`{B+{X{!f2P>>Zi}Q{+F3;?k>l3`j8RtpNSlg7ypjjTc&q8m_4vr zRHS}Fe*N#=f4=VizpL*4i(PH@4|2r5|M~NLPQf9q<+~HT@BBX9E+HeKGBYRr@iE?* z>a$y8ioaUVxO~?!I@>rm*Yn=5Yo9yantuE-I@rJ@CNZnUV}eT!-|DcrZLKF39(F$| z&Yg2}*G${pWk$<4PyLoM$K{H@iqPQ;y35qkZfwjI?|!EH@WMUiKfkZ<$7=zc&;&8smo3Y5V2QCiexff+ZCprJj-g@(&SmY zP6ur6^ZY11GqZP*!nsQub$Hh zcWR;M5@+T-E`qbLHJbQ*(5WADnxB)8C`KHh*Wc&y=)35cz!W8PDZ&Iy##aTphPu zJR`MMc5TfA=lb)9T4Vn_FW>KWf8yShQeV65P;XA1kiJcr@I88=z+go2EW z9dV1zZv`G+qq6z3Dnra|wb0PO(52@*+au+z^}9dheX7dlTmO_zI5H`&B=(vm6p+ zy}Zg`vYgt?97h2bF}(}+jjX67ZAGgn>JN`9O?W0|MbT(!5eimT>MJLcD^;+e8J zQcO4E%py_m%*@bOi z&mD_A);_zmbAC-zE4TQCJ0kApCStl#2VSPnJ9KUB?42)*|DX64zFjE4^U}3@P0K

B2MbT}a?OcWn0|KRxh0u>h&hJ*#@c zafaX7gqe;$%6>Mp9()PD@1!B{@Z0UWBkSu+<*m!#EDKsE$a!2sZr)bc|Hk_4jDLl% z&ELh?@Ywoadc5=D!=*+3$`Q}!_I0^4Zk1-#bT2x&cq!A@1D_{Uw#P=S{odc&HtX}f z_`glt=hSJ%zV!?eUd-~BFMCbY)teg<+1Kh8zsPvc5bYu&;!}L>sM|Ev>zB_hl)EmK zbvyaXml(Of8*?4|JQtV7mK!s>zgrdHBgohO^4+_IF_$Xe-8@_K-Mak4q2lNhsXm{6 zGWjvwW?@qFIjl7KoIqcT=h-tC?%X+YYist&luf0FuUxyPGSzGAzwdi)maQ*Y6}Gzc z?5=s1wm*{Us>IJe&d;k54_LT)_w1mRE$Or0?sf00Sk|ea;Ssdp-<{w6-!9&~>AW(+ zZCZ$x^)AL&vv-@v-|x8pqK&U<@%I4D>l#y*ZFpm|K$z?2&Arv;&*v5EEts6)wcu9` zOH5Iq*3?7t1wlU@4!GOMDLx3de7a`xhvOmZ_I%@Fxw>fW+Sa|l%Pvmp*_`sy$-jNZ z!i9+Dl$GSg@=WndJ>D6^qin+P@xN^z^-d&8Q>+DmR1scOG z{BjO1T(~efBy%sr$E;NUODA8?tS)|aFS&2QQJv-shZ${@{TvH+bGCUNkv%&lcXsRE z&lz2=8+_-^e5uKD@X<5?dBcd`u`H=Z9f}X zZ<9?p{miz9=j?^6`#M&IIet7XvCPxv;u`~&T#xjiEnBw*>4;@yWjTqse!8jYv`}Cd zcNkMb^ZobBRc{{pf9G3#%IC-_9sOI?*DsCI`S5W6PDYpXM;D~UCM}74>yTa+#N=IM zZLS)0Q)KSk&k`~+uEsf4RbmY5)E0|>X?r+DsY&@#V`j1>Tiw;;K30c+d@;EDd9uo^ z!x>Ri12j%dy1II$NRdd9<-Xr<|Eq|Us%(F}QR4RY++{me&pcAydt#Gsb+z%P`)U`3 zmhD~L8B#Z8<;%dXqdK3TeKoMT<8LFEmQx}y-+xiSnZk_!dDHY}offzI-JetS)H3hy zx3e+d-+h*xf9L4G+Ukz>=NB*ToK;b5&cC+44%NFlF zzE?ejrG|%>Pqu#oBgdm7Po(>voBv|g+91uIYNPl1!otH}4}A1fdS9#_YPvzEfJMsn zz{CIMn;501i~o@;^q8dbCu>Sq*b%;2JU)KHzQ-aq7BOy8nK1v{gt_zF_>NV6=zM;2 z$4l0$Kp&Kt6`aa(|{dB00ELW@1%8)Lng)ONbEzX`g%4hc7RnDnOwLfpWySaIH z+sn*o2Zx{!o>IZW%02!50-VP+rk0ePT@;~XWMXn=nQt`5QQ?s6P@9RNr_D5{8hK3W zQuuLmW3KhugUgx~?2gncbBa#LE6$(o zcFp@lmxq~cXW*IQM_$L}&)@8c4V$Nt(AD>`U9sU+cfgh+53dQeyUPWymR#vlKK1id zTkn^1_3!)2jLLU^UYciQPOV5htafUu zz-RF}t@rP{r4@BGSAQ$6c&~V`>}6_4$j^(i_n#i)-`D?r&vUkW|Gstqd^A1p(aiMg zkNK~>cyC{|=MdYR8FzYx7q740JCRp`=gY$ge);WJ%YPU=^%Q|}>0W)C&g}d+h8R=nBpJ&a>`iA6xpJR;T6ATV>2m+xGnUWi`_`|xp(N2F~i1%0CS(p9jY}5QME#Y2Q50ythnC)^- zuM%D;(;9z0Be=}sO>%Owj8(ym<@0K$ot?ccP&2cntaRZDp`%HCCs!(FMM>T+YSHAE zDOqv8EpJEhx4i8~LQl`N-afnX^X>~?Qj3)j`}p}^GOkzLUiUkUq3r!#Ujwm2SqrSS zyZ9M}FJIyK#oG5pc9Er=qDb$9?>{9Zr54?Md%60Vvfcdi6JJbHabIzDlYroB=?B98 zb_e#nGnhN~?3OLtHa$FQs}o(N#31|j?3a?QhTFH#cE4tSNWboL@wIgke%?k0eX@>e zhQ@3>-?=eRB+y~~)frOr6Dm>~esbTMd6H-0iVYv9`l?L!l&gJW{^su9$t6`&vhP=F z%K!Rs-^ik<<6@^_)a~lb+}Yl)#xqWy69{m<6&1bn*{ju=UE2#X-e|Z+FRsk9-gr(d zho$WOt;4b9caLs+8~x(7b&ZWP`}&WY|8?$XWx14vzD9`f{=x+bW zUL(@wT5@>7=F>AzaJM?OEDH3w{PMlEtoV)k1an_qM!KBjeSCZyi@)=2YL&T}Gmn{_-^kGL z;FT*P-eK#OXwFQjNo!ktxVh%ZVfp8czvUkuxBn$MwTSue?;q=S9$@f0^Y7x(gr|cE9$lkI&>&cbs#>ll{{e@dHr)=Eh!yP$kqbm1&fhDt6 z7M5Jk^V|DDq~x-L+k%xVrv~Y{|No&TSO1Cg5o3FsHrJx!;$4p>Z8FlHka(L{;bv=rcF#sMDl_TXOLo`_zVyN5e00S?TB3wEo}qw))1l%lo(crtf;U#rWEX!}I$CubwI?FAdhr<|yk4 zK4;N;yS~W!cG>#}?)JYm<9EGG<2kG%^D!}!i6gY+GNa^gKKZ1dmyY`Q_!T}mA?V_G zP`-QO%y~sD;Vg{4f9@R#8K+!*WTspuA64WEW7<} zvW|S0l#8*cTafdKE}m%)3uDv-`afK_eEvhH|6LdNMN>UAdQ`egZ4T`g)BjMLJenkelsAM&>fhi#*GE z5+gkrXU|%=nC(wTS5NEWMQ^^d20m5T_VT#NT;Ai`3XQk|Rpx6Ke>lMLTK?vthswen zb)Br*Vuo{MpU%(!X)<>%r^AA%m5Dd4w&f&8S*6db5`4a9ciZ3U-!o>+ark(1%IeD= zn>xZ)Ub;5v!NK@{JPcRWgV%`|uGL}^TJ<6&Q~9v_%M4#5L%W_;zan=PZTuZ~*2tvP zw>YNCS2%Q%L3XIpSrJVgGMjUR&Y9n7DTafa81c}on;;Eu}wwx&VlEPOV0_-y0z`YtJT#RU0WufU1E^S?Kq)-X8-pB zcJ`7jA6E)Zd-~;Ds;Ara)ul5tW7qFG<18ImRCS;4-rEzmfBe&W@*j+ogACoHH%;BwDER57 zlJ+%=4-->w#kD?{=-eSIvNp_FP<6t?b@Ttt*nEFeM~v3-PgB*6t?UjyRDNu-{melV zoliS$I=c}HYxqSZNIrGGR^~td0Fe%O~sbb2Rxy(g)TV!%Z!Gx1vRMgkToxQDR z&A;J=^c<%HkGf9lJ#H%2f3_v~9D~AaDOaJ~rh^@e*C))*xxekJh1;3;v5XHwAIKy; z&aOTD{xpLgbKE0+fjdy0sn>A60#tAcCi1C~jdk$(E} zg14Ct7wr7^&OWdt<3ykR+{ypEntZkfEKK)fU%0^2Bu$u$a@tVp>1Ah^~=$uK3-adqYkH`e)Yo-bX#ZNtmfwLD*W zi$DGp|8VW}y5y`JrmM@9xP+pvrudcXm;e96_ioQ?>z1a(UAw9Ro-j;Z)cELov(`iY zb^}+7TlNi0SYp{#+Z9B(SZ8|m3v;!4nwVa+H*nGmPkCB&e1l8qRjX$Y8TT&IS)6Iw zv@va7{Wnde#O|ru(x1 zeeI&3yf`SiCaYoPMxNbeG78hRHy0h%vitJ={o_xk&o5oY8>}Ondhd?kIs4}(pKfaI z`~Nq;@L8m+&8HKxGp0GJTI?A(1Xvz zXBwC5B?$B%?>L?m@+ndvm&bcP>q{vUw%5}ADhsdXnEx$XbIRqW=p6%}@&_k=-kcS^ zszdLACCG?u#?<(Kh9HMphF#lecjW&n(+~W&T^zx3@{^@jY;`y%MH#eu7r_U*6d|EUy z;X=C-cgLzz>+b%3G1HvibAp8IEQi%Ibz*lhtx(dQ#Qn)CxaQoGWsZxMw$>~=@yz%9 zhi%$^4=*h2T@zz8f4cdbgy;7bN0iB2-P*HVrE}&?k>zu0m?q_3sDEH^x7$jfhU;a> z#3IY%jm+$tqN_Wa_&W`Pg%_I~ovk_jbB~|j#)5}TO$QC6&G$CFSuN1nqABA2WK-&~ zW5+mHMD=z&kO^BK>lj@vyellJsz%0pwNe*TU43h1v9*Mym5-uvZ?4;qXV?GF{<-)3 zr>*Pnd2O-u+8UL5dD(sI-rs4uoE1Fm!Z)t}IFOyo!2emCudYFTe$^~*y`3xUUaw!3 zb@%$Y-+R_&yw+k3U46A`!~Zh%1BV414v5XUtQ>vf@|{~=1`@6Z3Ri8-PUR6^7A!es zLDTu?izlsJ*s(cN`^=@9y$#bS|eBg zEcMyh+~e;jpLkk4rR!YrnWx1nLcTB3L~n~+$P#NyOYoAKEc5jgC)Om+C#I89U7O`#3WS=xq{XIXFil3cZUSBjJViRlVL<5&@7jJ86v%F`` z@=e#a`Eea(YSI*BA5sk)k5dGh2ROh(IcH&=W#TJ?0+CI@{%T zbYJ$1Kl|hc$J&n;?0oMY?wmev`R-!7RhN!VoV0nSn{eTgD7lWy7o}9AbuO-EIk3sH zC`JDAo<%P$#iKqM~d6)=txt>Vt_L$_fbw%(gON(Pq>q`yQ z=k00xb}p=B@2MY8IFyzDvZ#o9PS6zbRl3NbxZlTY?xTtK@0_>3S7y3@-}{}(?;iT7 zwcdaK{G|GPt9=JQ{hHPxeEYNX{vT1&2WDS-^*j0XMEC2T&*uNBw!N??YJcFVtT4y0 z_4V_&8(w*SGFHW(dVE2fl_Ue@=XOAEwRNS<-Vt#uFIH9c!tOX2YI_q{PW|F(9|f&liM^yjbeWH zdGGl2iF@T0uQ|2%c23bw71@8}(YsV09^gLI`yXxJk6~(RbK}0w5`A6qY3J>oFCG>DeE7Zo*ruthYQ>j-G&?S7Xj;YLqNJO1 zXU|k`{oM`me;(PXsCpa?-D6o(zUL34Z*IJ_2oVm*7@L8Fc)?OP!90V5Z z)O55EaST*7dYSod_xI^bjE#SU*mo{V6LEaB#5DWGi@c?llE>eFFMM(#P;TFUL%V+$ z>mS&@|E}-4rvGAL)Jg%q4vkG+c~@r`adHN6PP_I2^u>$NmWMXdMH z_rufwJ-sg>A@S7Re&^(C-~7_l(k#AyDYe=2PfmxohB3px?Z&oTmeom@O_xuNxKgO- zrJ`i>@rdyhuS%<1&l6mA54e0?$?`XGS&=c%cYovM*>(zxGE-9yK3w_ZFQd-oB~hlG z#~vC9{U&5?Nv;6x9BcI0|D%Q_WKL1XU>!*VH+!7{+^R}O5ex4C{|95wP z;3bwSWyMP?McVu1Qq`s&+~l%lm8XS>+xn=GBC&lf^7};`CL|_Rh{S4dOgkG>RegKT zfxJA|X@}1Eh@VJ5E%MoQ(M64e2@__{V1W<-jmX`?()@p>Mm7@o$l4F zO=hTC5P$dKvD7D~*WR9cKe6zmv(ZeMqd_w#P83v{sIe_|ve(iTBI!TQEVny(D^&ZX zea(b(-^wPfUEAWUcztWcO75rz=VgH*oWAP=^`;A`Hh$|Xi@JSV##K6S!D?ea9jy)T zFRY%kp-A!l&)KUj-_J1)*kHM*RNKx|ZLZXz3*0)fHsY>VKAxW7+bcN1b(T_M$o`W} zt;G?$%XmLO(@iC)QtbSEXPZ~g*NX3J-I_h^^0iqGA|;D1Dx40}d%9$@zk@)o z$zCrDA=fYwCPw+4IrAEBxrA@(p8x+%-o8&y^&j$^B}$|&>P)*8C03a6-mdQNY59Ve zs?QCKCmwT4DaxATwYEv4gM~?}xO$eumK#4`h3)%%cm3xl!TZxrZgO>boXE!RE+Kuk z=%ed&#sID90<0%xuUxvoyLR2IKZm-{3-)@yx%JOz>LnF!@p2=ZiO;9!F!(=vynoeASK}=Wr8IQtV6#7MN(X^pMI&4*_T6nX~2` z*~a4+SuQ01?}h!wikn$=A78WYd~_=N<*Qdt#xk=F?-s-~{Bz5HH4kQ3{y&y;;5JJ`m3BahY+H%*@%P;ZHYO+T?94r) z>}z6fF5JmtS+s=Tb5i7n3eniNo^_ueF!S{BEdJHR!c^xH=oENm*PkgGT`EGpEPsrq z7InIIi)|=5nRL$Lafr*ow5+VAg^i21Y-PEzGoSzZzQ0WSejJ@0QGP5wPYTwEIwCV6|6V6VHu7p|8ff{q`Ox%hI;Wx5uevYVzmzxh@0#*Lh8%q)Ku zb55sRJQTOfYI4YGsh1&*mc>;XB1Jw+)8-g03p)CqKS9cD*RvHb+72nstNms?Dp z&JRjOFGZfm6=-g+{Vu&_TYF^rDNmN#vpNx14twk8smXj>eAQuEikiAx#d%)y3m0!5 z-S@X$Lhjb_O{&E=cmHN+|HWr)9QnjnzC!-3tZWMt<8@ijRq1I59a@|s(@$S>_tOhmedJML(?f}k5$yc;{!6c``du1R{+9Pi zQRarZhrNVcT>_s?i_S~Tc(H?*aqq;b$6vlXYf|@dt678OgRlg}4GaN&znA`Le{RT91gxN3C>m_`2}6Ql|_1(OJEXF2#Fz_X`!A@z0qYqUh>=UB>98 zjKl?@d__U-o?mwVthX3ORb^j@+DP$n4IZ>Gv(@)bgFb zKJ8D+d(U-kt@YIWy1(8f=BzK?MI8L4F28Kf>P=ssi2ix-dwcQC)a#2vi#;~6SR8hF z=+ZNNURB@BE8RT2p`NQ0ALUAx%&PqM^6i+o{BV&R)UZI}Hm=gz(T zeM9c$(<|&|`|NB9W4;}t@$A%#Hr`zgNi$2-7IRGMv)5SkE7#5a_|5tU0=9cUPnaTb z|Ie>jo4+w&-)T@ax6RQqjq|^{oj|#607SQLyrZ1vJ(Cw>v-tCJIf^o|NNI) z4~h?T?m2mv@v=$fvzgC#e4DlU)$)qux9^w!lY8|3=ark=de>-t?Qtx$-==dq^~BE0 zZL=Ip)y^)ucdklop)}jHg%78*EIZbB;nvNwckTajau_lHW&ZP_-TvXO^0?#P`u3+6 zHrK9++B@sGeEpo)>-Gs3O-Ao&ddl%D15JH;a=HqxI2fEtICO6AZOi9#iYKIY7VY#hRd+UV zaeuAEm)gJK{{h9YHBnAB7kMT%EDXmN*>ziVdn|IcYYo{v_Y)=&M}r#ZO)sh{sfzeZT+NBs}h0-Mc^knBUypH&s-9 zo=;WQoD7qeuwu8H@wav~rc_TkyME89OWWtQ>ZZ+{X}IDVi<|rLi`k~@YJQ6{ED4%? zM3{5mNBQ{YUyQ>i&6<_=?or&QvQ8gUe;dK{`~MP`sjEe7+|#X^oum`JahA)L?wD;( z8v~SL-!0!<&By=$nP^&CiP#st!(t}y_J7y>_U_)vtKqQ+9~`jQT_%@Ml%jX}vZ`6` z{Zp@|W=C%0>64RPu)Uh`|BsdRY&u<2mF)J*-z{K?-IzPC?$^uvGyQ5OPh-tFT==W% z0e|h|>{N#Rd%rPnfBd6!w#CeYv)y?0rk~#P`PS;!#f+tAZMLgqsQj*|`r4$RV1A;Y z`sK24wh(R8KL-V7eo<*nnszNk+_#gZZokvoFjpa#Sy7Wcwz=BbHZQ!sB8=;ZQlijB z4)t}`qMN53ejmQ`!=K_EUoLe=?AbBBM)7pz^ZBQDKHt|Wn(fEB*lp*8qnEN%)Z7A6 z<+r^&tFCckQj>y&$09d{2gHPIQ09qL*q{y+m&t-Q4`&;PB$ba|-`m z?fLTl@HW;QZ->jiB1ezExS_CY<>l+;9ZOG7GpNtAUg{*~C}POloT20P{txSnS#C>X zmT68rut8SdE>dh|h_=e@X9l*mC(pf|z4Kw)YfTaDITe>Qw`?gXJnmob6|m-G`1F`_ zGoS0Y>94l=bfcRiX=>lIuzOV}r{>(RoY1@8=0N+sdfW0pAEwVK{Kh3zD1SDVd!kFH z`nCI~E1$=AtkSxY#WpQbkfZ&_FZ2BeIkSa$-o95C;#FcZIp!8x_Fi69pvS<*;;8w) zpS+7Us`0G#(VM^6^rl~Wv~l_WKYqKOeV^c)x-qBmOLd;lsg)9sFJe=lYa;Ks zsiNqw*tUG@16`ur9$U;Odvf0Sdii*UXPH1>+uTnZmo4t%YG!us>GM$*dT=%TyW+q2 zXXmVa)@^$8fBxm==f6$5`{f=7=fdeM9I{KBzX%C%Yb|)xt)XSc;>sH6xHS1il8}M} z)62aR?@d1YZQtc|zrTM!o_Uj3Q+t2&_IYhQn7YF+HN&hq?UoARIc?yr9n7;4w? zs;kjQ!n*bIxqnLG@rANoYcAfs>3bmkfko2^7pDS|d*2WLzPbJVhCYV>t4$g9ySDtQ zXDa&>FaJYDy-Mtdp@Dz|>mH^hb|33M3g4=Je}4Jey|EVM_LF++mp;(*;heRsvLmhb zzCxb^^DN8F;ibn-juR$E4aZ}>GfzxtR4(+H{q9^^-)yUyueySG zV%0>r7G!d*Q}n-|Keiz3wMRnZ+tko$Ko|>+8pCnQSJN7S1+IV>Q>S- z{Tcm&9g}?175Yya)LbaoFz?bOp%C9grM%zX>C|xWvxX&;Ne^e?w()*oM@{j#jrH@koa` zhd#|P%d=>By=sLW|LHzKu56Yjsa1*+GrhdKe>`#(zi?yA^sU<4jPt*-|M~d6{^76H z`A=?~Jbp>i|Kd#bu+kW5*D#mWH%l|KbCVA?v9{l~b@=h%bVqZg!Xfwnvl&kOPnmDF zjRD*GY+qie6UOJyxsSv1zhjlKgTY(_v$`tgWb8>)0_AnuG7z8d9~{rH|sa) zrCUt4PdJ;_(Xu#VnzKcai)-ang%1wSfjm66K9Pl@i{1KCzpR*;aw(JNs)D5VB)?@R zmlQ3zq_^pna{4TfRXr-VHC9P5d@?wB%TmVX&qRT(#e6&sviEA9p^#z9}zQeEq%+9eJB&>rWgjwT-Jw zwyXXoyP)!ZR@-%v&FS^llB?UddT-zKW$sy@`6g|y@Bd9c)g>kMIoI&=rU288j4%Ay zKSs8;#x^8WnjBkuC4Grr&BD|_!393q$aXUfAjH}4;s|No2B>F`>Y&|K4_qOMb> zPk(HwchYCtqg7t-{(a+*++L*J|L+|?&k7m0;$-Hxy}l*ew_myz;cs=X*}pJARPjxh zmhwrhb=>Q;KR#MBoxl9uey$%%huHVCy!mS){?n%DSG|%6^P~BPxBgpe`~Oe(f8FQo zRsP?dJ?uW6F=<`7=U-5aQfq;sZ)4eI-l?Zfd4;$hEYuYVU9@~R^QV}f>(rmD%)RXt zn9}9lHM8W)MIK2j7lVH@EE!Wa`kwJ=st9NQxXzEMtqruFecJr^4b|4{N1ukeEDlTw z-N3xl-2Kg$FiXz#YI8<@_QPe;>(jq}O0D_vEhca0 zU%9p`Jx}(%|34`@|F2$l*xKV~mUtc!&ggQ|x!9?G`bd)AT*JwI>sXFn^eT#!K5gQu zHhId_slt5j8*wf>WVCaNcktc3v^j-r$LE)*#pe3{_?sva zdo0wZXc=3^gOCf?<}6#Tezey-);DnDL3VC2!;}bBVRgRL4-Xo5R85EoOT5_i*5=D^ z@r?Z3#3vIDvY0eil=oa~3u6o8fA%om=FINR&kH}78lOzLY+`ox^1X{*TQs~BUL2R_ z5aMZ_n8K>>|M6b8Vd0%Ub9Nm!K5t*X;|`-jo5Z!*pCk3ZREmA&yM5sEJKO5WeMzbM z=k`x{{XP6nVCdHGN7$=wH?Y<+EcpJu_1lAcw$+{amTqpawE#ySA9-{)B8)9`YJqKV z%aL{3EXG_dDyzC?o_w@Z?_dDOVFL|4y<;DKw%l95qUtJ`W8B;{IsV3@5U=GyKgAaH zWT$DYUgKh3Q!%M$$qSVr%N_@bc!MT;nWN$ylS^&9*!k6h;vDZbvXvZlKM-IL(kZ=e zZ`1s{_42m7w}0ML?l1VXa`}eE>t-%J9cEDd?e-PfoRr(sY%98)lcTMr&8}R$Hgnl# zZBMJ0LfuO>G{l|3f=2I=}2H75d`ddDOaauSjU%zmR@=LddVV{1IhjW_!v$XO}yx_ z?2<}xW)?fw)+Z@yu7z7fPM2h^$WOa?=;?_~^0DQtS7*r_dj2_BL^t`)49ySK*H!h#IIbL|&@2&0C-<#iwI5@UG zT)?EUV!u)T!*bV&)#fc8CO%4wC!JLLz{)c1{-#Y8`wV8;w@mS#d^)92l=W!9#m_tH z)aQ38oQ~Kjvuf4i2}cWcZb?gjTw@}-HMt%Cbxya^38qf2M^lnzOO8~pkhPn{cr^6MBTKEXK~=k|B#f&>jPv(3%Ky18Tl2d5 z{&WBOpSEYi<4!l$e${X=6I}k&f!SSK`Nf-CNAJy@d#C7i>6eo46FQcEjNxGU!{W^7 z)+6BHDPL-ewvSHq~HDnN0NdI+w2vT)V21z8pz9lC0U*FURQ+nW}3W8+qs|KX7+NZtsZM{4zd3C?8m1yhFxwUA?Dy|0J=&+9; z7TYgZj&nDOaVzvRTXlb@XPi4-Y1*qJxH$j*DB68U0USL(etmJ z=09iq@cI15sQ+i+Ju}`o9O$fLT;DXfm7b)l|N z^wqNus*I*FdvML->r(5miaG8!O+>`==$QzX+M`Q^4NXovO&2XXsnaF2rk(Lf+NiSFt@o(O<&3uG+;qbjUP;O8MH6kh z=fpoaxNh%9wWw(86>>IB&)V49dL#~}POqOC`Z_)NrdjMs5$WqXiLZ_<%=wjJcrt`} zop{QpDVYpY+}R5kBr*r9&p*H$9((ZJ*=mntyENCz9QghD*`#g49*LbLQBUW754(Kf z-Vy1|_n$0zDSj#9zDs4HWtE3R(6*fW=h(HMf4;ZpCd(lqWp}>BtEYS=+PtjR8rR(5 zSSC|!GiwotOIoVv;nVH{eH>@o9SnM2M=iM=X*KKC23hI-_6^HcX0++m-2Pm(^zyIm zpG0C07v`wA+Ol-Ms+8+{b5Zp5qpjv~)^pY|IDPqC9Qgc~Mec$AYZYe@ zzoX2R?~(J*zfj&Vp*3mk)U%5;xNgSq8BbT1=v&}nRJT9i>Qo1CHfCSX^Rvp75>}Tvy!-t* z^=osTuGhS|xtaa&hr&;u>Kki*a-BMrrJ>bz>GEae6FxERU7xsS&6+jAcDC_(%l;Ib zw&ycssvmX!dHnUcNBZWL!>S=-(sPO~S-g^dePie5M#pBgqK~5H$xn0Etnp1g9d*iU z^#zmO(%0LH&ixDz*uH;eq`s}2fy{&EpQmTqK4)g*mpQmfboC6QR4oOitrdHwe!o{e z`R4zMMrPmH61HUmv2W+DiQPF-)Z2d1rW6kQegD({e7MKHiMb#y^3^0KrKtA1mf$Y~i?Gn(4o-_^_|}hBt>*{}f+yo?}&Ob^q5X zpEll=f^+%&)uwOU;KRS){)~S8M+Lih&?KOvk9Fso6;q~%Z_d8%r{-(bmVc*E-MTzy z$=4(&!_s!H>=iy4kL=YK=Gjy|QenH!qcq(wK*sdL?-V|{ zoMk%=kA6HpkH^7k^3pwvWcU^T&h{t?jx;p*?^Ewp?Jm4JyzWTl^O$qb`nPSWJ8Z>v z<;teSw?%1te}v0be_4K}^0#W2H;>-_S2cZhSv>ysKgFlj?U*uq-Ojmpqxm=0U6s^%QLMq=&D zU3DCfwupzQeSI16tVpf?&*S-gi`iw8;vIk7OW$-}ijhU&u(Nwgp2FO70imHsJJsd% zE8c&Kud6@x!71fJeBSE)9V_xAD%SN(HP&3YBH^kCf)B++4h756i(o#m5g9 zK0T>z>+Yj0Wj9~q^O^Y7t2a+>uy;_1XnMZu!Oo0bebY}jESYdN(xa>Id1}EN8Clu4 zH_F6fc^_B_9du|&FlbS!JTY&2`~(j8~_+XrIo^d!-E0&Bfmt`QF_2^ek+ZnEQ=oZZOVLImbdp~P>tKHR_^H&`+o0y{jQqRO+`iDMN91YfrD3lW?4-9$$f9> za_0qd&-Z**ip{nB_u)lxO!?)nQ^KchF8k?pZ`NQipo|Iyzv>;JFo`;F4irR$xkkD1(XTJXSpf8ER9Bb6Jz3)G%a zPTODkqfBEqQpEE41h#Pep^e;0N}LA#-=_va(@mHE&SKJkiE~`P{;6p|+f+ z<%vJMHMCc|O%s`*`{*R!q?d9kzDGa&E>zi|${u}6LDOtr?&BE?GLBYcab|`e71wjT zXmaYbw`5$L@2OCN`X_o_@kBeZ5sLiOD^gXA9h$fN&uv3HD?>MOSjUBU6;H& zUMVhIv1)$H`3J@CzmzJw#oPzUN+`8I|v$sj@9*;Wp)$6+1LmJ$OA?wcE$aSLvSe z5s#FqU0p_2)k#}Qk4PF-aID|=t6XdK>daN|A3U3BENN4sVt%he+W4GptEBNV*)z7%txV`F4Ezz3R^|a{3o)b~X5?7mG}v za`D2Ao`oyt6rSU`HCt+htl1=1d7b18U(Wblk66TXqt9&;wf=FaRlM*|XZq!P*L=6^ zm~r*>w1ChkuRw(pkNx+YnR|cF^y>FJ+XEM~7hm80F753t{>+>^Cq32uU8mdD{l0Dg z;r0DLqWwa7`~Tkl^Wz`8<@-C_l6EyLJ7fBuO{SEnsd$_G0H7De4vQs%H_k#Gd(y&qazP7hul3-mJ{N8I=J84V_t92 z0`_9rkhA(f{>QT4y$}#O*Fj;*{XHTo{)aZUKIVQT=)dHnkjKx-C-PVPsI^l$?-(e0 z>7~g!_J0K|#?ud{|5&+7EBWIl*O-!%OMT|m=*0hjR$lpR=KhPauM>W6lf5FJp*q)V zbJ9~Sx%xl+XZ}6E?Xp-=&41>Bz{$rhUX^=(B)C8MQsCqlFZ?cCyVEq6zkBjY7Ug!n z#Mjq!@07hh>oLh&A>atJeQkrcp4G%xUNfI++b6xg7WVG%zkUsEapP|b3p!Zy_nv9F zcsMgB>-m9sVQzfVXPWu{^<4I|Wpq7lnE1z_jaUB6n(6Z%zt`V$P;>d*ux0;`IN}`E_dNC!b6yknPX0YJbf&Ykh@L?k4lj4Y*GV4+oo-j%?YtEhdVCRVn5pmZ5 zQAtS+?ez+?&n|q$r9Q8Qr}V4!k3&nvH+@{(+Hj^iUqA8Q9??DfA27#NzV*Fzd-uw9 zGG2P=ACF9~fAa12{zLKqep%c7NnlK$W7n(b-2UnB@45q#>2sZ3U0IJFJ$gjgu#I2V z-@91Y|KHPc7q8VGDx1F?nQfVT%w$_`-hr)$cq9xuCMvsKxOvYdwNy1beEqTKrNPm{ z2~U50HMzFE@P2K1@t2FuJO5nzY>|5_bI;dv)n{fDGM2L6`tf;w{Ig@x?~iTUv}N1& zWNW^I8yeqz*I55;VTRs1?TKDXJ(dUWoj6}bGhZsxjw$$8Ic55E`A-23Z_cd@>GDwN5!7H;nC&sqON}MI+TzIk`4ug;)!z%g z$KGGIbmP3*-P;T+9xyQNa;Z+|?^+cQCZ+1xRIx|+?uUuqOHXKU?Mf8rX*;px@&d1v zGd#NzpIE+oVq;$4=Ah8AC~L{2%r`kw^Ut@Zd)}DVvMTFF-&}ujp9vx%cMoihoxpR+ zlG|0d>sXqey2g1C-piXV{!yItQKD|X{PD>Lo4-wCYidj?;Mq0HI^(Qr<-eQaKc23S zf3kIYdg0OOd52G_Nrc2TEfIa}X|(OJ5ZBL@>*U%W&x)yfc{JzVhUvYx%?;jOG2ZcT z+3T4`PcuY9)#mywl-Am%?@@U*VBhz@^FMF3ez$n#-quyC3>LS!&ai*Z@cX-5!jTCN zPgL=)yUr4kbH`x5;$ilUbyu~PlxSa%E!KIsh(SEAL~!Svv(I{^EjFZ!>)U-dIo!sV z_IcghoT~S}msaxlp4F8p|8mmD{;i3Zm-Y(l9dh%<7H+CyadmaF)0x+?aHUf@qh`DM z&Wd$SI;$6SRW&@hvFNapcEr5)>FE>1S`%e&zV+suQseTlvhu1LBhP|{f0`EiU++5d zxUlZm<@vH7zy8l(BUT}`Hp(te#fBmgk*K&UOL&Y{Jp2xnnv^6YRpg^9_x}96 za;;>?>sxCg_qJ`@#@BRE;qmc)_36Hoy`o!`K}SNJIupHav9?_L^>3nQEJ|hMbs`@c4Z>L;h4=kDdtNHc{o$tn3cLL7Ye9n1f*uQKU%hab+A9&{6 z-Zqz&+it*|hq z=QboRK9O?i#VQxp@ga7(Imrbs%i8=M-<72*+ zm%bG-NigpF{nk2iSBczV_nsx&wza)mr*|)IL(5`y+1~rlnIo01-8?*@k@@GVx7&k4 z!@AziC@CuoijMY94`<(%CD1uVsE@^S`K3oL7hUY`?Y5|?TNATxJKsZ#Fn!;h_un7h z7Gopu=g%xZy~&Q}yDKUxZcOV|dacpb^4210;x0>*vj<9ZL}JASd!9`4Ja*AkRDH6_ zM4v|%e2nuQtHYa~Jxe<@(ID-bquHi6H}=kL?YZ+Am;Z{ehwB9oWRJ8qX{gI0#bf<*y~_RBy{Dgj zeo9+kIp$6Jp%+5S<{R`apEv*LFH`oPzq{WXXIv>L%ZWTIt#5y9M&Z*Hc9R{OwJjsA zpJM;KeRFm=bM?0pgRCw;ZUON0%l>BpUA?>76m_r2|rm+A97&1N4gO)ZLSkJ0OX`1ykA;_V76JQJQrA6>O( z!slNOwsJ}$(==8HH*23GtXX3> zU!zY|NMaZN-3upEj98c$9fa;OA3u0z=7~x^rcmMKy#XwGOY|PxkQ18qf@O!CTwexj zRg%=j=btAR#)zC(o%}*=wx3{-OY7||xkWR1K2C8}ny4VbGvVyaFD2U+N?XNtihRCT zJ^$@)zDsu}C*D2QZ}D!&V@aD731;>yn=(I(mA$=lbZU5<UwT*qey=Bl-HbG`X@h65MhmEV&`P%ylryPME3Z`j{;lXxB5PPr5}6t&d&Lg zJ>M&%6YY!tuQxxWUY<7lf5ON2^_zBX{(taG=JIb`Eh1eF(^3UB*Co83_VrHref|KE z)&c{U6-Hjg#ll@mkNG8Ct}0zy6MOz;`kZ6s?|z7_4%1nd<-odBb*V&~pL4CUvU|_6 z#haPupZ5<9Rb8i(m|E5DGc(!Yph)5*c`NL%`hrK!898@$(d$Rn5 z$w65KZLLX<9!?UuK7mJtJ;ieiN1?4vj)><2o)YDT%+wPz#ZpyWBW{|^@>iAMo9MH| zkVn4ifJ0E~u3c6c78MQRaX+j}d(ZxP&^TXc<+_Is&fKXJgPI<4q>4U$b9?u6+v>95 zu(*RWjh6}Yv1jDvE?l^A-f{W=%n?;m*T&UMKg66m0JWq z2zIe>u+(Ji>RPAW;H)Ed*d(`amPsr3`TcX^|Gg16k-2m1sQ6rW9l_;W7WEWlb}Uxw zNxn0o&}C7e)vjHU`|Ei3|GD^g#%kMEu8wN)PUqbx zB$XX@o~V-I$y4Ad_n8?}5eg-_Oo3VgAAX!PM@)=ArXB zb7NzdT-HpRQ^|HfaRJ|D6VKGiJ@VF_-g^I-&g%M}sm#{f*Wjn{!ac}k_l6ydTCRNy z`}X$UNz?H9&S~4ke2m+dWv6V8bo~9q!s(BZ$f6ZHI(*c6^#vHtAO7$=@Y4_GyaUyejlgta`9tJVljjl3G+}*}UR=#j=-H zn%M1^kNy61;c2#m4ie7h8)T=iTc>CAl4l;fvRmJSqmlX_FWrxSsQ&%`giVWrUh$g= zbGdFPGP;;yd0fW0@BaUPfnMIHZ|qD~udn&T@6yeixBG3{vvZZ~`_CU24Uao%eBRDg zMt{qU2==KA8Ld*`is#TZk6x!2~)g~K`b_H=5m-_zq^ z;be5}+`HWBo4dEqj<9jGntk-}(;dlv2_G+A?U6N~_-a;>j`8`|3f?Cst$6o+k(Fw} z8atD?dDENjXd*Po9lbs|KEFl-%DqDAggEGnEZp~iOu{H<_=|z!;2k6lh_{o z?>?LG_0?9N*=1UX+j(E>zF|n(J-C0KlWV1F_O&$&_bigRy~)62V%(Q6Ulcrz_BG5eF`7C3`?tjExczz# z@vS${?vK+5$eMNSx1HytBU_mn19LtVEeVRX*w(#{^(U>eWDWEU^e&jV zA=drDeO`OU{TKX>Og@>m>Byr(p8fpiKflO%zrgzD;V+y0(~oWnT@$mXY5xCD%eNfk zQ}QnO_w#vhY;3QN^_s}dZOfLq$^Ab3SSNmGkEnOJaq+X`>#Y|aRAJeK@q0ppbr z@yUvg;pN&JD`weNzxr`{{{KT~*4NBsG`(65{gWaWa5A#YVl1hf#9u$F=AM&A??1kS8_kTi z!`1>Ye$Zy&c&ISJ(Iv?0KV#-c*Y4>BcejQw&Yl%;KUCym*lX516*3kZ`lqg*Hu-E? z#O^ZD)TgN@x_nA~P3OwyE)n*8_4-@!&!^Kb-n{5oT&z9O!$srIv)}vM7A9NrP4rk2 zzQSbg*+y+T7(r=R{h?~}v&RTFhiTkL(J!nEks&P6*^D$QmwbuCfy7FssfF*GE_ z^XSG{n-~+{nB!)?A)&`F@QOsLP4ro)CG@hSE#{n4sAMvcI zZtt&==KuRj@Y!DD{`~syWs^Epiqp=go||J?^EKQ2^1W+kr|Cv(Xm);js(=3=_j!Bo z(yIY_JD(WcoBQ1?Hy3mzd3nXRo6kAh7JWHlW%+x}V~g)|5_RG>cE#KMvDzFLd)`}r z{~=xD{XXSy81BZ}iil5Dtmay$zUtYc`EeQBD?L1vYPAx+#JcZU=DA{(V|sMYrA|9@T=gQ*Dwe}5JPc&9uczi%RE&%X7sc%=~{%aWKc@7Xim zJ!caCYwp@w-|I&_%w(GM14UXZln#pP@0$_vXT!$(fAgm?y=pY&5W2wLaQl;eefx)# z{&)H;=7`pc&gi-^wa@hf>kt0Ne;IZ>{FPn#>3sipu?nBdXRb5pG(8lMzLx*s*Ub2& zKR*hmOb>rxCAapiZjXfBoMp?@LJqqZf41BA?P;`S;j5KM8nG^C0l}HM4YZ3`D-+D3IoU1?U%1!ZS8KK zvhn7IB34)C)hA8}?)$cN_m5+`=AT~NpZD;F=kZNtm!s0|)v3ns`?*`D{?JB-E!#{B zFL9=CD!*SgO(*ii%gg7EmTj(>cl^f1$2ZD1$lmrTKC>ii-k-~B|GYV<{h^pi#;U%qSf31$+-<&?rdQD6{>uH6ynrjT78@JZ9b?*Ov zmM`7)zUzl6`fPDbuLP8o+ppT)+A?!xaDU>72?zH+T+)7^CA~PZtxw{#Xn5SQ+xh!X zpPRdzvwbnsua&DJwmD~KTMHlePdNOmcTVxS$|+N)UbuI$?ez|cHva-n5201h7BAl0 zyXa9;(M_36%N`o{+;fWmFT78|i%IFe{+@gDubnB(QPfsFy<$_(a|g5Z#jDu1y={21 zAVX_s6U(k;Nz@_tc0KcYc5d$3Mf~B#hk36DMn#?c^z`%!NrQ&h>vqrF|NpOL zpKYF&yR2!es<&8>@dlYlqln#UlA)oYTTG9?e79xh*I!Zl>fhh3`Keug@!p+|HK$(K z$$AQ%EUVwLte|Lp_==-iXC8e&f1ly@#y}HR&Y3=Ht{Nf_w@g0&ta1LIj>Ua)hb}BM zePSfHNo%6d(gVRiPx{v#y?g(!p>uoBvXvWWH~61-7fae~xVcFp>~g8&n~CB*Qw18d z&lgr1GHlEJeDC+?o!{B*E$*D&o?y=SU9o0)WB<)dzfH=PRR3ZZyz{jEs+P8Px7f=xF4~{f$@i9AhnBVruq1NsbHzk!O8cg(PiMh69&#Ecs)|H*=?%wfx zopr|#r<}VxrcT-<)W$387JJu`?FygbT(3o$QddJmx0I9$a<+4_FjW?wGMVVHq@3sG z+2x9@_v|||ObkKpSvqto^UZC!+@eC3LV~~a=e=I2;mF3Pm3q&bThcnuf}ybjZRNX!&#O*6 z6%ZPEl>PI)=k4EP4^Llb(_DOhmfNLQDl%I&wZPq*qyhZw$4eI7WyFZJ_r9LUH3zsZmIbPU#R9Iq{Uv#$iuJi5&B8N(2g*sWf zloo4b={v32aAA#&o#WZ=86J~ngiKM93^gr?z*{) zwB*4vMy*XM4?n+NQTbuwbou6~Dh52w5jt*a*@h`M1is1NINEt!_IP7$>70s7CfC-- zpIkHj|GD+`f7540@0$7lU-g`-S1WsDi*4jPH(#!$K02xLJpKpktG+o0o?kv7I9bH*(&Ihm zGApHXZ|!N7&YN}fw3yD1-?=qyjwTW+_ilQoXEWD@CLWV~{`v6lx>H?CUsu#0Z10gb zp7-55r?v9-#5&mSgDI;ZOK^ij^eP4m`mV-wMDSG&C= zNp?lUqg@*F+9#iEc)sw-(vu%lRP?)@*sc~UJWovD+Ni(A8U?X65GklZl$5Enf&(^{|cS&8%rLx zi-=EuG*fKT+b?3t6OU}@e{I3^P<>+lWv;0&v)AvR_tyHKf9u-wm-}Y)ADGz7u#Rz^ zb<_9c@AGVzT*^FW`+UtkGntM<{mWi2%74G>NcFCa{QQI8eA3R(J^iNsUvrLG@9SMW zQbC8G@k-CINHbaJF(JjsO>g$W^dE-4!3ocb#g?UBym{4i%ZeGBPH{!ex_!jyda25R zcdC;oRQtPpx>|c&^bS+kvWK@S_v~7vRIPAwL6*_H@D<`On6%zZd@d<4DRWz9la`-Z z=%k4gH|nsvhI%d!cIobZ`R<*P6lb!8T~ySpl`ADroQwU<$#%#`#=5Jz?vr-fy%Njk zbHB6BtAFU~XZ_V=+59T2^?ScbsUBU*BX2qB`u~so`|6IHSJXc=dba<++4q|FyVpeT zZ}q*sE#ZX~`>St7pLLC2zIt)s{df68+ppXIZ@0U**GX{0|KBQ+2hWPLzODOHy-4oS z!tA<&Qa3*Nvu|#$FSv6ha7y?~lWo^F*FDbr_WtdoP2TI%4()h6(PpmU5|en`5WD@PA*B}w3-#IsjscdH*j}e!CwE4A*1JZMWZ5JF(!Ie#8O(cmEY9EKd92Fij`6ajIuCe3!7I!NPRs-`9NO*IO;;)IV%hsouq}*s1&B@7G)_C83EaGmroK zVBv53`OL(qW&gAGTLhU~1xX+9JTO7IWc~lN`~Tk6Z!EZ&)Ns#k?}Rr(x$>c*hCI#d z&gm^WXrOQRUCUkGw)1&>P2ah5W@tytBxW{1a)9qW{fY9bGo z$rRX{_`Y{9T>8Y{SMs`M$HEmSHZ|o&Ml8GV@^or$$Wez-(+LR!arF zA1=QAei8TXvYcmoKA&TK(C`0e=H+u1%?Udug_toPF!r}^7UZ~eS@`&-H#P1qf48fq zpBH{4%{qszV(%hp)@Aasmp|H=+dpR9wC3qzZPw!-Egqloe%bdj#puG_c}wf!r5`F) zyxp44*c}lq{npp_XLi>vr-xg_zn^he>!|x5oxk_!_xHQs8>~InpXw}NSA65GM=nFX zh12(++n-tRU5_glzP&B?xXD@P9d|=q8AA_#VzK$2@VD;6gNg0)9;{T4PyKr;_e@o> z-pqiG)Ys-8Kj_z;zHn<{%14VEQp)Wv;kzamD7dK2bebw^b6atG_v7#09x07F7aRIJ z^gV8HBp#EMIeqA=tq%XDZGB6ARB1olv}nO9t~9;Q*E1{TwWiM8F|XzEtsN{)fP@{o6<)Rr}C9PZp?+Be5TWu;_*pWbCZt;>!(&h*Q8Oq?YBvhebbnL3M4 zDaX`)J-TOm3U7E^HMj6F-v{OV|C?sU|7Byi|MS}QAFr+-eZP9T&ZBwnYv#X=F8_G$ zjironrOn!}UtPMjetwRh$K-I;Cg!U1B;5lCi&bT!g*0qFNz_PA@bS@hJ$G_qac<7- z!)CG~*AK7v*s)FZRx4{9ON`3xLlZ=*Zq3%vG}F*pm2&)`iRSf$m#0!)yxKn9dQ-XQ zTfcZmn9Bv{IuYmN6Aw&siQduwOWI-1zi6}khtp=Ke{<9R?`W;Wv)Da}S%S;Qj_Lc) zc-?s(1uvh*FIu~H?RN9|#=eS@d9xOAu!y*JzkHRMbS`>*(aEFJHbhe zty9`JHvU)s_u+}NjA^FME6E)$5CshQg+&gH9RL4 zWE%D5cD>raXu-=d2)b zq}aA-nI?zN-#VSsi&k%*y)jw+kyXM>zPfl0na+X;fu&nat`+}GoqplgrK8*5@0q;+ z?^FLJIu)tcy6059dim$`+wYQ=Sv*HwMQi@Pj(_YO9?yH=P%C%ktC!0eHT3n<{~jyd zB74X6?E=S?OE>pO6x&Y>+v;%P)uS7Hg~5kH|B6fx6Y8BJpsVY~ow-GI_sea*XZ9$x zD@44FSD$b)-F;5wGoG@y7AHx%~y=Pc3Sjcs9-OPu&F35?|Me2aH&w{DW9ZBo!w62=TQT^Z$C6&#&!E(KC|S|NrJ!zIfy2&Q>k)`>9vg?{_cEpck|T5!|g#aA+87Hk39c8`E**9xpg3m zXz`y_ixiZDLv0(}>pn~SS$<3r(~UY5`8?jyLgHY0i>Lbk`0D4f`R<|b-lwOXpLh54 z%e_mSIZR8IepgeF-L~_Z#jlTXHtfI2WPJ@cYq~J2J+{su($#`3?G9T&k*nM{KXNO`lt7Ww&3Rk@4t- zIT|L6A-;>ZRVgWdtWy${ZxdA)e1BNAt7B7xV#SBwf7ZkpOkcJvQ>OmW%P;(2CipD* z@U!O0CQBB_1?#uBS19=f2P@017xyfic>ejEnBs$)I$5`7Z+R}uTOBAcNBNj9r+pJc zQ)_RdLX*X>Yc0NgCWV_P`-X~4nDfT!wn@O}pJ)A?V=sUF$-JVgP3M$^naU;2ntuYi zx^JgUo3`jR7cXyVM4PX3t!;L=PO^pA^Um~n53fb%Kiz%*pVeQ_)<}N)6Taqm&OG+F zadmfBx8M8LOs@7z|D5uFt}~V$js6_BblIY=sovrr?!L}bnd|#_@?8%-$%|rvMXaBG z@-!&~hQ&JXSi%ylU+~|yd}G$Zm|y04k8i5ad-!+z^Ud$=ehW%_emZ!J*Jqv$>*ZTh z)-XTHn573^61=G^m3#Wc^Gi;oJepH=u9Af*kE>NjZoc$svx1$f?h9OlC$c1dUbJ3B zWMARe?G?Y@ZvTE`&mAV=!bg9uZrpVsqMqqbmU&F_?wNo8bjK82OjVienI5!~ZBo{w zAf28TW!bt5jqIDV?q;2Ot-Ug9-EnEIN#UYCvCdQeay%{l^Xg!A<;&LPGwwumXG-w! z^CvD8_U9AgY4ub!J~My+goh7V+Oh<=S}usNUYNil8|$eU!KI{?ckz&J$B(^ncNz>{ zy*XUgsH%J;PGvfO;0FaRIUna6Z$zft$>Zv3N}2gXMO<%s@9D=(H(OtnO+2frnc_ad zMby{#!DL4R-JIB^I!~5uvU$bP(x!Z*js4^^qd6s?I?vQSOs#U;W%2unu;ueB+&@p& z-(@&3XaD{q|Np(8dnW#a*wUqn4F6tFoBi{luRVi8NTBC!hpuydX0u++4k*s#YUSyy z=_<%&4dwNn5)yV{TbbSdJx`_9?f;YWEBwsDLvx=#Io$uuIy}xX*Rvsd-EaOrs|>dD zwvS)Djb7Cj8L*)@aBZEVL}`;%z0K=A-I=lyNx#lCSAN^Le50xK!Lo%f#YAE}wu$ez zcZy8q?c$6KRiAkF!_oD1t<&S`Dz~5A|JLEi%0I?wJ*7qg)9!|t<+xjvJX8Ghb!R#U zyHks^;)K61cB-f;<;_3y)x37TsjhZr*1U$bx4(QVfAsyd!i^T?z)8)9ZC1N?{O*en z3Ji4j&*x7N?moI=%?d@CJ_92opU6r};aSq0EKEH;Jr9aHUA#|UFu6N%tr#my%nlX( z8`qw_GM@c~SJm^@n>}BC)ZA$6T%`41BsM%iL}hw+!~OQel^L^|Qg8oYX1w$<(P$-0 z+oa>~C$3zTyI}9?{usOd)F%g?F5SAh{nRS2U)8@mj$c0F9%S?LM(`B>%m**o=Pg{d za^hF{H4mll@0hjyUX8SC;i-)KWwZft0b+?Fbu#B{MfC7&0uMTgA11 z$MfxPHlG*qS?ZuS-@WYgjA_DmmS)bH?5A@};Uo*QW1sy?jjZg$HP27Iw)}NuvWLo} z9kz8}dYn8%r)hG+gYatJ?`3 zfBf~~H;YsI%yP~-vL6q8Hs@{kidDOQCZ1n1r=ouT`Sw+-{MhbhZ1kS~}+Cc_F?(* z`78;iu4o=f{%iAX_WTd=_xDUmST{YHf0k3Suc+Pp{;zjq|2*uR{^RTA{10{B=H_=k zDT|zb)N1J``T5{KMx&WLaqF*N{4Q^MaAIoZ=D2#j`HT-%X|Gqan{)hi@gKQqsfjPF z^yW|Y{_r(F{$%C!c?S<2Ew+6Arug{p!~ExrOa4Yk%Ab}!u=~xj&1dE=UAV9@vfutp z{ofx^PJ*2MPYzyrc;vEQ>b>2c3s1`(VwB(eH+IG}H;ot07Y@Fix^l(JXLrl*KeV16 zbBa~GwzF5d%Aj=OwQr^&p`nZc(b4S+R`Y z@H}^O`#S5X9Wj#@8|2IqoAl(~9N!(D7cR;18eS7kH95ekQ=;N|M2^?RXo^mjox7D? z)6&;hOiUI=D*a{tYhoIb$eDL9eiPHp$II5QZm+O$jm{QdnbNkeQEt+*Gs^vTXP+FC z;)!*hl)CQUv#WP19)nI#nyCF`X5|OR7H3DfnO`j~f1i8qAp7~0o10X(tcgGVi*>!h z|FzdOzs?Am?k3?O?xD$+uNUR_zF^X{c{a=ElxUgX{gUiwadM`Sm{qLo+X~yfKl2n8 zY2==pb7zZ(6=#@&$AnO>A5Xbb)^~U7Tg~P^XE#~D?yc0X-&KV>Yo|<^dcpX@q0Hwp z-TF@!mVOl2d1uFmZ~5HsndZxKA5PxMyh6(P_U!Dob1i5^muCWB!S_q*+t#%hc^rk zJ0D)r)finO{Pnj6Z`%ZBMVFJ!_A+X#SJXUPk*smV{Y75v`AqeBCt9D+Id7f6=j8kP z|JAc*NiqI-_(Li|yDcB>I32k$%d);cmrs38iOI)50oFj@J~-&hOhX0{`+TNcdi%YsgRJ9R1*3)+2dHDx7?{yrvzNZXXdb-)|=q7 zM9{S{K*J|d<6OeMWv06;Pd^_V71WhE^GT58tJkkIM7UfQ2ip8&=v*Rv zEIKI4%Wk^5kZx&6wy*U3c(wWdN*r=JhdFD%o?u&cG)<9LMeut@tDa{=C23=WYM0&-?dz{)Qa&so$2H$DTjnz%KfaY5KfE zsny}@ms($Io>;hfTlS3S_iH9kWSUcbO!LpLee4-Id29(O3;0iOo4Z6c(5Jcs$=F(oXRX zb4|VuPybWH2TU?2mHtb_B~M?pN^5THTMs!7R;C%#92FicUbCjB;KIq?_pe^5n3=R( zf6W%XVA(FI(?%OhJIn7^Pwt$ocIm4n9?iigMbwARvIt zWs%~gl`P82hO4qyoqV=Q!OE&()-tQ7#{^D3=juBguu1l#|8M!L*|Jv^%_n&~ZJ#3= zJ1u6$u14L66|4H(za>g&y}ZKVxL^LV!A6}4D(1^pKAx0fBR6wmh0N7Wpy9Y}tf#zA zXJ$vb*9J55+y7DtuX*ZyrnXj(`7iUE?eFdW{8%O(xh>0d`@JgdtAYGi?Pf0%Y7XYh zR(kXOQ@j12ncv0NI8XX>X4YS}?&x-lpJ%!&e|Dab8=KF8Iz2Y}Pg>$=3{<%lSx$5bMHXSf1Id%G!Te0Gw7AL{DikFpk z_5WJ`hrInRe2Ak!vCr#*Ub>Dr1M3cc^+hXFzC>xA)1P!$P@%yxJ zT+8p>%scq{Yr&gN_f7v7{(kiJS7E16Tk3*LuEU27RXp9Um$tFBpLlYqVy{r#{OLJn zt+~zK922ik+OoCwfTsHVE*%e^<4GNQjG=)ZX+avh+ze!tM0%At)R@>puTItQ;yd~F ziSmnGP6`i{=b!I=Ro^ACtM$J8kqdL3X3V}ffurxh(##nxvqowu$Z=a#gm2*2tcGvSMXn%DE{{^8z$ly12J4xOmyP;rQdj&r4s4sa!g| z=j*xKcfSArz0>vW`7aC%vhAKOjv-$zou2XcPVt?>@Bcrwp4Lmgxkp^XoXs*LN_NwW z$;XV912QPkkeSGHU!a0`n4p*v$$oSd|t4k|!?%}LS`|`h+GkNWc z*{tX8bk2`wlG)V#>(jHht*#p5a`YVR*mRh}s_E%D*kABEK}ZSHNA>zB9B zY?t%ORPj-ptR_<^%;`RNQgBmiLw8Nw!RMd*+kR!aEMlD0sd8|Fc&vFq>D7x@q-%w> zy1L$!DLy*3iB*7AAf}_DKymRbHa(k`KdLK!@=QxysA)BcW!18PfRL(Rzh)SfN}Y_* zU-?8JSnc}c(-IZl$|;-0mdn($?f?1qy~kgd<#~rx=KbBjKPWorTz!14S-fqrRj=>l zh#gh7+2(wc!c)c8c|Oo8Ha4<36c8;wvCAk#*LThOUK>57e*%ki6+48Ur6r0m#$0MI zGwZWhl_k|-;c6M;T)ss%hg(P3anglS(d8<;*`15E{><9-xa6!>WZu7*-)DY47ysk( zn(zRf-urh-&%c`yG&|x@p1{GIfyZK}&aY1t`;dDq!sp9`nZESkI@t4t!jctzZR^HXu*B%q+D*9qLKlS6G zr4h?Iu0KBXdg?{f-hKDvPMuzTB9(haTkGw&2e+~MoOUZaT~W)(b64hUjL60u;kyAB zOJuj-e!J&mOZ}DQ*G+i)?{iCWOq+aXQurqij~A?>fm;f@^J?PkcfC?!A!Lr{^t5)i{4hbhSqc*PQP^*MGWK{W=G+`1sK2 zz_wDC>Aj5&x*Q4?>f-Nwr&hncX!Lp!w`0-2JQE%P^~j1Z_g$nT%aUKorL1Sa$-L92 zF~H)8aDC;Rg(23lf8{@%`y*OWTgS$B%R~KhvgJ~JBE z#P^lxedX<~_*<8y^~6>faYirY0D;s6;vaci@@MP+GcGV^x!m=MUz(9EI6u z7p>w_RSh)pmF}$<=U{6-F{$VH>q4!oJywVBEZ)hwNaKOXzKt7mc&AJII3HHtebJZq z?u83qnRcxc;%W6*8uX&g&q7FbzIH-{pHQIU`h*BACZ*W9%`B^%dX5PD+)pl}o?SLE<@z(PX@zdrK2g>{h~n$ z&tWAQzKvC1gP0uEpC5S7|LI7u|D=;?79Se^UNr4~_RQ+ksy}Puc3rg0m5gR9e3j7? zeJpmyJ9mR+bq%+6aiwhU?%Ni|_N)42s_~K_%?&ywzRps-+jT{_Ks~;QoQ@o`lgP18-jI{WjFKbgM^zHu6Jy8ij|g1`Q$`T4!)65<~BoS%Q%!SCUh z^YMP?8usruo1NVElZWAb&#@b;TbZ_*&f=JIHd%_zZoT++oy8eaU8|C2m@(FOEU@2U z8G7A-hih@xiD|isGuXB%DC~$)usYuGb5GZcCU?2Nw(Ix)vst)dbEZgvvBeC9cntyR zmrIuCGmAbAy8A|4 z{2u#Fx0@9>UM%ao{&?xKU6VXioK%EVMffJ_Ypm#E+%P-@jUvdqI!{IVS&W_^W6$NBlc9-JuF~}-OkCf$3#$mY`x`HJ#`vu%F|9|Yr$=-^+d7oN!LPNLm9QpO~wS?U1 zm8|Eu%lP`XeBY6>=RL!*80P|~&mlsry&0vvarHBP6bM~>>iUVbu$oz-$weW=skGE~ z>C&kkO+I?z3Q}K%`Pw)5NY6jsfBtdCkE88EDUU3bQbk!7Cq6q<+dBPzMPI@Z<|a?a zlM~LWXXa)r&pnqYq1Lr3I5^n-guKf1bCZ12HT89!g;=LJ$pv3F;Fa~gD3Hq-If=94 z@45T0wRSCABvvHC<;QqA$I4$ZU4g^s=~B^-rr9N0{ZA%wviw}zxKL7Zly6 z>y=wa@~$4d6L-utc7OlFmW}%y)Mh_i`1^h0uP-at#O-9by=lTyshCAA>sGP+u`}^i zpY6L?gH`>n*yM>7_NVXXyNa;%{dn{62eVvCq+6g!RaMo;bX9>pJ6sx<&#&y9u)9mx z-=gu{Ilohde1($krXIChBUwEIaVfIGlBFBU8?l0s$ zCI*Y7_|NFcHP(D^c)4oj_w)S5TbImPqu;TPz4BRW>z@6aM7P@?;e0N0^u7FF<*5^; z&i%c!Gp6{gDNp~Ag6_PkUsa4J(k5LoGr4&E?#bHyzfJ3Z-##4@Hf?jpN1L46OV+Jx zYio;I_v6y_pI`Tue>$PeU-;u;yJ6*h&6>~I*FT)wz5dhm^mOAJw+}P^mIv+ZY+rZ# z2Gf@>-#%TrT>t3S|3Cij#sMYEiu5L@J=zkLGM#J*po#9C zOL^YGmUZQ^8SWD1mA|4FF!a0(lGW`jy7}~Z(TRiXfuXU!YE#`lc<4lK?Ae)r=4k`- zH}%__XK&jlm_m&~9t^zDtSz0SrX(|i#DsULg@JP&(3RW|@bp5{b z((}iC&F?rwR$9J&`Lg zyYD}D7l~E4v1D>(%FW)}Z`CFiWXYb$lMD@Z6u9^)GUxP>ZMvU+GVSzfyEKWTGi6eN zg^$`~*OE#@!w6i%+&r|M0K6`vY6;o8A9;9(7vq-~Dgx zXZx2$Z^r|ju=SA#ubjMmGUakl&k;!}sU>?BO?#?6ebf7W!4qwj{@OaTFe_G7SI@A> zWNNlOe5dkwtj`>yxxKH=3*P6yfBKJI-r;)buU)%4U6_`9mekUIW%*~uVU8&tDWA3E zZuXi)350A3S+q1zM78<)>*dAH`ls*Rx0n{3EzGrhp~}5Jqj}{F!V-_$VZ8ExjHTAAi0e=Z2=YTGx%RzE`ibLe@Gc&0Df& z%N4;70(0KShK4#YL{2N%$s(fKZ~s7kD*NV-6*ex5164c^J^%c$%zMe^$QnC+RbFAq zj7WEbCuy%7*)Q$Q<9%2##mA~M^|pqa++~qYm(?eqe6sofK|gX^0_Vp@$9XgVYVEK8 z#(Qmh$pijVbBh0U`phut6xF^i@b&kOhr8_`JY<)DwnMlt`QM+Pck2KDm8m$;_~-oj z`eT#*Z4RH^{%6Abzt8KLEUYz5-CbQd-_Fh5vVD6{WaQE6&-b7H;nsh6;c)t-lglR9 z&>>Q z6nY&M_JFf3khi%+uf4Zp!BPXA$sR}iZR`%MQq4`a-D>vm=*-h^cfW4BeYm2GsjmO- zmfEP@WxUJh7AozmnU%4mXMNr0ps3r|0^-6By}EjO#R@IeslEywV$0TE+O=`2srRk9 z%4_%&Z=1bLY5Vt%@j_{u&+M+1k_p@%MVxD6-)k$q7j0YD$joQa zacPpl>1jGkw(Sx+nR@xI<$+UESAY1)6fz||FznU=SMm5WtJm-CTbB9Xzf+NyMM>)yk-9L9o@tEPaZM2yv*lWc==s-@6%O$woeTDY`?JF`+0lw znO|r7Ei!_poZerc$~|pe^1oYt_hPopb58Sl_HK8*pOvV(n6%TBlr~|_HsQ<|Ieoo@ zM|JK0D2m5cD3;$TRNw#e>)x1>gRN((f9EniRM>Re`E{G0&`OD3->omI8cScBZTjEy zw?q9=PY+M>?FS!!eW+&5U8&35DLqWy6X7T&Eonww(t zn<2g`L;o{}%=N#?}fZBwSo72vg-Q;a$ zYVkh3B3EPKIxDAm|2dx$8eZ@FAFJkjtKD?(j`F%1q=!9M82KAQn@0DlzJe=kta@gtWmcWm`jm`abeaif{O^5kxdrG1n zJ{6z;boc$AVzZP!en{q5>NZ<`c8;gt%f~yE`4{cl)3iLkcJ}o95&llir}gcd_3IxN zm4qh$e55+X<RZPP0A_nl;YKKFS2{~uv>KPJn6 zKE1pC;9q_lmn-q7R_E_I;{WfrRoL1%r}+9w#^-J3mRR}P%~zfssHw@hIq`ATr&b=J zIx*cCN3E$hpBetUb2Gj8{NDYGc4i7~|Mw^C-JZu~e)hk4ez8@*kb9&2>hlDNQFOC~xl$h|^4RsWy9SvA2x@semH%5@$t9cKioVocl?g|^M{lE_}tI8H#!-o z=9iOZu~dMIGU}GmotoeCBL%&S z9yqcW=G2;AUe%=X=kPm$d0uNK*1>shxqF1Wd6+qXqej{8IxXSWnS zD=G3XP4MoyE_=ZAK>qg<^<$GZUE-L${E9GN_0E^iq%A7`&)l*uS9QAggg*XDHzGQd zQeM3873>XJq+!LX>S{K7>GI|6yLMSE^SjAEyMD^eOQywNL>!WXf||G&m#ox(rQEM))uf8WRdJAS{DFaE{JUhpry|CxLF{qwiJDMq!4=-K>_@w5M$ z@+oW9&HvxU)yz#USuC7bV%MJbddvFPZr6Q2tvctSmg(y?DI& z$IZF_onv!lpFVwBQetJ-tlB-j=+nyM0-94Z=Gn}RG3#BFwPkzBhLnRvn`Ykq<-1S) z2jAlYzy5If1E~iBt9J<}doEmX>9%xoh6eMt_D`F#q@<-hRN4bWSr40>NHJ2GEE#fD zL4+?k(n?s}Z_&bq7X`Vl^48eN-^`g7qScvbk-7P2Q`7y|X%|=V-`KFQ_uVzKK8t4_ zan-eaH!Gy-FPKMheC7Sc<~Cz@0kh@hN2}J?PtD#}Y5MoC?V6~aOxzPE?u%QM{d$9- z!;}xZZ8H{cN~m-_s=DjNix&yc7Pbq0Xzx^a%gD`7z0~5lCTeGw@p+r(bL;dbb?Qu3 zyDrRA{o=)oPunN?cD(;vYtf`r~YAa7F5~Zz3*Qf8ylO3reOovwz?oYMQiBLT`IZ*oh76av6ffJ^PQGm>FsJd1ms=-`gugu1zht{dWI@ zh3<0dW-}C`LQkH5w`Z=m-rk0t#rG9vdr$U!zH)VT(&JlOWy+oerWje8Z7Vg%bYfl^ zGReoR`}lju@KP`DQx#vYUf=omoAr;Y>)$`ze7vr$SK7?T!sLXf`rH$3ywWGdKjqxn z&}h0kDq???^2UP?{U;guGpTP>KhfuYejneNyQ#LfTzkUh55ykuZK#(1x1MRsBbm)M zeT!XK6qw&LwD&sxKh{!Wbv8SH&(Z9Bi&h_XQ}a#lucYzKGnBl!_HmhaPR7rMg$EU- zUQ0>HbOncow!SG;=3>gb=xrh4CZH0t>C6$&%P-&hT)c4M#HCd3AN~3FJ6LsMcM1eg zx|x6Mp-h16(>2`o?v<_$*FryE&X{)KK!W_s9HyJk zkBhl(+2g1pvo!Uw(X};khg+U;MojY!FE^jHtnha3c0&V$9qH_4&m&rtPwYHtSg7Jx z@&4lhm2J9VuIzP&Vsmdt9$U@N@F3fw%jy3vjwOv-EpD%Lzgg9AwC31Gg~Qt(eLQhy zhS|(ruZ(8BT0P@NO#kXrrv)dcCp|9>+3s}QL4K2l-~^i^yR3dJ*xUX*u4?YDWhd7} z?U`{tw!p4&l2+99HH+#lrNqrNZa40?oTDFC%DGnBTXg6GLc=>1d=cwZ!Gyf(||DGpqFi)JJfbDaXLW8pSHny3Pl`GdQS+}ySGkEp{!;}0afm)q1g-3kk>b|9}`ffRC-#f*5v;U@hX`lM>sZzZ7 zWB9%kXAbwNy=l5{TcR#^(_1nBlJOPK6XBt)8SnGb7nHLw zH_y9QmvmS&Xy%=Y6-%tc&ZPZ*?kBQCX8D?MzI5~1_m2L(mD=;2H6h%9-J;aLJgHoX zgQ;$P&+)g1UVk;PnDb=9%=X9pKPrB&t^A>4o?iDZhv#(Cx8lt+&9=__`|bAp5Tnx` zMJ%p{ok!aQT`q|5hF%T`$<433u>HvDH&2V7U*5p5hgD|ta_hh+!Y8fYJS|S&^P2Gu zS6TF<@`-!<_SU@<5pPg(-F7CL_+Ui|6e>l_uP0j6S-pI=Zp``lJ0a>gg%fxtbzBj~E}>cK^dti6_Qx1yX)(=)Q89Sw)h`bKd@-+pWUsK^$i4g_HzD>wYQqx>ssa}Nyoo^TvqVk zOCp_5Lq0KG!kuBsi8u$5AMp!V)ek;SD?4*gdDGk5$CGXw=eoaJ-D0eM@8C@3{Y+lv z2hOSY`8%6ESf|##*gqiU&&1^w`P;hvoB0#MjW;Z}4oK-1d4IMnD28F%eBSC^%&yx` zxF)F!w9Z_J?!4_jd&UeR0{3tAaD#r)*o&^G9>v`%P4mKkxZwM)9dbf|h^7 zpS@qOckja7pn`t}-#0$)yB2JBC~l(CZjqU9=g*Mb7W4eU8rcoow{I!lW}tSwx9{e+ zTen}c+I02sZ&O$KAv-6crC)is?6+^zrrkX?<45E6?T6zMwZC8A zYB@*mZQJrwev>*=|68Wnvu{oQv-$9ky|D?!)yL9>&dsQsv-f^Z`(7p2GA?_0S-ULe z%{Mk)p88Jr#_eg_o++=>d{}X#{F|fi=l2&&l{QqV=ajua@T0?G>H@n{^G;o}-`pi~ zbLo|bExl3}aj*FsjLQ$*>%Y0NedT0b@9??Xo6YR+=-p|`?acix@@vJn$+b8=TdyU2jZwE6qE^JtOCSv&oTb!eM34W_4hX8=6y!_3?VQtp<6yST!Q=C9+V5;G>lXQWMd9_n>7uHR$Eo;21FVdQ&MBb@0PXxp;{X5v literal 0 HcmV?d00001 diff --git a/songbird.svg b/songbird.svg new file mode 100644 index 0000000..39e6e72 --- /dev/null +++ b/songbird.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/constants.rs b/src/constants.rs new file mode 100644 index 0000000..d6b757a --- /dev/null +++ b/src/constants.rs @@ -0,0 +1,75 @@ +//! Constants affecting driver function and API handling. + +#[cfg(feature = "driver")] +use audiopus::{Bitrate, SampleRate}; +#[cfg(feature = "driver")] +use discortp::rtp::RtpType; +use std::time::Duration; + +#[cfg(feature = "driver")] +/// The voice gateway version used by the library. +pub const VOICE_GATEWAY_VERSION: u8 = crate::model::constants::GATEWAY_VERSION; + +#[cfg(feature = "driver")] +/// Sample rate of audio to be sent to Discord. +pub const SAMPLE_RATE: SampleRate = SampleRate::Hz48000; + +/// Sample rate of audio to be sent to Discord. +pub const SAMPLE_RATE_RAW: usize = 48_000; + +/// Number of audio frames/packets to be sent per second. +pub const AUDIO_FRAME_RATE: usize = 50; + +/// Length of time between any two audio frames. +pub const TIMESTEP_LENGTH: Duration = Duration::from_millis(1000 / AUDIO_FRAME_RATE as u64); + +#[cfg(feature = "driver")] +/// Default bitrate for audio. +pub const DEFAULT_BITRATE: Bitrate = Bitrate::BitsPerSecond(128_000); + +/// Number of samples in one complete frame of audio per channel. +/// +/// This is equally the number of stereo (joint) samples in an audio frame. +pub const MONO_FRAME_SIZE: usize = SAMPLE_RATE_RAW / AUDIO_FRAME_RATE; + +/// Number of individual samples in one complete frame of stereo audio. +pub const STEREO_FRAME_SIZE: usize = 2 * MONO_FRAME_SIZE; + +/// Number of bytes in one complete frame of raw `f32`-encoded mono audio. +pub const MONO_FRAME_BYTE_SIZE: usize = MONO_FRAME_SIZE * std::mem::size_of::(); + +/// Number of bytes in one complete frame of raw `f32`-encoded stereo audio. +pub const STEREO_FRAME_BYTE_SIZE: usize = STEREO_FRAME_SIZE * std::mem::size_of::(); + +/// Length (in milliseconds) of any audio frame. +pub const FRAME_LEN_MS: usize = 1000 / AUDIO_FRAME_RATE; + +/// Maximum number of audio frames/packets to be sent per second to be buffered. +pub const CHILD_BUFFER_LEN: usize = AUDIO_FRAME_RATE / 2; + +/// Maximum packet size for a voice packet. +/// +/// Set a safe amount below the Ethernet MTU to avoid fragmentation/rejection. +pub const VOICE_PACKET_MAX: usize = 1460; + +/// Delay between sends of UDP keepalive frames. +/// +/// Passive monitoring of Discord itself shows that these fire every 5 seconds +/// irrespective of outgoing UDP traffic. +pub const UDP_KEEPALIVE_GAP_MS: u64 = 5_000; + +/// Type-converted delay between sends of UDP keepalive frames. +/// +/// Passive monitoring of Discord itself shows that these fire every 5 seconds +/// irrespective of outgoing UDP traffic. +pub const UDP_KEEPALIVE_GAP: Duration = Duration::from_millis(UDP_KEEPALIVE_GAP_MS); + +/// Opus silent frame, used to signal speech start and end (and prevent audio glitching). +pub const SILENT_FRAME: [u8; 3] = [0xf8, 0xff, 0xfe]; + +/// The one (and only) RTP version. +pub const RTP_VERSION: u8 = 2; + +#[cfg(feature = "driver")] +/// Profile type used by Discord's Opus audio traffic. +pub const RTP_PROFILE_TYPE: RtpType = RtpType::Dynamic(120); diff --git a/src/driver/config.rs b/src/driver/config.rs new file mode 100644 index 0000000..c5349b6 --- /dev/null +++ b/src/driver/config.rs @@ -0,0 +1,10 @@ +use super::CryptoMode; + +/// Configuration for the inner Driver. +/// +/// At present, this cannot be changed. +#[derive(Clone, Debug, Default)] +pub struct Config { + /// Selected tagging mode for voice packet encryption. + pub crypto_mode: Option, +} diff --git a/src/driver/connection/error.rs b/src/driver/connection/error.rs new file mode 100644 index 0000000..cb6f8c3 --- /dev/null +++ b/src/driver/connection/error.rs @@ -0,0 +1,105 @@ +//! Connection errors and convenience types. + +use crate::{ + driver::tasks::{error::Recipient, message::*}, + ws::Error as WsError, +}; +use flume::SendError; +use serde_json::Error as JsonError; +use std::{error::Error as ErrorTrait, fmt, io::Error as IoError}; +use xsalsa20poly1305::aead::Error as CryptoError; + +/// Errors encountered while connecting to a Discord voice server over the driver. +#[derive(Debug)] +pub enum Error { + /// An error occurred during [en/de]cryption of voice packets or key generation. + Crypto(CryptoError), + /// Server did not return the expected crypto mode during negotiation. + CryptoModeInvalid, + /// Selected crypto mode was not offered by server. + CryptoModeUnavailable, + /// An indicator that an endpoint URL was invalid. + EndpointUrl, + /// Discord hello/ready handshake was violated. + ExpectedHandshake, + /// Discord failed to correctly respond to IP discovery. + IllegalDiscoveryResponse, + /// Could not parse Discord's view of our IP. + IllegalIp, + /// Miscellaneous I/O error. + Io(IoError), + /// JSON (de)serialization error. + Json(JsonError), + /// Failed to message other background tasks after connection establishment. + InterconnectFailure(Recipient), + /// Error communicating with gateway server over WebSocket. + Ws(WsError), +} + +impl From for Error { + fn from(e: CryptoError) -> Self { + Error::Crypto(e) + } +} + +impl From for Error { + fn from(e: IoError) -> Error { + Error::Io(e) + } +} + +impl From for Error { + fn from(e: JsonError) -> Error { + Error::Json(e) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::AuxNetwork) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::Event) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::Mixer) + } +} + +impl From for Error { + fn from(e: WsError) -> Error { + Error::Ws(e) + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Failed to connect to Discord RTP server: ")?; + use Error::*; + match self { + Crypto(c) => write!(f, "cryptography error {}.", c), + CryptoModeInvalid => write!(f, "server changed negotiated encryption mode."), + CryptoModeUnavailable => write!(f, "server did not offer chosen encryption mode."), + EndpointUrl => write!(f, "endpoint URL received from gateway was invalid."), + ExpectedHandshake => write!(f, "voice initialisation protocol was violated."), + IllegalDiscoveryResponse => + write!(f, "IP discovery/NAT punching response was invalid."), + IllegalIp => write!(f, "IP discovery/NAT punching response had bad IP value."), + Io(i) => write!(f, "I/O failure ({}).", i), + Json(j) => write!(f, "JSON (de)serialization issue ({}).", j), + InterconnectFailure(r) => write!(f, "failed to contact other task ({:?})", r), + Ws(w) => write!(f, "websocket issue ({:?}).", w), + } + } +} + +impl ErrorTrait for Error {} + +/// Convenience type for Discord voice/driver connection error handling. +pub type Result = std::result::Result; diff --git a/src/driver/connection/mod.rs b/src/driver/connection/mod.rs new file mode 100644 index 0000000..ee5a416 --- /dev/null +++ b/src/driver/connection/mod.rs @@ -0,0 +1,321 @@ +pub mod error; + +use super::{ + tasks::{message::*, udp_rx, udp_tx, ws as ws_task}, + Config, + CryptoMode, +}; +use crate::{ + constants::*, + model::{ + payload::{Identify, Resume, SelectProtocol}, + Event as GatewayEvent, + ProtocolData, + }, + ws::{self, ReceiverExt, SenderExt, WsStream}, + ConnectionInfo, +}; +use discortp::discord::{IpDiscoveryPacket, IpDiscoveryType, MutableIpDiscoveryPacket}; +use error::{Error, Result}; +use flume::Sender; +use std::{net::IpAddr, str::FromStr}; +use tokio::net::UdpSocket; +use tracing::{debug, info, instrument}; +use url::Url; +use xsalsa20poly1305::{aead::NewAead, XSalsa20Poly1305 as Cipher}; + +#[cfg(all(feature = "rustls", not(feature = "native")))] +use ws::create_rustls_client; + +#[cfg(feature = "native")] +use ws::create_native_tls_client; + +pub(crate) struct Connection { + pub(crate) info: ConnectionInfo, + pub(crate) ws: Sender, +} + +impl Connection { + pub(crate) async fn new( + mut info: ConnectionInfo, + interconnect: &Interconnect, + config: &Config, + ) -> Result { + let crypto_mode = config.crypto_mode.unwrap_or(CryptoMode::Normal); + + let url = generate_url(&mut info.endpoint)?; + + #[cfg(all(feature = "rustls", not(feature = "native")))] + let mut client = create_rustls_client(url).await?; + + #[cfg(feature = "native")] + let mut client = create_native_tls_client(url).await?; + + let mut hello = None; + let mut ready = None; + + client + .send_json(&GatewayEvent::from(Identify { + server_id: info.guild_id.into(), + session_id: info.session_id.clone(), + token: info.token.clone(), + user_id: info.user_id.into(), + })) + .await?; + + loop { + let value = match client.recv_json().await? { + Some(value) => value, + None => continue, + }; + + match value { + GatewayEvent::Ready(r) => { + ready = Some(r); + if hello.is_some() { + break; + } + }, + GatewayEvent::Hello(h) => { + hello = Some(h); + if ready.is_some() { + break; + } + }, + other => { + debug!("Expected ready/hello; got: {:?}", other); + + return Err(Error::ExpectedHandshake); + }, + } + } + + let hello = + hello.expect("Hello packet expected in connection initialisation, but not found."); + let ready = + ready.expect("Ready packet expected in connection initialisation, but not found."); + + if !has_valid_mode(&ready.modes, crypto_mode) { + return Err(Error::CryptoModeUnavailable); + } + + let mut udp = UdpSocket::bind("0.0.0.0:0").await?; + udp.connect((ready.ip, ready.port)).await?; + + // Follow Discord's IP Discovery procedures, in case NAT tunnelling is needed. + let mut bytes = [0; IpDiscoveryPacket::const_packet_size()]; + { + let mut view = MutableIpDiscoveryPacket::new(&mut bytes[..]).expect( + "Too few bytes in 'bytes' for IPDiscovery packet.\ + (Blame: IpDiscoveryPacket::const_packet_size()?)", + ); + view.set_pkt_type(IpDiscoveryType::Request); + view.set_length(70); + view.set_ssrc(ready.ssrc); + } + + udp.send(&bytes).await?; + + let (len, _addr) = udp.recv_from(&mut bytes).await?; + { + let view = + IpDiscoveryPacket::new(&bytes[..len]).ok_or(Error::IllegalDiscoveryResponse)?; + + if view.get_pkt_type() != IpDiscoveryType::Response { + return Err(Error::IllegalDiscoveryResponse); + } + + // We could do something clever like binary search, + // but possibility of UDP spoofing preclueds us from + // making the assumption we can find a "left edge" of '\0's. + let nul_byte_index = view + .get_address_raw() + .iter() + .position(|&b| b == 0) + .ok_or(Error::IllegalIp)?; + + let address_str = std::str::from_utf8(&view.get_address_raw()[..nul_byte_index]) + .map_err(|_| Error::IllegalIp)?; + + let address = IpAddr::from_str(&address_str).map_err(|e| { + println!("{:?}", e); + Error::IllegalIp + })?; + + client + .send_json(&GatewayEvent::from(SelectProtocol { + protocol: "udp".into(), + data: ProtocolData { + address, + mode: crypto_mode.to_request_str().into(), + port: view.get_port(), + }, + })) + .await?; + } + + let cipher = init_cipher(&mut client, crypto_mode).await?; + + info!("Connected to: {}", info.endpoint); + + info!("WS heartbeat duration {}ms.", hello.heartbeat_interval,); + + let (ws_msg_tx, ws_msg_rx) = flume::unbounded(); + let (udp_sender_msg_tx, udp_sender_msg_rx) = flume::unbounded(); + let (udp_receiver_msg_tx, udp_receiver_msg_rx) = flume::unbounded(); + let (udp_rx, udp_tx) = udp.split(); + + let ssrc = ready.ssrc; + + let mix_conn = MixerConnection { + cipher: cipher.clone(), + udp_rx: udp_receiver_msg_tx, + udp_tx: udp_sender_msg_tx, + }; + + interconnect + .mixer + .send(MixerMessage::Ws(Some(ws_msg_tx.clone())))?; + + interconnect + .mixer + .send(MixerMessage::SetConn(mix_conn, ready.ssrc))?; + + tokio::spawn(ws_task::runner( + interconnect.clone(), + ws_msg_rx, + client, + ssrc, + hello.heartbeat_interval, + )); + + tokio::spawn(udp_rx::runner( + interconnect.clone(), + udp_receiver_msg_rx, + cipher, + crypto_mode, + udp_rx, + )); + tokio::spawn(udp_tx::runner(udp_sender_msg_rx, ssrc, udp_tx)); + + Ok(Connection { + info, + ws: ws_msg_tx, + }) + } + + #[instrument(skip(self))] + pub async fn reconnect(&mut self) -> Result<()> { + let url = generate_url(&mut self.info.endpoint)?; + + // Thread may have died, we want to send to prompt a clean exit + // (if at all possible) and then proceed as normal. + + #[cfg(all(feature = "rustls", not(feature = "native")))] + let mut client = create_rustls_client(url).await?; + + #[cfg(feature = "native")] + let mut client = create_native_tls_client(url).await?; + + client + .send_json(&GatewayEvent::from(Resume { + server_id: self.info.guild_id.into(), + session_id: self.info.session_id.clone(), + token: self.info.token.clone(), + })) + .await?; + + let mut hello = None; + let mut resumed = None; + + loop { + let value = match client.recv_json().await? { + Some(value) => value, + None => continue, + }; + + match value { + GatewayEvent::Resumed => { + resumed = Some(()); + if hello.is_some() { + break; + } + }, + GatewayEvent::Hello(h) => { + hello = Some(h); + if resumed.is_some() { + break; + } + }, + other => { + debug!("Expected resumed/hello; got: {:?}", other); + + return Err(Error::ExpectedHandshake); + }, + } + } + + let hello = + hello.expect("Hello packet expected in connection initialisation, but not found."); + + self.ws + .send(WsMessage::SetKeepalive(hello.heartbeat_interval))?; + self.ws.send(WsMessage::Ws(Box::new(client)))?; + + info!("Reconnected to: {}", &self.info.endpoint); + Ok(()) + } +} + +impl Drop for Connection { + fn drop(&mut self) { + info!("Disconnected"); + } +} + +fn generate_url(endpoint: &mut String) -> Result { + if endpoint.ends_with(":80") { + let len = endpoint.len(); + + endpoint.truncate(len - 3); + } + + Url::parse(&format!("wss://{}/?v={}", endpoint, VOICE_GATEWAY_VERSION)) + .or(Err(Error::EndpointUrl)) +} + +#[inline] +async fn init_cipher(client: &mut WsStream, mode: CryptoMode) -> Result { + loop { + let value = match client.recv_json().await? { + Some(value) => value, + None => continue, + }; + + match value { + GatewayEvent::SessionDescription(desc) => { + if desc.mode != mode.to_request_str() { + return Err(Error::CryptoModeInvalid); + } + + return Ok(Cipher::new_varkey(&desc.secret_key)?); + }, + other => { + debug!( + "Expected ready for key; got: op{}/v{:?}", + other.kind() as u8, + other + ); + }, + } + } +} + +#[inline] +fn has_valid_mode(modes: It, mode: CryptoMode) -> bool +where + T: for<'a> PartialEq<&'a str>, + It: IntoIterator, +{ + modes.into_iter().any(|s| s == mode.to_request_str()) +} diff --git a/src/driver/crypto.rs b/src/driver/crypto.rs new file mode 100644 index 0000000..e7a306d --- /dev/null +++ b/src/driver/crypto.rs @@ -0,0 +1,38 @@ +//! Encryption schemes supported by Discord's secure RTP negotiation. + +/// Variants of the XSalsa20Poly1305 encryption scheme. +/// +/// At present, only `Normal` is supported or selectable. +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +#[non_exhaustive] +pub enum Mode { + /// The RTP header is used as the source of nonce bytes for the packet. + /// + /// Equivalent to a nonce of at most 48b (6B) at no extra packet overhead: + /// the RTP sequence number and timestamp are the varying quantities. + Normal, + /// An additional random 24B suffix is used as the source of nonce bytes for the packet. + /// + /// Full nonce width of 24B (192b), at an extra 24B per packet (~1.2 kB/s). + Suffix, + /// An additional random 24B suffix is used as the source of nonce bytes for the packet. + /// + /// Nonce width of 4B (32b), at an extra 4B per packet (~0.2 kB/s). + Lite, +} + +impl Mode { + /// Returns the name of a mode as it will appear during negotiation. + pub fn to_request_str(self) -> &'static str { + use Mode::*; + match self { + Normal => "xsalsa20_poly1305", + Suffix => "xsalsa20_poly1305_suffix", + Lite => "xsalsa20_poly1305_lite", + } + } +} + +// TODO: implement encrypt + decrypt + nonce selection for each. +// This will probably need some research into correct handling of +// padding, reported length, SRTP profiles, and so on. diff --git a/src/driver/mod.rs b/src/driver/mod.rs new file mode 100644 index 0000000..cd148bc --- /dev/null +++ b/src/driver/mod.rs @@ -0,0 +1,233 @@ +//! Runner for a voice connection. +//! +//! Songbird's driver is a mixed-sync system, using: +//! * Asynchronous connection management, event-handling, and gateway integration. +//! * Synchronous audio mixing, packet generation, and encoding. +//! +//! This splits up work according to its IO/compute bound nature, preventing packet +//! generation from being slowed down past its deadline, or from affecting other +//! asynchronous tasks your bot must handle. + +mod config; +pub(crate) mod connection; +mod crypto; +pub(crate) mod tasks; + +pub use config::Config; +use connection::error::Result; +pub use crypto::Mode as CryptoMode; + +use crate::{ + events::EventData, + input::Input, + tracks::{Track, TrackHandle}, + ConnectionInfo, + Event, + EventHandler, +}; +use audiopus::Bitrate; +use flume::{Receiver, SendError, Sender}; +use tasks::message::CoreMessage; +use tracing::instrument; + +/// The control object for a Discord voice connection, handling connection, +/// mixing, encoding, en/decryption, and event generation. +#[derive(Clone, Debug)] +pub struct Driver { + config: Config, + self_mute: bool, + sender: Sender, +} + +impl Driver { + /// Creates a new voice driver. + /// + /// This will create the core voice tasks in the background. + #[inline] + pub fn new(config: Config) -> Self { + let sender = Self::start_inner(config.clone()); + + Driver { + config, + self_mute: false, + sender, + } + } + + fn start_inner(config: Config) -> Sender { + let (tx, rx) = flume::unbounded(); + + tasks::start(config, rx, tx.clone()); + + tx + } + + fn restart_inner(&mut self) { + self.sender = Self::start_inner(self.config.clone()); + + self.mute(self.self_mute); + } + + /// Connects to a voice channel using the specified server. + #[instrument(skip(self))] + pub fn connect(&mut self, info: ConnectionInfo) -> Receiver> { + let (tx, rx) = flume::bounded(1); + + self.raw_connect(info, tx); + + rx + } + + /// Connects to a voice channel using the specified server. + #[instrument(skip(self))] + pub(crate) fn raw_connect(&mut self, info: ConnectionInfo, tx: Sender>) { + self.send(CoreMessage::ConnectWithResult(info, tx)); + } + + /// Leaves the current voice channel, disconnecting from it. + /// + /// This does *not* forget settings, like whether to be self-deafened or + /// self-muted. + #[instrument(skip(self))] + pub fn leave(&mut self) { + self.send(CoreMessage::Disconnect); + } + + /// Sets whether the current connection is to be muted. + /// + /// If there is no live voice connection, then this only acts as a settings + /// update for future connections. + #[instrument(skip(self))] + pub fn mute(&mut self, mute: bool) { + self.self_mute = mute; + self.send(CoreMessage::Mute(mute)); + } + + /// Returns whether the driver is muted (i.e., processes audio internally + /// but submits none). + #[instrument(skip(self))] + pub fn is_mute(&self) -> bool { + self.self_mute + } + + /// Plays audio from a source, returning a handle for further control. + /// + /// This can be a source created via [`ffmpeg`] or [`ytdl`]. + /// + /// [`ffmpeg`]: ../input/fn.ffmpeg.html + /// [`ytdl`]: ../input/fn.ytdl.html + #[instrument(skip(self))] + pub fn play_source(&mut self, source: Input) -> TrackHandle { + let (player, handle) = super::create_player(source); + self.send(CoreMessage::AddTrack(player)); + + handle + } + + /// Plays audio from a source, returning a handle for further control. + /// + /// Unlike [`play_source`], this stops all other sources attached + /// to the channel. + /// + /// [`play_source`]: #method.play_source + #[instrument(skip(self))] + pub fn play_only_source(&mut self, source: Input) -> TrackHandle { + let (player, handle) = super::create_player(source); + self.send(CoreMessage::SetTrack(Some(player))); + + handle + } + + /// Plays audio from a [`Track`] object. + /// + /// This will be one half of the return value of [`create_player`]. + /// The main difference between this function and [`play_source`] is + /// that this allows for direct manipulation of the [`Track`] object + /// before it is passed over to the voice and mixing contexts. + /// + /// [`create_player`]: ../tracks/fn.create_player.html + /// [`Track`]: ../tracks/struct.Track.html + /// [`play_source`]: #method.play_source + #[instrument(skip(self))] + pub fn play(&mut self, track: Track) { + self.send(CoreMessage::AddTrack(track)); + } + + /// Exclusively plays audio from a [`Track`] object. + /// + /// This will be one half of the return value of [`create_player`]. + /// As in [`play_only_source`], this stops all other sources attached to the + /// channel. Like [`play`], however, this allows for direct manipulation of the + /// [`Track`] object before it is passed over to the voice and mixing contexts. + /// + /// [`create_player`]: ../tracks/fn.create_player.html + /// [`Track`]: ../tracks/struct.Track.html + /// [`play_only_source`]: #method.play_only_source + /// [`play`]: #method.play + #[instrument(skip(self))] + pub fn play_only(&mut self, track: Track) { + self.send(CoreMessage::SetTrack(Some(track))); + } + + /// Sets the bitrate for encoding Opus packets sent along + /// the channel being managed. + /// + /// The default rate is 128 kbps. + /// Sensible values range between `Bits(512)` and `Bits(512_000)` + /// bits per second. + /// Alternatively, `Auto` and `Max` remain available. + #[instrument(skip(self))] + pub fn set_bitrate(&mut self, bitrate: Bitrate) { + self.send(CoreMessage::SetBitrate(bitrate)) + } + + /// Stops playing audio from all sources, if any are set. + #[instrument(skip(self))] + pub fn stop(&mut self) { + self.send(CoreMessage::SetTrack(None)) + } + + /// Attach a global event handler to an audio context. Global events may receive + /// any [`EventContext`]. + /// + /// Global timing events will tick regardless of whether audio is playing, + /// so long as the bot is connected to a voice channel, and have no tracks. + /// [`TrackEvent`]s will respond to all relevant tracks, giving some audio elements. + /// + /// Users **must** ensure that no costly work or blocking occurs + /// within the supplied function or closure. *Taking excess time could prevent + /// timely sending of packets, causing audio glitches and delays*. + /// + /// [`Track`]: ../tracks/struct.Track.html + /// [`TrackEvent`]: ../events/enum.TrackEvent.html + /// [`EventContext`]: ../events/enum.EventContext.html + #[instrument(skip(self, action))] + pub fn add_global_event(&mut self, event: Event, action: F) { + self.send(CoreMessage::AddEvent(EventData::new(event, action))); + } + + /// Sends a message to the inner tasks, restarting it if necessary. + fn send(&mut self, status: CoreMessage) { + // Restart thread if it errored. + if let Err(SendError(status)) = self.sender.send(status) { + self.restart_inner(); + + self.sender.send(status).unwrap(); + } + } +} + +impl Default for Driver { + fn default() -> Self { + Self::new(Default::default()) + } +} + +impl Drop for Driver { + /// Leaves the current connected voice channel, if connected to one, and + /// forgets all configurations relevant to this Handler. + fn drop(&mut self) { + self.leave(); + let _ = self.sender.send(CoreMessage::Poison); + } +} diff --git a/src/driver/tasks/error.rs b/src/driver/tasks/error.rs new file mode 100644 index 0000000..c9e1fdb --- /dev/null +++ b/src/driver/tasks/error.rs @@ -0,0 +1,97 @@ +use super::message::*; +use crate::ws::Error as WsError; +use audiopus::Error as OpusError; +use flume::SendError; +use std::io::Error as IoError; +use xsalsa20poly1305::aead::Error as CryptoError; + +#[derive(Debug)] +pub enum Recipient { + AuxNetwork, + Event, + Mixer, + UdpRx, + UdpTx, +} + +pub type Result = std::result::Result; + +#[derive(Debug)] +pub enum Error { + Crypto(CryptoError), + /// Received an illegal voice packet on the voice UDP socket. + IllegalVoicePacket, + InterconnectFailure(Recipient), + Io(IoError), + Opus(OpusError), + Ws(WsError), +} + +impl Error { + pub(crate) fn should_trigger_connect(&self) -> bool { + matches!( + self, + Error::InterconnectFailure(Recipient::AuxNetwork) + | Error::InterconnectFailure(Recipient::UdpRx) + | Error::InterconnectFailure(Recipient::UdpTx) + ) + } + + pub(crate) fn should_trigger_interconnect_rebuild(&self) -> bool { + matches!(self, Error::InterconnectFailure(Recipient::Event)) + } +} + +impl From for Error { + fn from(e: CryptoError) -> Self { + Error::Crypto(e) + } +} + +impl From for Error { + fn from(e: IoError) -> Error { + Error::Io(e) + } +} + +impl From for Error { + fn from(e: OpusError) -> Error { + Error::Opus(e) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::AuxNetwork) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::Event) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::Mixer) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::UdpRx) + } +} + +impl From> for Error { + fn from(_e: SendError) -> Error { + Error::InterconnectFailure(Recipient::UdpTx) + } +} + +impl From for Error { + fn from(e: WsError) -> Error { + Error::Ws(e) + } +} diff --git a/src/driver/tasks/events.rs b/src/driver/tasks/events.rs new file mode 100644 index 0000000..bb28895 --- /dev/null +++ b/src/driver/tasks/events.rs @@ -0,0 +1,118 @@ +use super::message::*; +use crate::{ + events::{EventStore, GlobalEvents, TrackEvent}, + tracks::{TrackHandle, TrackState}, +}; +use flume::Receiver; +use tracing::{debug, info, instrument, trace}; + +#[instrument(skip(_interconnect, evt_rx))] +pub(crate) async fn runner(_interconnect: Interconnect, evt_rx: Receiver) { + let mut global = GlobalEvents::default(); + + let mut events: Vec = vec![]; + let mut states: Vec = vec![]; + let mut handles: Vec = vec![]; + + loop { + use EventMessage::*; + match evt_rx.recv_async().await { + Ok(AddGlobalEvent(data)) => { + info!("Global event added."); + global.add_event(data); + }, + Ok(AddTrackEvent(i, data)) => { + info!("Adding event to track {}.", i); + + let event_store = events + .get_mut(i) + .expect("Event thread was given an illegal store index for AddTrackEvent."); + let state = states + .get_mut(i) + .expect("Event thread was given an illegal state index for AddTrackEvent."); + + event_store.add_event(data, state.position); + }, + Ok(FireCoreEvent(ctx)) => { + let ctx = ctx.to_user_context(); + let evt = ctx + .to_core_event() + .expect("Event thread was passed a non-core event in FireCoreEvent."); + + trace!("Firing core event {:?}.", evt); + + global.fire_core_event(evt, ctx).await; + }, + Ok(AddTrack(store, state, handle)) => { + events.push(store); + states.push(state); + handles.push(handle); + + info!("Event state for track {} added", events.len()); + }, + Ok(ChangeState(i, change)) => { + use TrackStateChange::*; + + let max_states = states.len(); + debug!( + "Changing state for track {} of {}: {:?}", + i, max_states, change + ); + + let state = states + .get_mut(i) + .expect("Event thread was given an illegal state index for ChangeState."); + + match change { + Mode(mode) => { + let old = state.playing; + state.playing = mode; + if old != mode && mode.is_done() { + global.fire_track_event(TrackEvent::End, i); + } + }, + Volume(vol) => { + state.volume = vol; + }, + Position(pos) => { + // Currently, only Tick should fire time events. + state.position = pos; + }, + Loops(loops, user_set) => { + state.loops = loops; + if !user_set { + global.fire_track_event(TrackEvent::Loop, i); + } + }, + Total(new) => { + // Massive, unprecedented state changes. + *state = new; + }, + } + }, + Ok(RemoveTrack(i)) => { + info!("Event state for track {} of {} removed.", i, events.len()); + + events.remove(i); + states.remove(i); + handles.remove(i); + }, + Ok(RemoveAllTracks) => { + info!("Event state for all tracks removed."); + + events.clear(); + states.clear(); + handles.clear(); + }, + Ok(Tick) => { + // NOTE: this should fire saved up blocks of state change evts. + global.tick(&mut events, &mut states, &mut handles).await; + }, + Err(_) | Ok(Poison) => { + break; + }, + } + } + + info!("Event thread exited."); +} diff --git a/src/driver/tasks/message/core.rs b/src/driver/tasks/message/core.rs new file mode 100644 index 0000000..3c5c017 --- /dev/null +++ b/src/driver/tasks/message/core.rs @@ -0,0 +1,24 @@ +use crate::{ + driver::connection::error::Error, + events::EventData, + tracks::Track, + Bitrate, + ConnectionInfo, +}; +use flume::Sender; + +#[allow(clippy::large_enum_variant)] +#[derive(Debug)] +pub enum CoreMessage { + ConnectWithResult(ConnectionInfo, Sender>), + Disconnect, + SetTrack(Option), + AddTrack(Track), + SetBitrate(Bitrate), + AddEvent(EventData), + Mute(bool), + Reconnect, + FullReconnect, + RebuildInterconnect, + Poison, +} diff --git a/src/driver/tasks/message/events.rs b/src/driver/tasks/message/events.rs new file mode 100644 index 0000000..197ebe8 --- /dev/null +++ b/src/driver/tasks/message/events.rs @@ -0,0 +1,31 @@ +use crate::{ + events::{CoreContext, EventData, EventStore}, + tracks::{LoopState, PlayMode, TrackHandle, TrackState}, +}; +use std::time::Duration; + +pub(crate) enum EventMessage { + // Event related. + // Track events should fire off the back of state changes. + AddGlobalEvent(EventData), + AddTrackEvent(usize, EventData), + FireCoreEvent(CoreContext), + + AddTrack(EventStore, TrackState, TrackHandle), + ChangeState(usize, TrackStateChange), + RemoveTrack(usize), + RemoveAllTracks, + Tick, + + Poison, +} + +#[derive(Debug)] +pub enum TrackStateChange { + Mode(PlayMode), + Volume(f32), + Position(Duration), + // Bool indicates user-set. + Loops(LoopState, bool), + Total(TrackState), +} diff --git a/src/driver/tasks/message/mixer.rs b/src/driver/tasks/message/mixer.rs new file mode 100644 index 0000000..4c2eec5 --- /dev/null +++ b/src/driver/tasks/message/mixer.rs @@ -0,0 +1,32 @@ +use super::{Interconnect, UdpRxMessage, UdpTxMessage, WsMessage}; + +use crate::{tracks::Track, Bitrate}; +use flume::Sender; +use xsalsa20poly1305::XSalsa20Poly1305 as Cipher; + +pub(crate) struct MixerConnection { + pub cipher: Cipher, + pub udp_rx: Sender, + pub udp_tx: Sender, +} + +impl Drop for MixerConnection { + fn drop(&mut self) { + let _ = self.udp_rx.send(UdpRxMessage::Poison); + let _ = self.udp_tx.send(UdpTxMessage::Poison); + } +} + +pub(crate) enum MixerMessage { + AddTrack(Track), + SetTrack(Option), + SetBitrate(Bitrate), + SetMute(bool), + SetConn(MixerConnection, u32), + DropConn, + ReplaceInterconnect(Interconnect), + RebuildEncoder, + + Ws(Option>), + Poison, +} diff --git a/src/driver/tasks/message/mod.rs b/src/driver/tasks/message/mod.rs new file mode 100644 index 0000000..1831839 --- /dev/null +++ b/src/driver/tasks/message/mod.rs @@ -0,0 +1,49 @@ +mod core; +mod events; +mod mixer; +mod udp_rx; +mod udp_tx; +mod ws; + +pub(crate) use self::{core::*, events::*, mixer::*, udp_rx::*, udp_tx::*, ws::*}; + +use flume::Sender; +use tracing::info; + +#[derive(Clone, Debug)] +pub(crate) struct Interconnect { + pub core: Sender, + pub events: Sender, + pub mixer: Sender, +} + +impl Interconnect { + pub fn poison(&self) { + let _ = self.events.send(EventMessage::Poison); + } + + pub fn poison_all(&self) { + self.poison(); + let _ = self.mixer.send(MixerMessage::Poison); + } + + pub fn restart_volatile_internals(&mut self) { + self.poison(); + + let (evt_tx, evt_rx) = flume::unbounded(); + + self.events = evt_tx; + + let ic = self.clone(); + tokio::spawn(async move { + info!("Event processor restarted."); + super::events::runner(ic, evt_rx).await; + info!("Event processor finished."); + }); + + // Make mixer aware of new targets... + let _ = self + .mixer + .send(MixerMessage::ReplaceInterconnect(self.clone())); + } +} diff --git a/src/driver/tasks/message/udp_rx.rs b/src/driver/tasks/message/udp_rx.rs new file mode 100644 index 0000000..91e740d --- /dev/null +++ b/src/driver/tasks/message/udp_rx.rs @@ -0,0 +1,7 @@ +use super::Interconnect; + +pub(crate) enum UdpRxMessage { + ReplaceInterconnect(Interconnect), + + Poison, +} diff --git a/src/driver/tasks/message/udp_tx.rs b/src/driver/tasks/message/udp_tx.rs new file mode 100644 index 0000000..349d524 --- /dev/null +++ b/src/driver/tasks/message/udp_tx.rs @@ -0,0 +1,4 @@ +pub enum UdpTxMessage { + Packet(Vec), // TODO: do something cheaper. + Poison, +} diff --git a/src/driver/tasks/message/ws.rs b/src/driver/tasks/message/ws.rs new file mode 100644 index 0000000..7ce5f07 --- /dev/null +++ b/src/driver/tasks/message/ws.rs @@ -0,0 +1,12 @@ +use super::Interconnect; +use crate::ws::WsStream; + +#[allow(dead_code)] +pub(crate) enum WsMessage { + Ws(Box), + ReplaceInterconnect(Interconnect), + SetKeepalive(f64), + Speaking(bool), + + Poison, +} diff --git a/src/driver/tasks/mixer.rs b/src/driver/tasks/mixer.rs new file mode 100644 index 0000000..3fa5d1d --- /dev/null +++ b/src/driver/tasks/mixer.rs @@ -0,0 +1,516 @@ +use super::{error::Result, message::*}; +use crate::{ + constants::*, + tracks::{PlayMode, Track}, +}; +use audiopus::{ + coder::Encoder as OpusEncoder, + softclip::SoftClip, + Application as CodingMode, + Bitrate, + Channels, +}; +use discortp::{ + rtp::{MutableRtpPacket, RtpPacket}, + MutablePacket, + Packet, +}; +use flume::{Receiver, Sender, TryRecvError}; +use rand::random; +use spin_sleep::SpinSleeper; +use std::time::Instant; +use tokio::runtime::Handle; +use tracing::{error, instrument}; +use xsalsa20poly1305::{aead::AeadInPlace, Nonce, TAG_SIZE}; + +struct Mixer { + async_handle: Handle, + bitrate: Bitrate, + conn_active: Option, + deadline: Instant, + encoder: OpusEncoder, + interconnect: Interconnect, + mix_rx: Receiver, + muted: bool, + packet: [u8; VOICE_PACKET_MAX], + prevent_events: bool, + silence_frames: u8, + sleeper: SpinSleeper, + soft_clip: SoftClip, + tracks: Vec, + ws: Option>, +} + +fn new_encoder(bitrate: Bitrate) -> Result { + let mut encoder = OpusEncoder::new(SAMPLE_RATE, Channels::Stereo, CodingMode::Audio)?; + encoder.set_bitrate(bitrate)?; + + Ok(encoder) +} + +impl Mixer { + fn new( + mix_rx: Receiver, + async_handle: Handle, + interconnect: Interconnect, + ) -> Self { + let bitrate = DEFAULT_BITRATE; + let encoder = new_encoder(bitrate) + .expect("Failed to create encoder in mixing thread with known-good values."); + let soft_clip = SoftClip::new(Channels::Stereo); + + let mut packet = [0u8; VOICE_PACKET_MAX]; + + let mut rtp = MutableRtpPacket::new(&mut packet[..]).expect( + "FATAL: Too few bytes in self.packet for RTP header.\ + (Blame: VOICE_PACKET_MAX?)", + ); + rtp.set_version(RTP_VERSION); + rtp.set_payload_type(RTP_PROFILE_TYPE); + rtp.set_sequence(random::().into()); + rtp.set_timestamp(random::().into()); + + Self { + async_handle, + bitrate, + conn_active: None, + deadline: Instant::now(), + encoder, + interconnect, + mix_rx, + muted: false, + packet, + prevent_events: false, + silence_frames: 0, + sleeper: Default::default(), + soft_clip, + tracks: vec![], + ws: None, + } + } + + fn run(&mut self) { + let mut events_failure = false; + let mut conn_failure = false; + + 'runner: loop { + loop { + use MixerMessage::*; + + let error = match self.mix_rx.try_recv() { + Ok(AddTrack(mut t)) => { + t.source.prep_with_handle(self.async_handle.clone()); + self.add_track(t) + }, + Ok(SetTrack(t)) => { + self.tracks.clear(); + + let mut out = self.fire_event(EventMessage::RemoveAllTracks); + + if let Some(mut t) = t { + t.source.prep_with_handle(self.async_handle.clone()); + + // Do this unconditionally: this affects local state infallibly, + // with the event installation being the remote part. + if let Err(e) = self.add_track(t) { + out = Err(e); + } + } + + out + }, + Ok(SetBitrate(b)) => { + self.bitrate = b; + if let Err(e) = self.set_bitrate(b) { + error!("Failed to update bitrate {:?}", e); + } + Ok(()) + }, + Ok(SetMute(m)) => { + self.muted = m; + Ok(()) + }, + Ok(SetConn(conn, ssrc)) => { + self.conn_active = Some(conn); + let mut rtp = MutableRtpPacket::new(&mut self.packet[..]).expect( + "Too few bytes in self.packet for RTP header.\ + (Blame: VOICE_PACKET_MAX?)", + ); + rtp.set_ssrc(ssrc); + self.deadline = Instant::now(); + Ok(()) + }, + Ok(DropConn) => { + self.conn_active = None; + Ok(()) + }, + Ok(ReplaceInterconnect(i)) => { + self.prevent_events = false; + if let Some(ws) = &self.ws { + conn_failure |= + ws.send(WsMessage::ReplaceInterconnect(i.clone())).is_err(); + } + if let Some(conn) = &self.conn_active { + conn_failure |= conn + .udp_rx + .send(UdpRxMessage::ReplaceInterconnect(i.clone())) + .is_err(); + } + self.interconnect = i; + + self.rebuild_tracks() + }, + Ok(RebuildEncoder) => match new_encoder(self.bitrate) { + Ok(encoder) => { + self.encoder = encoder; + Ok(()) + }, + Err(e) => { + error!("Failed to rebuild encoder. Resetting bitrate. {:?}", e); + self.bitrate = DEFAULT_BITRATE; + self.encoder = new_encoder(self.bitrate) + .expect("Failed fallback rebuild of OpusEncoder with safe inputs."); + Ok(()) + }, + }, + Ok(Ws(new_ws_handle)) => { + self.ws = new_ws_handle; + Ok(()) + }, + + Err(TryRecvError::Disconnected) | Ok(Poison) => { + break 'runner; + }, + + Err(TryRecvError::Empty) => { + break; + }, + }; + + if let Err(e) = error { + events_failure |= e.should_trigger_interconnect_rebuild(); + conn_failure |= e.should_trigger_connect(); + } + } + + if let Err(e) = self.cycle().and_then(|_| self.audio_commands_events()) { + events_failure |= e.should_trigger_interconnect_rebuild(); + conn_failure |= e.should_trigger_connect(); + + error!("Mixer thread cycle: {:?}", e); + } + + // event failure? rebuild interconnect. + // ws or udp failure? full connect + // (soft reconnect is covered by the ws task.) + if events_failure { + self.prevent_events = true; + self.interconnect + .core + .send(CoreMessage::RebuildInterconnect) + .expect("FATAL: No way to rebuild driver core from mixer."); + events_failure = false; + } + + if conn_failure { + self.interconnect + .core + .send(CoreMessage::FullReconnect) + .expect("FATAL: No way to rebuild driver core from mixer."); + conn_failure = false; + } + } + } + + #[inline] + fn fire_event(&self, event: EventMessage) -> Result<()> { + // As this task is responsible for noticing the potential death of an event context, + // it's responsible for not forcibly recreating said context repeatedly. + if !self.prevent_events { + self.interconnect.events.send(event)?; + Ok(()) + } else { + Ok(()) + } + } + + #[inline] + fn add_track(&mut self, mut track: Track) -> Result<()> { + let evts = track.events.take().unwrap_or_default(); + let state = track.state(); + let handle = track.handle.clone(); + + self.tracks.push(track); + + self.interconnect + .events + .send(EventMessage::AddTrack(evts, state, handle))?; + + Ok(()) + } + + // rebuilds the event thread's view of each track, in event of a full rebuild. + #[inline] + fn rebuild_tracks(&mut self) -> Result<()> { + for track in self.tracks.iter_mut() { + let evts = track.events.take().unwrap_or_default(); + let state = track.state(); + let handle = track.handle.clone(); + + self.interconnect + .events + .send(EventMessage::AddTrack(evts, state, handle))?; + } + + Ok(()) + } + + #[inline] + fn mix_tracks<'a>( + &mut self, + opus_frame: &'a mut [u8], + mix_buffer: &mut [f32; STEREO_FRAME_SIZE], + ) -> Result<(usize, &'a [u8])> { + let mut len = 0; + + // Opus frame passthrough. + // This requires that we have only one track, who has volume 1.0, and an + // Opus codec type. + let do_passthrough = self.tracks.len() == 1 && { + let track = &self.tracks[0]; + (track.volume - 1.0).abs() < f32::EPSILON && track.source.supports_passthrough() + }; + + for (i, track) in self.tracks.iter_mut().enumerate() { + let vol = track.volume; + let stream = &mut track.source; + + if track.playing != PlayMode::Play { + continue; + } + + let (temp_len, opus_len) = if do_passthrough { + (0, track.source.read_opus_frame(opus_frame).ok()) + } else { + (stream.mix(mix_buffer, vol), None) + }; + + len = len.max(temp_len); + if temp_len > 0 || opus_len.is_some() { + track.step_frame(); + } else if track.do_loop() { + if let Some(time) = track.seek_time(Default::default()) { + // have to reproduce self.fire_event here + // to circumvent the borrow checker's lack of knowledge. + // + // In event of error, one of the later event calls will + // trigger the event thread rebuild: it is more prudent that + // the mixer works as normal right now. + if !self.prevent_events { + let _ = self.interconnect.events.send(EventMessage::ChangeState( + i, + TrackStateChange::Position(time), + )); + let _ = self.interconnect.events.send(EventMessage::ChangeState( + i, + TrackStateChange::Loops(track.loops, false), + )); + } + } + } else { + track.end(); + } + + if let Some(opus_len) = opus_len { + return Ok((STEREO_FRAME_SIZE, &opus_frame[..opus_len])); + } + } + + Ok((len, &opus_frame[..0])) + } + + #[inline] + fn audio_commands_events(&mut self) -> Result<()> { + // Apply user commands. + for (i, track) in self.tracks.iter_mut().enumerate() { + // This causes fallible event system changes, + // but if the event thread has died then we'll certainly + // detect that on the tick later. + // Changes to play state etc. MUST all be handled. + track.process_commands(i, &self.interconnect); + } + + // TODO: do without vec? + let mut i = 0; + let mut to_remove = Vec::with_capacity(self.tracks.len()); + while i < self.tracks.len() { + let track = self + .tracks + .get_mut(i) + .expect("Tried to remove an illegal track index."); + + if track.playing.is_done() { + let p_state = track.playing(); + self.tracks.remove(i); + to_remove.push(i); + self.fire_event(EventMessage::ChangeState( + i, + TrackStateChange::Mode(p_state), + ))?; + } else { + i += 1; + } + } + + // Tick + self.fire_event(EventMessage::Tick)?; + + // Then do removals. + for i in &to_remove[..] { + self.fire_event(EventMessage::RemoveTrack(*i))?; + } + + Ok(()) + } + + #[inline] + fn march_deadline(&mut self) { + self.sleeper + .sleep(self.deadline.saturating_duration_since(Instant::now())); + self.deadline += TIMESTEP_LENGTH; + } + + fn cycle(&mut self) -> Result<()> { + if self.conn_active.is_none() { + self.march_deadline(); + return Ok(()); + } + + // TODO: can we make opus_frame_backing *actually* a view over + // some region of self.packet, derived using the encryption mode? + // This saves a copy on Opus passthrough. + let mut opus_frame_backing = [0u8; STEREO_FRAME_SIZE]; + let mut mix_buffer = [0f32; STEREO_FRAME_SIZE]; + + // Slice which mix tracks may use to passthrough direct Opus frames. + let mut opus_space = &mut opus_frame_backing[..]; + + // Walk over all the audio files, combining into one audio frame according + // to volume, play state, etc. + let (mut len, mut opus_frame) = self.mix_tracks(&mut opus_space, &mut mix_buffer)?; + + self.soft_clip.apply(&mut mix_buffer[..])?; + + if self.muted { + len = 0; + } + + if len == 0 { + if self.silence_frames > 0 { + self.silence_frames -= 1; + + // Explicit "Silence" frame. + opus_frame = &SILENT_FRAME[..]; + } else { + // Per official guidelines, send 5x silence BEFORE we stop speaking. + if let Some(ws) = &self.ws { + // NOTE: this should prevent a catastrophic thread pileup. + // A full reconnect might cause an inner closed connection. + // It's safer to leave the central task to clean this up and + // pass the mixer a new channel. + let _ = ws.send(WsMessage::Speaking(false)); + } + + self.march_deadline(); + + return Ok(()); + } + } else { + self.silence_frames = 5; + } + + if let Some(ws) = &self.ws { + ws.send(WsMessage::Speaking(true))?; + } + + self.march_deadline(); + self.prep_and_send_packet(mix_buffer, opus_frame)?; + + Ok(()) + } + + fn set_bitrate(&mut self, bitrate: Bitrate) -> Result<()> { + self.encoder.set_bitrate(bitrate).map_err(Into::into) + } + + fn prep_and_send_packet(&mut self, buffer: [f32; 1920], opus_frame: &[u8]) -> Result<()> { + let conn = self + .conn_active + .as_mut() + .expect("Shouldn't be mixing packets without access to a cipher + UDP dest."); + + let mut nonce = Nonce::default(); + let index = { + let mut rtp = MutableRtpPacket::new(&mut self.packet[..]).expect( + "FATAL: Too few bytes in self.packet for RTP header.\ + (Blame: VOICE_PACKET_MAX?)", + ); + + let pkt = rtp.packet(); + let rtp_len = RtpPacket::minimum_packet_size(); + nonce[..rtp_len].copy_from_slice(&pkt[..rtp_len]); + + let payload = rtp.payload_mut(); + + let payload_len = if opus_frame.is_empty() { + self.encoder + .encode_float(&buffer[..STEREO_FRAME_SIZE], &mut payload[TAG_SIZE..])? + } else { + let len = opus_frame.len(); + payload[TAG_SIZE..TAG_SIZE + len].clone_from_slice(opus_frame); + len + }; + + let final_payload_size = TAG_SIZE + payload_len; + + let tag = conn.cipher.encrypt_in_place_detached( + &nonce, + b"", + &mut payload[TAG_SIZE..final_payload_size], + )?; + payload[..TAG_SIZE].copy_from_slice(&tag[..]); + + rtp_len + final_payload_size + }; + + // TODO: This is dog slow, don't do this. + // Can we replace this with a shared ring buffer + semaphore? + // i.e., do something like double/triple buffering in graphics. + conn.udp_tx + .send(UdpTxMessage::Packet(self.packet[..index].to_vec()))?; + + let mut rtp = MutableRtpPacket::new(&mut self.packet[..]).expect( + "FATAL: Too few bytes in self.packet for RTP header.\ + (Blame: VOICE_PACKET_MAX?)", + ); + rtp.set_sequence(rtp.get_sequence() + 1); + rtp.set_timestamp(rtp.get_timestamp() + MONO_FRAME_SIZE as u32); + + Ok(()) + } +} + +/// The mixing thread is a synchronous context due to its compute-bound nature. +/// +/// We pass in an async handle for the benefit of some Input classes (e.g., restartables) +/// who need to run their restart code elsewhere and return blank data until such time. +#[instrument(skip(interconnect, mix_rx, async_handle))] +pub(crate) fn runner( + interconnect: Interconnect, + mix_rx: Receiver, + async_handle: Handle, +) { + let mut mixer = Mixer::new(mix_rx, async_handle, interconnect); + + mixer.run(); +} diff --git a/src/driver/tasks/mod.rs b/src/driver/tasks/mod.rs new file mode 100644 index 0000000..2e0b2d0 --- /dev/null +++ b/src/driver/tasks/mod.rs @@ -0,0 +1,155 @@ +pub mod error; +mod events; +pub(crate) mod message; +mod mixer; +pub(crate) mod udp_rx; +pub(crate) mod udp_tx; +pub(crate) mod ws; + +use super::{ + connection::{error::Error as ConnectionError, Connection}, + Config, +}; +use flume::{Receiver, RecvError, Sender}; +use message::*; +use tokio::runtime::Handle; +use tracing::{error, info, instrument}; + +pub(crate) fn start(config: Config, rx: Receiver, tx: Sender) { + tokio::spawn(async move { + info!("Driver started."); + runner(config, rx, tx).await; + info!("Driver finished."); + }); +} + +fn start_internals(core: Sender) -> Interconnect { + let (evt_tx, evt_rx) = flume::unbounded(); + let (mix_tx, mix_rx) = flume::unbounded(); + + let interconnect = Interconnect { + core, + events: evt_tx, + mixer: mix_tx, + }; + + let ic = interconnect.clone(); + tokio::spawn(async move { + info!("Event processor started."); + events::runner(ic, evt_rx).await; + info!("Event processor finished."); + }); + + let ic = interconnect.clone(); + let handle = Handle::current(); + std::thread::spawn(move || { + info!("Mixer started."); + mixer::runner(ic, mix_rx, handle); + info!("Mixer finished."); + }); + + interconnect +} + +#[instrument(skip(rx, tx))] +async fn runner(config: Config, rx: Receiver, tx: Sender) { + let mut connection = None; + let mut interconnect = start_internals(tx); + + loop { + match rx.recv_async().await { + Ok(CoreMessage::ConnectWithResult(info, tx)) => { + connection = match Connection::new(info, &interconnect, &config).await { + Ok(connection) => { + // Other side may not be listening: this is fine. + let _ = tx.send(Ok(())); + Some(connection) + }, + Err(why) => { + // See above. + let _ = tx.send(Err(why)); + + None + }, + }; + }, + Ok(CoreMessage::Disconnect) => { + connection = None; + let _ = interconnect.mixer.send(MixerMessage::DropConn); + let _ = interconnect.mixer.send(MixerMessage::RebuildEncoder); + }, + Ok(CoreMessage::SetTrack(s)) => { + let _ = interconnect.mixer.send(MixerMessage::SetTrack(s)); + }, + Ok(CoreMessage::AddTrack(s)) => { + let _ = interconnect.mixer.send(MixerMessage::AddTrack(s)); + }, + Ok(CoreMessage::SetBitrate(b)) => { + let _ = interconnect.mixer.send(MixerMessage::SetBitrate(b)); + }, + Ok(CoreMessage::AddEvent(evt)) => { + let _ = interconnect.events.send(EventMessage::AddGlobalEvent(evt)); + }, + Ok(CoreMessage::Mute(m)) => { + let _ = interconnect.mixer.send(MixerMessage::SetMute(m)); + }, + Ok(CoreMessage::Reconnect) => { + if let Some(mut conn) = connection.take() { + // try once: if interconnect, try again. + // if still issue, full connect. + let info = conn.info.clone(); + + let full_connect = match conn.reconnect().await { + Ok(()) => { + connection = Some(conn); + false + }, + Err(ConnectionError::InterconnectFailure(_)) => { + interconnect.restart_volatile_internals(); + + match conn.reconnect().await { + Ok(()) => { + connection = Some(conn); + false + }, + _ => true, + } + }, + _ => true, + }; + + if full_connect { + connection = Connection::new(info, &interconnect, &config) + .await + .map_err(|e| { + error!("Catastrophic connection failure. Stopping. {:?}", e); + e + }) + .ok(); + } + } + }, + Ok(CoreMessage::FullReconnect) => + if let Some(conn) = connection.take() { + let info = conn.info.clone(); + + connection = Connection::new(info, &interconnect, &config) + .await + .map_err(|e| { + error!("Catastrophic connection failure. Stopping. {:?}", e); + e + }) + .ok(); + }, + Ok(CoreMessage::RebuildInterconnect) => { + interconnect.restart_volatile_internals(); + }, + Err(RecvError::Disconnected) | Ok(CoreMessage::Poison) => { + break; + }, + } + } + + info!("Main thread exited"); + interconnect.poison_all(); +} diff --git a/src/driver/tasks/udp_rx.rs b/src/driver/tasks/udp_rx.rs new file mode 100644 index 0000000..263ef76 --- /dev/null +++ b/src/driver/tasks/udp_rx.rs @@ -0,0 +1,286 @@ +use super::{ + error::{Error, Result}, + message::*, +}; +use crate::{constants::*, driver::CryptoMode, events::CoreContext}; +use audiopus::{coder::Decoder as OpusDecoder, Channels}; +use discortp::{ + demux::{self, DemuxedMut}, + rtp::{RtpExtensionPacket, RtpPacket}, + FromPacket, + MutablePacket, + Packet, + PacketSize, +}; +use flume::Receiver; +use std::collections::HashMap; +use tokio::net::udp::RecvHalf; +use tracing::{error, info, instrument, warn}; +use xsalsa20poly1305::{aead::AeadInPlace, Nonce, Tag, XSalsa20Poly1305 as Cipher, TAG_SIZE}; + +#[derive(Debug)] +struct SsrcState { + silent_frame_count: u16, + decoder: OpusDecoder, + last_seq: u16, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum SpeakingDelta { + Same, + Start, + Stop, +} + +impl SsrcState { + fn new(pkt: RtpPacket<'_>) -> Self { + Self { + silent_frame_count: 5, // We do this to make the first speech packet fire an event. + decoder: OpusDecoder::new(SAMPLE_RATE, Channels::Stereo) + .expect("Failed to create new Opus decoder for source."), + last_seq: pkt.get_sequence().into(), + } + } + + fn process( + &mut self, + pkt: RtpPacket<'_>, + data_offset: usize, + ) -> Result<(SpeakingDelta, Vec)> { + let new_seq: u16 = pkt.get_sequence().into(); + + let extensions = pkt.get_extension() != 0; + let seq_delta = new_seq.wrapping_sub(self.last_seq); + Ok(if seq_delta >= (1 << 15) { + // Overflow, reordered (previously missing) packet. + (SpeakingDelta::Same, vec![]) + } else { + self.last_seq = new_seq; + let missed_packets = seq_delta.saturating_sub(1); + let (audio, pkt_size) = + self.scan_and_decode(&pkt.payload()[data_offset..], extensions, missed_packets)?; + + let delta = if pkt_size == SILENT_FRAME.len() { + // Frame is silent. + let old = self.silent_frame_count; + self.silent_frame_count = + self.silent_frame_count.saturating_add(1 + missed_packets); + + if self.silent_frame_count >= 5 && old < 5 { + SpeakingDelta::Stop + } else { + SpeakingDelta::Same + } + } else { + // Frame has meaningful audio. + let out = if self.silent_frame_count >= 5 { + SpeakingDelta::Start + } else { + SpeakingDelta::Same + }; + self.silent_frame_count = 0; + out + }; + + (delta, audio) + }) + } + + fn scan_and_decode( + &mut self, + data: &[u8], + extension: bool, + missed_packets: u16, + ) -> Result<(Vec, usize)> { + let mut out = vec![0; STEREO_FRAME_SIZE]; + let start = if extension { + RtpExtensionPacket::new(data) + .map(|pkt| pkt.packet_size()) + .ok_or_else(|| { + error!("Extension packet indicated, but insufficient space."); + Error::IllegalVoicePacket + }) + } else { + Ok(0) + }?; + + for _ in 0..missed_packets { + let missing_frame: Option<&[u8]> = None; + if let Err(e) = self.decoder.decode(missing_frame, &mut out[..], false) { + warn!("Issue while decoding for missed packet: {:?}.", e); + } + } + + let audio_len = self + .decoder + .decode(Some(&data[start..]), &mut out[..], false) + .map_err(|e| { + error!("Failed to decode received packet: {:?}.", e); + e + })?; + + // Decoding to stereo: audio_len refers to sample count irrespective of channel count. + // => multiply by number of channels. + out.truncate(2 * audio_len); + + Ok((out, data.len() - start)) + } +} + +struct UdpRx { + cipher: Cipher, + decoder_map: HashMap, + #[allow(dead_code)] + mode: CryptoMode, // In future, this will allow crypto mode selection. + packet_buffer: [u8; VOICE_PACKET_MAX], + rx: Receiver, + udp_socket: RecvHalf, +} + +impl UdpRx { + #[instrument(skip(self))] + async fn run(&mut self, interconnect: &mut Interconnect) { + loop { + tokio::select! { + Ok((len, _addr)) = self.udp_socket.recv_from(&mut self.packet_buffer[..]) => { + self.process_udp_message(interconnect, len); + } + msg = self.rx.recv_async() => { + use UdpRxMessage::*; + match msg { + Ok(ReplaceInterconnect(i)) => { + *interconnect = i; + } + Ok(Poison) | Err(_) => break, + } + } + } + } + } + + fn process_udp_message(&mut self, interconnect: &Interconnect, len: usize) { + // NOTE: errors here (and in general for UDP) are not fatal to the connection. + // Panics should be avoided due to adversarial nature of rx'd packets, + // but correct handling should not prompt a reconnect. + // + // For simplicity, we nominate the mixing context to rebuild the event + // context if it fails (hence, the `let _ =` statements.), as it will try to + // make contact every 20ms. + let packet = &mut self.packet_buffer[..len]; + + match demux::demux_mut(packet) { + DemuxedMut::Rtp(mut rtp) => { + if !rtp_valid(rtp.to_immutable()) { + error!("Illegal RTP message received."); + return; + } + + let rtp_body_start = + decrypt_in_place(&mut rtp, &self.cipher).expect("RTP decryption failed."); + + let entry = self + .decoder_map + .entry(rtp.get_ssrc()) + .or_insert_with(|| SsrcState::new(rtp.to_immutable())); + + if let Ok((delta, audio)) = entry.process(rtp.to_immutable(), rtp_body_start) { + match delta { + SpeakingDelta::Start => { + let _ = interconnect.events.send(EventMessage::FireCoreEvent( + CoreContext::SpeakingUpdate { + ssrc: rtp.get_ssrc(), + speaking: true, + }, + )); + }, + SpeakingDelta::Stop => { + let _ = interconnect.events.send(EventMessage::FireCoreEvent( + CoreContext::SpeakingUpdate { + ssrc: rtp.get_ssrc(), + speaking: false, + }, + )); + }, + _ => {}, + } + + let _ = interconnect.events.send(EventMessage::FireCoreEvent( + CoreContext::VoicePacket { + audio, + packet: rtp.from_packet(), + payload_offset: rtp_body_start, + }, + )); + } else { + warn!("RTP decoding/decrytion failed."); + } + }, + DemuxedMut::Rtcp(mut rtcp) => { + let rtcp_body_start = decrypt_in_place(&mut rtcp, &self.cipher); + + if let Ok(start) = rtcp_body_start { + let _ = interconnect.events.send(EventMessage::FireCoreEvent( + CoreContext::RtcpPacket { + packet: rtcp.from_packet(), + payload_offset: start, + }, + )); + } else { + warn!("RTCP decryption failed."); + } + }, + DemuxedMut::FailedParse(t) => { + warn!("Failed to parse message of type {:?}.", t); + }, + _ => { + warn!("Illegal UDP packet from voice server."); + }, + } + } +} + +#[instrument(skip(interconnect, rx, cipher))] +pub(crate) async fn runner( + mut interconnect: Interconnect, + rx: Receiver, + cipher: Cipher, + mode: CryptoMode, + udp_socket: RecvHalf, +) { + info!("UDP receive handle started."); + + let mut state = UdpRx { + cipher, + decoder_map: Default::default(), + mode, + packet_buffer: [0u8; VOICE_PACKET_MAX], + rx, + udp_socket, + }; + + state.run(&mut interconnect).await; + + info!("UDP receive handle stopped."); +} + +#[inline] +fn decrypt_in_place(packet: &mut impl MutablePacket, cipher: &Cipher) -> Result { + // Applies discord's cheapest. + // In future, might want to make a choice... + let header_len = packet.packet().len() - packet.payload().len(); + let mut nonce = Nonce::default(); + nonce[..header_len].copy_from_slice(&packet.packet()[..header_len]); + + let data = packet.payload_mut(); + let (tag_bytes, data_bytes) = data.split_at_mut(TAG_SIZE); + let tag = Tag::from_slice(tag_bytes); + + Ok(cipher + .decrypt_in_place_detached(&nonce, b"", data_bytes, tag) + .map(|_| TAG_SIZE)?) +} + +#[inline] +fn rtp_valid(packet: RtpPacket<'_>) -> bool { + packet.get_version() == RTP_VERSION && packet.get_payload_type() == RTP_PROFILE_TYPE +} diff --git a/src/driver/tasks/udp_tx.rs b/src/driver/tasks/udp_tx.rs new file mode 100644 index 0000000..7027a09 --- /dev/null +++ b/src/driver/tasks/udp_tx.rs @@ -0,0 +1,45 @@ +use super::message::*; +use crate::constants::*; +use discortp::discord::MutableKeepalivePacket; +use flume::Receiver; +use tokio::{ + net::udp::SendHalf, + time::{timeout_at, Elapsed, Instant}, +}; +use tracing::{error, info, instrument, trace}; + +#[instrument(skip(udp_msg_rx))] +pub(crate) async fn runner(udp_msg_rx: Receiver, ssrc: u32, mut udp_tx: SendHalf) { + info!("UDP transmit handle started."); + + let mut keepalive_bytes = [0u8; MutableKeepalivePacket::minimum_packet_size()]; + let mut ka = MutableKeepalivePacket::new(&mut keepalive_bytes[..]) + .expect("FATAL: Insufficient bytes given to keepalive packet."); + ka.set_ssrc(ssrc); + + let mut ka_time = Instant::now() + UDP_KEEPALIVE_GAP; + + loop { + use UdpTxMessage::*; + match timeout_at(ka_time, udp_msg_rx.recv_async()).await { + Err(Elapsed { .. }) => { + trace!("Sending UDP Keepalive."); + if let Err(e) = udp_tx.send(&keepalive_bytes[..]).await { + error!("Fatal UDP keepalive send error: {:?}.", e); + break; + } + ka_time += UDP_KEEPALIVE_GAP; + }, + Ok(Ok(Packet(p))) => + if let Err(e) = udp_tx.send(&p[..]).await { + error!("Fatal UDP packet send error: {:?}.", e); + break; + }, + Ok(Err(_)) | Ok(Ok(Poison)) => { + break; + }, + } + } + + info!("UDP transmit handle stopped."); +} diff --git a/src/driver/tasks/ws.rs b/src/driver/tasks/ws.rs new file mode 100644 index 0000000..6f9813c --- /dev/null +++ b/src/driver/tasks/ws.rs @@ -0,0 +1,205 @@ +use super::{error::Result, message::*}; +use crate::{ + events::CoreContext, + model::{ + payload::{Heartbeat, Speaking}, + Event as GatewayEvent, + SpeakingState, + }, + ws::{Error as WsError, ReceiverExt, SenderExt, WsStream}, +}; +use flume::Receiver; +use rand::random; +use std::time::Duration; +use tokio::time::{self, Instant}; +use tracing::{error, info, instrument, trace, warn}; + +struct AuxNetwork { + rx: Receiver, + ws_client: WsStream, + dont_send: bool, + + ssrc: u32, + heartbeat_interval: Duration, + + speaking: SpeakingState, + last_heartbeat_nonce: Option, +} + +impl AuxNetwork { + pub(crate) fn new( + evt_rx: Receiver, + ws_client: WsStream, + ssrc: u32, + heartbeat_interval: f64, + ) -> Self { + Self { + rx: evt_rx, + ws_client, + dont_send: false, + + ssrc, + heartbeat_interval: Duration::from_secs_f64(heartbeat_interval / 1000.0), + + speaking: SpeakingState::empty(), + last_heartbeat_nonce: None, + } + } + + #[instrument(skip(self))] + async fn run(&mut self, interconnect: &mut Interconnect) { + let mut next_heartbeat = Instant::now() + self.heartbeat_interval; + + loop { + let mut ws_error = false; + + let hb = time::delay_until(next_heartbeat); + + tokio::select! { + _ = hb => { + ws_error = match self.send_heartbeat().await { + Err(e) => { + error!("Heartbeat send failure {:?}.", e); + true + }, + _ => false, + }; + next_heartbeat = self.next_heartbeat(); + } + ws_msg = self.ws_client.recv_json_no_timeout(), if !self.dont_send => { + ws_error = match ws_msg { + Err(WsError::Json(e)) => { + warn!("Unexpected JSON {:?}.", e); + false + }, + Err(e) => { + error!("Error processing ws {:?}.", e); + true + }, + Ok(Some(msg)) => { + self.process_ws(interconnect, msg); + false + }, + _ => false, + }; + } + inner_msg = self.rx.recv_async() => { + match inner_msg { + Ok(WsMessage::Ws(data)) => { + self.ws_client = *data; + next_heartbeat = self.next_heartbeat(); + self.dont_send = false; + }, + Ok(WsMessage::ReplaceInterconnect(i)) => { + *interconnect = i; + }, + Ok(WsMessage::SetKeepalive(keepalive)) => { + self.heartbeat_interval = Duration::from_secs_f64(keepalive / 1000.0); + next_heartbeat = self.next_heartbeat(); + }, + Ok(WsMessage::Speaking(is_speaking)) => { + if self.speaking.contains(SpeakingState::MICROPHONE) != is_speaking && !self.dont_send { + self.speaking.set(SpeakingState::MICROPHONE, is_speaking); + info!("Changing to {:?}", self.speaking); + + let ssu_status = self.ws_client + .send_json(&GatewayEvent::from(Speaking { + delay: Some(0), + speaking: self.speaking, + ssrc: self.ssrc, + user_id: None, + })) + .await; + + ws_error |= match ssu_status { + Err(e) => { + error!("Issue sending speaking update {:?}.", e); + true + }, + _ => false, + } + } + }, + Err(_) | Ok(WsMessage::Poison) => { + break; + }, + } + } + } + + if ws_error { + let _ = interconnect.core.send(CoreMessage::Reconnect); + self.dont_send = true; + } + } + } + + fn next_heartbeat(&self) -> Instant { + Instant::now() + self.heartbeat_interval + } + + async fn send_heartbeat(&mut self) -> Result<()> { + let nonce = random::(); + self.last_heartbeat_nonce = Some(nonce); + + trace!("Sent heartbeat {:?}", self.speaking); + + if !self.dont_send { + self.ws_client + .send_json(&GatewayEvent::from(Heartbeat { nonce })) + .await?; + } + + Ok(()) + } + + fn process_ws(&mut self, interconnect: &Interconnect, value: GatewayEvent) { + match value { + GatewayEvent::Speaking(ev) => { + let _ = interconnect.events.send(EventMessage::FireCoreEvent( + CoreContext::SpeakingStateUpdate(ev), + )); + }, + GatewayEvent::ClientConnect(ev) => { + let _ = interconnect + .events + .send(EventMessage::FireCoreEvent(CoreContext::ClientConnect(ev))); + }, + GatewayEvent::ClientDisconnect(ev) => { + let _ = interconnect.events.send(EventMessage::FireCoreEvent( + CoreContext::ClientDisconnect(ev), + )); + }, + GatewayEvent::HeartbeatAck(ev) => { + if let Some(nonce) = self.last_heartbeat_nonce.take() { + if ev.nonce == nonce { + trace!("Heartbeat ACK received."); + } else { + warn!( + "Heartbeat nonce mismatch! Expected {}, saw {}.", + nonce, ev.nonce + ); + } + } + }, + other => { + trace!("Received other websocket data: {:?}", other); + }, + } + } +} + +#[instrument(skip(interconnect, ws_client))] +pub(crate) async fn runner( + mut interconnect: Interconnect, + evt_rx: Receiver, + ws_client: WsStream, + ssrc: u32, + heartbeat_interval: f64, +) { + info!("WS thread started."); + let mut aux = AuxNetwork::new(evt_rx, ws_client, ssrc, heartbeat_interval); + + aux.run(&mut interconnect).await; + info!("WS thread finished."); +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..bfa4a4c --- /dev/null +++ b/src/error.rs @@ -0,0 +1,69 @@ +//! Driver and gateway error handling. + +#[cfg(feature = "serenity")] +use futures::channel::mpsc::TrySendError; +#[cfg(feature = "serenity")] +use serenity::gateway::InterMessage; +#[cfg(feature = "gateway")] +use std::{error::Error, fmt}; +#[cfg(feature = "twilight")] +use twilight_gateway::shard::CommandError; + +#[cfg(feature = "gateway")] +#[derive(Debug)] +/// Error returned when a manager or call handler is +/// unable to send messages over Discord's gateway. +pub enum JoinError { + /// No available gateway connection was provided to send + /// voice state update messages. + NoSender, + /// Tried to leave a [`Call`] which was not found. + /// + /// [`Call`]: ../struct.Call.html + NoCall, + #[cfg(feature = "serenity")] + /// Serenity-specific WebSocket send error. + Serenity(TrySendError), + #[cfg(feature = "twilight")] + /// Twilight-specific WebSocket send error. + Twilight(CommandError), +} + +#[cfg(feature = "gateway")] +impl fmt::Display for JoinError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Failed to Join Voice channel: ")?; + match self { + JoinError::NoSender => write!(f, "no gateway destination."), + JoinError::NoCall => write!(f, "tried to leave a non-existent call."), + #[cfg(feature = "serenity")] + JoinError::Serenity(t) => write!(f, "serenity failure {}.", t), + #[cfg(feature = "twilight")] + JoinError::Twilight(t) => write!(f, "twilight failure {}.", t), + } + } +} + +#[cfg(feature = "gateway")] +impl Error for JoinError {} + +#[cfg(all(feature = "serenity", feature = "gateway"))] +impl From> for JoinError { + fn from(e: TrySendError) -> Self { + JoinError::Serenity(e) + } +} + +#[cfg(all(feature = "twilight", feature = "gateway"))] +impl From for JoinError { + fn from(e: CommandError) -> Self { + JoinError::Twilight(e) + } +} + +#[cfg(feature = "gateway")] +/// Convenience type for Discord gateway error handling. +pub type JoinResult = Result; + +#[cfg(feature = "driver")] +pub use crate::driver::connection::error::{Error as ConnectionError, Result as ConnectionResult}; diff --git a/src/events/context.rs b/src/events/context.rs new file mode 100644 index 0000000..004465f --- /dev/null +++ b/src/events/context.rs @@ -0,0 +1,137 @@ +use super::*; +use crate::{ + model::payload::{ClientConnect, ClientDisconnect, Speaking}, + tracks::{TrackHandle, TrackState}, +}; +use discortp::{rtcp::Rtcp, rtp::Rtp}; + +/// Information about which tracks or data fired an event. +/// +/// [`Track`] events may be local or global, and have no tracks +/// if fired on the global context via [`Handler::add_global_event`]. +/// +/// [`Track`]: ../tracks/struct.Track.html +/// [`Handler::add_global_event`]: ../struct.Handler.html#method.add_global_event +#[derive(Clone, Debug)] +pub enum EventContext<'a> { + /// Track event context, passed to events created via [`TrackHandle::add_event`], + /// [`EventStore::add_event`], or relevant global events. + /// + /// [`EventStore::add_event`]: struct.EventStore.html#method.add_event + /// [`TrackHandle::add_event`]: ../tracks/struct.TrackHandle.html#method.add_event + Track(&'a [(&'a TrackState, &'a TrackHandle)]), + /// Speaking state update, typically describing how another voice + /// user is transmitting audio data. Clients must send at least one such + /// packet to allow SSRC/UserID matching. + SpeakingStateUpdate(Speaking), + /// Speaking state transition, describing whether a given source has started/stopped + /// transmitting. This fires in response to a silent burst, or the first packet + /// breaking such a burst. + SpeakingUpdate { + /// Synchronisation Source of the user who has begun speaking. + /// + /// This must be combined with another event class to map this back to + /// its original UserId. + ssrc: u32, + /// Whether this user is currently speaking. + speaking: bool, + }, + /// Opus audio packet, received from another stream (detailed in `packet`). + /// `payload_offset` contains the true payload location within the raw packet's `payload()`, + /// if extensions or raw packet data are required. + /// if `audio.len() == 0`, then this packet arrived out-of-order. + VoicePacket { + /// Decoded audio from this packet. + audio: &'a Vec, + /// Raw RTP packet data. + /// + /// Includes the SSRC (i.e., sender) of this packet. + packet: &'a Rtp, + /// Byte index into the packet for where the payload begins. + payload_offset: usize, + }, + /// Telemetry/statistics packet, received from another stream (detailed in `packet`). + /// `payload_offset` contains the true payload location within the raw packet's `payload()`, + /// to allow manual decoding of `Rtcp` packet bodies. + RtcpPacket { + /// Raw RTCP packet data. + packet: &'a Rtcp, + /// Byte index into the packet for where the payload begins. + payload_offset: usize, + }, + /// Fired whenever a client connects to a call for the first time, allowing SSRC/UserID + /// matching. + ClientConnect(ClientConnect), + /// Fired whenever a client disconnects. + ClientDisconnect(ClientDisconnect), +} + +#[derive(Clone, Debug)] +pub(crate) enum CoreContext { + SpeakingStateUpdate(Speaking), + SpeakingUpdate { + ssrc: u32, + speaking: bool, + }, + VoicePacket { + audio: Vec, + packet: Rtp, + payload_offset: usize, + }, + RtcpPacket { + packet: Rtcp, + payload_offset: usize, + }, + ClientConnect(ClientConnect), + ClientDisconnect(ClientDisconnect), +} + +impl<'a> CoreContext { + pub(crate) fn to_user_context(&'a self) -> EventContext<'a> { + use CoreContext::*; + + match self { + SpeakingStateUpdate(evt) => EventContext::SpeakingStateUpdate(*evt), + SpeakingUpdate { ssrc, speaking } => EventContext::SpeakingUpdate { + ssrc: *ssrc, + speaking: *speaking, + }, + VoicePacket { + audio, + packet, + payload_offset, + } => EventContext::VoicePacket { + audio, + packet, + payload_offset: *payload_offset, + }, + RtcpPacket { + packet, + payload_offset, + } => EventContext::RtcpPacket { + packet, + payload_offset: *payload_offset, + }, + ClientConnect(evt) => EventContext::ClientConnect(*evt), + ClientDisconnect(evt) => EventContext::ClientDisconnect(*evt), + } + } +} + +impl EventContext<'_> { + /// Retreive the event class for an event (i.e., when matching) + /// an event against the registered listeners. + pub fn to_core_event(&self) -> Option { + use EventContext::*; + + match self { + SpeakingStateUpdate { .. } => Some(CoreEvent::SpeakingStateUpdate), + SpeakingUpdate { .. } => Some(CoreEvent::SpeakingUpdate), + VoicePacket { .. } => Some(CoreEvent::VoicePacket), + RtcpPacket { .. } => Some(CoreEvent::RtcpPacket), + ClientConnect { .. } => Some(CoreEvent::ClientConnect), + ClientDisconnect { .. } => Some(CoreEvent::ClientDisconnect), + _ => None, + } + } +} diff --git a/src/events/core.rs b/src/events/core.rs new file mode 100644 index 0000000..df5eee4 --- /dev/null +++ b/src/events/core.rs @@ -0,0 +1,31 @@ +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +/// Voice core events occur on receipt of +/// voice packets and telemetry. +/// +/// Core events persist while the `action` in [`EventData`] +/// returns `None`. +/// +/// [`EventData`]: struct.EventData.html +pub enum CoreEvent { + /// Fired on receipt of a speaking state update from another host. + /// + /// Note: this will fire when a user starts speaking for the first time, + /// or changes their capabilities. + SpeakingStateUpdate, + /// Fires when a source starts speaking, or stops speaking + /// (*i.e.*, 5 consecutive silent frames). + SpeakingUpdate, + /// Fires on receipt of a voice packet from another stream in the voice call. + /// + /// As RTP packets do not map to Discord's notion of users, SSRCs must be mapped + /// back using the user IDs seen through client connection, disconnection, + /// or speaking state update. + VoicePacket, + /// Fires on receipt of an RTCP packet, containing various call stats + /// such as latency reports. + RtcpPacket, + /// Fires whenever a user connects to the same stream as the bot. + ClientConnect, + /// Fires whenever a user disconnects from the same stream as the bot. + ClientDisconnect, +} diff --git a/src/events/data.rs b/src/events/data.rs new file mode 100644 index 0000000..cd12c91 --- /dev/null +++ b/src/events/data.rs @@ -0,0 +1,88 @@ +use super::*; +use std::{cmp::Ordering, time::Duration}; + +/// Internal representation of an event, as handled by the audio context. +pub struct EventData { + pub(crate) event: Event, + pub(crate) fire_time: Option, + pub(crate) action: Box, +} + +impl EventData { + /// Create a representation of an event and its associated handler. + /// + /// An event handler, `action`, receives an [`EventContext`] and optionally + /// produces a new [`Event`] type for itself. Returning `None` will + /// maintain the same event type, while removing any [`Delayed`] entries. + /// Event handlers will be re-added with their new trigger condition, + /// or removed if [`Cancel`]led + /// + /// [`EventContext`]: enum.EventContext.html + /// [`Event`]: enum.Event.html + /// [`Delayed`]: enum.Event.html#variant.Delayed + /// [`Cancel`]: enum.Event.html#variant.Cancel + pub fn new(event: Event, action: F) -> Self { + Self { + event, + fire_time: None, + action: Box::new(action), + } + } + + /// Computes the next firing time for a timer event. + pub fn compute_activation(&mut self, now: Duration) { + match self.event { + Event::Periodic(period, phase) => { + self.fire_time = Some(now + phase.unwrap_or(period)); + }, + Event::Delayed(offset) => { + self.fire_time = Some(now + offset); + }, + _ => {}, + } + } +} + +impl std::fmt::Debug for EventData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + write!( + f, + "Event {{ event: {:?}, fire_time: {:?}, action: }}", + self.event, self.fire_time + ) + } +} + +/// Events are ordered/compared based on their firing time. +impl Ord for EventData { + fn cmp(&self, other: &Self) -> Ordering { + if self.fire_time.is_some() && other.fire_time.is_some() { + let t1 = self + .fire_time + .as_ref() + .expect("T1 known to be well-defined by above."); + let t2 = other + .fire_time + .as_ref() + .expect("T2 known to be well-defined by above."); + + t1.cmp(&t2) + } else { + Ordering::Equal + } + } +} + +impl PartialOrd for EventData { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl PartialEq for EventData { + fn eq(&self, other: &Self) -> bool { + self.fire_time == other.fire_time + } +} + +impl Eq for EventData {} diff --git a/src/events/mod.rs b/src/events/mod.rs new file mode 100644 index 0000000..b70961f --- /dev/null +++ b/src/events/mod.rs @@ -0,0 +1,91 @@ +//! Events relating to tracks, timing, and other callers. + +mod context; +mod core; +mod data; +mod store; +mod track; +mod untimed; + +pub use self::{context::*, core::*, data::*, store::*, track::*, untimed::*}; + +use async_trait::async_trait; +use std::time::Duration; + +#[async_trait] +/// Trait to handle an event which can be fired per-track, or globally. +/// +/// These may be feasibly reused between several event sources. +pub trait EventHandler: Send + Sync { + /// Respond to one received event. + async fn act(&self, ctx: &EventContext<'_>) -> Option; +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +/// Classes of event which may occur, triggering a handler +/// at the local (track-specific) or global level. +/// +/// Local time-based events rely upon the current playback +/// time of a track, and so will not fire if a track becomes paused +/// or stops. In case this is required, global events are a better +/// fit. +/// +/// Event handlers themselves are described in [`EventData::action`]. +/// +/// [`EventData::action`]: struct.EventData.html#method.action +pub enum Event { + /// Periodic events rely upon two parameters: a *period* + /// and an optional *phase*. + /// + /// If the *phase* is `None`, then the event will first fire + /// in one *period*. Periodic events repeat automatically + /// so long as the `action` in [`EventData`] returns `None`. + /// + /// [`EventData`]: struct.EventData.html + Periodic(Duration, Option), + /// Delayed events rely upon a *delay* parameter, and + /// fire one *delay* after the audio context processes them. + /// + /// Delayed events are automatically removed once fired, + /// so long as the `action` in [`EventData`] returns `None`. + /// + /// [`EventData`]: struct.EventData.html + Delayed(Duration), + /// Track events correspond to certain actions or changes + /// of state, such as a track finishing, looping, or being + /// manually stopped. + /// + /// Track events persist while the `action` in [`EventData`] + /// returns `None`. + /// + /// [`EventData`]: struct.EventData.html + Track(TrackEvent), + /// Core events + /// + /// Track events persist while the `action` in [`EventData`] + /// returns `None`. Core events **must** be applied globally, + /// as attaching them to a track is a no-op. + /// + /// [`EventData`]: struct.EventData.html + Core(CoreEvent), + /// Cancels the event, if it was intended to persist. + Cancel, +} + +impl Event { + pub(crate) fn is_global_only(&self) -> bool { + matches!(self, Self::Core(_)) + } +} + +impl From for Event { + fn from(evt: TrackEvent) -> Self { + Event::Track(evt) + } +} + +impl From for Event { + fn from(evt: CoreEvent) -> Self { + Event::Core(evt) + } +} diff --git a/src/events/store.rs b/src/events/store.rs new file mode 100644 index 0000000..6518ee2 --- /dev/null +++ b/src/events/store.rs @@ -0,0 +1,252 @@ +use super::*; +use crate::{ + constants::*, + tracks::{PlayMode, TrackHandle, TrackState}, +}; +use std::{ + collections::{BinaryHeap, HashMap}, + time::Duration, +}; +use tracing::info; + +#[derive(Debug, Default)] +/// Storage for [`EventData`], designed to be used for both local and global contexts. +/// +/// Timed events are stored in a binary heap for fast selection, and have custom `Eq`, +/// `Ord`, etc. implementations to support (only) this. +/// +/// [`EventData`]: struct.EventData.html +pub struct EventStore { + timed: BinaryHeap, + untimed: HashMap>, + local_only: bool, +} + +impl EventStore { + /// Creates a new event store to be used globally. + pub fn new() -> Self { + Default::default() + } + + /// Creates a new event store to be used within a [`Track`]. + /// + /// This is usually automatically installed by the driver once + /// a track has been registered. + /// + /// [`Track`]: ../tracks/struct.Track.html + pub fn new_local() -> Self { + EventStore { + local_only: true, + ..Default::default() + } + } + + /// Add an event to this store. + /// + /// Updates `evt` according to [`EventData::compute_activation`]. + /// + /// [`EventData::compute_activation`]: struct.EventData.html#method.compute_activation + pub fn add_event(&mut self, mut evt: EventData, now: Duration) { + evt.compute_activation(now); + + if self.local_only && evt.event.is_global_only() { + return; + } + + use Event::*; + match evt.event { + Core(c) => { + self.untimed + .entry(c.into()) + .or_insert_with(Vec::new) + .push(evt); + }, + Track(t) => { + self.untimed + .entry(t.into()) + .or_insert_with(Vec::new) + .push(evt); + }, + Delayed(_) | Periodic(_, _) => { + self.timed.push(evt); + }, + _ => { + // Event cancelled. + }, + } + } + + /// Processes all events due up to and including `now`. + pub(crate) async fn process_timed(&mut self, now: Duration, ctx: EventContext<'_>) { + while let Some(evt) = self.timed.peek() { + if evt + .fire_time + .as_ref() + .expect("Timed event must have a fire_time.") + > &now + { + break; + } + let mut evt = self + .timed + .pop() + .expect("Can only succeed due to peek = Some(...)."); + + let old_evt_type = evt.event; + if let Some(new_evt_type) = evt.action.act(&ctx).await { + evt.event = new_evt_type; + self.add_event(evt, now); + } else if let Event::Periodic(d, _) = old_evt_type { + evt.event = Event::Periodic(d, None); + self.add_event(evt, now); + } + } + } + + /// Processes all events attached to the given track event. + pub(crate) async fn process_untimed( + &mut self, + now: Duration, + untimed_event: UntimedEvent, + ctx: EventContext<'_>, + ) { + // move a Vec in and out: not too expensive, but could be better. + // Although it's obvious that moving an event out of one vec and into + // another necessitates that they be different event types, thus entries, + // convincing the compiler of this is non-trivial without making them dedicated + // fields. + let events = self.untimed.remove(&untimed_event); + if let Some(mut events) = events { + // TODO: Possibly use tombstones to prevent realloc/memcpys? + // i.e., never shrink array, replace ended tracks with , + // maintain a "first-track" stack and freelist alongside. + let mut i = 0; + while i < events.len() { + let evt = &mut events[i]; + // Only remove/readd if the event type changes (i.e., Some AND new != old) + if let Some(new_evt_type) = evt.action.act(&ctx).await { + if evt.event == new_evt_type { + let mut evt = events.remove(i); + + evt.event = new_evt_type; + self.add_event(evt, now); + } else { + i += 1; + } + } else { + i += 1; + }; + } + self.untimed.insert(untimed_event, events); + } + } +} + +#[derive(Debug, Default)] +pub(crate) struct GlobalEvents { + pub(crate) store: EventStore, + pub(crate) time: Duration, + pub(crate) awaiting_tick: HashMap>, +} + +impl GlobalEvents { + pub(crate) fn add_event(&mut self, evt: EventData) { + self.store.add_event(evt, self.time); + } + + pub(crate) async fn fire_core_event(&mut self, evt: CoreEvent, ctx: EventContext<'_>) { + self.store.process_untimed(self.time, evt.into(), ctx).await; + } + + pub(crate) fn fire_track_event(&mut self, evt: TrackEvent, index: usize) { + let holder = self.awaiting_tick.entry(evt).or_insert_with(Vec::new); + + holder.push(index); + } + + pub(crate) async fn tick( + &mut self, + events: &mut Vec, + states: &mut Vec, + handles: &mut Vec, + ) { + // Global timed events + self.time += TIMESTEP_LENGTH; + self.store + .process_timed(self.time, EventContext::Track(&[])) + .await; + + // Local timed events + for (i, state) in states.iter_mut().enumerate() { + if state.playing == PlayMode::Play { + state.step_frame(); + + let event_store = events + .get_mut(i) + .expect("Missing store index for Tick (local timed)."); + let handle = handles + .get_mut(i) + .expect("Missing handle index for Tick (local timed)."); + + event_store + .process_timed(state.play_time, EventContext::Track(&[(&state, &handle)])) + .await; + } + } + + for (evt, indices) in self.awaiting_tick.iter() { + let untimed = (*evt).into(); + + if !indices.is_empty() { + info!("Firing {:?} for {:?}", evt, indices); + } + + // Local untimed track events. + for &i in indices.iter() { + let event_store = events + .get_mut(i) + .expect("Missing store index for Tick (local untimed)."); + let handle = handles + .get_mut(i) + .expect("Missing handle index for Tick (local untimed)."); + let state = states + .get_mut(i) + .expect("Missing state index for Tick (local untimed)."); + + event_store + .process_untimed( + state.position, + untimed, + EventContext::Track(&[(&state, &handle)]), + ) + .await; + } + + // Global untimed track events. + if self.store.untimed.contains_key(&untimed) && !indices.is_empty() { + let global_ctx: Vec<(&TrackState, &TrackHandle)> = indices + .iter() + .map(|i| { + ( + states + .get(*i) + .expect("Missing state index for Tick (global untimed)"), + handles + .get(*i) + .expect("Missing handle index for Tick (global untimed)"), + ) + }) + .collect(); + + self.store + .process_untimed(self.time, untimed, EventContext::Track(&global_ctx[..])) + .await + } + } + + // Now drain vecs. + for (_evt, indices) in self.awaiting_tick.iter_mut() { + indices.clear(); + } + } +} diff --git a/src/events/track.rs b/src/events/track.rs new file mode 100644 index 0000000..df567a9 --- /dev/null +++ b/src/events/track.rs @@ -0,0 +1,16 @@ +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +/// Track events correspond to certain actions or changes +/// of state, such as a track finishing, looping, or being +/// manually stopped. Voice core events occur on receipt of +/// voice packets and telemetry. +/// +/// Track events persist while the `action` in [`EventData`] +/// returns `None`. +/// +/// [`EventData`]: struct.EventData.html +pub enum TrackEvent { + /// The attached track has ended. + End, + /// The attached track has looped. + Loop, +} diff --git a/src/events/untimed.rs b/src/events/untimed.rs new file mode 100644 index 0000000..4bb4899 --- /dev/null +++ b/src/events/untimed.rs @@ -0,0 +1,28 @@ +use super::*; + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +/// Track and voice core events. +/// +/// Untimed events persist while the `action` in [`EventData`] +/// returns `None`. +/// +/// [`EventData`]: struct.EventData.html +pub enum UntimedEvent { + /// Untimed events belonging to a track, such as state changes, end, or loops. + Track(TrackEvent), + /// Untimed events belonging to the global context, such as finished tracks, + /// client speaking updates, or RT(C)P voice and telemetry data. + Core(CoreEvent), +} + +impl From for UntimedEvent { + fn from(evt: TrackEvent) -> Self { + UntimedEvent::Track(evt) + } +} + +impl From for UntimedEvent { + fn from(evt: CoreEvent) -> Self { + UntimedEvent::Core(evt) + } +} diff --git a/src/handler.rs b/src/handler.rs new file mode 100644 index 0000000..3ecb089 --- /dev/null +++ b/src/handler.rs @@ -0,0 +1,301 @@ +#[cfg(feature = "driver")] +use crate::{driver::Driver, error::ConnectionResult}; +use crate::{ + error::{JoinError, JoinResult}, + id::{ChannelId, GuildId, UserId}, + info::{ConnectionInfo, ConnectionProgress}, + shards::Shard, +}; +use flume::{Receiver, Sender}; +use serde_json::json; +use tracing::instrument; + +#[cfg(feature = "driver")] +use std::ops::{Deref, DerefMut}; + +#[derive(Clone, Debug)] +enum Return { + Info(Sender), + #[cfg(feature = "driver")] + Conn(Sender>), +} + +/// The Call handler is responsible for a single voice connection, acting +/// as a clean API above the inner state and gateway message management. +/// +/// If the `"driver"` feature is enabled, then a Call exposes all control methods of +/// [`Driver`] via `Deref(Mut)`. +/// +/// [`Driver`]: driver/struct.Driver.html +/// [`Shard`]: ../gateway/struct.Shard.html +#[derive(Clone, Debug)] +pub struct Call { + connection: Option<(ChannelId, ConnectionProgress, Return)>, + + #[cfg(feature = "driver")] + /// The internal controller of the voice connection monitor thread. + driver: Driver, + + guild_id: GuildId, + /// Whether the current handler is set to deafen voice connections. + self_deaf: bool, + /// Whether the current handler is set to mute voice connections. + self_mute: bool, + user_id: UserId, + /// Will be set when a `Call` is made via the [`new`][`Call::new`] + /// method. + /// + /// When set via [`standalone`][`Call::standalone`], it will not be + /// present. + ws: Option, +} + +impl Call { + /// Creates a new Call, which will send out WebSocket messages via + /// the given shard. + #[inline] + #[instrument] + pub fn new(guild_id: GuildId, ws: Shard, user_id: UserId) -> Self { + Self::new_raw(guild_id, Some(ws), user_id) + } + + /// Creates a new, standalone Call which is not connected via + /// WebSocket to the Gateway. + /// + /// Actions such as muting, deafening, and switching channels will not + /// function through this Call and must be done through some other + /// method, as the values will only be internally updated. + /// + /// For most use cases you do not want this. + #[inline] + #[instrument] + pub fn standalone(guild_id: GuildId, user_id: UserId) -> Self { + Self::new_raw(guild_id, None, user_id) + } + + fn new_raw(guild_id: GuildId, ws: Option, user_id: UserId) -> Self { + Call { + connection: None, + #[cfg(feature = "driver")] + driver: Default::default(), + guild_id, + self_deaf: false, + self_mute: false, + user_id, + ws, + } + } + + #[instrument(skip(self))] + fn do_connect(&mut self) { + match &self.connection { + Some((_, ConnectionProgress::Complete(c), Return::Info(tx))) => { + // It's okay if the receiver hung up. + let _ = tx.send(c.clone()); + }, + #[cfg(feature = "driver")] + Some((_, ConnectionProgress::Complete(c), Return::Conn(tx))) => { + self.driver.raw_connect(c.clone(), tx.clone()); + }, + _ => {}, + } + } + + /// Sets whether the current connection is to be deafened. + /// + /// If there is no live voice connection, then this only acts as a settings + /// update for future connections. + /// + /// **Note**: Unlike in the official client, you _can_ be deafened while + /// not being muted. + /// + /// **Note**: If the `Call` was created via [`standalone`], then this + /// will _only_ update whether the connection is internally deafened. + /// + /// [`standalone`]: #method.standalone + #[instrument(skip(self))] + pub async fn deafen(&mut self, deaf: bool) -> JoinResult<()> { + self.self_deaf = deaf; + + self.update().await + } + + /// Returns whether the current connection is self-deafened in this server. + /// + /// This is purely cosmetic. + #[instrument(skip(self))] + pub fn is_deaf(&self) -> bool { + self.self_deaf + } + + #[cfg(feature = "driver")] + /// Connect or switch to the given voice channel by its Id. + #[instrument(skip(self))] + pub async fn join( + &mut self, + channel_id: ChannelId, + ) -> JoinResult>> { + let (tx, rx) = flume::unbounded(); + + self.connection = Some(( + channel_id, + ConnectionProgress::new(self.guild_id, self.user_id), + Return::Conn(tx), + )); + + self.update().await.map(|_| rx) + } + + /// Join the selected voice channel, *without* running/starting an RTP + /// session or running the driver. + /// + /// Use this if you require connection info for lavalink, + /// some other voice implementation, or don't want to use the driver for a given call. + #[instrument(skip(self))] + pub async fn join_gateway( + &mut self, + channel_id: ChannelId, + ) -> JoinResult> { + let (tx, rx) = flume::unbounded(); + + self.connection = Some(( + channel_id, + ConnectionProgress::new(self.guild_id, self.user_id), + Return::Info(tx), + )); + + self.update().await.map(|_| rx) + } + + /// Leaves the current voice channel, disconnecting from it. + /// + /// This does _not_ forget settings, like whether to be self-deafened or + /// self-muted. + /// + /// **Note**: If the `Call` was created via [`standalone`], then this + /// will _only_ update whether the connection is internally connected to a + /// voice channel. + /// + /// [`standalone`]: #method.standalone + #[instrument(skip(self))] + pub async fn leave(&mut self) -> JoinResult<()> { + // Only send an update if we were in a voice channel. + self.connection = None; + + #[cfg(feature = "driver")] + self.driver.leave(); + + self.update().await + } + + /// Sets whether the current connection is to be muted. + /// + /// If there is no live voice connection, then this only acts as a settings + /// update for future connections. + /// + /// **Note**: If the `Call` was created via [`standalone`], then this + /// will _only_ update whether the connection is internally muted. + /// + /// [`standalone`]: #method.standalone + #[instrument(skip(self))] + pub async fn mute(&mut self, mute: bool) -> JoinResult<()> { + self.self_mute = mute; + + #[cfg(feature = "driver")] + self.driver.mute(mute); + + self.update().await + } + + /// Returns whether the current connection is self-muted in this server. + #[instrument(skip(self))] + pub fn is_mute(&self) -> bool { + self.self_mute + } + + /// Updates the voice server data. + /// + /// You should only need to use this if you initialized the `Call` via + /// [`standalone`]. + /// + /// Refer to the documentation for [`connect`] for when this will + /// automatically connect to a voice channel. + /// + /// [`connect`]: #method.connect + /// [`standalone`]: #method.standalone + #[instrument(skip(self, token))] + pub fn update_server(&mut self, endpoint: String, token: String) { + let try_conn = if let Some((_, ref mut progress, _)) = self.connection.as_mut() { + progress.apply_server_update(endpoint, token) + } else { + false + }; + + if try_conn { + self.do_connect(); + } + } + + /// Updates the internal voice state of the current user. + /// + /// You should only need to use this if you initialized the `Call` via + /// [`standalone`]. + /// + /// refer to the documentation for [`connect`] for when this will + /// automatically connect to a voice channel. + /// + /// [`connect`]: #method.connect + /// [`standalone`]: #method.standalone + #[instrument(skip(self))] + pub fn update_state(&mut self, session_id: String) { + let try_conn = if let Some((_, ref mut progress, _)) = self.connection.as_mut() { + progress.apply_state_update(session_id) + } else { + false + }; + + if try_conn { + self.do_connect(); + } + } + + /// Send an update for the current session over WS. + /// + /// Does nothing if initialized via [`standalone`]. + /// + /// [`standalone`]: #method.standalone + #[instrument(skip(self))] + async fn update(&mut self) -> JoinResult<()> { + if let Some(ws) = self.ws.as_mut() { + let map = json!({ + "op": 4, + "d": { + "channel_id": self.connection.as_ref().map(|c| c.0.0), + "guild_id": self.guild_id.0, + "self_deaf": self.self_deaf, + "self_mute": self.self_mute, + } + }); + + ws.send(map).await + } else { + Err(JoinError::NoSender) + } + } +} + +#[cfg(feature = "driver")] +impl Deref for Call { + type Target = Driver; + + fn deref(&self) -> &Self::Target { + &self.driver + } +} + +#[cfg(feature = "driver")] +impl DerefMut for Call { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.driver + } +} diff --git a/src/id.rs b/src/id.rs new file mode 100644 index 0000000..f28e108 --- /dev/null +++ b/src/id.rs @@ -0,0 +1,121 @@ +//! Newtypes around Discord IDs for library cross-compatibility. + +#[cfg(feature = "driver")] +use crate::model::id::{GuildId as DriverGuild, UserId as DriverUser}; +#[cfg(feature = "serenity")] +use serenity::model::id::{ + ChannelId as SerenityChannel, + GuildId as SerenityGuild, + UserId as SerenityUser, +}; +use std::fmt::{Display, Formatter, Result as FmtResult}; +#[cfg(feature = "twilight")] +use twilight_model::id::{ + ChannelId as TwilightChannel, + GuildId as TwilightGuild, + UserId as TwilightUser, +}; + +/// ID of a Discord voice/text channel. +#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] +pub struct ChannelId(pub u64); + +/// ID of a Discord guild (colloquially, "server"). +#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] +pub struct GuildId(pub u64); + +/// ID of a Discord user. +#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] +pub struct UserId(pub u64); + +impl Display for ChannelId { + fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { + Display::fmt(&self.0, f) + } +} + +impl From for ChannelId { + fn from(id: u64) -> Self { + Self(id) + } +} + +#[cfg(feature = "serenity")] +impl From for ChannelId { + fn from(id: SerenityChannel) -> Self { + Self(id.0) + } +} + +#[cfg(feature = "twilight")] +impl From for ChannelId { + fn from(id: TwilightChannel) -> Self { + Self(id.0) + } +} + +impl Display for GuildId { + fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { + Display::fmt(&self.0, f) + } +} + +impl From for GuildId { + fn from(id: u64) -> Self { + Self(id) + } +} + +#[cfg(feature = "serenity")] +impl From for GuildId { + fn from(id: SerenityGuild) -> Self { + Self(id.0) + } +} + +#[cfg(feature = "driver")] +impl From for DriverGuild { + fn from(id: GuildId) -> Self { + Self(id.0) + } +} + +#[cfg(feature = "twilight")] +impl From for GuildId { + fn from(id: TwilightGuild) -> Self { + Self(id.0) + } +} + +impl Display for UserId { + fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { + Display::fmt(&self.0, f) + } +} + +impl From for UserId { + fn from(id: u64) -> Self { + Self(id) + } +} + +#[cfg(feature = "serenity")] +impl From for UserId { + fn from(id: SerenityUser) -> Self { + Self(id.0) + } +} + +#[cfg(feature = "driver")] +impl From for DriverUser { + fn from(id: UserId) -> Self { + Self(id.0) + } +} + +#[cfg(feature = "twilight")] +impl From for UserId { + fn from(id: TwilightUser) -> Self { + Self(id.0) + } +} diff --git a/src/info.rs b/src/info.rs new file mode 100644 index 0000000..8b3fdb3 --- /dev/null +++ b/src/info.rs @@ -0,0 +1,137 @@ +use crate::id::{GuildId, UserId}; +use std::fmt; + +#[derive(Clone, Debug)] +pub(crate) enum ConnectionProgress { + Complete(ConnectionInfo), + Incomplete(Partial), +} + +impl ConnectionProgress { + pub fn new(guild_id: GuildId, user_id: UserId) -> Self { + ConnectionProgress::Incomplete(Partial { + guild_id, + user_id, + ..Default::default() + }) + } + + pub(crate) fn apply_state_update(&mut self, session_id: String) -> bool { + use ConnectionProgress::*; + match self { + Complete(c) => { + let should_reconn = c.session_id != session_id; + c.session_id = session_id; + should_reconn + }, + Incomplete(i) => i + .apply_state_update(session_id) + .map(|info| { + *self = Complete(info); + }) + .is_some(), + } + } + + pub(crate) fn apply_server_update(&mut self, endpoint: String, token: String) -> bool { + use ConnectionProgress::*; + match self { + Complete(c) => { + let should_reconn = c.endpoint != endpoint || c.token != token; + + c.endpoint = endpoint; + c.token = token; + + should_reconn + }, + Incomplete(i) => i + .apply_server_update(endpoint, token) + .map(|info| { + *self = Complete(info); + }) + .is_some(), + } + } +} + +/// Parameters and information needed to start communicating with Discord's voice servers, either +/// with the Songbird driver, lavalink, or other system. +#[derive(Clone)] +pub struct ConnectionInfo { + /// URL of the voice websocket gateway server assigned to this call. + pub endpoint: String, + /// ID of the target voice channel's parent guild. + /// + /// Bots cannot connect to a guildless (i.e., direct message) voice call. + pub guild_id: GuildId, + /// Unique string describing this session for validation/authentication purposes. + pub session_id: String, + /// Ephemeral secret used to validate the above session. + pub token: String, + /// UserID of this bot. + pub user_id: UserId, +} + +impl fmt::Debug for ConnectionInfo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ConnectionInfo") + .field("endpoint", &self.endpoint) + .field("guild_id", &self.guild_id) + .field("session_id", &self.session_id) + .field("token", &"") + .field("user_id", &self.user_id) + .finish() + } +} + +#[derive(Clone, Default)] +pub(crate) struct Partial { + pub endpoint: Option, + pub guild_id: GuildId, + pub session_id: Option, + pub token: Option, + pub user_id: UserId, +} + +impl fmt::Debug for Partial { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Partial") + .field("endpoint", &self.endpoint) + .field("session_id", &self.session_id) + .field("token_is_some", &self.token.is_some()) + .finish() + } +} + +impl Partial { + fn finalise(&mut self) -> Option { + if self.endpoint.is_some() && self.session_id.is_some() && self.token.is_some() { + let endpoint = self.endpoint.take().unwrap(); + let session_id = self.session_id.take().unwrap(); + let token = self.token.take().unwrap(); + + Some(ConnectionInfo { + endpoint, + session_id, + token, + guild_id: self.guild_id, + user_id: self.user_id, + }) + } else { + None + } + } + + fn apply_state_update(&mut self, session_id: String) -> Option { + self.session_id = Some(session_id); + + self.finalise() + } + + fn apply_server_update(&mut self, endpoint: String, token: String) -> Option { + self.endpoint = Some(endpoint); + self.token = Some(token); + + self.finalise() + } +} diff --git a/src/input/cached/compressed.rs b/src/input/cached/compressed.rs new file mode 100644 index 0000000..183cba9 --- /dev/null +++ b/src/input/cached/compressed.rs @@ -0,0 +1,303 @@ +use super::{apply_length_hint, compressed_cost_per_sec, default_config}; +use crate::{ + constants::*, + input::{ + error::{Error, Result}, + CodecType, + Container, + Input, + Metadata, + Reader, + }, +}; +use audiopus::{ + coder::Encoder as OpusEncoder, + Application, + Bitrate, + Channels, + Error as OpusError, + ErrorCode as OpusErrorCode, + SampleRate, +}; +use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use std::{ + convert::TryInto, + io::{Error as IoError, ErrorKind as IoErrorKind, Read, Result as IoResult}, + mem, + sync::atomic::{AtomicUsize, Ordering}, +}; +use streamcatcher::{Config, NeedsBytes, Stateful, Transform, TransformPosition, TxCatcher}; +use tracing::{debug, trace}; + +/// A wrapper around an existing [`Input`] which compresses +/// the input using the Opus codec before storing it in memory. +/// +/// The main purpose of this wrapper is to enable seeking on +/// incompatible sources (i.e., ffmpeg output) and to ease resource +/// consumption for commonly reused/shared tracks. [`Restartable`] +/// and [`Memory`] offer the same functionality with different +/// tradeoffs. +/// +/// This is intended for use with larger, repeatedly used audio +/// tracks shared between sources, and stores the sound data +/// retrieved as **compressed Opus audio**. There is an associated memory cost, +/// but this is far smaller than using a [`Memory`]. +/// +/// [`Input`]: ../struct.Input.html +/// [`Memory`]: struct.Memory.html +/// [`Restartable`]: ../struct.Restartable.html +#[derive(Clone, Debug)] +pub struct Compressed { + /// Inner shared bytestore. + pub raw: TxCatcher, OpusCompressor>, + /// Metadata moved out of the captured source. + pub metadata: Metadata, + /// Stereo-ness of the captured source. + pub stereo: bool, +} + +impl Compressed { + /// Wrap an existing [`Input`] with an in-memory store, compressed using Opus. + /// + /// [`Input`]: ../struct.Input.html + /// [`Metadata.duration`]: ../struct.Metadata.html#structfield.duration + pub fn new(source: Input, bitrate: Bitrate) -> Result { + Self::with_config(source, bitrate, None) + } + + /// Wrap an existing [`Input`] with an in-memory store, compressed using Opus. + /// + /// `config.length_hint` may be used to control the size of the initial chunk, preventing + /// needless allocations and copies. If this is not present, the value specified in + /// `source`'s [`Metadata.duration`] will be used. + /// + /// [`Input`]: ../struct.Input.html + /// [`Metadata.duration`]: ../struct.Metadata.html#structfield.duration + pub fn with_config(source: Input, bitrate: Bitrate, config: Option) -> Result { + let channels = if source.stereo { + Channels::Stereo + } else { + Channels::Mono + }; + let mut encoder = OpusEncoder::new(SampleRate::Hz48000, channels, Application::Audio)?; + + encoder.set_bitrate(bitrate)?; + + Self::with_encoder(source, encoder, config) + } + + /// Wrap an existing [`Input`] with an in-memory store, compressed using a user-defined + /// Opus encoder. + /// + /// `length_hint` functions as in [`new`]. This function's behaviour is undefined if your encoder + /// has a different sample rate than 48kHz, and if the decoder has a different channel count from the source. + /// + /// [`Input`]: ../struct.Input.html + /// [`new`]: #method.new + pub fn with_encoder( + mut source: Input, + encoder: OpusEncoder, + config: Option, + ) -> Result { + let bitrate = encoder.bitrate()?; + let cost_per_sec = compressed_cost_per_sec(bitrate); + let stereo = source.stereo; + let metadata = source.metadata.take(); + + let mut config = config.unwrap_or_else(|| default_config(cost_per_sec)); + + // apply length hint. + if config.length_hint.is_none() { + if let Some(dur) = metadata.duration { + apply_length_hint(&mut config, dur, cost_per_sec); + } + } + + let raw = config + .build_tx(Box::new(source), OpusCompressor::new(encoder, stereo)) + .map_err(Error::Streamcatcher)?; + + Ok(Self { + raw, + metadata, + stereo, + }) + } + + /// Acquire a new handle to this object, creating a new + /// view of the existing cached data from the beginning. + pub fn new_handle(&self) -> Self { + Self { + raw: self.raw.new_handle(), + metadata: self.metadata.clone(), + stereo: self.stereo, + } + } +} + +impl From for Input { + fn from(src: Compressed) -> Self { + Input::new( + true, + Reader::Compressed(src.raw), + CodecType::Opus + .try_into() + .expect("Default decoder values are known to be valid."), + Container::Dca { first_frame: 0 }, + Some(src.metadata), + ) + } +} + +/// Transform applied inside [`Compressed`], converting a floating-point PCM +/// input stream into a DCA-framed Opus stream. +/// +/// Created and managed by [`Compressed`]. +/// +/// [`Compressed`]: struct.Compressed.html +#[derive(Debug)] +pub struct OpusCompressor { + encoder: OpusEncoder, + last_frame: Vec, + stereo_input: bool, + frame_pos: usize, + audio_bytes: AtomicUsize, +} + +impl OpusCompressor { + fn new(encoder: OpusEncoder, stereo_input: bool) -> Self { + Self { + encoder, + last_frame: Vec::with_capacity(4000), + stereo_input, + frame_pos: 0, + audio_bytes: Default::default(), + } + } +} + +impl Transform for OpusCompressor +where + T: Read, +{ + fn transform_read(&mut self, src: &mut T, buf: &mut [u8]) -> IoResult { + let output_start = mem::size_of::(); + let mut eof = false; + + let mut raw_len = 0; + let mut out = None; + let mut sample_buf = [0f32; STEREO_FRAME_SIZE]; + let samples_in_frame = if self.stereo_input { + STEREO_FRAME_SIZE + } else { + MONO_FRAME_SIZE + }; + + // Purge old frame and read new, if needed. + if self.frame_pos == self.last_frame.len() + output_start || self.last_frame.is_empty() { + self.last_frame.resize(self.last_frame.capacity(), 0); + + // We can't use `read_f32_into` because we can't guarantee the buffer will be filled. + for el in sample_buf[..samples_in_frame].iter_mut() { + match src.read_f32::() { + Ok(sample) => { + *el = sample; + raw_len += 1; + }, + Err(e) if e.kind() == IoErrorKind::UnexpectedEof => { + eof = true; + break; + }, + Err(e) => { + out = Some(Err(e)); + break; + }, + } + } + + if out.is_none() && raw_len > 0 { + loop { + // NOTE: we don't index by raw_len because the last frame can be too small + // to occupy a "whole packet". Zero-padding is the correct behaviour. + match self + .encoder + .encode_float(&sample_buf[..samples_in_frame], &mut self.last_frame[..]) + { + Ok(pkt_len) => { + trace!("Next packet to write has {:?}", pkt_len); + self.frame_pos = 0; + self.last_frame.truncate(pkt_len); + break; + }, + Err(OpusError::Opus(OpusErrorCode::BufferTooSmall)) => { + // If we need more capacity to encode this frame, then take it. + trace!("Resizing inner buffer (+256)."); + self.last_frame.resize(self.last_frame.len() + 256, 0); + }, + Err(e) => { + debug!("Read error {:?} {:?} {:?}.", e, out, raw_len); + out = Some(Err(IoError::new(IoErrorKind::Other, e))); + break; + }, + } + } + } + } + + if out.is_none() { + // Write from frame we have. + let start = if self.frame_pos < output_start { + (&mut buf[..output_start]) + .write_i16::(self.last_frame.len() as i16) + .expect( + "Minimum bytes requirement for Opus (2) should mean that an i16 \ + may always be written.", + ); + self.frame_pos += output_start; + + trace!("Wrote frame header: {}.", self.last_frame.len()); + + output_start + } else { + 0 + }; + + let out_pos = self.frame_pos - output_start; + let remaining = self.last_frame.len() - out_pos; + let write_len = remaining.min(buf.len() - start); + buf[start..start + write_len] + .copy_from_slice(&self.last_frame[out_pos..out_pos + write_len]); + self.frame_pos += write_len; + trace!("Appended {} to inner store", write_len); + out = Some(Ok(write_len + start)); + } + + // NOTE: use of raw_len here preserves true sample length even if + // stream is extended to 20ms boundary. + out.unwrap_or_else(|| Err(IoError::new(IoErrorKind::Other, "Unclear."))) + .map(|compressed_sz| { + self.audio_bytes + .fetch_add(raw_len * mem::size_of::(), Ordering::Release); + + if eof { + TransformPosition::Finished + } else { + TransformPosition::Read(compressed_sz) + } + }) + } +} + +impl NeedsBytes for OpusCompressor { + fn min_bytes_required(&self) -> usize { + 2 + } +} + +impl Stateful for OpusCompressor { + type State = usize; + + fn state(&self) -> Self::State { + self.audio_bytes.load(Ordering::Acquire) + } +} diff --git a/src/input/cached/hint.rs b/src/input/cached/hint.rs new file mode 100644 index 0000000..b32fbce --- /dev/null +++ b/src/input/cached/hint.rs @@ -0,0 +1,40 @@ +use std::time::Duration; +use streamcatcher::Config; + +/// Expected amount of time that an input should last. +#[derive(Copy, Clone, Debug)] +pub enum LengthHint { + /// Estimate of a source's length in bytes. + Bytes(usize), + /// Estimate of a source's length in time. + /// + /// This will be converted to a bytecount at setup. + Time(Duration), +} + +impl From for LengthHint { + fn from(size: usize) -> Self { + LengthHint::Bytes(size) + } +} + +impl From for LengthHint { + fn from(size: Duration) -> Self { + LengthHint::Time(size) + } +} + +/// Modify the given cache configuration to initially allocate +/// enough bytes to store a length of audio at the given bitrate. +pub fn apply_length_hint(config: &mut Config, hint: H, cost_per_sec: usize) +where + H: Into, +{ + config.length_hint = Some(match hint.into() { + LengthHint::Bytes(a) => a, + LengthHint::Time(t) => { + let s = t.as_secs() + if t.subsec_millis() > 0 { 1 } else { 0 }; + (s as usize) * cost_per_sec + }, + }); +} diff --git a/src/input/cached/memory.rs b/src/input/cached/memory.rs new file mode 100644 index 0000000..92062cc --- /dev/null +++ b/src/input/cached/memory.rs @@ -0,0 +1,116 @@ +use super::{apply_length_hint, default_config, raw_cost_per_sec}; +use crate::input::{ + error::{Error, Result}, + CodecType, + Container, + Input, + Metadata, + Reader, +}; +use std::convert::{TryFrom, TryInto}; +use streamcatcher::{Catcher, Config}; + +/// A wrapper around an existing [`Input`] which caches +/// the decoded and converted audio data locally in memory. +/// +/// The main purpose of this wrapper is to enable seeking on +/// incompatible sources (i.e., ffmpeg output) and to ease resource +/// consumption for commonly reused/shared tracks. [`Restartable`] +/// and [`Compressed`] offer the same functionality with different +/// tradeoffs. +/// +/// This is intended for use with small, repeatedly used audio +/// tracks shared between sources, and stores the sound data +/// retrieved in **uncompressed floating point** form to minimise the +/// cost of audio processing. This is a significant *3 Mbps (375 kiB/s)*, +/// or 131 MiB of RAM for a 6 minute song. +/// +/// [`Input`]: ../struct.Input.html +/// [`Compressed`]: struct.Compressed.html +/// [`Restartable`]: ../struct.Restartable.html +#[derive(Clone, Debug)] +pub struct Memory { + /// Inner shared bytestore. + pub raw: Catcher>, + /// Metadata moved out of the captured source. + pub metadata: Metadata, + /// Codec used to read the inner bytestore. + pub kind: CodecType, + /// Stereo-ness of the captured source. + pub stereo: bool, + /// Framing mechanism for the inner bytestore. + pub container: Container, +} + +impl Memory { + /// Wrap an existing [`Input`] with an in-memory store with the same codec and framing. + /// + /// [`Input`]: ../struct.Input.html + pub fn new(source: Input) -> Result { + Self::with_config(source, None) + } + + /// Wrap an existing [`Input`] with an in-memory store with the same codec and framing. + /// + /// `length_hint` may be used to control the size of the initial chunk, preventing + /// needless allocations and copies. If this is not present, the value specified in + /// `source`'s [`Metadata.duration`] will be used, assuming that the source is uncompressed. + /// + /// [`Input`]: ../struct.Input.html + /// [`Metadata.duration`]: ../struct.Metadata.html#structfield.duration + pub fn with_config(mut source: Input, config: Option) -> Result { + let stereo = source.stereo; + let kind = (&source.kind).into(); + let container = source.container; + let metadata = source.metadata.take(); + + let cost_per_sec = raw_cost_per_sec(stereo); + + let mut config = config.unwrap_or_else(|| default_config(cost_per_sec)); + + // apply length hint. + if config.length_hint.is_none() { + if let Some(dur) = metadata.duration { + apply_length_hint(&mut config, dur, cost_per_sec); + } + } + + let raw = config + .build(Box::new(source.reader)) + .map_err(Error::Streamcatcher)?; + + Ok(Self { + raw, + metadata, + kind, + stereo, + container, + }) + } + + /// Acquire a new handle to this object, creating a new + /// view of the existing cached data from the beginning. + pub fn new_handle(&self) -> Self { + Self { + raw: self.raw.new_handle(), + metadata: self.metadata.clone(), + kind: self.kind, + stereo: self.stereo, + container: self.container, + } + } +} + +impl TryFrom for Input { + type Error = Error; + + fn try_from(src: Memory) -> Result { + Ok(Input::new( + src.stereo, + Reader::Memory(src.raw), + src.kind.try_into()?, + src.container, + Some(src.metadata), + )) + } +} diff --git a/src/input/cached/mod.rs b/src/input/cached/mod.rs new file mode 100644 index 0000000..5983c81 --- /dev/null +++ b/src/input/cached/mod.rs @@ -0,0 +1,44 @@ +//! In-memory, shared input sources for reuse between calls, fast seeking, and +//! direct Opus frame passthrough. + +mod compressed; +mod hint; +mod memory; +#[cfg(test)] +mod tests; + +pub use self::{compressed::*, hint::*, memory::*}; + +use crate::constants::*; +use crate::input::utils; +use audiopus::Bitrate; +use std::{mem, time::Duration}; +use streamcatcher::{Config, GrowthStrategy}; + +/// Estimates the cost, in B/s, of audio data compressed at the given bitrate. +pub fn compressed_cost_per_sec(bitrate: Bitrate) -> usize { + let framing_cost_per_sec = AUDIO_FRAME_RATE * mem::size_of::(); + + let bitrate_raw = match bitrate { + Bitrate::BitsPerSecond(i) => i, + Bitrate::Auto => 64_000, + Bitrate::Max => 512_000, + } as usize; + + (bitrate_raw / 8) + framing_cost_per_sec +} + +/// Calculates the cost, in B/s, of raw floating-point audio data. +pub fn raw_cost_per_sec(stereo: bool) -> usize { + utils::timestamp_to_byte_count(Duration::from_secs(1), stereo) +} + +/// Provides the default config used by a cached source. +/// +/// This maps to the default configuration in [`streamcatcher`], using +/// a constant chunk size of 5s worth of audio at the given bitrate estimate. +/// +/// [`streamcatcher`]: https://docs.rs/streamcatcher/0.1.0/streamcatcher/struct.Config.html +pub fn default_config(cost_per_sec: usize) -> Config { + Config::new().chunk_size(GrowthStrategy::Constant(5 * cost_per_sec)) +} diff --git a/src/input/cached/tests.rs b/src/input/cached/tests.rs new file mode 100644 index 0000000..d4a7021 --- /dev/null +++ b/src/input/cached/tests.rs @@ -0,0 +1,79 @@ +use super::*; +use crate::{ + constants::*, + input::{error::Error, ffmpeg, Codec, Container, Input, Reader}, + test_utils::*, +}; +use audiopus::{coder::Decoder, Bitrate, Channels, SampleRate}; +use byteorder::{LittleEndian, ReadBytesExt}; +use std::io::{Cursor, Read}; + +#[tokio::test] +async fn streamcatcher_preserves_file() { + let input = make_sine(50 * MONO_FRAME_SIZE, true); + let input_len = input.len(); + + let mut raw = default_config(raw_cost_per_sec(true)) + .build(Cursor::new(input.clone())) + .map_err(Error::Streamcatcher) + .unwrap(); + + let mut out_buf = vec![]; + let read = raw.read_to_end(&mut out_buf).unwrap(); + + assert_eq!(input_len, read); + + assert_eq!(input, out_buf); +} + +#[test] +fn compressed_scans_frames_decodes_mono() { + let data = one_s_compressed_sine(false); + run_through_dca(data.raw); +} + +#[test] +fn compressed_scans_frames_decodes_stereo() { + let data = one_s_compressed_sine(true); + run_through_dca(data.raw); +} + +#[test] +fn compressed_triggers_valid_passthrough() { + let mut input = Input::from(one_s_compressed_sine(true)); + + assert!(input.supports_passthrough()); + + let mut opus_buf = [0u8; 10_000]; + let mut signal_buf = [0i16; 1920]; + + let opus_len = input.read_opus_frame(&mut opus_buf[..]).unwrap(); + + let mut decoder = Decoder::new(SampleRate::Hz48000, Channels::Stereo).unwrap(); + decoder + .decode(Some(&opus_buf[..opus_len]), &mut signal_buf[..], false) + .unwrap(); +} + +fn one_s_compressed_sine(stereo: bool) -> Compressed { + let data = make_sine(50 * MONO_FRAME_SIZE, stereo); + + let input = Input::new(stereo, data.into(), Codec::FloatPcm, Container::Raw, None); + + Compressed::new(input, Bitrate::BitsPerSecond(128_000)).unwrap() +} + +fn run_through_dca(mut src: impl Read) { + let mut decoder = Decoder::new(SampleRate::Hz48000, Channels::Stereo).unwrap(); + + let mut pkt_space = [0u8; 10_000]; + let mut signals = [0i16; 1920]; + + while let Ok(frame_len) = src.read_i16::() { + let pkt_len = src.read(&mut pkt_space[..frame_len as usize]).unwrap(); + + decoder + .decode(Some(&pkt_space[..pkt_len]), &mut signals[..], false) + .unwrap(); + } +} diff --git a/src/input/child.rs b/src/input/child.rs new file mode 100644 index 0000000..47d57f9 --- /dev/null +++ b/src/input/child.rs @@ -0,0 +1,38 @@ +use super::*; +use std::{ + io::{BufReader, Read}, + process::Child, +}; +use tracing::debug; + +/// Handle for a child process which ensures that any subprocesses are properly closed +/// on drop. +#[derive(Debug)] +pub struct ChildContainer(Child); + +pub(crate) fn child_to_reader(child: Child) -> Reader { + Reader::Pipe(BufReader::with_capacity( + STEREO_FRAME_SIZE * mem::size_of::() * CHILD_BUFFER_LEN, + ChildContainer(child), + )) +} + +impl From for Reader { + fn from(container: Child) -> Self { + child_to_reader::(container) + } +} + +impl Read for ChildContainer { + fn read(&mut self, buffer: &mut [u8]) -> IoResult { + self.0.stdout.as_mut().unwrap().read(buffer) + } +} + +impl Drop for ChildContainer { + fn drop(&mut self) { + if let Err(e) = self.0.kill() { + debug!("Error awaiting child process: {:?}", e); + } + } +} diff --git a/src/input/codec/mod.rs b/src/input/codec/mod.rs new file mode 100644 index 0000000..ddd4113 --- /dev/null +++ b/src/input/codec/mod.rs @@ -0,0 +1,99 @@ +//! Decoding schemes for input audio bytestreams. + +mod opus; + +pub use self::opus::OpusDecoderState; + +use super::*; +use std::{fmt::Debug, mem}; + +/// State used to decode input bytes of an [`Input`]. +/// +/// [`Input`]: ../struct.Input.html +#[non_exhaustive] +#[derive(Clone, Debug)] +pub enum Codec { + /// The inner bytestream is encoded using the Opus codec, to be decoded + /// using the given state. + /// + /// Must be combined with a non-[`Raw`] container. + /// + /// [`Raw`]: ../enum.Container.html#variant.Raw + Opus(OpusDecoderState), + /// The inner bytestream is encoded using raw `i16` samples. + /// + /// Must be combined with a [`Raw`] container. + /// + /// [`Raw`]: ../enum.Container.html#variant.Raw + Pcm, + /// The inner bytestream is encoded using raw `f32` samples. + /// + /// Must be combined with a [`Raw`] container. + /// + /// [`Raw`]: ../enum.Container.html#variant.Raw + FloatPcm, +} + +impl From<&Codec> for CodecType { + fn from(f: &Codec) -> Self { + use Codec::*; + + match f { + Opus(_) => Self::Opus, + Pcm => Self::Pcm, + FloatPcm => Self::FloatPcm, + } + } +} + +/// Type of data being passed into an [`Input`]. +/// +/// [`Input`]: ../struct.Input.html +#[non_exhaustive] +#[derive(Copy, Clone, Debug)] +pub enum CodecType { + /// The inner bytestream is encoded using the Opus codec. + /// + /// Must be combined with a non-[`Raw`] container. + /// + /// [`Raw`]: ../enum.Container.html#variant.Raw + Opus, + /// The inner bytestream is encoded using raw `i16` samples. + /// + /// Must be combined with a [`Raw`] container. + /// + /// [`Raw`]: ../enum.Container.html#variant.Raw + Pcm, + /// The inner bytestream is encoded using raw `f32` samples. + /// + /// Must be combined with a [`Raw`] container. + /// + /// [`Raw`]: ../enum.Container.html#variant.Raw + FloatPcm, +} + +impl CodecType { + /// Returns the length of a single output sample, in bytes. + pub fn sample_len(&self) -> usize { + use CodecType::*; + + match self { + Opus | FloatPcm => mem::size_of::(), + Pcm => mem::size_of::(), + } + } +} + +impl TryFrom for Codec { + type Error = Error; + + fn try_from(f: CodecType) -> Result { + use CodecType::*; + + match f { + Opus => Ok(Codec::Opus(OpusDecoderState::new()?)), + Pcm => Ok(Codec::Pcm), + FloatPcm => Ok(Codec::FloatPcm), + } + } +} diff --git a/src/input/codec/opus.rs b/src/input/codec/opus.rs new file mode 100644 index 0000000..1c002cf --- /dev/null +++ b/src/input/codec/opus.rs @@ -0,0 +1,43 @@ +use crate::constants::*; +use audiopus::{coder::Decoder as OpusDecoder, Channels, Error as OpusError}; +use parking_lot::Mutex; +use std::sync::Arc; + +#[derive(Clone, Debug)] +/// Inner state +pub struct OpusDecoderState { + /// Inner decoder used to convert opus frames into a stream of samples. + pub decoder: Arc>, + /// Controls whether this source allows direct Opus frame passthrough. + /// Defaults to `true`. + /// + /// Enabling this flag is a promise from the programmer to the audio core + /// that the source has been encoded at 48kHz, using 20ms long frames. + /// If you cannot guarantee this, disable this flag (or else risk nasal demons) + /// and bizarre audio behaviour. + pub allow_passthrough: bool, + pub(crate) current_frame: Vec, + pub(crate) frame_pos: usize, + pub(crate) should_reset: bool, +} + +impl OpusDecoderState { + /// Creates a new decoder, having stereo output at 48kHz. + pub fn new() -> Result { + Ok(Self::from_decoder(OpusDecoder::new( + SAMPLE_RATE, + Channels::Stereo, + )?)) + } + + /// Creates a new decoder pre-configured by the user. + pub fn from_decoder(decoder: OpusDecoder) -> Self { + Self { + decoder: Arc::new(Mutex::new(decoder)), + allow_passthrough: true, + current_frame: Vec::with_capacity(STEREO_FRAME_SIZE), + frame_pos: 0, + should_reset: false, + } + } +} diff --git a/src/input/container/frame.rs b/src/input/container/frame.rs new file mode 100644 index 0000000..fb5f0f4 --- /dev/null +++ b/src/input/container/frame.rs @@ -0,0 +1,8 @@ +/// Information used in audio frame detection. +#[derive(Clone, Copy, Debug)] +pub struct Frame { + /// Length of this frame's header, in bytes. + pub header_len: usize, + /// Payload length, in bytes. + pub frame_len: usize, +} diff --git a/src/input/container/mod.rs b/src/input/container/mod.rs new file mode 100644 index 0000000..f22b013 --- /dev/null +++ b/src/input/container/mod.rs @@ -0,0 +1,69 @@ +mod frame; + +pub use frame::*; + +use super::CodecType; +use byteorder::{LittleEndian, ReadBytesExt}; +use std::{ + fmt::Debug, + io::{Read, Result as IoResult}, + mem, +}; + +/// Marker and state for decoding framed input files. +#[non_exhaustive] +#[derive(Clone, Copy, Debug)] +pub enum Container { + /// Raw, unframed input. + Raw, + /// Framed input, beginning with a JSON header. + /// + /// Frames have the form `{ len: i16, payload: [u8; len]}`. + Dca { + /// Byte index of the first frame after the JSON header. + first_frame: usize, + }, +} + +impl Container { + /// Tries to read the header of the next frame from an input stream. + pub fn next_frame_length( + &mut self, + mut reader: impl Read, + input: CodecType, + ) -> IoResult { + use Container::*; + + match self { + Raw => Ok(Frame { + header_len: 0, + frame_len: input.sample_len(), + }), + Dca { .. } => reader.read_i16::().map(|frame_len| Frame { + header_len: mem::size_of::(), + frame_len: frame_len.max(0) as usize, + }), + } + } + + /// Tries to seek on an input directly using sample length, if the input + /// is unframed. + pub fn try_seek_trivial(&self, input: CodecType) -> Option { + use Container::*; + + match self { + Raw => Some(input.sample_len()), + _ => None, + } + } + + /// Returns the byte index of the first frame containing audio payload data. + pub fn input_start(&self) -> usize { + use Container::*; + + match self { + Raw => 0, + Dca { first_frame } => *first_frame, + } + } +} diff --git a/src/input/dca.rs b/src/input/dca.rs new file mode 100644 index 0000000..ea46331 --- /dev/null +++ b/src/input/dca.rs @@ -0,0 +1,137 @@ +use super::{codec::OpusDecoderState, error::DcaError, Codec, Container, Input, Metadata, Reader}; +use serde::Deserialize; +use std::{ffi::OsStr, io::BufReader, mem}; +use tokio::{fs::File as TokioFile, io::AsyncReadExt}; + +/// Creates a streamed audio source from a DCA file. +/// Currently only accepts the [DCA1 format](https://github.com/bwmarrin/dca). +pub async fn dca>(path: P) -> Result { + _dca(path.as_ref()).await +} + +async fn _dca(path: &OsStr) -> Result { + let mut reader = TokioFile::open(path).await.map_err(DcaError::IoError)?; + + let mut header = [0u8; 4]; + + // Read in the magic number to verify it's a DCA file. + reader + .read_exact(&mut header) + .await + .map_err(DcaError::IoError)?; + + if header != b"DCA1"[..] { + return Err(DcaError::InvalidHeader); + } + + let size = reader + .read_i32_le() + .await + .map_err(|_| DcaError::InvalidHeader)?; + + // Sanity check + if size < 2 { + return Err(DcaError::InvalidSize(size)); + } + + let mut raw_json = Vec::with_capacity(size as usize); + + let mut json_reader = reader.take(size as u64); + + json_reader + .read_to_end(&mut raw_json) + .await + .map_err(DcaError::IoError)?; + + let reader = BufReader::new(json_reader.into_inner().into_std().await); + + let metadata: Metadata = serde_json::from_slice::(raw_json.as_slice()) + .map_err(DcaError::InvalidMetadata)? + .into(); + + let stereo = metadata.channels == Some(2); + + Ok(Input::new( + stereo, + Reader::File(reader), + Codec::Opus(OpusDecoderState::new().map_err(DcaError::Opus)?), + Container::Dca { + first_frame: (size as usize) + mem::size_of::() + header.len(), + }, + Some(metadata), + )) +} + +#[derive(Debug, Deserialize)] +pub(crate) struct DcaMetadata { + pub(crate) dca: Dca, + pub(crate) opus: Opus, + pub(crate) info: Option, + pub(crate) origin: Option, + pub(crate) extra: Option, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct Dca { + pub(crate) version: u64, + pub(crate) tool: Tool, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct Tool { + pub(crate) name: String, + pub(crate) version: String, + pub(crate) url: String, + pub(crate) author: String, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct Opus { + pub(crate) mode: String, + pub(crate) sample_rate: u32, + pub(crate) frame_size: u64, + pub(crate) abr: u64, + pub(crate) vbr: u64, + pub(crate) channels: u8, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct Info { + pub(crate) title: Option, + pub(crate) artist: Option, + pub(crate) album: Option, + pub(crate) genre: Option, + pub(crate) cover: Option, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct Origin { + pub(crate) source: Option, + pub(crate) abr: Option, + pub(crate) channels: Option, + pub(crate) encoding: Option, + pub(crate) url: Option, +} + +impl From for Metadata { + fn from(mut d: DcaMetadata) -> Self { + let (title, artist) = d + .info + .take() + .map(|mut m| (m.title.take(), m.artist.take())) + .unwrap_or_else(|| (None, None)); + + let channels = Some(d.opus.channels); + let sample_rate = Some(d.opus.sample_rate); + + Self { + title, + artist, + + channels, + sample_rate, + + ..Default::default() + } + } +} diff --git a/src/input/error.rs b/src/input/error.rs new file mode 100644 index 0000000..614249f --- /dev/null +++ b/src/input/error.rs @@ -0,0 +1,93 @@ +//! Errors caused by input creation. + +use audiopus::Error as OpusError; +use serde_json::{Error as JsonError, Value}; +use std::{io::Error as IoError, process::Output}; +use streamcatcher::CatcherError; + +/// An error returned when creating a new [`Input`]. +/// +/// [`Input`]: ../struct.Input.html +#[derive(Debug)] +#[non_exhaustive] +pub enum Error { + /// An error occurred while opening a new DCA source. + Dca(DcaError), + /// An error occurred while reading, or opening a file. + Io(IoError), + /// An error occurred while parsing JSON (i.e., during metadata/stereo detection). + Json(JsonError), + /// An error occurred within the Opus codec. + Opus(OpusError), + /// Failed to extract metadata from alternate pipe. + Metadata, + /// Apparently failed to create stdout. + Stdout, + /// An error occurred while checking if a path is stereo. + Streams, + /// Configuration error for a cached Input. + Streamcatcher(CatcherError), + /// An error occurred while processing the JSON output from `youtube-dl`. + /// + /// The JSON output is given. + YouTubeDLProcessing(Value), + /// An error occurred while running `youtube-dl`. + YouTubeDLRun(Output), + /// The `url` field of the `youtube-dl` JSON output was not present. + /// + /// The JSON output is given. + YouTubeDLUrl(Value), +} + +impl From for Error { + fn from(e: CatcherError) -> Self { + Error::Streamcatcher(e) + } +} + +impl From for Error { + fn from(e: DcaError) -> Self { + Error::Dca(e) + } +} + +impl From for Error { + fn from(e: IoError) -> Error { + Error::Io(e) + } +} + +impl From for Error { + fn from(e: JsonError) -> Self { + Error::Json(e) + } +} + +impl From for Error { + fn from(e: OpusError) -> Error { + Error::Opus(e) + } +} + +/// An error returned from the [`dca`] method. +/// +/// [`dca`]: ../fn.dca.html +#[derive(Debug)] +#[non_exhaustive] +pub enum DcaError { + /// An error occurred while reading, or opening a file. + IoError(IoError), + /// The file opened did not have a valid DCA JSON header. + InvalidHeader, + /// The file's metadata block was invalid, or could not be parsed. + InvalidMetadata(JsonError), + /// The file's header reported an invalid metadata block size. + InvalidSize(i32), + /// An error was encountered while creating a new Opus decoder. + Opus(OpusError), +} + +/// Convenience type for fallible return of [`Input`]s. +/// +/// [`Input`]: ../struct.Input.html +pub type Result = std::result::Result; diff --git a/src/input/ffmpeg_src.rs b/src/input/ffmpeg_src.rs new file mode 100644 index 0000000..f430762 --- /dev/null +++ b/src/input/ffmpeg_src.rs @@ -0,0 +1,146 @@ +use super::{ + child_to_reader, + error::{Error, Result}, + Codec, + Container, + Input, + Metadata, +}; +use serde_json::Value; +use std::{ + ffi::OsStr, + process::{Command, Stdio}, +}; +use tokio::process::Command as TokioCommand; +use tracing::debug; + +/// Opens an audio file through `ffmpeg` and creates an audio source. +pub async fn ffmpeg>(path: P) -> Result { + _ffmpeg(path.as_ref()).await +} + +pub(crate) async fn _ffmpeg(path: &OsStr) -> Result { + // Will fail if the path is not to a file on the fs. Likely a YouTube URI. + let is_stereo = is_stereo(path) + .await + .unwrap_or_else(|_e| (false, Default::default())); + let stereo_val = if is_stereo.0 { "2" } else { "1" }; + + _ffmpeg_optioned( + path, + &[], + &[ + "-f", + "s16le", + "-ac", + stereo_val, + "-ar", + "48000", + "-acodec", + "pcm_f32le", + "-", + ], + Some(is_stereo), + ) + .await +} + +/// Opens an audio file through `ffmpeg` and creates an audio source, with +/// user-specified arguments to pass to ffmpeg. +/// +/// Note that this does _not_ build on the arguments passed by the [`ffmpeg`] +/// function. +/// +/// # Examples +/// +/// Pass options to create a custom ffmpeg streamer: +/// +/// ```rust,no_run +/// use songbird::input; +/// +/// let stereo_val = "2"; +/// +/// let streamer = futures::executor::block_on(input::ffmpeg_optioned("./some_file.mp3", &[], &[ +/// "-f", +/// "s16le", +/// "-ac", +/// stereo_val, +/// "-ar", +/// "48000", +/// "-acodec", +/// "pcm_s16le", +/// "-", +/// ])); +///``` +pub async fn ffmpeg_optioned>( + path: P, + pre_input_args: &[&str], + args: &[&str], +) -> Result { + _ffmpeg_optioned(path.as_ref(), pre_input_args, args, None).await +} + +pub(crate) async fn _ffmpeg_optioned( + path: &OsStr, + pre_input_args: &[&str], + args: &[&str], + is_stereo_known: Option<(bool, Metadata)>, +) -> Result { + let (is_stereo, metadata) = if let Some(vals) = is_stereo_known { + vals + } else { + is_stereo(path) + .await + .ok() + .unwrap_or_else(|| (false, Default::default())) + }; + + let command = Command::new("ffmpeg") + .args(pre_input_args) + .arg("-i") + .arg(path) + .args(args) + .stderr(Stdio::null()) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .spawn()?; + + Ok(Input::new( + is_stereo, + child_to_reader::(command), + Codec::FloatPcm, + Container::Raw, + Some(metadata), + )) +} + +pub(crate) async fn is_stereo(path: &OsStr) -> Result<(bool, Metadata)> { + let args = [ + "-v", + "quiet", + "-of", + "json", + "-show_format", + "-show_streams", + "-i", + ]; + + let out = TokioCommand::new("ffprobe") + .args(&args) + .arg(path) + .stdin(Stdio::null()) + .output() + .await?; + + let value: Value = serde_json::from_reader(&out.stdout[..])?; + + let metadata = Metadata::from_ffprobe_json(&value); + + debug!("FFprobe metadata {:?}", metadata); + + if let Some(count) = metadata.channels { + Ok((count == 2, metadata)) + } else { + Err(Error::Streams) + } +} diff --git a/src/input/metadata.rs b/src/input/metadata.rs new file mode 100644 index 0000000..4a47523 --- /dev/null +++ b/src/input/metadata.rs @@ -0,0 +1,166 @@ +use crate::constants::*; +use serde_json::Value; +use std::time::Duration; + +/// Information about an [`Input`] source. +/// +/// [`Input`]: struct.Input.html +#[derive(Clone, Debug, Default, Eq, PartialEq)] +pub struct Metadata { + /// The title of this stream. + pub title: Option, + /// The main artist of this stream. + pub artist: Option, + /// The date of creation of this stream. + pub date: Option, + + /// The number of audio channels in this stream. + /// + /// Any number `>= 2` is treated as stereo. + pub channels: Option, + /// The time at which the first true sample is played back. + /// + /// This occurs as an artefact of coder delay. + pub start_time: Option, + /// The reported duration of this stream. + pub duration: Option, + /// The sample rate of this stream. + pub sample_rate: Option, +} + +impl Metadata { + /// Extract metadata and details from the output of + /// `ffprobe`. + pub fn from_ffprobe_json(value: &Value) -> Self { + let format = value.as_object().and_then(|m| m.get("format")); + + let duration = format + .and_then(|m| m.get("duration")) + .and_then(Value::as_str) + .and_then(|v| v.parse::().ok()) + .map(Duration::from_secs_f64); + + let start_time = format + .and_then(|m| m.get("start_time")) + .and_then(Value::as_str) + .and_then(|v| v.parse::().ok()) + .map(Duration::from_secs_f64); + + let tags = format.and_then(|m| m.get("tags")); + + let title = tags + .and_then(|m| m.get("title")) + .and_then(Value::as_str) + .map(str::to_string); + + let artist = tags + .and_then(|m| m.get("artist")) + .and_then(Value::as_str) + .map(str::to_string); + + let date = tags + .and_then(|m| m.get("date")) + .and_then(Value::as_str) + .map(str::to_string); + + let stream = value + .as_object() + .and_then(|m| m.get("streams")) + .and_then(|v| v.as_array()) + .and_then(|v| { + v.iter() + .find(|line| line.get("codec_type").and_then(Value::as_str) == Some("audio")) + }); + + let channels = stream + .and_then(|m| m.get("channels")) + .and_then(Value::as_u64) + .map(|v| v as u8); + + let sample_rate = stream + .and_then(|m| m.get("sample_rate")) + .and_then(Value::as_str) + .and_then(|v| v.parse::().ok()) + .map(|v| v as u32); + + Self { + title, + artist, + date, + + channels, + start_time, + duration, + sample_rate, + } + } + + /// Use `youtube-dl` to extract metadata for an online resource. + pub fn from_ytdl_output(value: Value) -> Self { + let obj = value.as_object(); + + let track = obj + .and_then(|m| m.get("track")) + .and_then(Value::as_str) + .map(str::to_string); + + let title = track.or_else(|| { + obj.and_then(|m| m.get("title")) + .and_then(Value::as_str) + .map(str::to_string) + }); + + let true_artist = obj + .and_then(|m| m.get("artist")) + .and_then(Value::as_str) + .map(str::to_string); + + let artist = true_artist.or_else(|| { + obj.and_then(|m| m.get("uploader")) + .and_then(Value::as_str) + .map(str::to_string) + }); + + let r_date = obj + .and_then(|m| m.get("release_date")) + .and_then(Value::as_str) + .map(str::to_string); + + let date = r_date.or_else(|| { + obj.and_then(|m| m.get("upload_date")) + .and_then(Value::as_str) + .map(str::to_string) + }); + + let duration = obj + .and_then(|m| m.get("duration")) + .and_then(Value::as_f64) + .map(Duration::from_secs_f64); + + Self { + title, + artist, + date, + + channels: Some(2), + duration, + sample_rate: Some(SAMPLE_RATE_RAW as u32), + + ..Default::default() + } + } + + /// Move all fields from a `Metadata` object into a new one. + pub fn take(&mut self) -> Self { + Self { + title: self.title.take(), + artist: self.artist.take(), + date: self.date.take(), + + channels: self.channels.take(), + start_time: self.start_time.take(), + duration: self.duration.take(), + sample_rate: self.sample_rate.take(), + } + } +} diff --git a/src/input/mod.rs b/src/input/mod.rs new file mode 100644 index 0000000..8d10c26 --- /dev/null +++ b/src/input/mod.rs @@ -0,0 +1,596 @@ +//! Raw audio input data streams and sources. +//! +//! [`Input`] is handled in Songbird by combining metadata with: +//! * A 48kHz audio bytestream, via [`Reader`], +//! * A [`Container`] describing the framing mechanism of the bytestream, +//! * A [`Codec`], defining the format of audio frames. +//! +//! When used as a [`Read`], the output bytestream will be a floating-point +//! PCM stream at 48kHz, matching the channel count of the input source. +//! +//! ## Opus frame passthrough. +//! Some sources, such as [`Compressed`] or the output of [`dca`], support +//! direct frame passthrough to the driver. This lets you directly send the +//! audio data you have *without decoding, re-encoding, or mixing*. In many +//! cases, this can greatly reduce the processing/compute cost of the driver. +//! +//! This functionality requires that: +//! * only one track is active (including paused tracks), +//! * that track's input supports direct Opus frame reads, +//! * its [`Input`] [meets the promises described herein](codec/struct.OpusDecoderState.html#structfield.allow_passthrough), +//! * and that track's volume is set to `1.0`. +//! +//! [`Input`]: struct.Input.html +//! [`Reader`]: reader/enum.Reader.html +//! [`Container`]: enum.Container.html +//! [`Codec`]: codec/enum.Codec.html +//! [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +//! [`Compressed`]: cached/struct.Compressed.html +//! [`dca`]: fn.dca.html + +pub mod cached; +mod child; +pub mod codec; +mod container; +mod dca; +pub mod error; +mod ffmpeg_src; +mod metadata; +pub mod reader; +pub mod restartable; +pub mod utils; +mod ytdl_src; + +pub use self::{ + child::*, + codec::{Codec, CodecType}, + container::{Container, Frame}, + dca::dca, + ffmpeg_src::*, + metadata::Metadata, + reader::Reader, + restartable::Restartable, + ytdl_src::*, +}; + +use crate::constants::*; +use audiopus::coder::GenericCtl; +use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use cached::OpusCompressor; +use error::{Error, Result}; +use tokio::runtime::Handle; + +use std::{ + convert::TryFrom, + io::{ + self, + Error as IoError, + ErrorKind as IoErrorKind, + Read, + Result as IoResult, + Seek, + SeekFrom, + }, + mem, + time::Duration, +}; +use tracing::{debug, error}; + +/// Data and metadata needed to correctly parse a [`Reader`]'s audio bytestream. +/// +/// See the [module root] for more information. +/// +/// [`Reader`]: enum.Reader.html +/// [module root]: index.html +#[derive(Debug)] +pub struct Input { + /// Information about the played source. + pub metadata: Metadata, + /// Indicates whether `source` is stereo or mono. + pub stereo: bool, + /// Underlying audio data bytestream. + pub reader: Reader, + /// Decoder used to parse the output of `reader`. + pub kind: Codec, + /// Framing strategy needed to identify frames of compressed audio. + pub container: Container, + pos: usize, +} + +impl Input { + /// Creates a floating-point PCM Input from a given reader. + pub fn float_pcm(is_stereo: bool, reader: Reader) -> Input { + Input { + metadata: Default::default(), + stereo: is_stereo, + reader, + kind: Codec::FloatPcm, + container: Container::Raw, + pos: 0, + } + } + + /// Creates a new Input using (at least) the given reader, codec, and container. + pub fn new( + stereo: bool, + reader: Reader, + kind: Codec, + container: Container, + metadata: Option, + ) -> Self { + Input { + metadata: metadata.unwrap_or_default(), + stereo, + reader, + kind, + container, + pos: 0, + } + } + + /// Returns whether the inner [`Reader`] implements [`Seek`]. + /// + /// [`Reader`]: reader/enum.Reader.html + /// [`Seek`]: https://doc.rust-lang.org/std/io/trait.Seek.html + pub fn is_seekable(&self) -> bool { + self.reader.is_seekable() + } + + /// Returns whether the read audio signal is stereo (or mono). + pub fn is_stereo(&self) -> bool { + self.stereo + } + + /// Returns the type of the inner [`Codec`]. + /// + /// [`Codec`]: codec/enum.Codec.html + pub fn get_type(&self) -> CodecType { + (&self.kind).into() + } + + /// Mixes the output of this stream into a 20ms stereo audio buffer. + #[inline] + pub fn mix(&mut self, float_buffer: &mut [f32; STEREO_FRAME_SIZE], volume: f32) -> usize { + match self.add_float_pcm_frame(float_buffer, self.stereo, volume) { + Some(len) => len, + None => 0, + } + } + + /// Seeks the stream to the given time, if possible. + /// + /// Returns the actual time reached. + pub fn seek_time(&mut self, time: Duration) -> Option { + let future_pos = utils::timestamp_to_byte_count(time, self.stereo); + Seek::seek(self, SeekFrom::Start(future_pos as u64)) + .ok() + .map(|a| utils::byte_count_to_timestamp(a as usize, self.stereo)) + } + + fn read_inner(&mut self, buffer: &mut [u8], ignore_decode: bool) -> IoResult { + // This implementation of Read converts the input stream + // to floating point output. + let sample_len = mem::size_of::(); + let float_space = buffer.len() / sample_len; + let mut written_floats = 0; + + // TODO: better decouple codec and container here. + // this is a little bit backwards, and assumes the bottom cases are always raw... + let out = match &mut self.kind { + Codec::Opus(decoder_state) => { + if matches!(self.container, Container::Raw) { + return Err(IoError::new( + IoErrorKind::InvalidInput, + "Raw container cannot demarcate Opus frames.", + )); + } + + if ignore_decode { + // If we're less than one frame away from the end of cheap seeking, + // then we must decode to make sure the next starting offset is correct. + + // Step one: use up the remainder of the frame. + let mut aud_skipped = + decoder_state.current_frame.len() - decoder_state.frame_pos; + + decoder_state.frame_pos = 0; + decoder_state.current_frame.truncate(0); + + // Step two: take frames if we can. + while buffer.len() - aud_skipped >= STEREO_FRAME_BYTE_SIZE { + decoder_state.should_reset = true; + + let frame = self + .container + .next_frame_length(&mut self.reader, CodecType::Opus)?; + self.reader.consume(frame.frame_len); + + aud_skipped += STEREO_FRAME_BYTE_SIZE; + } + + Ok(aud_skipped) + } else { + // get new frame *if needed* + if decoder_state.frame_pos == decoder_state.current_frame.len() { + let mut decoder = decoder_state.decoder.lock(); + + if decoder_state.should_reset { + decoder + .reset_state() + .expect("Critical failure resetting decoder."); + decoder_state.should_reset = false; + } + let frame = self + .container + .next_frame_length(&mut self.reader, CodecType::Opus)?; + + let mut opus_data_buffer = [0u8; 4000]; + + decoder_state + .current_frame + .resize(decoder_state.current_frame.capacity(), 0.0); + + let seen = + Read::read(&mut self.reader, &mut opus_data_buffer[..frame.frame_len])?; + + let samples = decoder + .decode_float( + Some(&opus_data_buffer[..seen]), + &mut decoder_state.current_frame[..], + false, + ) + .unwrap_or(0); + + decoder_state.current_frame.truncate(2 * samples); + decoder_state.frame_pos = 0; + } + + // read from frame which is present. + let mut buffer = &mut buffer[..]; + + let start = decoder_state.frame_pos; + let to_write = float_space.min(decoder_state.current_frame.len() - start); + for val in &decoder_state.current_frame[start..start + float_space] { + buffer.write_f32::(*val)?; + } + decoder_state.frame_pos += to_write; + written_floats = to_write; + + Ok(written_floats * mem::size_of::()) + } + }, + Codec::Pcm => { + let mut buffer = &mut buffer[..]; + while written_floats < float_space { + if let Ok(signal) = self.reader.read_i16::() { + buffer.write_f32::(f32::from(signal) / 32768.0)?; + written_floats += 1; + } else { + break; + } + } + Ok(written_floats * mem::size_of::()) + }, + Codec::FloatPcm => Read::read(&mut self.reader, buffer), + }; + + out.map(|v| { + self.pos += v; + v + }) + } + + fn cheap_consume(&mut self, count: usize) -> IoResult { + let mut scratch = [0u8; STEREO_FRAME_BYTE_SIZE * 4]; + let len = scratch.len(); + let mut done = 0; + + loop { + let read = self.read_inner(&mut scratch[..len.min(count - done)], true)?; + if read == 0 { + break; + } + done += read; + } + + Ok(done) + } + + pub(crate) fn supports_passthrough(&self) -> bool { + match &self.kind { + Codec::Opus(state) => state.allow_passthrough, + _ => false, + } + } + + pub(crate) fn read_opus_frame(&mut self, buffer: &mut [u8]) -> IoResult { + // Called in event of opus passthrough. + if let Codec::Opus(state) = &mut self.kind { + // step 1: align to frame. + self.pos += state.current_frame.len() - state.frame_pos; + + state.frame_pos = 0; + state.current_frame.truncate(0); + + // step 2: read new header. + let frame = self + .container + .next_frame_length(&mut self.reader, CodecType::Opus)?; + + // step 3: read in bytes. + self.reader + .read_exact(&mut buffer[..frame.frame_len]) + .map(|_| { + self.pos += STEREO_FRAME_BYTE_SIZE; + frame.frame_len + }) + } else { + Err(IoError::new( + IoErrorKind::InvalidInput, + "Frame passthrough not supported for this file.", + )) + } + } + + pub(crate) fn prep_with_handle(&mut self, handle: Handle) { + self.reader.prep_with_handle(handle); + } +} + +impl Read for Input { + fn read(&mut self, buffer: &mut [u8]) -> IoResult { + self.read_inner(buffer, false) + } +} + +impl Seek for Input { + fn seek(&mut self, pos: SeekFrom) -> IoResult { + let mut target = self.pos; + match pos { + SeekFrom::Start(pos) => { + target = pos as usize; + }, + SeekFrom::Current(rel) => { + target = target.wrapping_add(rel as usize); + }, + SeekFrom::End(_pos) => unimplemented!(), + } + + debug!("Seeking to {:?}", pos); + + (if target == self.pos { + Ok(0) + } else if let Some(conversion) = self.container.try_seek_trivial(self.get_type()) { + let inside_target = (target * conversion) / mem::size_of::(); + Seek::seek(&mut self.reader, SeekFrom::Start(inside_target as u64)).map(|inner_dest| { + let outer_dest = ((inner_dest as usize) * mem::size_of::()) / conversion; + self.pos = outer_dest; + outer_dest + }) + } else if target > self.pos { + // seek in the next amount, disabling decoding if need be. + let shift = target - self.pos; + self.cheap_consume(shift) + } else { + // start from scratch, then seek in... + Seek::seek( + &mut self.reader, + SeekFrom::Start(self.container.input_start() as u64), + )?; + + self.cheap_consume(target) + }) + .map(|_| self.pos as u64) + } +} + +/// Extension trait to pull frames of audio from a byte source. +pub(crate) trait ReadAudioExt { + fn add_float_pcm_frame( + &mut self, + float_buffer: &mut [f32; STEREO_FRAME_SIZE], + true_stereo: bool, + volume: f32, + ) -> Option; + + fn consume(&mut self, amt: usize) -> usize + where + Self: Sized; +} + +impl ReadAudioExt for R { + fn add_float_pcm_frame( + &mut self, + float_buffer: &mut [f32; STEREO_FRAME_SIZE], + stereo: bool, + volume: f32, + ) -> Option { + // IDEA: Read in 8 floats at a time, then use iterator code + // to gently nudge the compiler into vectorising for us. + // Max SIMD float32 lanes is 8 on AVX, older archs use a divisor of this + // e.g., 4. + const SAMPLE_LEN: usize = mem::size_of::(); + const FLOAT_COUNT: usize = 512; + let mut simd_float_bytes = [0u8; FLOAT_COUNT * SAMPLE_LEN]; + let mut simd_float_buf = [0f32; FLOAT_COUNT]; + + let mut frame_pos = 0; + + // Code duplication here is because unifying these codepaths + // with a dynamic chunk size is not zero-cost. + if stereo { + let mut max_bytes = STEREO_FRAME_BYTE_SIZE; + + while frame_pos < float_buffer.len() { + let progress = self + .read(&mut simd_float_bytes[..max_bytes.min(FLOAT_COUNT * SAMPLE_LEN)]) + .and_then(|byte_len| { + let target = byte_len / SAMPLE_LEN; + (&simd_float_bytes[..byte_len]) + .read_f32_into::(&mut simd_float_buf[..target]) + .map(|_| target) + }) + .map(|f32_len| { + let new_pos = frame_pos + f32_len; + for (el, new_el) in float_buffer[frame_pos..new_pos] + .iter_mut() + .zip(&simd_float_buf[..f32_len]) + { + *el += volume * new_el; + } + (new_pos, f32_len) + }); + + match progress { + Ok((new_pos, delta)) => { + frame_pos = new_pos; + max_bytes -= delta * SAMPLE_LEN; + + if delta == 0 { + break; + } + }, + Err(ref e) => + return if e.kind() == IoErrorKind::UnexpectedEof { + error!("EOF unexpectedly: {:?}", e); + Some(frame_pos) + } else { + error!("Input died unexpectedly: {:?}", e); + None + }, + } + } + } else { + let mut max_bytes = MONO_FRAME_BYTE_SIZE; + + while frame_pos < float_buffer.len() { + let progress = self + .read(&mut simd_float_bytes[..max_bytes.min(FLOAT_COUNT * SAMPLE_LEN)]) + .and_then(|byte_len| { + let target = byte_len / SAMPLE_LEN; + (&simd_float_bytes[..byte_len]) + .read_f32_into::(&mut simd_float_buf[..target]) + .map(|_| target) + }) + .map(|f32_len| { + let new_pos = frame_pos + (2 * f32_len); + for (els, new_el) in float_buffer[frame_pos..new_pos] + .chunks_exact_mut(2) + .zip(&simd_float_buf[..f32_len]) + { + let sample = volume * new_el; + els[0] += sample; + els[1] += sample; + } + (new_pos, f32_len) + }); + + match progress { + Ok((new_pos, delta)) => { + frame_pos = new_pos; + max_bytes -= delta * SAMPLE_LEN; + + if delta == 0 { + break; + } + }, + Err(ref e) => + return if e.kind() == IoErrorKind::UnexpectedEof { + Some(frame_pos) + } else { + error!("Input died unexpectedly: {:?}", e); + None + }, + } + } + } + + Some(frame_pos * SAMPLE_LEN) + } + + fn consume(&mut self, amt: usize) -> usize { + io::copy(&mut self.by_ref().take(amt as u64), &mut io::sink()).unwrap_or(0) as usize + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::*; + + #[test] + fn float_pcm_input_unchanged_mono() { + let data = make_sine(50 * MONO_FRAME_SIZE, false); + let mut input = Input::new( + false, + data.clone().into(), + Codec::FloatPcm, + Container::Raw, + None, + ); + + let mut out_vec = vec![]; + + let len = input.read_to_end(&mut out_vec).unwrap(); + assert_eq!(out_vec[..len], data[..]); + } + + #[test] + fn float_pcm_input_unchanged_stereo() { + let data = make_sine(50 * MONO_FRAME_SIZE, true); + let mut input = Input::new( + true, + data.clone().into(), + Codec::FloatPcm, + Container::Raw, + None, + ); + + let mut out_vec = vec![]; + + let len = input.read_to_end(&mut out_vec).unwrap(); + assert_eq!(out_vec[..len], data[..]); + } + + #[test] + fn pcm_input_becomes_float_mono() { + let data = make_pcm_sine(50 * MONO_FRAME_SIZE, false); + let mut input = Input::new(false, data.clone().into(), Codec::Pcm, Container::Raw, None); + + let mut out_vec = vec![]; + let len = input.read_to_end(&mut out_vec).unwrap(); + + let mut i16_window = &data[..]; + let mut float_window = &out_vec[..]; + + while i16_window.len() != 0 { + let before = i16_window.read_i16::().unwrap() as f32; + let after = float_window.read_f32::().unwrap(); + + let diff = (before / 32768.0) - after; + + assert!(diff.abs() < f32::EPSILON); + } + } + + #[test] + fn pcm_input_becomes_float_stereo() { + let data = make_pcm_sine(50 * MONO_FRAME_SIZE, true); + let mut input = Input::new(true, data.clone().into(), Codec::Pcm, Container::Raw, None); + + let mut out_vec = vec![]; + let len = input.read_to_end(&mut out_vec).unwrap(); + + let mut i16_window = &data[..]; + let mut float_window = &out_vec[..]; + + while i16_window.len() != 0 { + let before = i16_window.read_i16::().unwrap() as f32; + let after = float_window.read_f32::().unwrap(); + + let diff = (before / 32768.0) - after; + + assert!(diff.abs() < f32::EPSILON); + } + } +} diff --git a/src/input/reader.rs b/src/input/reader.rs new file mode 100644 index 0000000..030dac3 --- /dev/null +++ b/src/input/reader.rs @@ -0,0 +1,180 @@ +//! Raw handlers for input bytestreams. + +use super::*; +use std::{ + fmt::{Debug, Error as FormatError, Formatter}, + fs::File, + io::{ + BufReader, + Cursor, + Error as IoError, + ErrorKind as IoErrorKind, + Read, + Result as IoResult, + Seek, + SeekFrom, + }, + result::Result as StdResult, +}; +use streamcatcher::{Catcher, TxCatcher}; + +/// Usable data/byte sources for an audio stream. +/// +/// Users may define their own data sources using [`Extension`] +/// and [`ExtensionSeek`]. +/// +/// [`Extension`]: #variant.Extension +/// [`ExtensionSeek`]: #variant.ExtensionSeek +pub enum Reader { + /// Piped output of another program (i.e., [`ffmpeg`]). + /// + /// Does not support seeking. + /// + /// [`ffmpeg`]: ../fn.ffmpeg.html + Pipe(BufReader), + /// A cached, raw in-memory store, provided by Songbird. + /// + /// Supports seeking. + Memory(Catcher>), + /// A cached, Opus-compressed in-memory store, provided by Songbird. + /// + /// Supports seeking. + Compressed(TxCatcher, OpusCompressor>), + /// A source which supports seeking by recreating its inout stream. + /// + /// Supports seeking. + Restartable(Restartable), + /// A source contained in a local file. + /// + /// Supports seeking. + File(BufReader), + /// A source contained as an array in memory. + /// + /// Supports seeking. + Vec(Cursor>), + /// A basic user-provided source. + /// + /// Does not support seeking. + Extension(Box), + /// A user-provided source which also implements [`Seek`]. + /// + /// Supports seeking. + /// + /// [`Seek`]: https://doc.rust-lang.org/std/io/trait.Seek.html + ExtensionSeek(Box), +} + +impl Reader { + /// Returns whether the given source implements [`Seek`]. + /// + /// [`Seek`]: https://doc.rust-lang.org/std/io/trait.Seek.html + pub fn is_seekable(&self) -> bool { + use Reader::*; + match self { + Restartable(_) | Compressed(_) | Memory(_) => true, + Extension(_) => false, + ExtensionSeek(_) => true, + _ => false, + } + } + + #[allow(clippy::single_match)] + pub(crate) fn prep_with_handle(&mut self, handle: Handle) { + use Reader::*; + match self { + Restartable(r) => r.prep_with_handle(handle), + _ => {}, + } + } +} + +impl Read for Reader { + fn read(&mut self, buffer: &mut [u8]) -> IoResult { + use Reader::*; + match self { + Pipe(a) => Read::read(a, buffer), + Memory(a) => Read::read(a, buffer), + Compressed(a) => Read::read(a, buffer), + Restartable(a) => Read::read(a, buffer), + File(a) => Read::read(a, buffer), + Vec(a) => Read::read(a, buffer), + Extension(a) => a.read(buffer), + ExtensionSeek(a) => a.read(buffer), + } + } +} + +impl Seek for Reader { + fn seek(&mut self, pos: SeekFrom) -> IoResult { + use Reader::*; + match self { + Pipe(_) | Extension(_) => Err(IoError::new( + IoErrorKind::InvalidInput, + "Seeking not supported on Reader of this type.", + )), + Memory(a) => Seek::seek(a, pos), + Compressed(a) => Seek::seek(a, pos), + File(a) => Seek::seek(a, pos), + Restartable(a) => Seek::seek(a, pos), + Vec(a) => Seek::seek(a, pos), + ExtensionSeek(a) => a.seek(pos), + } + } +} + +impl Debug for Reader { + fn fmt(&self, f: &mut Formatter<'_>) -> StdResult<(), FormatError> { + use Reader::*; + let field = match self { + Pipe(a) => format!("{:?}", a), + Memory(a) => format!("{:?}", a), + Compressed(a) => format!("{:?}", a), + Restartable(a) => format!("{:?}", a), + File(a) => format!("{:?}", a), + Vec(a) => format!("{:?}", a), + Extension(_) => "Extension".to_string(), + ExtensionSeek(_) => "ExtensionSeek".to_string(), + }; + f.debug_tuple("Reader").field(&field).finish() + } +} + +impl From> for Reader { + fn from(val: Vec) -> Reader { + Reader::Vec(Cursor::new(val)) + } +} + +/// Fusion trait for custom input sources which allow seeking. +pub trait ReadSeek { + /// See [`Read::read`]. + /// + /// [`Read::read`]: https://doc.rust-lang.org/nightly/std/io/trait.Read.html#tymethod.read + fn read(&mut self, buf: &mut [u8]) -> IoResult; + /// See [`Seek::seek`]. + /// + /// [`Seek::seek`]: https://doc.rust-lang.org/nightly/std/io/trait.Seek.html#tymethod.seek + fn seek(&mut self, pos: SeekFrom) -> IoResult; +} + +impl Read for dyn ReadSeek { + fn read(&mut self, buf: &mut [u8]) -> IoResult { + ReadSeek::read(self, buf) + } +} + +impl Seek for dyn ReadSeek { + fn seek(&mut self, pos: SeekFrom) -> IoResult { + ReadSeek::seek(self, pos) + } +} + +impl ReadSeek for R { + fn read(&mut self, buf: &mut [u8]) -> IoResult { + Read::read(self, buf) + } + + fn seek(&mut self, pos: SeekFrom) -> IoResult { + Seek::seek(self, pos) + } +} diff --git a/src/input/restartable.rs b/src/input/restartable.rs new file mode 100644 index 0000000..6965e54 --- /dev/null +++ b/src/input/restartable.rs @@ -0,0 +1,294 @@ +//! A source which supports seeking by recreating its input stream. +//! +//! This is intended for use with single-use audio tracks which +//! may require looping or seeking, but where additional memory +//! cannot be spared. Forward seeks will drain the track until reaching +//! the desired timestamp. +//! +//! Restarting occurs by temporarily pausing the track, running the restart +//! mechanism, and then passing the handle back to the mixer thread. Until +//! success/failure is confirmed, the track produces silence. + +use super::*; +use flume::{Receiver, TryRecvError}; +use futures::executor; +use std::{ + ffi::OsStr, + fmt::{Debug, Error as FormatError, Formatter}, + io::{Error as IoError, ErrorKind as IoErrorKind, Read, Result as IoResult, Seek, SeekFrom}, + result::Result as StdResult, + time::Duration, +}; + +type Recreator = Box; +type RecreateChannel = Receiver, Recreator)>>; + +/// A wrapper around a method to create a new [`Input`] which +/// seeks backward by recreating the source. +/// +/// The main purpose of this wrapper is to enable seeking on +/// incompatible sources (i.e., ffmpeg output) and to ease resource +/// consumption for commonly reused/shared tracks. [`Compressed`] +/// and [`Memory`] offer the same functionality with different +/// tradeoffs. +/// +/// This is intended for use with single-use audio tracks which +/// may require looping or seeking, but where additional memory +/// cannot be spared. Forward seeks will drain the track until reaching +/// the desired timestamp. +/// +/// [`Input`]: struct.Input.html +/// [`Memory`]: cached/struct.Memory.html +/// [`Compressed`]: cached/struct.Compressed.html +pub struct Restartable { + async_handle: Option, + awaiting_source: Option, + position: usize, + recreator: Option, + source: Box, +} + +impl Restartable { + /// Create a new source, which can be restarted using a `recreator` function. + pub fn new(mut recreator: impl Restart + Send + 'static) -> Result { + recreator.call_restart(None).map(move |source| Self { + async_handle: None, + awaiting_source: None, + position: 0, + recreator: Some(Box::new(recreator)), + source: Box::new(source), + }) + } + + /// Create a new restartable ffmpeg source for a local file. + pub fn ffmpeg + Send + Clone + 'static>(path: P) -> Result { + Self::new(FfmpegRestarter { path }) + } + + /// Create a new restartable ytdl source. + /// + /// The cost of restarting and seeking will probably be *very* high: + /// expect a pause if you seek backwards. + pub fn ytdl + Send + Clone + 'static>(uri: P) -> Result { + Self::new(move |time: Option| { + if let Some(time) = time { + let ts = format!("{}.{}", time.as_secs(), time.subsec_millis()); + + executor::block_on(_ytdl(uri.as_ref(), &["-ss", &ts])) + } else { + executor::block_on(ytdl(uri.as_ref())) + } + }) + } + + /// Create a new restartable ytdl source, using the first result of a youtube search. + /// + /// The cost of restarting and seeking will probably be *very* high: + /// expect a pause if you seek backwards. + pub fn ytdl_search(name: &str) -> Result { + Self::ytdl(format!("ytsearch1:{}", name)) + } + + pub(crate) fn prep_with_handle(&mut self, handle: Handle) { + self.async_handle = Some(handle); + } +} + +/// Trait used to create an instance of a [`Reader`] at instantiation and when +/// a backwards seek is needed. +/// +/// Many closures derive this automatically. +/// +/// [`Reader`]: ../reader/enum.Reader.html +pub trait Restart { + /// Tries to create a replacement source. + fn call_restart(&mut self, time: Option) -> Result; +} + +struct FfmpegRestarter

+where + P: AsRef + Send, +{ + path: P, +} + +impl

Restart for FfmpegRestarter

+where + P: AsRef + Send, +{ + fn call_restart(&mut self, time: Option) -> Result { + executor::block_on(async { + if let Some(time) = time { + let is_stereo = is_stereo(self.path.as_ref()) + .await + .unwrap_or_else(|_e| (false, Default::default())); + let stereo_val = if is_stereo.0 { "2" } else { "1" }; + + let ts = format!("{}.{}", time.as_secs(), time.subsec_millis()); + _ffmpeg_optioned( + self.path.as_ref(), + &["-ss", &ts], + &[ + "-f", + "s16le", + "-ac", + stereo_val, + "-ar", + "48000", + "-acodec", + "pcm_f32le", + "-", + ], + Some(is_stereo), + ) + .await + } else { + ffmpeg(self.path.as_ref()).await + } + }) + } +} + +impl

Restart for P +where + P: FnMut(Option) -> Result + Send + 'static, +{ + fn call_restart(&mut self, time: Option) -> Result { + (self)(time) + } +} + +impl Debug for Restartable { + fn fmt(&self, f: &mut Formatter<'_>) -> StdResult<(), FormatError> { + f.debug_struct("Restartable") + .field("async_handle", &self.async_handle) + .field("awaiting_source", &self.awaiting_source) + .field("position", &self.position) + .field("recreator", &"") + .field("source", &self.source) + .finish() + } +} + +impl From for Input { + fn from(mut src: Restartable) -> Self { + let kind = src.source.kind.clone(); + let meta = Some(src.source.metadata.take()); + let stereo = src.source.stereo; + let container = src.source.container; + Input::new(stereo, Reader::Restartable(src), kind, container, meta) + } +} + +// How do these work at a high level? +// If you need to restart, send a request to do this to the async context. +// if a request is pending, then just output all zeroes. + +impl Read for Restartable { + fn read(&mut self, buffer: &mut [u8]) -> IoResult { + let (out_val, march_pos, remove_async) = if let Some(chan) = &self.awaiting_source { + match chan.try_recv() { + Ok(Ok((new_source, recreator))) => { + self.source = new_source; + self.recreator = Some(recreator); + + (Read::read(&mut self.source, buffer), true, true) + }, + Ok(Err(source_error)) => { + let e = Err(IoError::new( + IoErrorKind::UnexpectedEof, + format!("Failed to create new reader: {:?}.", source_error), + )); + (e, false, true) + }, + Err(TryRecvError::Empty) => { + // Output all zeroes. + for el in buffer.iter_mut() { + *el = 0; + } + (Ok(buffer.len()), false, false) + }, + Err(_) => { + let e = Err(IoError::new( + IoErrorKind::UnexpectedEof, + "Failed to create new reader: dropped.", + )); + (e, false, true) + }, + } + } else { + // already have a good, valid source. + (Read::read(&mut self.source, buffer), true, false) + }; + + if remove_async { + self.awaiting_source = None; + } + + if march_pos { + out_val.map(|a| { + self.position += a; + a + }) + } else { + out_val + } + } +} + +impl Seek for Restartable { + fn seek(&mut self, pos: SeekFrom) -> IoResult { + let _local_pos = self.position as u64; + + use SeekFrom::*; + match pos { + Start(offset) => { + let stereo = self.source.stereo; + let _current_ts = utils::byte_count_to_timestamp(self.position, stereo); + let offset = offset as usize; + + if offset < self.position { + // We're going back in time. + if let Some(handle) = self.async_handle.as_ref() { + let (tx, rx) = flume::bounded(1); + + self.awaiting_source = Some(rx); + + let recreator = self.recreator.take(); + + if let Some(mut rec) = recreator { + handle.spawn(async move { + let ret_val = rec.call_restart(Some( + utils::byte_count_to_timestamp(offset, stereo), + )); + + let _ = tx.send(ret_val.map(Box::new).map(|v| (v, rec))); + }); + } else { + return Err(IoError::new( + IoErrorKind::Interrupted, + "Previous seek in progress.", + )); + } + + self.position = offset; + } else { + return Err(IoError::new( + IoErrorKind::Interrupted, + "Cannot safely call seek until provided an async context handle.", + )); + } + } else { + self.position += self.source.consume(offset - self.position); + } + + Ok(offset as u64) + }, + End(_offset) => Err(IoError::new( + IoErrorKind::InvalidInput, + "End point for Restartables is not known.", + )), + Current(_offset) => unimplemented!(), + } + } +} diff --git a/src/input/utils.rs b/src/input/utils.rs new file mode 100644 index 0000000..d6072da --- /dev/null +++ b/src/input/utils.rs @@ -0,0 +1,41 @@ +//! Utility methods for seeking or decoding. + +use crate::constants::*; +use audiopus::{coder::Decoder, Channels, Result as OpusResult, SampleRate}; +use std::{mem, time::Duration}; + +/// Calculates the sample position in a FloatPCM stream from a timestamp. +pub fn timestamp_to_sample_count(timestamp: Duration, stereo: bool) -> usize { + ((timestamp.as_millis() as usize) * (MONO_FRAME_SIZE / FRAME_LEN_MS)) << stereo as usize +} + +/// Calculates the time position in a FloatPCM stream from a sample index. +pub fn sample_count_to_timestamp(amt: usize, stereo: bool) -> Duration { + Duration::from_millis((((amt * FRAME_LEN_MS) / MONO_FRAME_SIZE) as u64) >> stereo as u64) +} + +/// Calculates the byte position in a FloatPCM stream from a timestamp. +/// +/// Each sample is sized by `mem::size_of::() == 4usize`. +pub fn timestamp_to_byte_count(timestamp: Duration, stereo: bool) -> usize { + timestamp_to_sample_count(timestamp, stereo) * mem::size_of::() +} + +/// Calculates the time position in a FloatPCM stream from a byte index. +/// +/// Each sample is sized by `mem::size_of::() == 4usize`. +pub fn byte_count_to_timestamp(amt: usize, stereo: bool) -> Duration { + sample_count_to_timestamp(amt / mem::size_of::(), stereo) +} + +/// Create an Opus decoder outputting at a sample rate of 48kHz. +pub fn decoder(stereo: bool) -> OpusResult { + Decoder::new( + SampleRate::Hz48000, + if stereo { + Channels::Stereo + } else { + Channels::Mono + }, + ) +} diff --git a/src/input/ytdl_src.rs b/src/input/ytdl_src.rs new file mode 100644 index 0000000..1de3880 --- /dev/null +++ b/src/input/ytdl_src.rs @@ -0,0 +1,107 @@ +use super::{ + child_to_reader, + error::{Error, Result}, + Codec, + Container, + Input, + Metadata, +}; +use serde_json::Value; +use std::{ + io::{BufRead, BufReader, Read}, + process::{Command, Stdio}, +}; +use tokio::task; +use tracing::trace; + +/// Creates a streamed audio source with `youtube-dl` and `ffmpeg`. +pub async fn ytdl(uri: &str) -> Result { + _ytdl(uri, &[]).await +} + +pub(crate) async fn _ytdl(uri: &str, pre_args: &[&str]) -> Result { + let ytdl_args = [ + "--print-json", + "-f", + "webm[abr>0]/bestaudio/best", + "-R", + "infinite", + "--no-playlist", + "--ignore-config", + uri, + "-o", + "-", + ]; + + let ffmpeg_args = [ + "-f", + "s16le", + "-ac", + "2", + "-ar", + "48000", + "-acodec", + "pcm_f32le", + "-", + ]; + + let mut youtube_dl = Command::new("youtube-dl") + .args(&ytdl_args) + .stdin(Stdio::null()) + .stderr(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn()?; + + let stderr = youtube_dl.stderr.take(); + + let (returned_stderr, value) = task::spawn_blocking(move || { + if let Some(mut s) = stderr { + let out: Option = { + let mut o_vec = vec![]; + let mut serde_read = BufReader::new(s.by_ref()); + // Newline... + if let Ok(len) = serde_read.read_until(0xA, &mut o_vec) { + serde_json::from_slice(&o_vec[..len]).ok() + } else { + None + } + }; + + (Some(s), out) + } else { + (None, None) + } + }) + .await + .map_err(|_| Error::Metadata)?; + + youtube_dl.stderr = returned_stderr; + + let ffmpeg = Command::new("ffmpeg") + .args(pre_args) + .arg("-i") + .arg("-") + .args(&ffmpeg_args) + .stdin(youtube_dl.stdout.ok_or(Error::Stdout)?) + .stderr(Stdio::null()) + .stdout(Stdio::piped()) + .spawn()?; + + let metadata = Metadata::from_ytdl_output(value.unwrap_or_default()); + + trace!("ytdl metadata {:?}", metadata); + + Ok(Input::new( + true, + child_to_reader::(ffmpeg), + Codec::FloatPcm, + Container::Raw, + Some(metadata), + )) +} + +/// Creates a streamed audio source from YouTube search results with `youtube-dl`,`ffmpeg`, and `ytsearch`. +/// Takes the first video listed from the YouTube search. +pub async fn ytdl_search(name: &str) -> Result { + ytdl(&format!("ytsearch1:{}", name)).await +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..99e53c7 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,84 @@ +#![doc( + html_logo_url = "https://raw.githubusercontent.com/FelixMcFelix/serenity/voice-rework/songbird/songbird.png", + html_favicon_url = "https://raw.githubusercontent.com/FelixMcFelix/serenity/voice-rework/songbird/songbird-ico.png" +)] +#![deny(missing_docs)] +//! ![project logo][logo] +//! +//! Songbird is an async, cross-library compatible voice system for Discord, written in Rust. +//! The library offers: +//! * A standalone gateway frontend compatible with [serenity] and [twilight] using the +//! `"gateway"` and `"[serenity/twilight]-[rustls/native]"` features. You can even run +//! driverless, to help manage your [lavalink] sessions. +//! * A standalone driver for voice calls, via the `"driver"` feature. If you can create +//! a [`ConnectionInfo`] using any other gateway, or language for your bot, then you +//! can run the songbird voice driver. +//! * And, by default, a fully featured voice system featuring events, queues, RT(C)P packet +//! handling, seeking on compatible streams, shared multithreaded audio stream caches, +//! and direct Opus data passthrough from DCA files. +//! +//! ## Examples +//! Full examples showing various types of functionality and integrations can be found as part of [serenity's examples], +//! and in [this crate's examples directory]. +//! +//! ## Attribution +//! +//! Songbird's logo is based upon the copyright-free image ["Black-Capped Chickadee"] by George Gorgas White. +//! +//! [logo]: https://raw.githubusercontent.com/FelixMcFelix/serenity/voice-rework/songbird/songbird.png +//! [serenity]: https://github.com/serenity-rs/serenity +//! [twilight]: https://github.com/twilight-rs/twilight +//! [serenity's examples]: https://github.com/serenity-rs/serenity/tree/current/examples +//! [this crate's examples directory]: https://github.com/serenity-rs/serenity/tree/current/songbird/examples +//! ["Black-Capped Chickadee"]: https://www.oldbookillustrations.com/illustrations/black-capped-chickadee/ +//! [`ConnectionInfo`]: struct.ConnectionInfo.html +//! [lavalink]: https://github.com/Frederikam/Lavalink + +pub mod constants; +#[cfg(feature = "driver")] +pub mod driver; +pub mod error; +#[cfg(feature = "driver")] +pub mod events; +#[cfg(feature = "gateway")] +mod handler; +pub mod id; +pub(crate) mod info; +#[cfg(feature = "driver")] +pub mod input; +#[cfg(feature = "gateway")] +mod manager; +#[cfg(feature = "serenity")] +pub mod serenity; +#[cfg(feature = "gateway")] +pub mod shards; +#[cfg(feature = "driver")] +pub mod tracks; +#[cfg(feature = "driver")] +mod ws; + +#[cfg(feature = "driver")] +pub use audiopus::{self as opus, Bitrate}; +#[cfg(feature = "driver")] +pub use discortp as packet; +#[cfg(feature = "driver")] +pub use serenity_voice_model as model; + +#[cfg(test)] +use utils as test_utils; + +#[cfg(feature = "driver")] +pub use crate::{ + driver::Driver, + events::{CoreEvent, Event, EventContext, EventHandler, TrackEvent}, + input::{ffmpeg, ytdl}, + tracks::create_player, +}; + +#[cfg(feature = "gateway")] +pub use crate::{handler::Call, manager::Songbird}; + +#[cfg(feature = "serenity")] +pub use crate::serenity::*; + +pub use info::ConnectionInfo; diff --git a/src/manager.rs b/src/manager.rs new file mode 100644 index 0000000..7543975 --- /dev/null +++ b/src/manager.rs @@ -0,0 +1,353 @@ +#[cfg(feature = "driver")] +use crate::error::ConnectionResult; +use crate::{ + error::{JoinError, JoinResult}, + id::{ChannelId, GuildId, UserId}, + shards::Sharder, + Call, + ConnectionInfo, +}; +#[cfg(feature = "serenity")] +use async_trait::async_trait; +use flume::Receiver; +#[cfg(feature = "serenity")] +use futures::channel::mpsc::UnboundedSender as Sender; +use parking_lot::RwLock as PRwLock; +#[cfg(feature = "serenity")] +use serenity::{ + client::bridge::voice::VoiceGatewayManager, + gateway::InterMessage, + model::{ + id::{GuildId as SerenityGuild, UserId as SerenityUser}, + voice::VoiceState, + }, +}; +use std::{collections::HashMap, sync::Arc}; +use tokio::sync::Mutex; +#[cfg(feature = "twilight")] +use twilight_gateway::Cluster; +#[cfg(feature = "twilight")] +use twilight_model::gateway::event::Event as TwilightEvent; + +#[derive(Clone, Copy, Debug, Default)] +struct ClientData { + shard_count: u64, + initialised: bool, + user_id: UserId, +} + +/// A shard-aware struct responsible for managing [`Call`]s. +/// +/// This manager transparently maps guild state and a source of shard information +/// into individual calls, and forwards state updates which affect call state. +/// +/// [`Call`]: struct.Call.html +#[derive(Debug)] +pub struct Songbird { + client_data: PRwLock, + calls: PRwLock>>>, + sharder: Sharder, +} + +impl Songbird { + #[cfg(feature = "serenity")] + /// Create a new Songbird instance for serenity. + /// + /// This must be [registered] after creation. + /// + /// [registered]: serenity/fn.register_with.html + pub fn serenity() -> Arc { + Arc::new(Self { + client_data: Default::default(), + calls: Default::default(), + sharder: Sharder::Serenity(Default::default()), + }) + } + + #[cfg(feature = "twilight")] + /// Create a new Songbird instance for twilight. + /// + /// Twilight handlers do not need to be registered, but + /// users are responsible for passing in any events using + /// [`process`]. + /// + /// [`process`]: #method.process + pub fn twilight(cluster: Cluster, shard_count: u64, user_id: U) -> Arc + where + U: Into, + { + Arc::new(Self { + client_data: PRwLock::new(ClientData { + shard_count, + initialised: true, + user_id: user_id.into(), + }), + calls: Default::default(), + sharder: Sharder::Twilight(cluster), + }) + } + + /// Set the bot's user, and the number of shards in use. + /// + /// If this struct is already initialised (e.g., from [`::twilight`]), + /// or a previous call, then this function is a no-op. + /// + /// [`::twilight`]: #method.twilight + pub fn initialise_client_data>(&self, shard_count: u64, user_id: U) { + let mut client_data = self.client_data.write(); + + if client_data.initialised { + return; + } + + client_data.shard_count = shard_count; + client_data.user_id = user_id.into(); + client_data.initialised = true; + } + + /// Retreives a [`Call`] for the given guild, if one already exists. + /// + /// [`Call`]: struct.Call.html + pub fn get>(&self, guild_id: G) -> Option>> { + let map_read = self.calls.read(); + map_read.get(&guild_id.into()).cloned() + } + + /// Retreives a [`Call`] for the given guild, creating a new one if + /// none is found. + /// + /// This will not join any calls, or cause connection state to change. + /// + /// [`Call`]: struct.Call.html + pub fn get_or_insert(&self, guild_id: GuildId) -> Arc> { + self.get(guild_id).unwrap_or_else(|| { + let mut map_read = self.calls.write(); + + map_read + .entry(guild_id) + .or_insert_with(|| { + let info = self.manager_info(); + let shard = shard_id(guild_id.0, info.shard_count); + let shard_handle = self + .sharder + .get_shard(shard) + .expect("Failed to get shard handle: shard_count incorrect?"); + + Arc::new(Mutex::new(Call::new(guild_id, shard_handle, info.user_id))) + }) + .clone() + }) + } + + fn manager_info(&self) -> ClientData { + let client_data = self.client_data.write(); + + *client_data + } + + #[cfg(feature = "driver")] + /// Connects to a target by retrieving its relevant [`Call`] and + /// connecting, or creating the handler if required. + /// + /// This can also switch to the given channel, if a handler already exists + /// for the target and the current connected channel is not equal to the + /// given channel. + /// + /// The provided channel ID is used as a connection target. The + /// channel _must_ be in the provided guild. This is _not_ checked by the + /// library, and will result in an error. If there is already a connected + /// handler for the guild, _and_ the provided channel is different from the + /// channel that the connection is already connected to, then the handler + /// will switch the connection to the provided channel. + /// + /// If you _only_ need to retrieve the handler for a target, then use + /// [`get`]. + /// + /// [`Call`]: struct.Call.html + /// [`get`]: #method.get + #[inline] + pub async fn join( + &self, + guild_id: G, + channel_id: C, + ) -> (Arc>, JoinResult>>) + where + C: Into, + G: Into, + { + self._join(guild_id.into(), channel_id.into()).await + } + + #[cfg(feature = "driver")] + async fn _join( + &self, + guild_id: GuildId, + channel_id: ChannelId, + ) -> (Arc>, JoinResult>>) { + let call = self.get_or_insert(guild_id); + + let result = { + let mut handler = call.lock().await; + handler.join(channel_id).await + }; + + (call, result) + } + + /// Partially connects to a target by retrieving its relevant [`Call`] and + /// connecting, or creating the handler if required. + /// + /// This method returns the handle and the connection info needed for other libraries + /// or drivers, such as lavalink, and does not actually start or run a voice call. + /// + /// [`Call`]: struct.Call.html + #[inline] + pub async fn join_gateway( + &self, + guild_id: G, + channel_id: C, + ) -> (Arc>, JoinResult>) + where + C: Into, + G: Into, + { + self._join_gateway(guild_id.into(), channel_id.into()).await + } + + async fn _join_gateway( + &self, + guild_id: GuildId, + channel_id: ChannelId, + ) -> (Arc>, JoinResult>) { + let call = self.get_or_insert(guild_id); + + let result = { + let mut handler = call.lock().await; + handler.join_gateway(channel_id).await + }; + + (call, result) + } + + /// Retrieves the [handler][`Call`] for the given target and leaves the + /// associated voice channel, if connected. + /// + /// This will _not_ drop the handler, and will preserve it and its settings. + /// + /// This is a wrapper around [getting][`get`] a handler and calling + /// [`leave`] on it. + /// + /// [`Call`]: struct.Call.html + /// [`get`]: #method.get + /// [`leave`]: struct.Call.html#method.leave + #[inline] + pub async fn leave>(&self, guild_id: G) -> JoinResult<()> { + self._leave(guild_id.into()).await + } + + async fn _leave(&self, guild_id: GuildId) -> JoinResult<()> { + if let Some(call) = self.get(guild_id) { + let mut handler = call.lock().await; + handler.leave().await + } else { + Err(JoinError::NoCall) + } + } + + /// Retrieves the [`Call`] for the given target and leaves the associated + /// voice channel, if connected. + /// + /// The handler is then dropped, removing settings for the target. + /// + /// An Err(...) value implies that the gateway could not be contacted, + /// and that leaving should be attempted again later (i.e., after reconnect). + /// + /// [`Call`]: struct.Call.html + #[inline] + pub async fn remove>(&self, guild_id: G) -> JoinResult<()> { + self._remove(guild_id.into()).await + } + + async fn _remove(&self, guild_id: GuildId) -> JoinResult<()> { + self.leave(guild_id).await?; + let mut calls = self.calls.write(); + calls.remove(&guild_id); + Ok(()) + } +} + +#[cfg(feature = "twilight")] +impl Songbird { + /// Handle events received on the cluster. + /// + /// When using twilight, you are required to call this with all inbound + /// (voice) events, *i.e.*, at least `VoiceStateUpdate`s and `VoiceServerUpdate`s. + pub async fn process(&self, event: &TwilightEvent) { + match event { + TwilightEvent::VoiceServerUpdate(v) => { + let call = v.guild_id.map(GuildId::from).and_then(|id| self.get(id)); + + if let Some(call) = call { + let mut handler = call.lock().await; + if let Some(endpoint) = &v.endpoint { + handler.update_server(endpoint.clone(), v.token.clone()); + } + } + }, + TwilightEvent::VoiceStateUpdate(v) => { + if v.0.user_id.0 != self.client_data.read().user_id.0 { + return; + } + + let call = v.0.guild_id.map(GuildId::from).and_then(|id| self.get(id)); + + if let Some(call) = call { + let mut handler = call.lock().await; + handler.update_state(v.0.session_id.clone()); + } + }, + _ => {}, + } + } +} + +#[cfg(feature = "serenity")] +#[async_trait] +impl VoiceGatewayManager for Songbird { + async fn initialise(&self, shard_count: u64, user_id: SerenityUser) { + self.initialise_client_data(shard_count, user_id); + } + + async fn register_shard(&self, shard_id: u64, sender: Sender) { + self.sharder.register_shard_handle(shard_id, sender); + } + + async fn deregister_shard(&self, shard_id: u64) { + self.sharder.deregister_shard_handle(shard_id); + } + + async fn server_update(&self, guild_id: SerenityGuild, endpoint: &Option, token: &str) { + if let Some(call) = self.get(guild_id) { + let mut handler = call.lock().await; + if let Some(endpoint) = endpoint { + handler.update_server(endpoint.clone(), token.to_string()); + } + } + } + + async fn state_update(&self, guild_id: SerenityGuild, voice_state: &VoiceState) { + if voice_state.user_id.0 != self.client_data.read().user_id.0 { + return; + } + + if let Some(call) = self.get(guild_id) { + let mut handler = call.lock().await; + handler.update_state(voice_state.session_id.clone()); + } + } +} + +#[inline] +fn shard_id(guild_id: u64, shard_count: u64) -> u64 { + (guild_id >> 22) % shard_count +} diff --git a/src/serenity.rs b/src/serenity.rs new file mode 100644 index 0000000..87b6d32 --- /dev/null +++ b/src/serenity.rs @@ -0,0 +1,71 @@ +//! Compatability and convenience methods for working with [serenity]. +//! Requires the `"serenity-rustls"` or `"serenity-native"` features. +//! +//! [serenity]: https://crates.io/crates/serenity/0.9.0-rc.2 + +use crate::manager::Songbird; +use serenity::{ + client::{ClientBuilder, Context}, + prelude::TypeMapKey, +}; +use std::sync::Arc; + +/// Zero-size type used to retrieve the registered [`Songbird`] instance +/// from serenity's inner TypeMap. +/// +/// [`Songbird`]: ../struct.Songbird.html +pub struct SongbirdKey; + +impl TypeMapKey for SongbirdKey { + type Value = Arc; +} + +/// Installs a new songbird instance into the serenity client. +/// +/// This should be called after any uses of `ClientBuilder::type_map`. +pub fn register(client_builder: ClientBuilder) -> ClientBuilder { + let voice = Songbird::serenity(); + register_with(client_builder, voice) +} + +/// Installs a given songbird instance into the serenity client. +/// +/// This should be called after any uses of `ClientBuilder::type_map`. +pub fn register_with(client_builder: ClientBuilder, voice: Arc) -> ClientBuilder { + client_builder + .voice_manager_arc(voice.clone()) + .type_map_insert::(voice) +} + +/// Retrieve the Songbird voice client from a serenity context's +/// shared key-value store. +pub async fn get(ctx: &Context) -> Option> { + let data = ctx.data.read().await; + + data.get::().cloned() +} + +/// Helper trait to add installation/creation methods to serenity's +/// `ClientBuilder`. +/// +/// These install the client to receive gateway voice events, and +/// store an easily accessible reference to Songbird's managers. +pub trait SerenityInit { + /// Registers a new Songbird voice system with serenity, storing it for easy + /// access via [`get`]. + /// + /// [`get`]: fn.get.html + fn register_songbird(self) -> Self; + /// Registers a given Songbird voice system with serenity, as above. + fn register_songbird_with(self, voice: Arc) -> Self; +} + +impl SerenityInit for ClientBuilder<'_> { + fn register_songbird(self) -> Self { + register(self) + } + + fn register_songbird_with(self, voice: Arc) -> Self { + register_with(self, voice) + } +} diff --git a/src/shards.rs b/src/shards.rs new file mode 100644 index 0000000..7577b8f --- /dev/null +++ b/src/shards.rs @@ -0,0 +1,168 @@ +//! Handlers for sending packets over sharded connections. + +use crate::error::{JoinError, JoinResult}; +#[cfg(feature = "serenity")] +use futures::channel::mpsc::{TrySendError, UnboundedSender as Sender}; +#[cfg(feature = "serenity")] +use parking_lot::{lock_api::RwLockWriteGuard, Mutex as PMutex, RwLock as PRwLock}; +use serde_json::Value; +#[cfg(feature = "serenity")] +use serenity::gateway::InterMessage; +#[cfg(feature = "serenity")] +use std::{collections::HashMap, result::Result as StdResult, sync::Arc}; +use tracing::error; +#[cfg(feature = "twilight")] +use twilight_gateway::{Cluster, Shard as TwilightShard}; + +#[derive(Debug)] +#[non_exhaustive] +/// Source of individual shard connection handles. +pub enum Sharder { + #[cfg(feature = "serenity")] + /// Serenity-specific wrapper for sharder state initialised by the library. + Serenity(SerenitySharder), + #[cfg(feature = "twilight")] + /// Twilight-specific wrapper for sharder state initialised by the user. + Twilight(Cluster), +} + +impl Sharder { + #[allow(unreachable_patterns)] + /// Returns a new handle to the required inner shard. + pub fn get_shard(&self, shard_id: u64) -> Option { + match self { + #[cfg(feature = "serenity")] + Sharder::Serenity(s) => Some(Shard::Serenity(s.get_or_insert_shard_handle(shard_id))), + #[cfg(feature = "twilight")] + Sharder::Twilight(t) => t.shard(shard_id).map(Shard::Twilight), + _ => None, + } + } +} + +#[cfg(feature = "serenity")] +impl Sharder { + #[allow(unreachable_patterns)] + pub(crate) fn register_shard_handle(&self, shard_id: u64, sender: Sender) { + match self { + Sharder::Serenity(s) => s.register_shard_handle(shard_id, sender), + _ => error!("Called serenity management function on a non-serenity Songbird instance."), + } + } + + #[allow(unreachable_patterns)] + pub(crate) fn deregister_shard_handle(&self, shard_id: u64) { + match self { + Sharder::Serenity(s) => s.deregister_shard_handle(shard_id), + _ => error!("Called serenity management function on a non-serenity Songbird instance."), + } + } +} + +#[cfg(feature = "serenity")] +#[derive(Debug, Default)] +/// Serenity-specific wrapper for sharder state initialised by the library. +/// +/// This is updated and maintained by the library, and is designed to prevent +/// message loss during rebalances and reconnects. +pub struct SerenitySharder(PRwLock>>); + +#[cfg(feature = "serenity")] +impl SerenitySharder { + fn get_or_insert_shard_handle(&self, shard_id: u64) -> Arc { + ({ + let map_read = self.0.read(); + map_read.get(&shard_id).cloned() + }) + .unwrap_or_else(|| { + let mut map_read = self.0.write(); + map_read.entry(shard_id).or_default().clone() + }) + } + + fn register_shard_handle(&self, shard_id: u64, sender: Sender) { + // Write locks are only used to add new entries to the map. + let handle = self.get_or_insert_shard_handle(shard_id); + + handle.register(sender); + } + + fn deregister_shard_handle(&self, shard_id: u64) { + // Write locks are only used to add new entries to the map. + let handle = self.get_or_insert_shard_handle(shard_id); + + handle.deregister(); + } +} + +#[derive(Clone, Debug)] +#[non_exhaustive] +/// A reference to an individual websocket connection. +pub enum Shard { + #[cfg(feature = "serenity")] + /// Handle to one of serenity's shard runners. + Serenity(Arc), + #[cfg(feature = "twilight")] + /// Handle to a twilight shard spawned from a cluster. + Twilight(TwilightShard), +} + +impl Shard { + #[allow(unreachable_patterns)] + /// Send a JSON message to the inner shard handle. + pub async fn send(&mut self, msg: Value) -> JoinResult<()> { + match self { + #[cfg(feature = "serenity")] + Shard::Serenity(s) => s.send(InterMessage::Json(msg))?, + #[cfg(feature = "twilight")] + Shard::Twilight(t) => t.command(&msg).await?, + _ => return Err(JoinError::NoSender), + } + Ok(()) + } +} + +#[cfg(feature = "serenity")] +/// Handle to an individual shard designed to buffer unsent messages while +/// a reconnect/rebalance is ongoing. +#[derive(Debug, Default)] +pub struct SerenityShardHandle { + sender: PRwLock>>, + queue: PMutex>, +} + +#[cfg(feature = "serenity")] +impl SerenityShardHandle { + fn register(&self, sender: Sender) { + let mut sender_lock = self.sender.write(); + *sender_lock = Some(sender); + + let sender_lock = RwLockWriteGuard::downgrade(sender_lock); + let mut messages_lock = self.queue.lock(); + + if let Some(sender) = &*sender_lock { + for msg in messages_lock.drain(..) { + if let Err(e) = sender.unbounded_send(msg) { + error!("Error while clearing gateway message queue: {:?}", e); + break; + } + } + } + } + + fn deregister(&self) { + let mut sender_lock = self.sender.write(); + *sender_lock = None; + } + + fn send(&self, message: InterMessage) -> StdResult<(), TrySendError> { + let sender_lock = self.sender.read(); + if let Some(sender) = &*sender_lock { + sender.unbounded_send(message) + } else { + let mut messages_lock = self.queue.lock(); + messages_lock.push(message); + Ok(()) + } + } +} diff --git a/src/tracks/command.rs b/src/tracks/command.rs new file mode 100644 index 0000000..4dc3ef6 --- /dev/null +++ b/src/tracks/command.rs @@ -0,0 +1,53 @@ +use super::*; +use crate::events::EventData; +use std::time::Duration; +use tokio::sync::oneshot::Sender as OneshotSender; + +/// A request from external code using a [`TrackHandle`] to modify +/// or act upon an [`Track`] object. +/// +/// [`Track`]: struct.Track.html +/// [`TrackHandle`]: struct.TrackHandle.html +pub enum TrackCommand { + /// Set the track's play_mode to play/resume. + Play, + /// Set the track's play_mode to pause. + Pause, + /// Stop the target track. This cannot be undone. + Stop, + /// Set the track's volume. + Volume(f32), + /// Seek to the given duration. + /// + /// On unsupported input types, this can be fatal. + Seek(Duration), + /// Register an event on this track. + AddEvent(EventData), + /// Run some closure on this track, with direct access to the core object. + Do(Box), + /// Request a read-only view of this track's state. + Request(OneshotSender>), + /// Change the loop count/strategy of this track. + Loop(LoopState), +} + +impl std::fmt::Debug for TrackCommand { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + use TrackCommand::*; + write!( + f, + "TrackCommand::{}", + match self { + Play => "Play".to_string(), + Pause => "Pause".to_string(), + Stop => "Stop".to_string(), + Volume(vol) => format!("Volume({})", vol), + Seek(d) => format!("Seek({:?})", d), + AddEvent(evt) => format!("AddEvent({:?})", evt), + Do(_f) => "Do([function])".to_string(), + Request(tx) => format!("Request({:?})", tx), + Loop(loops) => format!("Loop({:?})", loops), + } + ) + } +} diff --git a/src/tracks/handle.rs b/src/tracks/handle.rs new file mode 100644 index 0000000..effa703 --- /dev/null +++ b/src/tracks/handle.rs @@ -0,0 +1,159 @@ +use super::*; +use crate::events::{Event, EventData, EventHandler}; +use std::time::Duration; +use tokio::sync::{ + mpsc::{error::SendError, UnboundedSender}, + oneshot, +}; + +#[derive(Clone, Debug)] +/// Handle for safe control of a [`Track`] track from other threads, outside +/// of the audio mixing and voice handling context. +/// +/// Almost all method calls here are fallible; in most cases, this will be because +/// the underlying [`Track`] object has been discarded. Those which aren't refer +/// to immutable properties of the underlying stream. +/// +/// [`Track`]: struct.Track.html +pub struct TrackHandle { + command_channel: UnboundedSender, + seekable: bool, +} + +impl TrackHandle { + /// Creates a new handle, using the given command sink and hint as to whether + /// the underlying [`Input`] supports seek operations. + /// + /// [`Input`]: ../input/struct.Input.html + pub fn new(command_channel: UnboundedSender, seekable: bool) -> Self { + Self { + command_channel, + seekable, + } + } + + /// Unpauses an audio track. + pub fn play(&self) -> TrackResult { + self.send(TrackCommand::Play) + } + + /// Pauses an audio track. + pub fn pause(&self) -> TrackResult { + self.send(TrackCommand::Pause) + } + + /// Stops an audio track. + /// + /// This is *final*, and will cause the audio context to fire + /// a [`TrackEvent::End`] event. + /// + /// [`TrackEvent::End`]: ../events/enum.TrackEvent.html#variant.End + pub fn stop(&self) -> TrackResult { + self.send(TrackCommand::Stop) + } + + /// Sets the volume of an audio track. + pub fn set_volume(&self, volume: f32) -> TrackResult { + self.send(TrackCommand::Volume(volume)) + } + + /// Denotes whether the underlying [`Input`] stream is compatible with arbitrary seeking. + /// + /// If this returns `false`, all calls to [`seek`] will fail, and the track is + /// incapable of looping. + /// + /// [`seek`]: #method.seek + /// [`Input`]: ../input/struct.Input.html + pub fn is_seekable(&self) -> bool { + self.seekable + } + + /// Seeks along the track to the specified position. + /// + /// If the underlying [`Input`] does not support this behaviour, + /// then all calls will fail. + /// + /// [`Input`]: ../input/struct.Input.html + pub fn seek_time(&self, position: Duration) -> TrackResult { + if self.seekable { + self.send(TrackCommand::Seek(position)) + } else { + Err(SendError(TrackCommand::Seek(position))) + } + } + + /// Attach an event handler to an audio track. These will receive [`EventContext::Track`]. + /// + /// Users **must** ensure that no costly work or blocking occurs + /// within the supplied function or closure. *Taking excess time could prevent + /// timely sending of packets, causing audio glitches and delays*. + /// + /// [`Track`]: struct.Track.html + /// [`EventContext::Track`]: ../events/enum.EventContext.html#variant.Track + pub fn add_event(&self, event: Event, action: F) -> TrackResult { + let cmd = TrackCommand::AddEvent(EventData::new(event, action)); + if event.is_global_only() { + Err(SendError(cmd)) + } else { + self.send(cmd) + } + } + + /// Perform an arbitrary action on a raw [`Track`] object. + /// + /// Users **must** ensure that no costly work or blocking occurs + /// within the supplied function or closure. *Taking excess time could prevent + /// timely sending of packets, causing audio glitches and delays*. + /// + /// [`Track`]: struct.Track.html + pub fn action(&self, action: F) -> TrackResult + where + F: FnOnce(&mut Track) + Send + Sync + 'static, + { + self.send(TrackCommand::Do(Box::new(action))) + } + + /// Request playback information and state from the audio context. + /// + /// Crucially, the audio thread will respond *at a later time*: + /// It is up to the user when or how this should be read from the returned channel. + pub fn get_info(&self) -> TrackQueryResult { + let (tx, rx) = oneshot::channel(); + self.send(TrackCommand::Request(tx)).map(move |_| rx) + } + + /// Set an audio track to loop indefinitely. + pub fn enable_loop(&self) -> TrackResult { + if self.seekable { + self.send(TrackCommand::Loop(LoopState::Infinite)) + } else { + Err(SendError(TrackCommand::Loop(LoopState::Infinite))) + } + } + + /// Set an audio track to no longer loop. + pub fn disable_loop(&self) -> TrackResult { + if self.seekable { + self.send(TrackCommand::Loop(LoopState::Finite(0))) + } else { + Err(SendError(TrackCommand::Loop(LoopState::Finite(0)))) + } + } + + /// Set an audio track to loop a set number of times. + pub fn loop_for(&self, count: usize) -> TrackResult { + if self.seekable { + self.send(TrackCommand::Loop(LoopState::Finite(count))) + } else { + Err(SendError(TrackCommand::Loop(LoopState::Finite(count)))) + } + } + + #[inline] + /// Send a raw command to the [`Track`] object. + /// + /// [`Track`]: struct.Track.html + pub fn send(&self, cmd: TrackCommand) -> TrackResult { + self.command_channel.send(cmd) + } +} diff --git a/src/tracks/looping.rs b/src/tracks/looping.rs new file mode 100644 index 0000000..0e57d0a --- /dev/null +++ b/src/tracks/looping.rs @@ -0,0 +1,22 @@ +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +/// Looping behaviour for a [`Track`]. +/// +/// [`Track`]: struct.Track.html +pub enum LoopState { + /// Track will loop endlessly until loop state is changed or + /// manually stopped. + Infinite, + + /// Track will loop `n` more times. + /// + /// `Finite(0)` is the `Default`, stopping the track once its [`Input`] ends. + /// + /// [`Input`]: ../input/struct.Input.html + Finite(usize), +} + +impl Default for LoopState { + fn default() -> Self { + Self::Finite(0) + } +} diff --git a/src/tracks/mod.rs b/src/tracks/mod.rs new file mode 100644 index 0000000..d60f867 --- /dev/null +++ b/src/tracks/mod.rs @@ -0,0 +1,379 @@ +//! Live, controllable audio instances. +//! +//! Tracks add control and event data around the bytestreams offered by [`Input`], +//! where each represents a live audio source inside of the driver's mixer. +//! +//! To prevent locking and stalling of the driver, tracks are controlled from your bot using a +//! [`TrackHandle`]. These handles remotely send commands from your bot's (a)sync +//! context to control playback, register events, and execute synchronous closures. +//! +//! If you want a new track from an [`Input`], i.e., for direct control before +//! playing your source on the driver, use [`create_player`]. +//! +//! [`Input`]: ../input/struct.Input.html +//! [`TrackHandle`]: struct.TrackHandle.html +//! [`create_player`]: fn.create_player.html + +mod command; +mod handle; +mod looping; +mod mode; +mod queue; +mod state; + +pub use self::{command::*, handle::*, looping::*, mode::*, queue::*, state::*}; + +use crate::{constants::*, driver::tasks::message::*, events::EventStore, input::Input}; +use std::time::Duration; +use tokio::sync::{ + mpsc::{ + self, + error::{SendError, TryRecvError}, + UnboundedReceiver, + }, + oneshot::Receiver as OneshotReceiver, +}; + +/// Control object for audio playback. +/// +/// Accessed by both commands and the playback code -- as such, access from user code is +/// almost always guarded via a [`TrackHandle`]. You should expect to receive +/// access to a raw object of this type via [`create_player`], for use in +/// [`Driver::play`] or [`Driver::play_only`]. +/// +/// # Example +/// +/// ```rust,no_run +/// use songbird::{driver::Driver, ffmpeg, tracks::create_player}; +/// +/// # async { +/// // A Call is also valid here! +/// let mut handler: Driver = Default::default(); +/// let source = ffmpeg("../audio/my-favourite-song.mp3") +/// .await +/// .expect("This might fail: handle this error!"); +/// let (mut audio, audio_handle) = create_player(source); +/// +/// audio.set_volume(0.5); +/// +/// handler.play_only(audio); +/// +/// // Future access occurs via audio_handle. +/// # }; +/// ``` +/// +/// [`Driver::play_only`]: ../struct.Driver.html#method.play_only +/// [`Driver::play`]: ../struct.Driver.html#method.play +/// [`TrackHandle`]: struct.TrackHandle.html +/// [`create_player`]: fn.create_player.html +#[derive(Debug)] +pub struct Track { + /// Whether or not this sound is currently playing. + /// + /// Can be controlled with [`play`] or [`pause`] if chaining is desired. + /// + /// [`play`]: #method.play + /// [`pause`]: #method.pause + pub(crate) playing: PlayMode, + + /// The desired volume for playback. + /// + /// Sensible values fall between `0.0` and `1.0`. + /// + /// Can be controlled with [`volume`] if chaining is desired. + /// + /// [`volume`]: #method.volume + pub(crate) volume: f32, + + /// Underlying data access object. + /// + /// *Calling code is not expected to use this.* + pub(crate) source: Input, + + /// The current playback position in the track. + pub(crate) position: Duration, + + /// The total length of time this track has been active. + pub(crate) play_time: Duration, + + /// List of events attached to this audio track. + /// + /// This may be used to add additional events to a track + /// before it is sent to the audio context for playing. + pub events: Option, + + /// Channel from which commands are received. + /// + /// Track commands are sent in this manner to ensure that access + /// occurs in a thread-safe manner, without allowing any external + /// code to lock access to audio objects and block packet generation. + pub(crate) commands: UnboundedReceiver, + + /// Handle for safe control of this audio track from other threads. + /// + /// Typically, this is used by internal code to supply context information + /// to event handlers, though more may be cloned from this handle. + pub handle: TrackHandle, + + /// Count of remaining loops. + pub loops: LoopState, +} + +impl Track { + /// Create a new track directly from an input, command source, + /// and handle. + /// + /// In general, you should probably use [`create_player`]. + /// + /// [`create_player`]: fn.create_player.html + pub fn new_raw( + source: Input, + commands: UnboundedReceiver, + handle: TrackHandle, + ) -> Self { + Self { + playing: Default::default(), + volume: 1.0, + source, + position: Default::default(), + play_time: Default::default(), + events: Some(EventStore::new_local()), + commands, + handle, + loops: LoopState::Finite(0), + } + } + + /// Sets a track to playing if it is paused. + pub fn play(&mut self) -> &mut Self { + self.set_playing(PlayMode::Play) + } + + /// Pauses a track if it is playing. + pub fn pause(&mut self) -> &mut Self { + self.set_playing(PlayMode::Pause) + } + + /// Manually stops a track. + /// + /// This will cause the audio track to be removed, with any relevant events triggered. + /// Stopped/ended tracks cannot be restarted. + pub fn stop(&mut self) -> &mut Self { + self.set_playing(PlayMode::Stop) + } + + pub(crate) fn end(&mut self) -> &mut Self { + self.set_playing(PlayMode::End) + } + + #[inline] + fn set_playing(&mut self, new_state: PlayMode) -> &mut Self { + self.playing = self.playing.change_to(new_state); + + self + } + + /// Returns the current play status of this track. + pub fn playing(&self) -> PlayMode { + self.playing + } + + /// Sets [`volume`] in a manner that allows method chaining. + /// + /// [`volume`]: #structfield.volume + pub fn set_volume(&mut self, volume: f32) -> &mut Self { + self.volume = volume; + + self + } + + /// Returns the current playback position. + pub fn volume(&self) -> f32 { + self.volume + } + + /// Returns the current playback position. + pub fn position(&self) -> Duration { + self.position + } + + /// Returns the total length of time this track has been active. + pub fn play_time(&self) -> Duration { + self.play_time + } + + /// Sets [`loops`] in a manner that allows method chaining. + /// + /// [`loops`]: #structfield.loops + pub fn set_loops(&mut self, loops: LoopState) -> &mut Self { + self.loops = loops; + self + } + + pub(crate) fn do_loop(&mut self) -> bool { + match self.loops { + LoopState::Infinite => true, + LoopState::Finite(0) => false, + LoopState::Finite(ref mut n) => { + *n -= 1; + true + }, + } + } + + /// Steps playback location forward by one frame. + pub(crate) fn step_frame(&mut self) { + self.position += TIMESTEP_LENGTH; + self.play_time += TIMESTEP_LENGTH; + } + + /// Receives and acts upon any commands forwarded by [`TrackHandle`]s. + /// + /// *Used internally*, this should not be exposed to users. + /// + /// [`TrackHandle`]: struct.TrackHandle.html + pub(crate) fn process_commands(&mut self, index: usize, ic: &Interconnect) { + // Note: disconnection and an empty channel are both valid, + // and should allow the audio object to keep running as intended. + + // Note that interconnect failures are not currently errors. + // In correct operation, the event thread should never panic, + // but it receiving status updates is secondary do actually + // doing the work. + loop { + match self.commands.try_recv() { + Ok(cmd) => { + use TrackCommand::*; + match cmd { + Play => { + self.play(); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Mode(self.playing), + )); + }, + Pause => { + self.pause(); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Mode(self.playing), + )); + }, + Stop => { + self.stop(); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Mode(self.playing), + )); + }, + Volume(vol) => { + self.set_volume(vol); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Volume(self.volume), + )); + }, + Seek(time) => { + self.seek_time(time); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Position(self.position), + )); + }, + AddEvent(evt) => { + let _ = ic.events.send(EventMessage::AddTrackEvent(index, evt)); + }, + Do(action) => { + action(self); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Total(self.state()), + )); + }, + Request(tx) => { + let _ = tx.send(Box::new(self.state())); + }, + Loop(loops) => { + self.set_loops(loops); + let _ = ic.events.send(EventMessage::ChangeState( + index, + TrackStateChange::Loops(self.loops, true), + )); + }, + } + }, + Err(TryRecvError::Closed) => { + // this branch will never be visited. + break; + }, + Err(TryRecvError::Empty) => { + break; + }, + } + } + } + + /// Creates a read-only copy of the audio track's state. + /// + /// The primary use-case of this is sending information across + /// threads in response to a [`TrackHandle`]. + /// + /// [`TrackHandle`]: struct.TrackHandle.html + pub fn state(&self) -> TrackState { + TrackState { + playing: self.playing, + volume: self.volume, + position: self.position, + play_time: self.play_time, + loops: self.loops, + } + } + + /// Seek to a specific point in the track. + /// + /// Returns `None` if unsupported. + pub fn seek_time(&mut self, pos: Duration) -> Option { + let out = self.source.seek_time(pos); + + if let Some(t) = out { + self.position = t; + } + + out + } +} + +/// Creates a [`Track`] object to pass into the audio context, and a [`TrackHandle`] +/// for safe, lock-free access in external code. +/// +/// Typically, this would be used if you wished to directly work on or configure +/// the [`Track`] object before it is passed over to the driver. +/// +/// [`Track`]: struct.Track.html +/// [`TrackHandle`]: struct.TrackHandle.html +pub fn create_player(source: Input) -> (Track, TrackHandle) { + let (tx, rx) = mpsc::unbounded_channel(); + let can_seek = source.is_seekable(); + let player = Track::new_raw(source, rx, TrackHandle::new(tx.clone(), can_seek)); + + (player, TrackHandle::new(tx, can_seek)) +} + +/// Alias for most result-free calls to a [`TrackHandle`]. +/// +/// Failure indicates that the accessed audio object has been +/// removed or deleted by the audio context. +/// +/// [`TrackHandle`]: struct.TrackHandle.html +pub type TrackResult = Result<(), SendError>; + +/// Alias for return value from calls to [`TrackHandle::get_info`]. +/// +/// Crucially, the audio thread will respond *at a later time*: +/// It is up to the user when or how this should be read from the returned channel. +/// +/// Failure indicates that the accessed audio object has been +/// removed or deleted by the audio context. +/// +/// [`TrackHandle::get_info`]: struct.TrackHandle.html#method.get_info +pub type TrackQueryResult = Result>, SendError>; diff --git a/src/tracks/mode.rs b/src/tracks/mode.rs new file mode 100644 index 0000000..80dd101 --- /dev/null +++ b/src/tracks/mode.rs @@ -0,0 +1,37 @@ +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +/// Playback status of a track. +pub enum PlayMode { + /// The track is currently playing. + Play, + /// The track is currently paused, and may be resumed. + Pause, + /// The track has been manually stopped, and cannot be restarted. + Stop, + /// The track has naturally ended, and cannot be restarted. + End, +} + +impl PlayMode { + /// Returns whether the track has irreversibly stopped. + pub fn is_done(self) -> bool { + matches!(self, PlayMode::Stop | PlayMode::End) + } + + pub(crate) fn change_to(self, other: Self) -> PlayMode { + use PlayMode::*; + + // Idea: a finished track cannot be restarted -- this action is final. + // We may want to change this in future so that seekable tracks can uncancel + // themselves, perhaps, but this requires a bit more machinery to readd... + match self { + Play | Pause => other, + state => state, + } + } +} + +impl Default for PlayMode { + fn default() -> Self { + PlayMode::Play + } +} diff --git a/src/tracks/queue.rs b/src/tracks/queue.rs new file mode 100644 index 0000000..349bc3d --- /dev/null +++ b/src/tracks/queue.rs @@ -0,0 +1,213 @@ +use crate::{ + driver::Driver, + events::{Event, EventContext, EventData, EventHandler, TrackEvent}, + input::Input, + tracks::{self, Track, TrackHandle, TrackResult}, +}; +use async_trait::async_trait; +use parking_lot::Mutex; +use std::{collections::VecDeque, sync::Arc}; +use tracing::{info, warn}; + +#[derive(Default)] +/// A simple queue for several audio sources, designed to +/// play in sequence. +/// +/// This makes use of [`TrackEvent`]s to determine when the current +/// song or audio file has finished before playing the next entry. +/// +/// `examples/e16_voice_events` demonstrates how a user might manage, +/// track and use this to run a song queue in many guilds in parallel. +/// This code is trivial to extend if extra functionality is needed. +/// +/// # Example +/// +/// ```rust,no_run +/// use songbird::{ +/// driver::Driver, +/// id::GuildId, +/// ffmpeg, +/// tracks::{create_player, TrackQueue}, +/// }; +/// use std::collections::HashMap; +/// +/// # async { +/// let guild = GuildId(0); +/// // A Call is also valid here! +/// let mut driver: Driver = Default::default(); +/// +/// let mut queues: HashMap = Default::default(); +/// +/// let source = ffmpeg("../audio/my-favourite-song.mp3") +/// .await +/// .expect("This might fail: handle this error!"); +/// +/// // We need to ensure that this guild has a TrackQueue created for it. +/// let queue = queues.entry(guild) +/// .or_default(); +/// +/// // Queueing a track is this easy! +/// queue.add_source(source, &mut driver); +/// # }; +/// ``` + +/// +/// [`TrackEvent`]: ../events/enum.TrackEvent.html +pub struct TrackQueue { + // NOTE: the choice of a parking lot mutex is quite deliberate + inner: Arc>, +} + +#[derive(Default)] +/// Inner portion of a [`TrackQueue`]. +/// +/// This abstracts away thread-safety from the user, +/// and offers a convenient location to store further state if required. +/// +/// [`TrackQueue`]: struct.TrackQueue.html +struct TrackQueueCore { + tracks: VecDeque, +} + +struct QueueHandler { + remote_lock: Arc>, +} + +#[async_trait] +impl EventHandler for QueueHandler { + async fn act(&self, ctx: &EventContext<'_>) -> Option { + let mut inner = self.remote_lock.lock(); + let _old = inner.tracks.pop_front(); + + info!("Queued track ended: {:?}.", ctx); + info!("{} tracks remain.", inner.tracks.len()); + + // If any audio files die unexpectedly, then keep going until we + // find one which works, or we run out. + let mut keep_looking = true; + while keep_looking && !inner.tracks.is_empty() { + if let Some(new) = inner.tracks.front() { + keep_looking = new.play().is_err(); + + // Discard files which cannot be used for whatever reason. + if keep_looking { + warn!("Track in Queue couldn't be played..."); + let _ = inner.tracks.pop_front(); + } + } + } + + None + } +} + +impl TrackQueue { + /// Create a new, empty, track queue. + pub fn new() -> Self { + Self { + inner: Arc::new(Mutex::new(TrackQueueCore { + tracks: VecDeque::new(), + })), + } + } + + /// Adds an audio source to the queue, to be played in the channel managed by `handler`. + pub fn add_source(&self, source: Input, handler: &mut Driver) { + let (audio, audio_handle) = tracks::create_player(source); + self.add(audio, audio_handle, handler); + } + + /// Adds a [`Track`] object to the queue, to be played in the channel managed by `handler`. + /// + /// This is used with [`voice::create_player`] if additional configuration or event handlers + /// are required before enqueueing the audio track. + /// + /// [`Track`]: struct.Track.html + /// [`voice::create_player`]: fn.create_player.html + pub fn add(&self, mut track: Track, track_handle: TrackHandle, handler: &mut Driver) { + info!("Track added to queue."); + let remote_lock = self.inner.clone(); + let mut inner = self.inner.lock(); + + if !inner.tracks.is_empty() { + track.pause(); + } + + track + .events + .as_mut() + .expect("Queue inspecting EventStore on new Track: did not exist.") + .add_event( + EventData::new(Event::Track(TrackEvent::End), QueueHandler { remote_lock }), + track.position, + ); + + handler.play(track); + inner.tracks.push_back(track_handle); + } + + /// Returns the number of tracks currently in the queue. + pub fn len(&self) -> usize { + let inner = self.inner.lock(); + + inner.tracks.len() + } + + /// Returns whether there are no tracks currently in the queue. + pub fn is_empty(&self) -> bool { + let inner = self.inner.lock(); + + inner.tracks.is_empty() + } + + /// Pause the track at the head of the queue. + pub fn pause(&self) -> TrackResult { + let inner = self.inner.lock(); + + if let Some(handle) = inner.tracks.front() { + handle.pause() + } else { + Ok(()) + } + } + + /// Resume the track at the head of the queue. + pub fn resume(&self) -> TrackResult { + let inner = self.inner.lock(); + + if let Some(handle) = inner.tracks.front() { + handle.play() + } else { + Ok(()) + } + } + + /// Stop the currently playing track, and clears the queue. + pub fn stop(&self) -> TrackResult { + let mut inner = self.inner.lock(); + + let out = inner.stop_current(); + + inner.tracks.clear(); + + out + } + + /// Skip to the next track in the queue, if it exists. + pub fn skip(&self) -> TrackResult { + let inner = self.inner.lock(); + + inner.stop_current() + } +} + +impl TrackQueueCore { + /// Skip to the next track in the queue, if it exists. + fn stop_current(&self) -> TrackResult { + if let Some(handle) = self.tracks.front() { + handle.stop() + } else { + Ok(()) + } + } +} diff --git a/src/tracks/state.rs b/src/tracks/state.rs new file mode 100644 index 0000000..b0650fb --- /dev/null +++ b/src/tracks/state.rs @@ -0,0 +1,31 @@ +use super::*; + +/// State of an [`Track`] object, designed to be passed to event handlers +/// and retrieved remotely via [`TrackHandle::get_info`] or +/// [`TrackHandle::get_info_blocking`]. +/// +/// [`Track`]: struct.Track.html +/// [`TrackHandle::get_info`]: struct.TrackHandle.html#method.get_info +/// [`TrackHandle::get_info_blocking`]: struct.TrackHandle.html#method.get_info_blocking +#[derive(Copy, Clone, Debug, Default, PartialEq)] +pub struct TrackState { + /// Play status (e.g., active, paused, stopped) of this track. + pub playing: PlayMode, + /// Current volume of this track. + pub volume: f32, + /// Current playback position in the source. + /// + /// This is altered by loops and seeks + pub position: Duration, + /// Total playback time, increasing monotonically. + pub play_time: Duration, + /// Remaining loops on this track. + pub loops: LoopState, +} + +impl TrackState { + pub(crate) fn step_frame(&mut self) { + self.position += TIMESTEP_LENGTH; + self.play_time += TIMESTEP_LENGTH; + } +} diff --git a/src/ws.rs b/src/ws.rs new file mode 100644 index 0000000..f0100e2 --- /dev/null +++ b/src/ws.rs @@ -0,0 +1,208 @@ +// FIXME: this is copied from serenity/src/internal/ws_impl.rs +// To prevent this duplication, we either need to expose this on serenity's API +// (not desirable) or break the common WS elements into a subcrate. +// I believe that decisions is outside of the scope of the voice subcrate PR. + +use crate::model::Event; + +use async_trait::async_trait; +use async_tungstenite::{ + tokio::ConnectStream, + tungstenite::{error::Error as TungsteniteError, protocol::CloseFrame, Message}, + WebSocketStream, +}; +use futures::{SinkExt, StreamExt, TryStreamExt}; +use serde_json::Error as JsonError; +use tokio::time::timeout; +use tracing::{instrument, warn}; + +pub type WsStream = WebSocketStream; + +pub type Result = std::result::Result; + +#[derive(Debug)] +pub enum Error { + Json(JsonError), + #[cfg(all(feature = "rustls", not(feature = "native")))] + Tls(RustlsError), + + /// The discord voice gateway does not support or offer zlib compression. + /// As a result, only text messages are expected. + UnexpectedBinaryMessage(Vec), + + Ws(TungsteniteError), + + WsClosed(Option>), +} + +impl From for Error { + fn from(e: JsonError) -> Error { + Error::Json(e) + } +} + +#[cfg(all(feature = "rustls", not(feature = "native")))] +impl From for Error { + fn from(e: RustlsError) -> Error { + Error::Tls(e) + } +} + +impl From for Error { + fn from(e: TungsteniteError) -> Error { + Error::Ws(e) + } +} + +use futures::stream::SplitSink; +#[cfg(all(feature = "rustls", not(feature = "native")))] +use std::{ + error::Error as StdError, + fmt::{Display, Formatter, Result as FmtResult}, + io::Error as IoError, +}; +use url::Url; + +#[async_trait] +pub trait ReceiverExt { + async fn recv_json(&mut self) -> Result>; + async fn recv_json_no_timeout(&mut self) -> Result>; +} + +#[async_trait] +pub trait SenderExt { + async fn send_json(&mut self, value: &Event) -> Result<()>; +} + +#[async_trait] +impl ReceiverExt for WsStream { + async fn recv_json(&mut self) -> Result> { + const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_millis(500); + + let ws_message = match timeout(TIMEOUT, self.next()).await { + Ok(Some(Ok(v))) => Some(v), + Ok(Some(Err(e))) => return Err(e.into()), + Ok(None) | Err(_) => None, + }; + + convert_ws_message(ws_message) + } + + async fn recv_json_no_timeout(&mut self) -> Result> { + convert_ws_message(self.try_next().await.ok().flatten()) + } +} + +#[async_trait] +impl SenderExt for SplitSink { + async fn send_json(&mut self, value: &Event) -> Result<()> { + Ok(serde_json::to_string(value) + .map(Message::Text) + .map_err(Error::from) + .map(|m| self.send(m))? + .await?) + } +} + +#[async_trait] +impl SenderExt for WsStream { + async fn send_json(&mut self, value: &Event) -> Result<()> { + Ok(serde_json::to_string(value) + .map(Message::Text) + .map_err(Error::from) + .map(|m| self.send(m))? + .await?) + } +} + +#[inline] +pub(crate) fn convert_ws_message(message: Option) -> Result> { + Ok(match message { + Some(Message::Text(payload)) => + serde_json::from_str(&payload).map(Some).map_err(|why| { + warn!("Err deserializing text: {:?}; text: {}", why, payload,); + + why + })?, + Some(Message::Binary(bytes)) => { + return Err(Error::UnexpectedBinaryMessage(bytes)); + }, + Some(Message::Close(Some(frame))) => { + return Err(Error::WsClosed(Some(frame))); + }, + // Ping/Pong message behaviour is internally handled by tungstenite. + _ => None, + }) +} + +/// An error that occured while connecting over rustls +#[derive(Debug)] +#[non_exhaustive] +#[cfg(all(feature = "rustls", not(feature = "native")))] +pub enum RustlsError { + /// An error with the handshake in tungstenite + HandshakeError, + /// Standard IO error happening while creating the tcp stream + Io(IoError), +} + +#[cfg(all(feature = "rustls", not(feature = "native")))] +impl From for RustlsError { + fn from(e: IoError) -> Self { + RustlsError::Io(e) + } +} + +#[cfg(all(feature = "rustls", not(feature = "native")))] +impl Display for RustlsError { + fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { + match self { + RustlsError::HandshakeError => + f.write_str("TLS handshake failed when making the websocket connection"), + RustlsError::Io(inner) => Display::fmt(&inner, f), + } + } +} + +#[cfg(all(feature = "rustls", not(feature = "native")))] +impl StdError for RustlsError { + fn source(&self) -> Option<&(dyn StdError + 'static)> { + match self { + RustlsError::Io(inner) => Some(inner), + _ => None, + } + } +} + +#[cfg(all(feature = "rustls", not(feature = "native")))] +#[instrument] +pub(crate) async fn create_rustls_client(url: Url) -> Result { + let (stream, _) = async_tungstenite::tokio::connect_async_with_config::( + url, + Some(async_tungstenite::tungstenite::protocol::WebSocketConfig { + max_message_size: None, + max_frame_size: None, + max_send_queue: None, + }), + ) + .await + .map_err(|_| RustlsError::HandshakeError)?; + + Ok(stream) +} + +#[cfg(feature = "native")] +#[instrument] +pub(crate) async fn create_native_tls_client(url: Url) -> Result { + let (stream, _) = async_tungstenite::tokio::connect_async_with_config::( + url, + Some(async_tungstenite::tungstenite::protocol::WebSocketConfig { + max_message_size: None, + max_frame_size: None, + max_send_queue: None, + }), + ) + .await?; + + Ok(stream) +} diff --git a/utils/Cargo.toml b/utils/Cargo.toml new file mode 100644 index 0000000..9bb2698 --- /dev/null +++ b/utils/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "utils" +version = "0.1.0" +authors = ["Kyle Simpson "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +byteorder = "1" diff --git a/utils/README.md b/utils/README.md new file mode 100644 index 0000000..fcb910f --- /dev/null +++ b/utils/README.md @@ -0,0 +1 @@ +Test utilities for testing and benchmarking songbird. diff --git a/utils/src/lib.rs b/utils/src/lib.rs new file mode 100644 index 0000000..35bcf34 --- /dev/null +++ b/utils/src/lib.rs @@ -0,0 +1,67 @@ +use byteorder::{LittleEndian, WriteBytesExt}; +use std::mem; + +pub fn make_sine(float_len: usize, stereo: bool) -> Vec { + let sample_len = mem::size_of::(); + let byte_len = float_len * sample_len; + + // set period to 100 samples == 480Hz sine. + + let mut out = vec![0u8; byte_len]; + let mut byte_slice = &mut out[..]; + + for i in 0..float_len { + let x_val = (i as f32) * 50.0 / std::f32::consts::PI; + byte_slice.write_f32::(x_val.sin()).unwrap(); + } + + if stereo { + let mut new_out = vec![0u8; byte_len * 2]; + + for (mono_chunk, stereo_chunk) in out[..] + .chunks(sample_len) + .zip(new_out[..].chunks_mut(2 * sample_len)) + { + stereo_chunk[..sample_len].copy_from_slice(mono_chunk); + stereo_chunk[sample_len..].copy_from_slice(mono_chunk); + } + + new_out + } else { + out + } +} + +pub fn make_pcm_sine(i16_len: usize, stereo: bool) -> Vec { + let sample_len = mem::size_of::(); + let byte_len = i16_len * sample_len; + + // set period to 100 samples == 480Hz sine. + // amplitude = 10_000 + + let mut out = vec![0u8; byte_len]; + let mut byte_slice = &mut out[..]; + + for i in 0..i16_len { + let x_val = (i as f32) * 50.0 / std::f32::consts::PI; + byte_slice + .write_i16::((x_val.sin() * 10_000.0) as i16) + .unwrap(); + } + + if stereo { + let mut new_out = vec![0u8; byte_len * 2]; + + for (mono_chunk, stereo_chunk) in out[..] + .chunks(sample_len) + .zip(new_out[..].chunks_mut(2 * sample_len)) + { + stereo_chunk[..sample_len].copy_from_slice(mono_chunk); + stereo_chunk[sample_len..].copy_from_slice(mono_chunk); + } + + new_out + } else { + out + } +}