Compare commits
6 Commits
release-20
...
release-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d15392e148 | ||
|
|
f3121898e2 | ||
|
|
e1370db6ce | ||
|
|
1432dec828 | ||
|
|
1bb3475d61 | ||
|
|
cffc88443a |
33
Cargo.lock
generated
33
Cargo.lock
generated
@@ -310,7 +310,6 @@ dependencies = [
|
|||||||
"serde_json",
|
"serde_json",
|
||||||
"shared",
|
"shared",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
"strum",
|
|
||||||
"thiserror 2.0.18",
|
"thiserror 2.0.18",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
@@ -545,7 +544,7 @@ version = "4.5.55"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
|
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck 0.5.0",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.114",
|
"syn 2.0.114",
|
||||||
@@ -1538,12 +1537,6 @@ dependencies = [
|
|||||||
"hashbrown 0.15.5",
|
"hashbrown 0.15.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "heck"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
@@ -3711,7 +3704,7 @@ checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
"either",
|
"either",
|
||||||
"heck 0.5.0",
|
"heck",
|
||||||
"hex",
|
"hex",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@@ -3854,28 +3847,6 @@ version = "0.11.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "strum"
|
|
||||||
version = "0.25.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125"
|
|
||||||
dependencies = [
|
|
||||||
"strum_macros",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "strum_macros"
|
|
||||||
version = "0.25.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0"
|
|
||||||
dependencies = [
|
|
||||||
"heck 0.4.1",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"rustversion",
|
|
||||||
"syn 2.0.114",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "subtle"
|
name = "subtle"
|
||||||
version = "2.6.1"
|
version = "2.6.1"
|
||||||
|
|||||||
@@ -42,4 +42,3 @@ anyhow = "1.0.101"
|
|||||||
time = { version = "0.3.47", features = ["serde", "formatting", "parsing"] }
|
time = { version = "0.3.47", features = ["serde", "formatting", "parsing"] }
|
||||||
tower_governor = "0.8.0"
|
tower_governor = "0.8.0"
|
||||||
governor = "0.10.4"
|
governor = "0.10.4"
|
||||||
strum = { version = "0.25", features = ["derive", "strum_macros"] }
|
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ pub struct AppState {
|
|||||||
pub db: db::Db,
|
pub db: db::Db,
|
||||||
#[cfg(feature = "push-notifications")]
|
#[cfg(feature = "push-notifications")]
|
||||||
pub push_store: push::PushSubscriptionStore,
|
pub push_store: push::PushSubscriptionStore,
|
||||||
|
pub notify_poll: Arc<tokio::sync::Notify>,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn auth_middleware(
|
async fn auth_middleware(
|
||||||
@@ -336,6 +337,8 @@ async fn main() {
|
|||||||
#[cfg(not(feature = "push-notifications"))]
|
#[cfg(not(feature = "push-notifications"))]
|
||||||
let push_store = ();
|
let push_store = ();
|
||||||
|
|
||||||
|
let notify_poll = Arc::new(tokio::sync::Notify::new());
|
||||||
|
|
||||||
let app_state = AppState {
|
let app_state = AppState {
|
||||||
tx: tx.clone(),
|
tx: tx.clone(),
|
||||||
event_bus: event_bus.clone(),
|
event_bus: event_bus.clone(),
|
||||||
@@ -343,6 +346,7 @@ async fn main() {
|
|||||||
db: db.clone(),
|
db: db.clone(),
|
||||||
#[cfg(feature = "push-notifications")]
|
#[cfg(feature = "push-notifications")]
|
||||||
push_store,
|
push_store,
|
||||||
|
notify_poll: notify_poll.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Spawn background task to poll rTorrent
|
// Spawn background task to poll rTorrent
|
||||||
@@ -351,6 +355,7 @@ async fn main() {
|
|||||||
let socket_path = args.socket.clone(); // Clone for background task
|
let socket_path = args.socket.clone(); // Clone for background task
|
||||||
#[cfg(feature = "push-notifications")]
|
#[cfg(feature = "push-notifications")]
|
||||||
let push_store_clone = app_state.push_store.clone();
|
let push_store_clone = app_state.push_store.clone();
|
||||||
|
let notify_poll_clone = notify_poll.clone();
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let client = xmlrpc::RtorrentClient::new(&socket_path);
|
let client = xmlrpc::RtorrentClient::new(&socket_path);
|
||||||
@@ -359,6 +364,14 @@ async fn main() {
|
|||||||
let mut backoff_duration = Duration::from_secs(1);
|
let mut backoff_duration = Duration::from_secs(1);
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
// Determine polling interval based on active clients
|
||||||
|
let active_clients = event_bus_tx.receiver_count();
|
||||||
|
let loop_interval = if active_clients > 0 {
|
||||||
|
Duration::from_secs(1)
|
||||||
|
} else {
|
||||||
|
Duration::from_secs(30)
|
||||||
|
};
|
||||||
|
|
||||||
// 1. Fetch Torrents
|
// 1. Fetch Torrents
|
||||||
let torrents_result = sse::fetch_torrents(&client).await;
|
let torrents_result = sse::fetch_torrents(&client).await;
|
||||||
|
|
||||||
@@ -429,6 +442,14 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
previous_torrents = new_torrents;
|
previous_torrents = new_torrents;
|
||||||
|
|
||||||
|
// Success case: wait for the determined interval OR a wakeup notification
|
||||||
|
tokio::select! {
|
||||||
|
_ = tokio::time::sleep(loop_interval) => {},
|
||||||
|
_ = notify_poll_clone.notified() => {
|
||||||
|
tracing::debug!("Background loop awakened by new client connection");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tracing::error!("Error fetching torrents in background: {}", e);
|
tracing::error!("Error fetching torrents in background: {}", e);
|
||||||
@@ -449,20 +470,15 @@ async fn main() {
|
|||||||
"Backoff: Sleeping for {:?} due to rTorrent error.",
|
"Backoff: Sleeping for {:?} due to rTorrent error.",
|
||||||
backoff_duration
|
backoff_duration
|
||||||
);
|
);
|
||||||
|
|
||||||
|
tokio::time::sleep(backoff_duration).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle Stats
|
// Handle Stats
|
||||||
match stats_result {
|
if let Ok(stats) = stats_result {
|
||||||
Ok(stats) => {
|
|
||||||
let _ = event_bus_tx.send(AppEvent::Stats(stats));
|
let _ = event_bus_tx.send(AppEvent::Stats(stats));
|
||||||
}
|
}
|
||||||
Err(e) => {
|
|
||||||
tracing::warn!("Error fetching global stats: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tokio::time::sleep(backoff_duration).await;
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ pub enum ScgiError {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[error("Protocol Error: {0}")]
|
#[error("Protocol Error: {0}")]
|
||||||
Protocol(String),
|
Protocol(String),
|
||||||
|
#[error("Timeout: SCGI request took too long")]
|
||||||
|
Timeout,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ScgiRequest {
|
pub struct ScgiRequest {
|
||||||
@@ -78,20 +80,30 @@ impl ScgiRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_request(socket_path: &str, request: ScgiRequest) -> Result<Bytes, ScgiError> {
|
pub async fn send_request(socket_path: &str, request: ScgiRequest) -> Result<Bytes, ScgiError> {
|
||||||
|
let perform_request = async {
|
||||||
let mut stream = UnixStream::connect(socket_path).await?;
|
let mut stream = UnixStream::connect(socket_path).await?;
|
||||||
let data = request.encode();
|
let data = request.encode();
|
||||||
stream.write_all(&data).await?;
|
stream.write_all(&data).await?;
|
||||||
|
|
||||||
let mut response = Vec::new();
|
let mut response = Vec::new();
|
||||||
stream.read_to_end(&mut response).await?;
|
stream.read_to_end(&mut response).await?;
|
||||||
|
Ok::<Vec<u8>, std::io::Error>(response)
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = tokio::time::timeout(std::time::Duration::from_secs(10), perform_request)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ScgiError::Timeout)??;
|
||||||
|
|
||||||
let double_newline = b"\r\n\r\n";
|
let double_newline = b"\r\n\r\n";
|
||||||
if let Some(pos) = response
|
let mut response_vec = response;
|
||||||
|
if let Some(pos) = response_vec
|
||||||
.windows(double_newline.len())
|
.windows(double_newline.len())
|
||||||
.position(|window| window == double_newline)
|
.position(|window| window == double_newline)
|
||||||
{
|
{
|
||||||
Ok(Bytes::from(response.split_off(pos + double_newline.len())))
|
Ok(Bytes::from(
|
||||||
|
response_vec.split_off(pos + double_newline.len()),
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
Ok(Bytes::from(response))
|
Ok(Bytes::from(response_vec))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,80 +7,95 @@ use axum::response::sse::{Event, Sse};
|
|||||||
use futures::stream::{self, Stream};
|
use futures::stream::{self, Stream};
|
||||||
use shared::{AppEvent, GlobalStats, Torrent, TorrentStatus};
|
use shared::{AppEvent, GlobalStats, Torrent, TorrentStatus};
|
||||||
use std::convert::Infallible;
|
use std::convert::Infallible;
|
||||||
use strum::{Display, EnumString};
|
|
||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, EnumString, Display, PartialEq)]
|
// Field definitions to keep query and parser in sync
|
||||||
enum RtorrentField {
|
mod fields {
|
||||||
#[strum(serialize = "d.hash=")]
|
pub const IDX_HASH: usize = 0;
|
||||||
Hash,
|
pub const CMD_HASH: &str = "d.hash=";
|
||||||
#[strum(serialize = "d.name=")]
|
|
||||||
Name,
|
pub const IDX_NAME: usize = 1;
|
||||||
#[strum(serialize = "d.size_bytes=")]
|
pub const CMD_NAME: &str = "d.name=";
|
||||||
Size,
|
|
||||||
#[strum(serialize = "d.bytes_done=")]
|
pub const IDX_SIZE: usize = 2;
|
||||||
Completed,
|
pub const CMD_SIZE: &str = "d.size_bytes=";
|
||||||
#[strum(serialize = "d.down.rate=")]
|
|
||||||
DownRate,
|
pub const IDX_COMPLETED: usize = 3;
|
||||||
#[strum(serialize = "d.up.rate=")]
|
pub const CMD_COMPLETED: &str = "d.bytes_done=";
|
||||||
UpRate,
|
|
||||||
#[strum(serialize = "d.state=")]
|
pub const IDX_DOWN_RATE: usize = 4;
|
||||||
State,
|
pub const CMD_DOWN_RATE: &str = "d.down.rate=";
|
||||||
#[strum(serialize = "d.complete=")]
|
|
||||||
Complete,
|
pub const IDX_UP_RATE: usize = 5;
|
||||||
#[strum(serialize = "d.message=")]
|
pub const CMD_UP_RATE: &str = "d.up.rate=";
|
||||||
Message,
|
|
||||||
#[strum(serialize = "d.left_bytes=")]
|
pub const IDX_STATE: usize = 6;
|
||||||
LeftBytes,
|
pub const CMD_STATE: &str = "d.state=";
|
||||||
#[strum(serialize = "d.creation_date=")]
|
|
||||||
CreationDate,
|
pub const IDX_COMPLETE: usize = 7;
|
||||||
#[strum(serialize = "d.hashing=")]
|
pub const CMD_COMPLETE: &str = "d.complete=";
|
||||||
Hashing,
|
|
||||||
#[strum(serialize = "d.custom1=")]
|
pub const IDX_MESSAGE: usize = 8;
|
||||||
Label,
|
pub const CMD_MESSAGE: &str = "d.message=";
|
||||||
|
|
||||||
|
pub const IDX_LEFT_BYTES: usize = 9;
|
||||||
|
pub const CMD_LEFT_BYTES: &str = "d.left_bytes=";
|
||||||
|
|
||||||
|
pub const IDX_CREATION_DATE: usize = 10;
|
||||||
|
pub const CMD_CREATION_DATE: &str = "d.creation_date=";
|
||||||
|
|
||||||
|
pub const IDX_HASHING: usize = 11;
|
||||||
|
pub const CMD_HASHING: &str = "d.hashing=";
|
||||||
|
|
||||||
|
pub const IDX_LABEL: usize = 12;
|
||||||
|
pub const CMD_LABEL: &str = "d.custom1=";
|
||||||
}
|
}
|
||||||
|
|
||||||
const RTORRENT_FIELDS: &[RtorrentField] = &[
|
use fields::*;
|
||||||
RtorrentField::Hash,
|
|
||||||
RtorrentField::Name,
|
// Constants for rTorrent fields to ensure query and parser stay in sync
|
||||||
RtorrentField::Size,
|
const RTORRENT_FIELDS: &[&str] = &[
|
||||||
RtorrentField::Completed,
|
"", // Ignored by multicall pattern
|
||||||
RtorrentField::DownRate,
|
"main", // View
|
||||||
RtorrentField::UpRate,
|
CMD_HASH,
|
||||||
RtorrentField::State,
|
CMD_NAME,
|
||||||
RtorrentField::Complete,
|
CMD_SIZE,
|
||||||
RtorrentField::Message,
|
CMD_COMPLETED,
|
||||||
RtorrentField::LeftBytes,
|
CMD_DOWN_RATE,
|
||||||
RtorrentField::CreationDate,
|
CMD_UP_RATE,
|
||||||
RtorrentField::Hashing,
|
CMD_STATE,
|
||||||
RtorrentField::Label,
|
CMD_COMPLETE,
|
||||||
|
CMD_MESSAGE,
|
||||||
|
CMD_LEFT_BYTES,
|
||||||
|
CMD_CREATION_DATE,
|
||||||
|
CMD_HASHING,
|
||||||
|
CMD_LABEL,
|
||||||
];
|
];
|
||||||
|
|
||||||
fn get_field_value(row: &Vec<String>, field: RtorrentField) -> String {
|
fn parse_long(s: Option<&String>) -> i64 {
|
||||||
let idx = RTORRENT_FIELDS.iter().position(|&f| f == field).unwrap_or(0);
|
s.map(|v| v.parse().unwrap_or(0)).unwrap_or(0)
|
||||||
row.get(idx).cloned().unwrap_or_default()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_long(s: &str) -> i64 {
|
fn parse_string(s: Option<&String>) -> String {
|
||||||
s.parse().unwrap_or(0)
|
s.cloned().unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts a raw row of strings from rTorrent XML-RPC into a generic Torrent struct
|
/// Converts a raw row of strings from rTorrent XML-RPC into a generic Torrent struct
|
||||||
fn from_rtorrent_row(row: &Vec<String>) -> Torrent {
|
fn from_rtorrent_row(row: Vec<String>) -> Torrent {
|
||||||
let hash = get_field_value(row, RtorrentField::Hash);
|
let hash = parse_string(row.get(IDX_HASH));
|
||||||
let name = get_field_value(row, RtorrentField::Name);
|
let name = parse_string(row.get(IDX_NAME));
|
||||||
let size = parse_long(&get_field_value(row, RtorrentField::Size));
|
let size = parse_long(row.get(IDX_SIZE));
|
||||||
let completed = parse_long(&get_field_value(row, RtorrentField::Completed));
|
let completed = parse_long(row.get(IDX_COMPLETED));
|
||||||
let down_rate = parse_long(&get_field_value(row, RtorrentField::DownRate));
|
let down_rate = parse_long(row.get(IDX_DOWN_RATE));
|
||||||
let up_rate = parse_long(&get_field_value(row, RtorrentField::UpRate));
|
let up_rate = parse_long(row.get(IDX_UP_RATE));
|
||||||
|
|
||||||
let state = parse_long(&get_field_value(row, RtorrentField::State));
|
let state = parse_long(row.get(IDX_STATE));
|
||||||
let is_complete = parse_long(&get_field_value(row, RtorrentField::Complete));
|
let is_complete = parse_long(row.get(IDX_COMPLETE));
|
||||||
let message = get_field_value(row, RtorrentField::Message);
|
let message = parse_string(row.get(IDX_MESSAGE));
|
||||||
let left_bytes = parse_long(&get_field_value(row, RtorrentField::LeftBytes));
|
let left_bytes = parse_long(row.get(IDX_LEFT_BYTES));
|
||||||
let added_date = parse_long(&get_field_value(row, RtorrentField::CreationDate));
|
let added_date = parse_long(row.get(IDX_CREATION_DATE));
|
||||||
let is_hashing = parse_long(&get_field_value(row, RtorrentField::Hashing));
|
let is_hashing = parse_long(row.get(IDX_HASHING));
|
||||||
let label_raw = get_field_value(row, RtorrentField::Label);
|
let label_raw = parse_string(row.get(IDX_LABEL));
|
||||||
|
|
||||||
let label = if label_raw.is_empty() {
|
let label = if label_raw.is_empty() {
|
||||||
None
|
None
|
||||||
@@ -131,10 +146,7 @@ fn from_rtorrent_row(row: &Vec<String>) -> Torrent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, XmlRpcError> {
|
pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, XmlRpcError> {
|
||||||
let params: Vec<RpcParam> = RTORRENT_FIELDS
|
let params: Vec<RpcParam> = RTORRENT_FIELDS.iter().map(|s| RpcParam::from(*s)).collect();
|
||||||
.iter()
|
|
||||||
.map(|&f| RpcParam::from(f.to_string()))
|
|
||||||
.collect();
|
|
||||||
let xml = client.call("d.multicall2", ¶ms).await?;
|
let xml = client.call("d.multicall2", ¶ms).await?;
|
||||||
|
|
||||||
if xml.trim().is_empty() {
|
if xml.trim().is_empty() {
|
||||||
@@ -143,7 +155,7 @@ pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, Xml
|
|||||||
|
|
||||||
let rows = parse_multicall_response(&xml)?;
|
let rows = parse_multicall_response(&xml)?;
|
||||||
|
|
||||||
let torrents = rows.iter().map(from_rtorrent_row).collect();
|
let torrents = rows.into_iter().map(from_rtorrent_row).collect();
|
||||||
|
|
||||||
Ok(torrents)
|
Ok(torrents)
|
||||||
}
|
}
|
||||||
@@ -183,6 +195,9 @@ pub async fn fetch_global_stats(client: &RtorrentClient) -> Result<GlobalStats,
|
|||||||
pub async fn sse_handler(
|
pub async fn sse_handler(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
) -> Sse<impl Stream<Item = Result<Event, Infallible>>> {
|
) -> Sse<impl Stream<Item = Result<Event, Infallible>>> {
|
||||||
|
// Notify background worker to wake up and poll immediately
|
||||||
|
state.notify_poll.notify_one();
|
||||||
|
|
||||||
// Get initial value synchronously (from the watch channel's current state)
|
// Get initial value synchronously (from the watch channel's current state)
|
||||||
let initial_rx = state.tx.subscribe();
|
let initial_rx = state.tx.subscribe();
|
||||||
let initial_torrents = initial_rx.borrow().clone();
|
let initial_torrents = initial_rx.borrow().clone();
|
||||||
|
|||||||
@@ -6,52 +6,47 @@ use crate::api;
|
|||||||
pub fn Sidebar() -> impl IntoView {
|
pub fn Sidebar() -> impl IntoView {
|
||||||
let store = use_context::<crate::store::TorrentStore>().expect("store not provided");
|
let store = use_context::<crate::store::TorrentStore>().expect("store not provided");
|
||||||
|
|
||||||
let total_count = move || store.torrents.get().len();
|
let total_count = move || store.torrents.with(|map| map.len());
|
||||||
let downloading_count = move || {
|
let downloading_count = move || {
|
||||||
store
|
store.torrents.with(|map| {
|
||||||
.torrents
|
map.values()
|
||||||
.get()
|
|
||||||
.iter()
|
|
||||||
.filter(|t| t.status == shared::TorrentStatus::Downloading)
|
.filter(|t| t.status == shared::TorrentStatus::Downloading)
|
||||||
.count()
|
.count()
|
||||||
|
})
|
||||||
};
|
};
|
||||||
let seeding_count = move || {
|
let seeding_count = move || {
|
||||||
store
|
store.torrents.with(|map| {
|
||||||
.torrents
|
map.values()
|
||||||
.get()
|
|
||||||
.iter()
|
|
||||||
.filter(|t| t.status == shared::TorrentStatus::Seeding)
|
.filter(|t| t.status == shared::TorrentStatus::Seeding)
|
||||||
.count()
|
.count()
|
||||||
|
})
|
||||||
};
|
};
|
||||||
let completed_count = move || {
|
let completed_count = move || {
|
||||||
store
|
store.torrents.with(|map| {
|
||||||
.torrents
|
map.values()
|
||||||
.get()
|
|
||||||
.iter()
|
|
||||||
.filter(|t| {
|
.filter(|t| {
|
||||||
t.status == shared::TorrentStatus::Seeding
|
t.status == shared::TorrentStatus::Seeding
|
||||||
|| (t.status == shared::TorrentStatus::Paused && t.percent_complete >= 100.0)
|
|| (t.status == shared::TorrentStatus::Paused && t.percent_complete >= 100.0)
|
||||||
})
|
})
|
||||||
.count()
|
.count()
|
||||||
|
})
|
||||||
};
|
};
|
||||||
let paused_count = move || {
|
let paused_count = move || {
|
||||||
store
|
store.torrents.with(|map| {
|
||||||
.torrents
|
map.values()
|
||||||
.get()
|
|
||||||
.iter()
|
|
||||||
.filter(|t| t.status == shared::TorrentStatus::Paused)
|
.filter(|t| t.status == shared::TorrentStatus::Paused)
|
||||||
.count()
|
.count()
|
||||||
|
})
|
||||||
};
|
};
|
||||||
let inactive_count = move || {
|
let inactive_count = move || {
|
||||||
store
|
store.torrents.with(|map| {
|
||||||
.torrents
|
map.values()
|
||||||
.get()
|
|
||||||
.iter()
|
|
||||||
.filter(|t| {
|
.filter(|t| {
|
||||||
t.status == shared::TorrentStatus::Paused
|
t.status == shared::TorrentStatus::Paused
|
||||||
|| t.status == shared::TorrentStatus::Error
|
|| t.status == shared::TorrentStatus::Error
|
||||||
})
|
})
|
||||||
.count()
|
.count()
|
||||||
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
let close_drawer = move || {
|
let close_drawer = move || {
|
||||||
|
|||||||
@@ -82,9 +82,10 @@ pub fn TorrentTable() -> impl IntoView {
|
|||||||
let sort_dir = create_rw_signal(SortDirection::Descending);
|
let sort_dir = create_rw_signal(SortDirection::Descending);
|
||||||
|
|
||||||
let filtered_torrents = move || {
|
let filtered_torrents = move || {
|
||||||
let mut torrents = store
|
// Convert HashMap values to Vec for filtering and sorting
|
||||||
.torrents
|
let torrents: Vec<shared::Torrent> = store.torrents.with(|map| map.values().cloned().collect());
|
||||||
.get()
|
|
||||||
|
let mut torrents = torrents
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|t| {
|
.filter(|t| {
|
||||||
let filter = store.filter.get();
|
let filter = store.filter.get();
|
||||||
@@ -253,7 +254,7 @@ pub fn TorrentTable() -> impl IntoView {
|
|||||||
<div class="flex items-center">"Status" {move || sort_arrow(SortColumn::Status)}</div>
|
<div class="flex items-center">"Status" {move || sort_arrow(SortColumn::Status)}</div>
|
||||||
</th>
|
</th>
|
||||||
<th class="w-24 cursor-pointer hover:bg-base-300 group select-none" on:click=move |_| handle_sort(SortColumn::DownSpeed)>
|
<th class="w-24 cursor-pointer hover:bg-base-300 group select-none" on:click=move |_| handle_sort(SortColumn::DownSpeed)>
|
||||||
<div class="flex items-center">"Down Speed" {move || sort_arrow(SortColumn::DownSpeed)}</div>
|
<div class="flex items-center">"DL Speed" {move || sort_arrow(SortColumn::DownSpeed)}</div>
|
||||||
</th>
|
</th>
|
||||||
<th class="w-24 cursor-pointer hover:bg-base-300 group select-none" on:click=move |_| handle_sort(SortColumn::UpSpeed)>
|
<th class="w-24 cursor-pointer hover:bg-base-300 group select-none" on:click=move |_| handle_sort(SortColumn::UpSpeed)>
|
||||||
<div class="flex items-center">"Up Speed" {move || sort_arrow(SortColumn::UpSpeed)}</div>
|
<div class="flex items-center">"Up Speed" {move || sort_arrow(SortColumn::UpSpeed)}</div>
|
||||||
@@ -344,7 +345,7 @@ pub fn TorrentTable() -> impl IntoView {
|
|||||||
(SortColumn::Size, "Size"),
|
(SortColumn::Size, "Size"),
|
||||||
(SortColumn::Progress, "Progress"),
|
(SortColumn::Progress, "Progress"),
|
||||||
(SortColumn::Status, "Status"),
|
(SortColumn::Status, "Status"),
|
||||||
(SortColumn::DownSpeed, "Down Speed"),
|
(SortColumn::DownSpeed, "DL Speed"),
|
||||||
(SortColumn::UpSpeed, "Up Speed"),
|
(SortColumn::UpSpeed, "Up Speed"),
|
||||||
(SortColumn::ETA, "ETA"),
|
(SortColumn::ETA, "ETA"),
|
||||||
(SortColumn::AddedDate, "Date"),
|
(SortColumn::AddedDate, "Date"),
|
||||||
|
|||||||
@@ -113,9 +113,11 @@ impl FilterStatus {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub struct TorrentStore {
|
pub struct TorrentStore {
|
||||||
pub torrents: RwSignal<Vec<Torrent>>,
|
pub torrents: RwSignal<HashMap<String, Torrent>>,
|
||||||
pub filter: RwSignal<FilterStatus>,
|
pub filter: RwSignal<FilterStatus>,
|
||||||
pub search_query: RwSignal<String>,
|
pub search_query: RwSignal<String>,
|
||||||
pub global_stats: RwSignal<GlobalStats>,
|
pub global_stats: RwSignal<GlobalStats>,
|
||||||
@@ -124,7 +126,7 @@ pub struct TorrentStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn provide_torrent_store() {
|
pub fn provide_torrent_store() {
|
||||||
let torrents = create_rw_signal(Vec::<Torrent>::new());
|
let torrents = create_rw_signal(HashMap::new());
|
||||||
let filter = create_rw_signal(FilterStatus::All);
|
let filter = create_rw_signal(FilterStatus::All);
|
||||||
let search_query = create_rw_signal(String::new());
|
let search_query = create_rw_signal(String::new());
|
||||||
let global_stats = create_rw_signal(GlobalStats::default());
|
let global_stats = create_rw_signal(GlobalStats::default());
|
||||||
@@ -193,12 +195,15 @@ pub fn provide_torrent_store() {
|
|||||||
if let Ok(event) = serde_json::from_str::<AppEvent>(&data_str) {
|
if let Ok(event) = serde_json::from_str::<AppEvent>(&data_str) {
|
||||||
match event {
|
match event {
|
||||||
AppEvent::FullList { torrents: list, .. } => {
|
AppEvent::FullList { torrents: list, .. } => {
|
||||||
torrents.set(list);
|
let map: HashMap<String, Torrent> = list
|
||||||
|
.into_iter()
|
||||||
|
.map(|t| (t.hash.clone(), t))
|
||||||
|
.collect();
|
||||||
|
torrents.set(map);
|
||||||
}
|
}
|
||||||
AppEvent::Update(update) => {
|
AppEvent::Update(update) => {
|
||||||
torrents.update(|list| {
|
torrents.update(|map| {
|
||||||
if let Some(t) = list.iter_mut().find(|t| t.hash == update.hash)
|
if let Some(t) = map.get_mut(&update.hash) {
|
||||||
{
|
|
||||||
if let Some(name) = update.name {
|
if let Some(name) = update.name {
|
||||||
t.name = name;
|
t.name = name;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user