Compare commits
2 Commits
release-20
...
release-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ffc7576a0 | ||
|
|
ce10c5dfb2 |
33
Cargo.lock
generated
33
Cargo.lock
generated
@@ -310,6 +310,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"shared",
|
||||
"sqlx",
|
||||
"strum",
|
||||
"thiserror 2.0.18",
|
||||
"time",
|
||||
"tokio",
|
||||
@@ -544,7 +545,7 @@ version = "4.5.55"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.114",
|
||||
@@ -1537,6 +1538,12 @@ dependencies = [
|
||||
"hashbrown 0.15.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -3704,7 +3711,7 @@ checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
||||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
"heck",
|
||||
"heck 0.5.0",
|
||||
"hex",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
@@ -3847,6 +3854,28 @@ version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.25.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.25.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.114",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.6.1"
|
||||
|
||||
@@ -42,3 +42,4 @@ anyhow = "1.0.101"
|
||||
time = { version = "0.3.47", features = ["serde", "formatting", "parsing"] }
|
||||
tower_governor = "0.8.0"
|
||||
governor = "0.10.4"
|
||||
strum = { version = "0.25", features = ["derive", "strum_macros"] }
|
||||
|
||||
@@ -45,7 +45,6 @@ pub struct AppState {
|
||||
pub db: db::Db,
|
||||
#[cfg(feature = "push-notifications")]
|
||||
pub push_store: push::PushSubscriptionStore,
|
||||
pub notify_poll: Arc<tokio::sync::Notify>,
|
||||
}
|
||||
|
||||
async fn auth_middleware(
|
||||
@@ -337,8 +336,6 @@ async fn main() {
|
||||
#[cfg(not(feature = "push-notifications"))]
|
||||
let push_store = ();
|
||||
|
||||
let notify_poll = Arc::new(tokio::sync::Notify::new());
|
||||
|
||||
let app_state = AppState {
|
||||
tx: tx.clone(),
|
||||
event_bus: event_bus.clone(),
|
||||
@@ -346,7 +343,6 @@ async fn main() {
|
||||
db: db.clone(),
|
||||
#[cfg(feature = "push-notifications")]
|
||||
push_store,
|
||||
notify_poll: notify_poll.clone(),
|
||||
};
|
||||
|
||||
// Spawn background task to poll rTorrent
|
||||
@@ -355,7 +351,6 @@ async fn main() {
|
||||
let socket_path = args.socket.clone(); // Clone for background task
|
||||
#[cfg(feature = "push-notifications")]
|
||||
let push_store_clone = app_state.push_store.clone();
|
||||
let notify_poll_clone = notify_poll.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let client = xmlrpc::RtorrentClient::new(&socket_path);
|
||||
@@ -364,14 +359,6 @@ async fn main() {
|
||||
let mut backoff_duration = Duration::from_secs(1);
|
||||
|
||||
loop {
|
||||
// Determine polling interval based on active clients
|
||||
let active_clients = event_bus_tx.receiver_count();
|
||||
let loop_interval = if active_clients > 0 {
|
||||
Duration::from_secs(1)
|
||||
} else {
|
||||
Duration::from_secs(30)
|
||||
};
|
||||
|
||||
// 1. Fetch Torrents
|
||||
let torrents_result = sse::fetch_torrents(&client).await;
|
||||
|
||||
@@ -442,14 +429,6 @@ async fn main() {
|
||||
}
|
||||
|
||||
previous_torrents = new_torrents;
|
||||
|
||||
// Success case: wait for the determined interval OR a wakeup notification
|
||||
tokio::select! {
|
||||
_ = tokio::time::sleep(loop_interval) => {},
|
||||
_ = notify_poll_clone.notified() => {
|
||||
tracing::debug!("Background loop awakened by new client connection");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Error fetching torrents in background: {}", e);
|
||||
@@ -470,15 +449,20 @@ async fn main() {
|
||||
"Backoff: Sleeping for {:?} due to rTorrent error.",
|
||||
backoff_duration
|
||||
);
|
||||
|
||||
tokio::time::sleep(backoff_duration).await;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle Stats
|
||||
if let Ok(stats) = stats_result {
|
||||
match stats_result {
|
||||
Ok(stats) => {
|
||||
let _ = event_bus_tx.send(AppEvent::Stats(stats));
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("Error fetching global stats: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
tokio::time::sleep(backoff_duration).await;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -7,95 +7,80 @@ use axum::response::sse::{Event, Sse};
|
||||
use futures::stream::{self, Stream};
|
||||
use shared::{AppEvent, GlobalStats, Torrent, TorrentStatus};
|
||||
use std::convert::Infallible;
|
||||
use strum::{Display, EnumString};
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
// Field definitions to keep query and parser in sync
|
||||
mod fields {
|
||||
pub const IDX_HASH: usize = 0;
|
||||
pub const CMD_HASH: &str = "d.hash=";
|
||||
|
||||
pub const IDX_NAME: usize = 1;
|
||||
pub const CMD_NAME: &str = "d.name=";
|
||||
|
||||
pub const IDX_SIZE: usize = 2;
|
||||
pub const CMD_SIZE: &str = "d.size_bytes=";
|
||||
|
||||
pub const IDX_COMPLETED: usize = 3;
|
||||
pub const CMD_COMPLETED: &str = "d.bytes_done=";
|
||||
|
||||
pub const IDX_DOWN_RATE: usize = 4;
|
||||
pub const CMD_DOWN_RATE: &str = "d.down.rate=";
|
||||
|
||||
pub const IDX_UP_RATE: usize = 5;
|
||||
pub const CMD_UP_RATE: &str = "d.up.rate=";
|
||||
|
||||
pub const IDX_STATE: usize = 6;
|
||||
pub const CMD_STATE: &str = "d.state=";
|
||||
|
||||
pub const IDX_COMPLETE: usize = 7;
|
||||
pub const CMD_COMPLETE: &str = "d.complete=";
|
||||
|
||||
pub const IDX_MESSAGE: usize = 8;
|
||||
pub const CMD_MESSAGE: &str = "d.message=";
|
||||
|
||||
pub const IDX_LEFT_BYTES: usize = 9;
|
||||
pub const CMD_LEFT_BYTES: &str = "d.left_bytes=";
|
||||
|
||||
pub const IDX_CREATION_DATE: usize = 10;
|
||||
pub const CMD_CREATION_DATE: &str = "d.creation_date=";
|
||||
|
||||
pub const IDX_HASHING: usize = 11;
|
||||
pub const CMD_HASHING: &str = "d.hashing=";
|
||||
|
||||
pub const IDX_LABEL: usize = 12;
|
||||
pub const CMD_LABEL: &str = "d.custom1=";
|
||||
#[derive(Debug, Clone, Copy, EnumString, Display, PartialEq)]
|
||||
enum RtorrentField {
|
||||
#[strum(serialize = "d.hash=")]
|
||||
Hash,
|
||||
#[strum(serialize = "d.name=")]
|
||||
Name,
|
||||
#[strum(serialize = "d.size_bytes=")]
|
||||
Size,
|
||||
#[strum(serialize = "d.bytes_done=")]
|
||||
Completed,
|
||||
#[strum(serialize = "d.down.rate=")]
|
||||
DownRate,
|
||||
#[strum(serialize = "d.up.rate=")]
|
||||
UpRate,
|
||||
#[strum(serialize = "d.state=")]
|
||||
State,
|
||||
#[strum(serialize = "d.complete=")]
|
||||
Complete,
|
||||
#[strum(serialize = "d.message=")]
|
||||
Message,
|
||||
#[strum(serialize = "d.left_bytes=")]
|
||||
LeftBytes,
|
||||
#[strum(serialize = "d.creation_date=")]
|
||||
CreationDate,
|
||||
#[strum(serialize = "d.hashing=")]
|
||||
Hashing,
|
||||
#[strum(serialize = "d.custom1=")]
|
||||
Label,
|
||||
}
|
||||
|
||||
use fields::*;
|
||||
|
||||
// Constants for rTorrent fields to ensure query and parser stay in sync
|
||||
const RTORRENT_FIELDS: &[&str] = &[
|
||||
"", // Ignored by multicall pattern
|
||||
"main", // View
|
||||
CMD_HASH,
|
||||
CMD_NAME,
|
||||
CMD_SIZE,
|
||||
CMD_COMPLETED,
|
||||
CMD_DOWN_RATE,
|
||||
CMD_UP_RATE,
|
||||
CMD_STATE,
|
||||
CMD_COMPLETE,
|
||||
CMD_MESSAGE,
|
||||
CMD_LEFT_BYTES,
|
||||
CMD_CREATION_DATE,
|
||||
CMD_HASHING,
|
||||
CMD_LABEL,
|
||||
const RTORRENT_FIELDS: &[RtorrentField] = &[
|
||||
RtorrentField::Hash,
|
||||
RtorrentField::Name,
|
||||
RtorrentField::Size,
|
||||
RtorrentField::Completed,
|
||||
RtorrentField::DownRate,
|
||||
RtorrentField::UpRate,
|
||||
RtorrentField::State,
|
||||
RtorrentField::Complete,
|
||||
RtorrentField::Message,
|
||||
RtorrentField::LeftBytes,
|
||||
RtorrentField::CreationDate,
|
||||
RtorrentField::Hashing,
|
||||
RtorrentField::Label,
|
||||
];
|
||||
|
||||
fn parse_long(s: Option<&String>) -> i64 {
|
||||
s.map(|v| v.parse().unwrap_or(0)).unwrap_or(0)
|
||||
fn get_field_value(row: &Vec<String>, field: RtorrentField) -> String {
|
||||
let idx = RTORRENT_FIELDS.iter().position(|&f| f == field).unwrap_or(0);
|
||||
row.get(idx).cloned().unwrap_or_default()
|
||||
}
|
||||
|
||||
fn parse_string(s: Option<&String>) -> String {
|
||||
s.cloned().unwrap_or_default()
|
||||
fn parse_long(s: &str) -> i64 {
|
||||
s.parse().unwrap_or(0)
|
||||
}
|
||||
|
||||
/// Converts a raw row of strings from rTorrent XML-RPC into a generic Torrent struct
|
||||
fn from_rtorrent_row(row: Vec<String>) -> Torrent {
|
||||
let hash = parse_string(row.get(IDX_HASH));
|
||||
let name = parse_string(row.get(IDX_NAME));
|
||||
let size = parse_long(row.get(IDX_SIZE));
|
||||
let completed = parse_long(row.get(IDX_COMPLETED));
|
||||
let down_rate = parse_long(row.get(IDX_DOWN_RATE));
|
||||
let up_rate = parse_long(row.get(IDX_UP_RATE));
|
||||
fn from_rtorrent_row(row: &Vec<String>) -> Torrent {
|
||||
let hash = get_field_value(row, RtorrentField::Hash);
|
||||
let name = get_field_value(row, RtorrentField::Name);
|
||||
let size = parse_long(&get_field_value(row, RtorrentField::Size));
|
||||
let completed = parse_long(&get_field_value(row, RtorrentField::Completed));
|
||||
let down_rate = parse_long(&get_field_value(row, RtorrentField::DownRate));
|
||||
let up_rate = parse_long(&get_field_value(row, RtorrentField::UpRate));
|
||||
|
||||
let state = parse_long(row.get(IDX_STATE));
|
||||
let is_complete = parse_long(row.get(IDX_COMPLETE));
|
||||
let message = parse_string(row.get(IDX_MESSAGE));
|
||||
let left_bytes = parse_long(row.get(IDX_LEFT_BYTES));
|
||||
let added_date = parse_long(row.get(IDX_CREATION_DATE));
|
||||
let is_hashing = parse_long(row.get(IDX_HASHING));
|
||||
let label_raw = parse_string(row.get(IDX_LABEL));
|
||||
let state = parse_long(&get_field_value(row, RtorrentField::State));
|
||||
let is_complete = parse_long(&get_field_value(row, RtorrentField::Complete));
|
||||
let message = get_field_value(row, RtorrentField::Message);
|
||||
let left_bytes = parse_long(&get_field_value(row, RtorrentField::LeftBytes));
|
||||
let added_date = parse_long(&get_field_value(row, RtorrentField::CreationDate));
|
||||
let is_hashing = parse_long(&get_field_value(row, RtorrentField::Hashing));
|
||||
let label_raw = get_field_value(row, RtorrentField::Label);
|
||||
|
||||
let label = if label_raw.is_empty() {
|
||||
None
|
||||
@@ -146,7 +131,10 @@ fn from_rtorrent_row(row: Vec<String>) -> Torrent {
|
||||
}
|
||||
|
||||
pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, XmlRpcError> {
|
||||
let params: Vec<RpcParam> = RTORRENT_FIELDS.iter().map(|s| RpcParam::from(*s)).collect();
|
||||
let params: Vec<RpcParam> = RTORRENT_FIELDS
|
||||
.iter()
|
||||
.map(|&f| RpcParam::from(f.to_string()))
|
||||
.collect();
|
||||
let xml = client.call("d.multicall2", ¶ms).await?;
|
||||
|
||||
if xml.trim().is_empty() {
|
||||
@@ -155,7 +143,7 @@ pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, Xml
|
||||
|
||||
let rows = parse_multicall_response(&xml)?;
|
||||
|
||||
let torrents = rows.into_iter().map(from_rtorrent_row).collect();
|
||||
let torrents = rows.iter().map(from_rtorrent_row).collect();
|
||||
|
||||
Ok(torrents)
|
||||
}
|
||||
@@ -195,9 +183,6 @@ pub async fn fetch_global_stats(client: &RtorrentClient) -> Result<GlobalStats,
|
||||
pub async fn sse_handler(
|
||||
State(state): State<AppState>,
|
||||
) -> Sse<impl Stream<Item = Result<Event, Infallible>>> {
|
||||
// Notify background worker to wake up and poll immediately
|
||||
state.notify_poll.notify_one();
|
||||
|
||||
// Get initial value synchronously (from the watch channel's current state)
|
||||
let initial_rx = state.tx.subscribe();
|
||||
let initial_torrents = initial_rx.borrow().clone();
|
||||
|
||||
@@ -6,47 +6,52 @@ use crate::api;
|
||||
pub fn Sidebar() -> impl IntoView {
|
||||
let store = use_context::<crate::store::TorrentStore>().expect("store not provided");
|
||||
|
||||
let total_count = move || store.torrents.with(|map| map.len());
|
||||
let total_count = move || store.torrents.get().len();
|
||||
let downloading_count = move || {
|
||||
store.torrents.with(|map| {
|
||||
map.values()
|
||||
store
|
||||
.torrents
|
||||
.get()
|
||||
.iter()
|
||||
.filter(|t| t.status == shared::TorrentStatus::Downloading)
|
||||
.count()
|
||||
})
|
||||
};
|
||||
let seeding_count = move || {
|
||||
store.torrents.with(|map| {
|
||||
map.values()
|
||||
store
|
||||
.torrents
|
||||
.get()
|
||||
.iter()
|
||||
.filter(|t| t.status == shared::TorrentStatus::Seeding)
|
||||
.count()
|
||||
})
|
||||
};
|
||||
let completed_count = move || {
|
||||
store.torrents.with(|map| {
|
||||
map.values()
|
||||
store
|
||||
.torrents
|
||||
.get()
|
||||
.iter()
|
||||
.filter(|t| {
|
||||
t.status == shared::TorrentStatus::Seeding
|
||||
|| (t.status == shared::TorrentStatus::Paused && t.percent_complete >= 100.0)
|
||||
})
|
||||
.count()
|
||||
})
|
||||
};
|
||||
let paused_count = move || {
|
||||
store.torrents.with(|map| {
|
||||
map.values()
|
||||
store
|
||||
.torrents
|
||||
.get()
|
||||
.iter()
|
||||
.filter(|t| t.status == shared::TorrentStatus::Paused)
|
||||
.count()
|
||||
})
|
||||
};
|
||||
let inactive_count = move || {
|
||||
store.torrents.with(|map| {
|
||||
map.values()
|
||||
store
|
||||
.torrents
|
||||
.get()
|
||||
.iter()
|
||||
.filter(|t| {
|
||||
t.status == shared::TorrentStatus::Paused
|
||||
|| t.status == shared::TorrentStatus::Error
|
||||
})
|
||||
.count()
|
||||
})
|
||||
};
|
||||
|
||||
let close_drawer = move || {
|
||||
|
||||
@@ -82,10 +82,9 @@ pub fn TorrentTable() -> impl IntoView {
|
||||
let sort_dir = create_rw_signal(SortDirection::Descending);
|
||||
|
||||
let filtered_torrents = move || {
|
||||
// Convert HashMap values to Vec for filtering and sorting
|
||||
let torrents: Vec<shared::Torrent> = store.torrents.with(|map| map.values().cloned().collect());
|
||||
|
||||
let mut torrents = torrents
|
||||
let mut torrents = store
|
||||
.torrents
|
||||
.get()
|
||||
.into_iter()
|
||||
.filter(|t| {
|
||||
let filter = store.filter.get();
|
||||
@@ -254,7 +253,7 @@ pub fn TorrentTable() -> impl IntoView {
|
||||
<div class="flex items-center">"Status" {move || sort_arrow(SortColumn::Status)}</div>
|
||||
</th>
|
||||
<th class="w-24 cursor-pointer hover:bg-base-300 group select-none" on:click=move |_| handle_sort(SortColumn::DownSpeed)>
|
||||
<div class="flex items-center">"DL Speed" {move || sort_arrow(SortColumn::DownSpeed)}</div>
|
||||
<div class="flex items-center">"Down Speed" {move || sort_arrow(SortColumn::DownSpeed)}</div>
|
||||
</th>
|
||||
<th class="w-24 cursor-pointer hover:bg-base-300 group select-none" on:click=move |_| handle_sort(SortColumn::UpSpeed)>
|
||||
<div class="flex items-center">"Up Speed" {move || sort_arrow(SortColumn::UpSpeed)}</div>
|
||||
@@ -345,7 +344,7 @@ pub fn TorrentTable() -> impl IntoView {
|
||||
(SortColumn::Size, "Size"),
|
||||
(SortColumn::Progress, "Progress"),
|
||||
(SortColumn::Status, "Status"),
|
||||
(SortColumn::DownSpeed, "DL Speed"),
|
||||
(SortColumn::DownSpeed, "Down Speed"),
|
||||
(SortColumn::UpSpeed, "Up Speed"),
|
||||
(SortColumn::ETA, "ETA"),
|
||||
(SortColumn::AddedDate, "Date"),
|
||||
|
||||
@@ -113,11 +113,9 @@ impl FilterStatus {
|
||||
}
|
||||
}
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct TorrentStore {
|
||||
pub torrents: RwSignal<HashMap<String, Torrent>>,
|
||||
pub torrents: RwSignal<Vec<Torrent>>,
|
||||
pub filter: RwSignal<FilterStatus>,
|
||||
pub search_query: RwSignal<String>,
|
||||
pub global_stats: RwSignal<GlobalStats>,
|
||||
@@ -126,7 +124,7 @@ pub struct TorrentStore {
|
||||
}
|
||||
|
||||
pub fn provide_torrent_store() {
|
||||
let torrents = create_rw_signal(HashMap::new());
|
||||
let torrents = create_rw_signal(Vec::<Torrent>::new());
|
||||
let filter = create_rw_signal(FilterStatus::All);
|
||||
let search_query = create_rw_signal(String::new());
|
||||
let global_stats = create_rw_signal(GlobalStats::default());
|
||||
@@ -195,15 +193,12 @@ pub fn provide_torrent_store() {
|
||||
if let Ok(event) = serde_json::from_str::<AppEvent>(&data_str) {
|
||||
match event {
|
||||
AppEvent::FullList { torrents: list, .. } => {
|
||||
let map: HashMap<String, Torrent> = list
|
||||
.into_iter()
|
||||
.map(|t| (t.hash.clone(), t))
|
||||
.collect();
|
||||
torrents.set(map);
|
||||
torrents.set(list);
|
||||
}
|
||||
AppEvent::Update(update) => {
|
||||
torrents.update(|map| {
|
||||
if let Some(t) = map.get_mut(&update.hash) {
|
||||
torrents.update(|list| {
|
||||
if let Some(t) = list.iter_mut().find(|t| t.hash == update.hash)
|
||||
{
|
||||
if let Some(name) = update.name {
|
||||
t.name = name;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user