Compare commits

..

2 Commits

Author SHA1 Message Date
spinline
3ffc7576a0 feat: add centralized API service layer for frontend
All checks were successful
Build MIPS Binary / build (push) Successful in 4m24s
- Create frontend/src/api/mod.rs with centralized HTTP client and error handling
- Implement api::auth module (login, logout, check_auth, get_user)
- Implement api::torrent module (add, action, delete, start, stop, set_label, set_priority)
- Implement api::setup module (get_status, setup)
- Implement api::settings module (set_global_limits)
- Implement api::push module (get_public_key, subscribe)
- Update all components to use api service layer instead of direct gloo_net calls
- Add thiserror dependency for error handling
2026-02-08 23:04:24 +03:00
spinline
ce10c5dfb2 refactor: replace magic indices with RtorrentField enum for type-safe parsing
All checks were successful
Build MIPS Binary / build (push) Successful in 4m22s
2026-02-08 22:50:26 +03:00
7 changed files with 154 additions and 143 deletions

33
Cargo.lock generated
View File

@@ -310,6 +310,7 @@ dependencies = [
"serde_json",
"shared",
"sqlx",
"strum",
"thiserror 2.0.18",
"time",
"tokio",
@@ -544,7 +545,7 @@ version = "4.5.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
dependencies = [
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.114",
@@ -1537,6 +1538,12 @@ dependencies = [
"hashbrown 0.15.5",
]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heck"
version = "0.5.0"
@@ -3704,7 +3711,7 @@ checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
dependencies = [
"dotenvy",
"either",
"heck",
"heck 0.5.0",
"hex",
"once_cell",
"proc-macro2",
@@ -3847,6 +3854,28 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0"
dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.114",
]
[[package]]
name = "subtle"
version = "2.6.1"

View File

@@ -42,3 +42,4 @@ anyhow = "1.0.101"
time = { version = "0.3.47", features = ["serde", "formatting", "parsing"] }
tower_governor = "0.8.0"
governor = "0.10.4"
strum = { version = "0.25", features = ["derive", "strum_macros"] }

View File

@@ -359,14 +359,6 @@ async fn main() {
let mut backoff_duration = Duration::from_secs(1);
loop {
// Determine polling interval based on active clients
let active_clients = event_bus_tx.receiver_count();
let loop_interval = if active_clients > 0 {
Duration::from_secs(1)
} else {
Duration::from_secs(30)
};
// 1. Fetch Torrents
let torrents_result = sse::fetch_torrents(&client).await;
@@ -437,9 +429,6 @@ async fn main() {
}
previous_torrents = new_torrents;
// Success case: sleep for the determined interval
tokio::time::sleep(loop_interval).await;
}
Err(e) => {
tracing::error!("Error fetching torrents in background: {}", e);
@@ -460,15 +449,20 @@ async fn main() {
"Backoff: Sleeping for {:?} due to rTorrent error.",
backoff_duration
);
tokio::time::sleep(backoff_duration).await;
}
}
// Handle Stats
if let Ok(stats) = stats_result {
let _ = event_bus_tx.send(AppEvent::Stats(stats));
match stats_result {
Ok(stats) => {
let _ = event_bus_tx.send(AppEvent::Stats(stats));
}
Err(e) => {
tracing::warn!("Error fetching global stats: {}", e);
}
}
tokio::time::sleep(backoff_duration).await;
}
});

View File

@@ -7,95 +7,80 @@ use axum::response::sse::{Event, Sse};
use futures::stream::{self, Stream};
use shared::{AppEvent, GlobalStats, Torrent, TorrentStatus};
use std::convert::Infallible;
use strum::{Display, EnumString};
use tokio_stream::StreamExt;
// Field definitions to keep query and parser in sync
mod fields {
pub const IDX_HASH: usize = 0;
pub const CMD_HASH: &str = "d.hash=";
pub const IDX_NAME: usize = 1;
pub const CMD_NAME: &str = "d.name=";
pub const IDX_SIZE: usize = 2;
pub const CMD_SIZE: &str = "d.size_bytes=";
pub const IDX_COMPLETED: usize = 3;
pub const CMD_COMPLETED: &str = "d.bytes_done=";
pub const IDX_DOWN_RATE: usize = 4;
pub const CMD_DOWN_RATE: &str = "d.down.rate=";
pub const IDX_UP_RATE: usize = 5;
pub const CMD_UP_RATE: &str = "d.up.rate=";
pub const IDX_STATE: usize = 6;
pub const CMD_STATE: &str = "d.state=";
pub const IDX_COMPLETE: usize = 7;
pub const CMD_COMPLETE: &str = "d.complete=";
pub const IDX_MESSAGE: usize = 8;
pub const CMD_MESSAGE: &str = "d.message=";
pub const IDX_LEFT_BYTES: usize = 9;
pub const CMD_LEFT_BYTES: &str = "d.left_bytes=";
pub const IDX_CREATION_DATE: usize = 10;
pub const CMD_CREATION_DATE: &str = "d.creation_date=";
pub const IDX_HASHING: usize = 11;
pub const CMD_HASHING: &str = "d.hashing=";
pub const IDX_LABEL: usize = 12;
pub const CMD_LABEL: &str = "d.custom1=";
#[derive(Debug, Clone, Copy, EnumString, Display, PartialEq)]
enum RtorrentField {
#[strum(serialize = "d.hash=")]
Hash,
#[strum(serialize = "d.name=")]
Name,
#[strum(serialize = "d.size_bytes=")]
Size,
#[strum(serialize = "d.bytes_done=")]
Completed,
#[strum(serialize = "d.down.rate=")]
DownRate,
#[strum(serialize = "d.up.rate=")]
UpRate,
#[strum(serialize = "d.state=")]
State,
#[strum(serialize = "d.complete=")]
Complete,
#[strum(serialize = "d.message=")]
Message,
#[strum(serialize = "d.left_bytes=")]
LeftBytes,
#[strum(serialize = "d.creation_date=")]
CreationDate,
#[strum(serialize = "d.hashing=")]
Hashing,
#[strum(serialize = "d.custom1=")]
Label,
}
use fields::*;
// Constants for rTorrent fields to ensure query and parser stay in sync
const RTORRENT_FIELDS: &[&str] = &[
"", // Ignored by multicall pattern
"main", // View
CMD_HASH,
CMD_NAME,
CMD_SIZE,
CMD_COMPLETED,
CMD_DOWN_RATE,
CMD_UP_RATE,
CMD_STATE,
CMD_COMPLETE,
CMD_MESSAGE,
CMD_LEFT_BYTES,
CMD_CREATION_DATE,
CMD_HASHING,
CMD_LABEL,
const RTORRENT_FIELDS: &[RtorrentField] = &[
RtorrentField::Hash,
RtorrentField::Name,
RtorrentField::Size,
RtorrentField::Completed,
RtorrentField::DownRate,
RtorrentField::UpRate,
RtorrentField::State,
RtorrentField::Complete,
RtorrentField::Message,
RtorrentField::LeftBytes,
RtorrentField::CreationDate,
RtorrentField::Hashing,
RtorrentField::Label,
];
fn parse_long(s: Option<&String>) -> i64 {
s.map(|v| v.parse().unwrap_or(0)).unwrap_or(0)
fn get_field_value(row: &Vec<String>, field: RtorrentField) -> String {
let idx = RTORRENT_FIELDS.iter().position(|&f| f == field).unwrap_or(0);
row.get(idx).cloned().unwrap_or_default()
}
fn parse_string(s: Option<&String>) -> String {
s.cloned().unwrap_or_default()
fn parse_long(s: &str) -> i64 {
s.parse().unwrap_or(0)
}
/// Converts a raw row of strings from rTorrent XML-RPC into a generic Torrent struct
fn from_rtorrent_row(row: Vec<String>) -> Torrent {
let hash = parse_string(row.get(IDX_HASH));
let name = parse_string(row.get(IDX_NAME));
let size = parse_long(row.get(IDX_SIZE));
let completed = parse_long(row.get(IDX_COMPLETED));
let down_rate = parse_long(row.get(IDX_DOWN_RATE));
let up_rate = parse_long(row.get(IDX_UP_RATE));
fn from_rtorrent_row(row: &Vec<String>) -> Torrent {
let hash = get_field_value(row, RtorrentField::Hash);
let name = get_field_value(row, RtorrentField::Name);
let size = parse_long(&get_field_value(row, RtorrentField::Size));
let completed = parse_long(&get_field_value(row, RtorrentField::Completed));
let down_rate = parse_long(&get_field_value(row, RtorrentField::DownRate));
let up_rate = parse_long(&get_field_value(row, RtorrentField::UpRate));
let state = parse_long(row.get(IDX_STATE));
let is_complete = parse_long(row.get(IDX_COMPLETE));
let message = parse_string(row.get(IDX_MESSAGE));
let left_bytes = parse_long(row.get(IDX_LEFT_BYTES));
let added_date = parse_long(row.get(IDX_CREATION_DATE));
let is_hashing = parse_long(row.get(IDX_HASHING));
let label_raw = parse_string(row.get(IDX_LABEL));
let state = parse_long(&get_field_value(row, RtorrentField::State));
let is_complete = parse_long(&get_field_value(row, RtorrentField::Complete));
let message = get_field_value(row, RtorrentField::Message);
let left_bytes = parse_long(&get_field_value(row, RtorrentField::LeftBytes));
let added_date = parse_long(&get_field_value(row, RtorrentField::CreationDate));
let is_hashing = parse_long(&get_field_value(row, RtorrentField::Hashing));
let label_raw = get_field_value(row, RtorrentField::Label);
let label = if label_raw.is_empty() {
None
@@ -146,7 +131,10 @@ fn from_rtorrent_row(row: Vec<String>) -> Torrent {
}
pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, XmlRpcError> {
let params: Vec<RpcParam> = RTORRENT_FIELDS.iter().map(|s| RpcParam::from(*s)).collect();
let params: Vec<RpcParam> = RTORRENT_FIELDS
.iter()
.map(|&f| RpcParam::from(f.to_string()))
.collect();
let xml = client.call("d.multicall2", &params).await?;
if xml.trim().is_empty() {
@@ -155,7 +143,7 @@ pub async fn fetch_torrents(client: &RtorrentClient) -> Result<Vec<Torrent>, Xml
let rows = parse_multicall_response(&xml)?;
let torrents = rows.into_iter().map(from_rtorrent_row).collect();
let torrents = rows.iter().map(from_rtorrent_row).collect();
Ok(torrents)
}

View File

@@ -6,47 +6,52 @@ use crate::api;
pub fn Sidebar() -> impl IntoView {
let store = use_context::<crate::store::TorrentStore>().expect("store not provided");
let total_count = move || store.torrents.with(|map| map.len());
let total_count = move || store.torrents.get().len();
let downloading_count = move || {
store.torrents.with(|map| {
map.values()
.filter(|t| t.status == shared::TorrentStatus::Downloading)
.count()
})
store
.torrents
.get()
.iter()
.filter(|t| t.status == shared::TorrentStatus::Downloading)
.count()
};
let seeding_count = move || {
store.torrents.with(|map| {
map.values()
.filter(|t| t.status == shared::TorrentStatus::Seeding)
.count()
})
store
.torrents
.get()
.iter()
.filter(|t| t.status == shared::TorrentStatus::Seeding)
.count()
};
let completed_count = move || {
store.torrents.with(|map| {
map.values()
.filter(|t| {
t.status == shared::TorrentStatus::Seeding
|| (t.status == shared::TorrentStatus::Paused && t.percent_complete >= 100.0)
})
.count()
})
store
.torrents
.get()
.iter()
.filter(|t| {
t.status == shared::TorrentStatus::Seeding
|| (t.status == shared::TorrentStatus::Paused && t.percent_complete >= 100.0)
})
.count()
};
let paused_count = move || {
store.torrents.with(|map| {
map.values()
.filter(|t| t.status == shared::TorrentStatus::Paused)
.count()
})
store
.torrents
.get()
.iter()
.filter(|t| t.status == shared::TorrentStatus::Paused)
.count()
};
let inactive_count = move || {
store.torrents.with(|map| {
map.values()
.filter(|t| {
t.status == shared::TorrentStatus::Paused
|| t.status == shared::TorrentStatus::Error
})
.count()
})
store
.torrents
.get()
.iter()
.filter(|t| {
t.status == shared::TorrentStatus::Paused
|| t.status == shared::TorrentStatus::Error
})
.count()
};
let close_drawer = move || {

View File

@@ -82,10 +82,9 @@ pub fn TorrentTable() -> impl IntoView {
let sort_dir = create_rw_signal(SortDirection::Descending);
let filtered_torrents = move || {
// Convert HashMap values to Vec for filtering and sorting
let torrents: Vec<shared::Torrent> = store.torrents.with(|map| map.values().cloned().collect());
let mut torrents = torrents
let mut torrents = store
.torrents
.get()
.into_iter()
.filter(|t| {
let filter = store.filter.get();

View File

@@ -113,11 +113,9 @@ impl FilterStatus {
}
}
use std::collections::HashMap;
#[derive(Clone, Copy, Debug)]
pub struct TorrentStore {
pub torrents: RwSignal<HashMap<String, Torrent>>,
pub torrents: RwSignal<Vec<Torrent>>,
pub filter: RwSignal<FilterStatus>,
pub search_query: RwSignal<String>,
pub global_stats: RwSignal<GlobalStats>,
@@ -126,7 +124,7 @@ pub struct TorrentStore {
}
pub fn provide_torrent_store() {
let torrents = create_rw_signal(HashMap::new());
let torrents = create_rw_signal(Vec::<Torrent>::new());
let filter = create_rw_signal(FilterStatus::All);
let search_query = create_rw_signal(String::new());
let global_stats = create_rw_signal(GlobalStats::default());
@@ -195,15 +193,12 @@ pub fn provide_torrent_store() {
if let Ok(event) = serde_json::from_str::<AppEvent>(&data_str) {
match event {
AppEvent::FullList { torrents: list, .. } => {
let map: HashMap<String, Torrent> = list
.into_iter()
.map(|t| (t.hash.clone(), t))
.collect();
torrents.set(map);
torrents.set(list);
}
AppEvent::Update(update) => {
torrents.update(|map| {
if let Some(t) = map.get_mut(&update.hash) {
torrents.update(|list| {
if let Some(t) = list.iter_mut().find(|t| t.hash == update.hash)
{
if let Some(name) = update.name {
t.name = name;
}