Compare commits

...

33 Commits

Author SHA1 Message Date
spinline
7d46dbd437 refactor: tema yönetimi leptos-use::use_local_storage ile reaktif hale getirildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m28s
2026-02-08 20:02:01 +03:00
spinline
5f107299e3 refactor: long press mantığı leptos-use::use_timeout_fn ile modernize edildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m29s
2026-02-08 19:53:10 +03:00
spinline
c34133ded1 refactor: manuel Closure ve timer yönetimi Leptos set_timeout ile değiştirildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m16s
2026-02-08 19:39:38 +03:00
spinline
0d059cbbd3 fix: push notification permission ve toJSON çağrıları tip güvenli hale getirildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m14s
2026-02-08 19:30:05 +03:00
spinline
fc83a1cc65 refactor: js_sys::eval kullanımı kaldırıldı, Base64 çözümleme pure Rust ile güncellendi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m14s
2026-02-08 19:25:36 +03:00
spinline
4e81af0599 fix: unused import warning for utoipa::OpenApi in production build
All checks were successful
Build MIPS Binary / build (push) Successful in 4m18s
2026-02-08 18:49:54 +03:00
spinline
74c3c5c17e feat: Swagger UI varsayılan (dev) özelliklere eklendi, production build'inden muaf tutuldu
All checks were successful
Build MIPS Binary / build (push) Successful in 4m18s
2026-02-08 18:43:55 +03:00
spinline
3632a578e1 build: CI/CD ve optimizasyon süreci en sade ve güvenilir haline getirildi
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 18:41:40 +03:00
spinline
8a9905fc56 fix: WASM dosyasının bozulmasına neden olan hatalı manuel optimizasyon adımı kaldırıldı
All checks were successful
Build MIPS Binary / build (push) Successful in 6m45s
2026-02-08 18:21:40 +03:00
spinline
1e39cbb0c5 perf: backend binary boyutunu düşürmek için Swagger UI opsiyonel yapıldı ve build komutu optimize edildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m31s
2026-02-08 18:16:45 +03:00
spinline
40be58f2fc perf: backend derleme süreci kök dizine taşınarak workspace optimizasyonları aktif edildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m30s
2026-02-08 18:10:09 +03:00
spinline
3f08b5b54a perf: WASM boyut takibi loglara eklendi ve profil çakışmaları giderildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m27s
2026-02-08 18:03:52 +03:00
spinline
bfec99ae35 fix: wasm-opt için --all-features bayrağı kullanılarak flag uyuşmazlığı giderildi
All checks were successful
Build MIPS Binary / build (push) Successful in 4m28s
2026-02-08 17:58:14 +03:00
spinline
d9afd3aa81 fix: wasm-opt için nontrapping-float-to-int-conversions özelliği eklendi
Some checks failed
Build MIPS Binary / build (push) Failing after 1m2s
2026-02-08 17:56:28 +03:00
spinline
e72113d91d perf: manuel WASM optimizasyonu eklendi ve build süreci stabilize edildi
Some checks failed
Build MIPS Binary / build (push) Failing after 1m2s
2026-02-08 17:54:43 +03:00
spinline
7c4ff619c1 fix: .cargo/config.toml yazım hatası düzeltildi
Some checks failed
Build MIPS Binary / build (push) Failing after 1m2s
2026-02-08 17:52:15 +03:00
spinline
9c4217f450 feat: WASM için bulk-memory özelliği aktif edildi
Some checks failed
Build MIPS Binary / build (push) Failing after 3s
2026-02-08 17:51:04 +03:00
spinline
cc09002171 trigger: yeniden build başlatıldı
Some checks failed
Build MIPS Binary / build (push) Failing after 1m3s
2026-02-08 17:49:06 +03:00
spinline
5d8cdd7760 build: build ortamı güncellendi (Trunk v0.21.14 ve binaryen eklendi), optimizasyonlar tekrar açıldı
Some checks failed
Build MIPS Binary / build (push) Failing after 1m2s
2026-02-08 16:51:40 +03:00
spinline
145436eefc fix: build hatasını aşmak için wasm-opt geçici olarak devre dışı bırakıldı
All checks were successful
Build MIPS Binary / build (push) Successful in 4m30s
2026-02-08 16:44:29 +03:00
spinline
10c95c5ff3 fix: wasm-opt build hatası için rustc ve wasm-opt versiyon ayarları güncellendi
Some checks failed
Build MIPS Binary / build (push) Failing after 1m8s
2026-02-08 16:42:13 +03:00
spinline
329654cc4e fix: wasm-opt build hatası için bulk-memory özelliği devre dışı bırakıldı
Some checks failed
Build MIPS Binary / build (push) Failing after 1m31s
2026-02-08 16:37:45 +03:00
spinline
22b592a652 fix: wasm-opt seviyesi 'z' olarak güncellendi
Some checks failed
Build MIPS Binary / build (push) Failing after 1m35s
2026-02-08 16:33:46 +03:00
spinline
817dc49db2 fix: wasm-opt build hatası için --enable-bulk-memory flag'i eklendi
Some checks failed
Build MIPS Binary / build (push) Failing after 3s
2026-02-08 16:29:33 +03:00
spinline
b2a60d3d1e cleanup: kullanılmayan get_vapid_public_key fonksiyonu kaldırıldı
Some checks failed
Build MIPS Binary / build (push) Failing after 1m6s
2026-02-08 16:26:16 +03:00
spinline
520903fa3f perf: push bildirimleri paralel gönderim ve env var önbelleğe alma ile optimize edildi
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 16:25:44 +03:00
spinline
c45f2f50e9 fix: ARM64 build hatası için wasm-opt versiyonu v117 olarak güncellendi
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 16:25:02 +03:00
spinline
791eabe9bd fix: SQLite deadlock ve busy_timeout yönetimi iyileştirildi
Some checks failed
Build MIPS Binary / build (push) Failing after 1m2s
2026-02-08 16:20:55 +03:00
spinline
12f93dd640 perf: Trunk WASM optimizasyonu aktif edildi (data-wasm-opt=0 kaldırıldı)
Some checks failed
Build MIPS Binary / build (push) Failing after 1m2s
2026-02-08 16:18:50 +03:00
spinline
7306db8c2f fix: torrent diff algoritması hash tabanlı hale getirilerek sıralama bağımlılığı kaldırıldı
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 16:17:30 +03:00
spinline
ce0ecd62af fix: index.html yükleme ekranına zaman aşımı (15sn) ve hata mesajı eklendi
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 16:13:20 +03:00
spinline
f2379b67d8 docs: main.rs içindeki güncelliğini yitirmiş şifre güncelleme yorumu temizlendi
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 16:11:18 +03:00
spinline
755f35c94c security: gerçek .env dosyası takipten çıkarıldı ve .env.example güncellendi
Some checks failed
Build MIPS Binary / build (push) Has been cancelled
2026-02-08 16:07:26 +03:00
17 changed files with 353 additions and 325 deletions

View File

@@ -26,23 +26,22 @@ jobs:
run: |
cd frontend
npm install
# Run Tailwind manually first
npx @tailwindcss/cli -i input.css -o public/tailwind.css
# Trunk'ın optimizasyonunu kapalı (0) tutuyoruz çünkü Cargo.toml'daki opt-level='z' zaten o işi yapıyor.
trunk build --release
- name: Build Backend (MIPS)
env:
# Ensure we are building a fully static binary
# -C link-self-contained=no: Let Zig (the linker) handle CRT objects (crt1.o, etc.)
RUSTFLAGS: "-C target-feature=+crt-static -C link-self-contained=no -C link-arg=-msoft-float"
# -s ve -w ile binary içindeki gereksiz tüm yükleri siliyoruz.
RUSTFLAGS: "-C target-feature=+crt-static -C link-self-contained=no -C link-arg=-msoft-float -C link-arg=-s -C link-arg=-w"
CFLAGS_mips_unknown_linux_musl: "-msoft-float"
run: |
cd backend
cargo zigbuild --target mips-unknown-linux-musl --release -Z build-std=std,panic_abort
file target/mips-unknown-linux-musl/release/backend
# Sadece gerekli özellikleri derliyoruz (Boyut tasarrufu için swagger kapalı)
cargo zigbuild -p backend --target mips-unknown-linux-musl --release -Z build-std=std,panic_abort --no-default-features --features push-notifications
- name: Rename Binary
run: mv target/mips-unknown-linux-musl/release/backend target/mips-unknown-linux-musl/release/vibetorrent-mips
- name: Create Release Assets
run: |
mv target/mips-unknown-linux-musl/release/backend target/mips-unknown-linux-musl/release/vibetorrent-mips
- name: Generate Release Tag
id: tag
@@ -56,8 +55,10 @@ jobs:
REPO="admin/vibetorrent"
API_URL="${{ gitea.server_url }}/api/v1"
# Create release
RELEASE_RESPONSE=$(curl -s -X POST "${API_URL}/repos/${REPO}/releases" -H "Authorization: token ${RELEASE_TOKEN}" -H "Content-Type: application/json" -d "{
RELEASE_RESPONSE=$(curl -s -X POST "${API_URL}/repos/${REPO}/releases" \
-H "Authorization: token ${RELEASE_TOKEN}" \
-H "Content-Type: application/json" \
-d "{
\"tag_name\": \"${TAG}\",
\"name\": \"Release ${TAG}\",
\"body\": \"Automated build from commit ${{ gitea.sha }}\",
@@ -66,15 +67,9 @@ jobs:
}")
RELEASE_ID=$(echo "$RELEASE_RESPONSE" | jq -r '.id')
echo "Release ID: $RELEASE_ID"
if [ "$RELEASE_ID" = "null" ] || [ -z "$RELEASE_ID" ]; then exit 1; fi
if [ "$RELEASE_ID" = "null" ] || [ -z "$RELEASE_ID" ]; then
echo "Failed to create release:"
echo "$RELEASE_RESPONSE"
exit 1
fi
# Upload binary as release asset
curl -s -X POST "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=vibetorrent-mips" -H "Authorization: token ${RELEASE_TOKEN}" -H "Content-Type: application/octet-stream" --data-binary @target/mips-unknown-linux-musl/release/vibetorrent-mips
echo "Release ${TAG} created with binary attached."
curl -s -X POST "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=vibetorrent-mips" \
-H "Authorization: token ${RELEASE_TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary @target/mips-unknown-linux-musl/release/vibetorrent-mips

2
.gitignore vendored
View File

@@ -6,3 +6,5 @@ result.xml
frontend/dist
backend.log
.runner
.env
backend/.env

View File

@@ -2,13 +2,19 @@
members = ["backend", "frontend", "shared"]
resolver = "2"
# Optimize for size (aggressive)
[profile.release]
# En küçük binary boyutu
opt-level = "z"
lto = true
# En derin kod temizliği (dead code elimination)
lto = "fat"
# En iyi optimizasyon için tek birim derleme
codegen-units = 1
# Hata izleme kodlarını atarak yer kazan
panic = "abort"
# Sembolleri ve hata ayıklama bilgilerini kesin sil
strip = true
# Artık (incremental) build'i kapat ki optimizasyon tam olsun
incremental = false
[patch.crates-io]
coarsetime = { path = "third_party/coarsetime" }

View File

@@ -1,8 +0,0 @@
# Database
DATABASE_URL=sqlite:vibetorrent.db
# VAPID Keys for Push Notifications
# Generate new keys for production using: cargo run --bin web-push --features web-push -- generate-vapid-keys
VAPID_PUBLIC_KEY=BEdPj6XQR7MGzM28Nev9wokF5upHoydNDahouJbQ9ZdBJpEFAN1iNfANSEvY0ItasNY5zcvvqN_tjUt64Rfd0gU
VAPID_PRIVATE_KEY=aUcCYJ7kUd9UClCaWwad0IVgbYJ6svwl19MjSX7GH10
VAPID_EMAIL=mailto:admin@vibetorrent.app

View File

@@ -3,3 +3,12 @@ RTORRENT_SOCKET=/tmp/rtorrent.sock
# Backend Listen Port
PORT=3000
# Database URL
DATABASE_URL=sqlite:vibetorrent.db
# VAPID Keys for Push Notifications
# Generate new keys for production using: npx web-push generate-vapid-keys
VAPID_PUBLIC_KEY=YOUR_PUBLIC_VAPID_KEY
VAPID_PRIVATE_KEY=YOUR_PRIVATE_VAPID_KEY
VAPID_EMAIL=mailto:your-email@example.com

View File

@@ -4,8 +4,9 @@ version = "0.1.0"
edition = "2021"
[features]
default = ["push-notifications"]
default = ["push-notifications", "swagger"]
push-notifications = ["web-push", "openssl"]
swagger = ["utoipa-swagger-ui"]
[dependencies]
axum = { version = "0.8", features = ["macros", "ws"] }
@@ -29,7 +30,7 @@ shared = { path = "../shared" }
thiserror = "2.0.18"
dotenvy = "0.15.7"
utoipa = { version = "5.4.0", features = ["axum_extras"] }
utoipa-swagger-ui = { version = "9.0.2", features = ["axum"] }
utoipa-swagger-ui = { version = "9.0.2", features = ["axum"], optional = true }
web-push = { version = "0.10", default-features = false, features = ["hyper-client"], optional = true }
base64 = "0.22"
openssl = { version = "0.10", features = ["vendored"], optional = true }

View File

@@ -1,6 +1,7 @@
use sqlx::{sqlite::SqlitePoolOptions, Pool, Sqlite, Row};
use sqlx::{sqlite::SqlitePoolOptions, Pool, Sqlite, Row, sqlite::SqliteConnectOptions};
use std::time::Duration;
use anyhow::Result;
use std::str::FromStr;
#[derive(Clone)]
pub struct Db {
@@ -9,10 +10,16 @@ pub struct Db {
impl Db {
pub async fn new(db_url: &str) -> Result<Self> {
let options = SqliteConnectOptions::from_str(db_url)?
.create_if_missing(true)
.busy_timeout(Duration::from_secs(10)) // Bekleme süresini 10 saniyeye çıkardık
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal)
.synchronous(sqlx::sqlite::SqliteSynchronous::Normal);
let pool = SqlitePoolOptions::new()
.max_connections(5)
.acquire_timeout(Duration::from_secs(3))
.connect(db_url)
.acquire_timeout(Duration::from_secs(10))
.connect_with(options)
.await?;
let db = Self { pool };
@@ -21,21 +28,6 @@ impl Db {
}
async fn run_migrations(&self) -> Result<()> {
// WAL mode - enables concurrent reads while writing
sqlx::query("PRAGMA journal_mode=WAL")
.execute(&self.pool)
.await?;
// NORMAL synchronous - faster than FULL, still safe enough
sqlx::query("PRAGMA synchronous=NORMAL")
.execute(&self.pool)
.await?;
// 5 second busy timeout - reduces "database locked" errors
sqlx::query("PRAGMA busy_timeout=5000")
.execute(&self.pool)
.await?;
sqlx::migrate!("./migrations").run(&self.pool).await?;
Ok(())
}

View File

@@ -1,3 +1,4 @@
use std::collections::HashMap;
use shared::{AppEvent, NotificationLevel, SystemNotification, Torrent, TorrentUpdate};
#[derive(Debug)]
@@ -8,24 +9,32 @@ pub enum DiffResult {
}
pub fn diff_torrents(old: &[Torrent], new: &[Torrent]) -> DiffResult {
// 1. Structural Check (Length or Order changed)
// 1. Structural Check: Eğer torrent sayısı değişmişse (yeni eklenen veya silinen),
// şimdilik basitlik adına FullUpdate gönderiyoruz.
if old.len() != new.len() {
return DiffResult::FullUpdate;
}
for (i, t) in new.iter().enumerate() {
if old[i].hash != t.hash {
// 2. Hash Set Karşılaştırması:
// Sıralama değişmiş olabilir ama torrentler aynı mı?
let old_map: HashMap<&str, &Torrent> = old.iter().map(|t| (t.hash.as_str(), t)).collect();
// Eğer yeni listedeki bir hash eski listede yoksa, yapı değişmiş demektir.
for new_t in new {
if !old_map.contains_key(new_t.hash.as_str()) {
return DiffResult::FullUpdate;
}
}
// 2. Field Updates
// 3. Alan Güncellemeleri (Partial Updates)
// Buraya geldiğimizde biliyoruz ki old ve new listelerindeki torrentler (hash olarak) aynı,
// sadece sıraları farklı olabilir veya içindeki veriler güncellenmiş olabilir.
let mut events = Vec::new();
for (i, new_t) in new.iter().enumerate() {
let old_t = &old[i];
for new_t in new {
// old_map'ten ilgili torrente hash ile ulaşalım (sıradan bağımsız)
let old_t = old_map.get(new_t.hash.as_str()).unwrap();
// Initialize with all None
let mut update = TorrentUpdate {
hash: new_t.hash.clone(),
name: None,
@@ -42,7 +51,7 @@ pub fn diff_torrents(old: &[Torrent], new: &[Torrent]) -> DiffResult {
let mut has_changes = false;
// Compare fields
// Alanları karşılaştır
if old_t.name != new_t.name {
update.name = Some(new_t.name.clone());
has_changes = true;
@@ -63,7 +72,7 @@ pub fn diff_torrents(old: &[Torrent], new: &[Torrent]) -> DiffResult {
update.percent_complete = Some(new_t.percent_complete);
has_changes = true;
// Check for torrent completion: reached 100%
// Torrent tamamlanma kontrolü
if old_t.percent_complete < 100.0 && new_t.percent_complete >= 100.0 {
tracing::info!("Torrent completed: {} ({})", new_t.name, new_t.hash);
events.push(AppEvent::Notification(SystemNotification {
@@ -84,7 +93,6 @@ pub fn diff_torrents(old: &[Torrent], new: &[Torrent]) -> DiffResult {
update.status = Some(new_t.status.clone());
has_changes = true;
// Log status changes for debugging
tracing::debug!(
"Torrent status changed: {} ({}) {:?} -> {:?}",
new_t.name, new_t.hash, old_t.status, new_t.status

View File

@@ -690,8 +690,10 @@ pub async fn handle_timeout_error(err: BoxError) -> (StatusCode, &'static str) {
(status = 200, description = "VAPID public key", body = String)
)
)]
pub async fn get_push_public_key_handler() -> impl IntoResponse {
let public_key = push::get_vapid_public_key();
pub async fn get_push_public_key_handler(
State(state): State<AppState>,
) -> impl IntoResponse {
let public_key = state.push_store.get_public_key();
(StatusCode::OK, Json(serde_json::json!({ "publicKey": public_key }))).into_response()
}

View File

@@ -32,7 +32,9 @@ use tower_http::{
cors::CorsLayer,
trace::TraceLayer,
};
#[cfg(feature = "swagger")]
use utoipa::OpenApi;
#[cfg(feature = "swagger")]
use utoipa_swagger_ui::SwaggerUi;
#[derive(Clone)]
@@ -98,6 +100,7 @@ struct Args {
reset_password: Option<String>,
}
#[cfg(feature = "swagger")]
#[cfg(feature = "push-notifications")]
#[derive(OpenApi)]
#[openapi(
@@ -146,6 +149,7 @@ struct Args {
)]
struct ApiDoc;
#[cfg(feature = "swagger")]
#[cfg(not(feature = "push-notifications"))]
#[derive(OpenApi)]
#[openapi(
@@ -255,9 +259,7 @@ async fn main() {
}
};
// Update in DB (using a direct query since db.rs doesn't have update_password yet)
// We should add `update_password` to db.rs for cleaner code, but for now direct query is fine or we can extend Db.
// Let's extend Db.rs first to be clean.
// Update in DB
if let Err(e) = db.update_password(user_id, &password_hash).await {
tracing::error!("Failed to update password in DB: {}", e);
std::process::exit(1);
@@ -464,9 +466,13 @@ async fn main() {
}
});
let app = Router::new()
.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", ApiDoc::openapi()))
// Setup & Auth Routes
let app = Router::new();
#[cfg(feature = "swagger")]
let app = app.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", ApiDoc::openapi()));
// Setup & Auth Routes
let app = app
.route("/api/setup/status", get(handlers::setup::get_setup_status_handler))
.route("/api/setup", post(handlers::setup::setup_handler))
.route(

View File

@@ -5,6 +5,7 @@ use utoipa::ToSchema;
use web_push::{
HyperWebPushClient, SubscriptionInfo, VapidSignatureBuilder, WebPushClient, WebPushMessageBuilder,
};
use futures::StreamExt;
use crate::db::Db;
@@ -20,17 +21,34 @@ pub struct PushKeys {
pub auth: String,
}
#[derive(Clone)]
pub struct VapidConfig {
pub private_key: String,
pub public_key: String,
pub email: String,
}
#[derive(Clone)]
pub struct PushSubscriptionStore {
db: Option<Db>,
subscriptions: Arc<RwLock<Vec<PushSubscription>>>,
vapid_config: VapidConfig,
}
impl PushSubscriptionStore {
pub fn new() -> Self {
let private_key = std::env::var("VAPID_PRIVATE_KEY").expect("VAPID_PRIVATE_KEY must be set in .env");
let public_key = std::env::var("VAPID_PUBLIC_KEY").expect("VAPID_PUBLIC_KEY must be set in .env");
let email = std::env::var("VAPID_EMAIL").expect("VAPID_EMAIL must be set in .env");
Self {
db: None,
subscriptions: Arc::new(RwLock::new(Vec::new())),
vapid_config: VapidConfig {
private_key,
public_key,
email,
},
}
}
@@ -47,9 +65,18 @@ impl PushSubscriptionStore {
}
tracing::info!("Loaded {} push subscriptions from database", subscriptions_vec.len());
let private_key = std::env::var("VAPID_PRIVATE_KEY").expect("VAPID_PRIVATE_KEY must be set in .env");
let public_key = std::env::var("VAPID_PUBLIC_KEY").expect("VAPID_PUBLIC_KEY must be set in .env");
let email = std::env::var("VAPID_EMAIL").expect("VAPID_EMAIL must be set in .env");
Ok(Self {
db: Some(db.clone()),
subscriptions: Arc::new(RwLock::new(subscriptions_vec)),
vapid_config: VapidConfig {
private_key,
public_key,
email,
},
})
}
@@ -91,6 +118,10 @@ impl PushSubscriptionStore {
pub async fn get_all_subscriptions(&self) -> Vec<PushSubscription> {
self.subscriptions.read().await.clone()
}
pub fn get_public_key(&self) -> &str {
&self.vapid_config.public_key
}
}
/// Send push notification to all subscribed clients
@@ -116,50 +147,68 @@ pub async fn send_push_notification(
"tag": "vibetorrent"
});
let client = HyperWebPushClient::new();
let client = Arc::new(HyperWebPushClient::new());
let vapid_config = store.vapid_config.clone();
let payload_str = payload.to_string();
let vapid_private_key = std::env::var("VAPID_PRIVATE_KEY").expect("VAPID_PRIVATE_KEY must be set in .env");
let vapid_email = std::env::var("VAPID_EMAIL").expect("VAPID_EMAIL must be set in .env");
// Send notifications concurrently
futures::stream::iter(subscriptions)
.for_each_concurrent(10, |subscription| {
let client = client.clone();
let vapid_config = vapid_config.clone();
let payload_str = payload_str.clone();
for subscription in subscriptions {
let subscription_info = SubscriptionInfo {
endpoint: subscription.endpoint.clone(),
keys: web_push::SubscriptionKeys {
p256dh: subscription.keys.p256dh.clone(),
auth: subscription.keys.auth.clone(),
},
};
async move {
let subscription_info = SubscriptionInfo {
endpoint: subscription.endpoint.clone(),
keys: web_push::SubscriptionKeys {
p256dh: subscription.keys.p256dh.clone(),
auth: subscription.keys.auth.clone(),
},
};
let mut sig_builder = VapidSignatureBuilder::from_base64(
&vapid_private_key,
web_push::URL_SAFE_NO_PAD,
&subscription_info,
)?;
let sig_res = VapidSignatureBuilder::from_base64(
&vapid_config.private_key,
web_push::URL_SAFE_NO_PAD,
&subscription_info,
);
sig_builder.add_claim("sub", vapid_email.as_str());
sig_builder.add_claim("aud", subscription.endpoint.as_str());
let signature = sig_builder.build()?;
match sig_res {
Ok(mut sig_builder) => {
sig_builder.add_claim("sub", vapid_config.email.as_str());
sig_builder.add_claim("aud", subscription.endpoint.as_str());
let mut builder = WebPushMessageBuilder::new(&subscription_info);
builder.set_vapid_signature(signature);
match sig_builder.build() {
Ok(signature) => {
let mut builder = WebPushMessageBuilder::new(&subscription_info);
builder.set_vapid_signature(signature);
builder.set_payload(web_push::ContentEncoding::Aes128Gcm, payload_str.as_bytes());
let payload_str = payload.to_string();
builder.set_payload(web_push::ContentEncoding::Aes128Gcm, payload_str.as_bytes());
match client.send(builder.build()?).await {
Ok(_) => {
tracing::debug!("Push notification sent to: {}", subscription.endpoint);
match builder.build() {
Ok(msg) => {
match client.send(msg).await {
Ok(_) => {
tracing::debug!("Push notification sent to: {}", subscription.endpoint);
}
Err(e) => {
tracing::error!("Failed to send push notification to {}: {}", subscription.endpoint, e);
}
}
}
Err(e) => tracing::error!("Failed to build push message: {}", e),
}
}
Err(e) => tracing::error!("Failed to build VAPID signature: {}", e),
}
}
Err(e) => tracing::error!("Failed to create VAPID signature builder: {}", e),
}
}
Err(e) => {
tracing::error!("Failed to send push notification: {}", e);
// TODO: Remove invalid subscriptions
}
}
})
.await;
Ok(())
}
Ok(())
}
pub fn get_vapid_public_key() -> String {
std::env::var("VAPID_PUBLIC_KEY").expect("VAPID_PUBLIC_KEY must be set in .env")
}

View File

@@ -20,6 +20,8 @@ RUN apt-get update && apt-get install -y \
jq \
# Needed for some crate compilations
protobuf-compiler \
# Install binaryen to have wasm-opt available system-wide
binaryen \
&& rm -rf /var/lib/apt/lists/*
# 2. Install Node.js v20 (Manual install to support multi-arch cleanly)
@@ -70,7 +72,7 @@ RUN . "$HOME/.cargo/env" && \
ARCH=$(dpkg --print-architecture) && \
if [ "$ARCH" = "amd64" ]; then TRUNK_ARCH="x86_64-unknown-linux-gnu"; \
elif [ "$ARCH" = "arm64" ]; then TRUNK_ARCH="aarch64-unknown-linux-gnu"; fi && \
wget -qO- "https://github.com/trunk-rs/trunk/releases/download/v0.21.5/trunk-$TRUNK_ARCH.tar.gz" | tar -xzf - -C /root/.cargo/bin/ && \
wget -qO- "https://github.com/trunk-rs/trunk/releases/download/v0.21.14/trunk-$TRUNK_ARCH.tar.gz" | tar -xzf - -C /root/.cargo/bin/ && \
chmod +x /root/.cargo/bin/trunk && \
# Install wasm-bindgen-cli (Compiling from source to avoid glibc issues, doing it ONCE here)
cargo install wasm-bindgen-cli --version 0.2.108

View File

@@ -52,3 +52,7 @@ web-sys = { version = "0.3", features = [
shared = { path = "../shared" }
tailwind_fuse = "0.3.2"
js-sys = "0.3.85"
base64 = "0.22.1"
serde-wasm-bindgen = "0.6.5"
leptos-use = "0.13"
codee = "0.2"

View File

@@ -86,12 +86,15 @@
id="app-loading"
style="
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100vh;
font-family: sans-serif;
"
>
<div
id="app-loading-spinner"
style="
width: 40px;
height: 40px;
@@ -102,6 +105,32 @@
opacity: 0.5;
"
></div>
<div
id="app-loading-error"
style="display: none; text-align: center; margin-top: 20px; padding: 0 20px"
>
<p style="color: #ef4444; font-weight: bold; margin-bottom: 8px">
Uygulama yüklenemedi
</p>
<p style="font-size: 14px; opacity: 0.7">
Bağlantınız yavaş olabilir veya bir sistem hatası oluşmuş olabilir.
</p>
<button
onclick="location.reload()"
style="
margin-top: 16px;
padding: 8px 16px;
background: #3b82f6;
color: white;
border: none;
border-radius: 6px;
cursor: pointer;
font-weight: 500;
"
>
Sayfayı Yenile
</button>
</div>
</div>
<style>
@keyframes spin {
@@ -115,6 +144,34 @@
}
</style>
<script>
// App loading timeout handler
(function () {
var timeout = setTimeout(function () {
if (!document.body.classList.contains("app-loaded")) {
var spinner = document.getElementById("app-loading-spinner");
var error = document.getElementById("app-loading-error");
if (spinner) spinner.style.display = "none";
if (error) error.style.display = "block";
}
}, 15000); // 15 seconds timeout
// Clean up timeout if app loads
var observer = new MutationObserver(function (mutations) {
mutations.forEach(function (mutation) {
if (
mutation.attributeName === "class" &&
document.body.classList.contains("app-loaded")
) {
clearTimeout(timeout);
observer.disconnect();
}
});
});
observer.observe(document.body, { attributes: true });
})();
</script>
<!-- Service Worker Registration & PWA Setup -->
<script>
if ("serviceWorker" in navigator) {

View File

@@ -1,4 +1,6 @@
use leptos::*;
use leptos_use::storage::use_local_storage;
use codee::string::FromToStringCodec;
use shared::GlobalLimitRequest;
fn format_bytes(bytes: i64) -> String {
@@ -26,34 +28,19 @@ pub fn StatusBar() -> impl IntoView {
let store = use_context::<crate::store::TorrentStore>().expect("store not provided");
let stats = store.global_stats;
let initial_theme = if let Some(win) = web_sys::window() {
if let Some(doc) = win.document() {
doc.document_element()
.and_then(|el| el.get_attribute("data-theme"))
.unwrap_or_else(|| "dark".to_string())
} else {
"dark".to_string()
}
} else {
"dark".to_string()
};
// Use leptos-use for reactive localStorage management
let (current_theme, set_current_theme, _) = use_local_storage::<String, FromToStringCodec>("vibetorrent_theme");
let (current_theme, set_current_theme) = create_signal(initial_theme);
// Initialize with default if empty
if current_theme.get_untracked().is_empty() {
set_current_theme.set("dark".to_string());
}
// Automatically sync theme to document attribute
create_effect(move |_| {
if let Some(win) = web_sys::window() {
if let Some(storage) = win.local_storage().ok().flatten() {
if let Ok(Some(stored_theme)) = storage.get_item("vibetorrent_theme") {
let theme = stored_theme.to_lowercase();
set_current_theme.set(theme.clone());
if let Some(doc) = win.document() {
let _ = doc
.document_element()
.unwrap()
.set_attribute("data-theme", &theme);
}
}
}
let theme = current_theme.get().to_lowercase();
if let Some(doc) = document().document_element() {
let _ = doc.set_attribute("data-theme", &theme);
}
});
@@ -275,14 +262,6 @@ pub fn StatusBar() -> impl IntoView {
on:pointerdown=move |e| {
e.stop_propagation();
set_current_theme.set(theme.to_string());
if let Some(win) = web_sys::window() {
if let Some(doc) = win.document() {
let _ = doc.document_element().unwrap().set_attribute("data-theme", theme);
}
if let Some(storage) = win.local_storage().ok().flatten() {
let _ = storage.set_item("vibetorrent_theme", theme);
}
}
close_all();
}
>

View File

@@ -1,6 +1,5 @@
use leptos::*;
use wasm_bindgen::closure::Closure;
use wasm_bindgen::JsCast;
use leptos_use::use_timeout_fn;
use crate::store::{get_action_messages, show_toast_with_signal};
use shared::NotificationLevel;
@@ -423,58 +422,45 @@ pub fn TorrentTable() -> impl IntoView {
let _t_hash = t.hash.clone();
let t_hash_click = t.hash.clone();
let (timer_id, set_timer_id) = create_signal(Option::<i32>::None);
let t_hash_long = t.hash.clone();
let leptos_use::UseTimeoutFnReturn { start, stop, .. } = use_timeout_fn(
move |pos: (i32, i32)| {
set_menu_position.set(pos);
set_selected_hash.set(Some(t_hash_long.clone()));
set_menu_visible.set(true);
let clear_timer = move || {
if let Some(id) = timer_id.get_untracked() {
window().clear_timeout_with_handle(id);
set_timer_id.set(None);
}
};
// Haptic feedback
let navigator = window().navigator();
if let Ok(vibrate) = js_sys::Reflect::get(&navigator, &"vibrate".into()) {
if vibrate.is_function() {
let _ = navigator.vibrate_with_duration(50);
}
}
},
600.0,
);
let handle_touchstart = {
let t_hash = t_hash_long.clone();
move |e: web_sys::TouchEvent| {
clear_timer();
let start = start.clone();
move |e: web_sys::TouchEvent| {
if let Some(touch) = e.touches().get(0) {
let x = touch.client_x();
let y = touch.client_y();
let hash = t_hash.clone();
let closure = Closure::wrap(Box::new(move || {
set_menu_position.set((x, y));
set_selected_hash.set(Some(hash.clone()));
set_menu_visible.set(true);
// Haptic feedback (iOS Safari doesn't support vibrate)
let navigator = window().navigator();
if js_sys::Reflect::has(&navigator, &wasm_bindgen::JsValue::from_str("vibrate")).unwrap_or(false) {
let _ = navigator.vibrate_with_duration(50);
}
}) as Box<dyn Fn()>);
let id = window()
.set_timeout_with_callback_and_timeout_and_arguments_0(
closure.as_ref().unchecked_ref(),
600
)
.unwrap_or(0);
closure.forget();
set_timer_id.set(Some(id));
start((touch.client_x(), touch.client_y()));
}
}
};
let handle_touchmove = move |_| {
clear_timer();
let handle_touchmove = {
let stop = stop.clone();
move |_| stop()
};
let handle_touchend = move |_| {
clear_timer();
let handle_touchend = {
let stop = stop.clone();
move |_| stop()
};
let handle_touchcancel = move |_| stop();
view! {
<div
class=move || {
@@ -492,7 +478,7 @@ pub fn TorrentTable() -> impl IntoView {
on:touchstart=handle_touchstart
on:touchmove=handle_touchmove
on:touchend=handle_touchend
on:touchcancel=handle_touchend
on:touchcancel=handle_touchcancel
>
<div class="card-body gap-3">
<div class="flex justify-between items-start gap-2">

View File

@@ -328,67 +328,46 @@ pub async fn subscribe_to_push_notifications() {
// First, request notification permission if not already granted
let window = web_sys::window().expect("window should exist");
let permission_granted = if let Ok(notification_class) = js_sys::Reflect::get(&window, &"Notification".into()) {
if notification_class.is_undefined() {
log::error!("Notification API not available");
return;
}
// Check current permission
let current_permission = js_sys::Reflect::get(&notification_class, &"permission".into())
.ok()
.and_then(|p| p.as_string())
.unwrap_or_default();
if current_permission == "granted" {
// Notification.permission is a static property, but web_sys exposes it via the Notification class instance or we check it manually.
// Actually, Notification::permission() is a static method in web_sys.
match web_sys::Notification::permission() {
web_sys::NotificationPermission::Granted => {
log::info!("Notification permission already granted");
true
} else if current_permission == "denied" {
}
web_sys::NotificationPermission::Denied => {
log::warn!("Notification permission was denied");
return;
} else {
// Permission is "default" - need to request
}
web_sys::NotificationPermission::Default => {
log::info!("Requesting notification permission...");
if let Ok(request_fn) = js_sys::Reflect::get(&notification_class, &"requestPermission".into()) {
if request_fn.is_function() {
let request_fn_typed = js_sys::Function::from(request_fn);
match request_fn_typed.call0(&notification_class) {
Ok(promise_val) => {
let request_future = wasm_bindgen_futures::JsFuture::from(
js_sys::Promise::from(promise_val)
);
match request_future.await {
Ok(result) => {
let result_str = result.as_string().unwrap_or_default();
log::info!("Permission request result: {}", result_str);
result_str == "granted"
}
Err(e) => {
log::error!("Failed to request notification permission: {:?}", e);
false
}
}
}
Err(e) => {
log::error!("Failed to call requestPermission: {:?}", e);
false
}
}
} else {
false
let permission_promise = match web_sys::Notification::request_permission() {
Ok(p) => p,
Err(e) => {
log::error!("Failed to request notification permission: {:?}", e);
return;
}
};
match wasm_bindgen_futures::JsFuture::from(permission_promise).await {
Ok(val) => {
let permission = val.as_string().unwrap_or_default();
if permission != "granted" {
log::warn!("Notification permission denied by user");
return;
}
log::info!("Notification permission granted by user");
}
Err(e) => {
log::error!("Failed to await notification permission: {:?}", e);
return;
}
} else {
false
}
}
} else {
log::error!("Cannot access Notification class");
return;
};
if !permission_granted {
log::warn!("Notification permission not granted, cannot subscribe to push");
return;
_ => {
log::warn!("Unknown notification permission status");
return;
}
}
log::info!("Notification permission granted! Proceeding with push subscription...");
@@ -433,7 +412,6 @@ pub async fn subscribe_to_push_notifications() {
};
// Get service worker registration
let window = web_sys::window().expect("window should exist");
let navigator = window.navigator();
let service_worker = navigator.service_worker();
@@ -494,38 +472,45 @@ pub async fn subscribe_to_push_notifications() {
.dyn_into::<web_sys::PushSubscription>()
.expect("should be PushSubscription");
// Get subscription JSON using toJSON() method
let json_result = match js_sys::Reflect::get(&push_subscription, &"toJSON".into()) {
// PushSubscription objects can be serialized directly via JSON.stringify which calls their toJSON method internally.
// Or we can use Reflect to call toJSON if we want the object directly.
// Let's use the robust way: call toJSON via Reflect but handle it gracefully.
let json_val = match js_sys::Reflect::get(&push_subscription, &"toJSON".into()) {
Ok(func) if func.is_function() => {
let json_func = js_sys::Function::from(func);
match json_func.call0(&push_subscription) {
Ok(result) => result,
let json_func = js_sys::Function::from(func);
match json_func.call0(&push_subscription) {
Ok(res) => res,
Err(e) => {
log::error!("Failed to call toJSON: {:?}", e);
return;
}
}
}
_ => {
// Fallback: try to stringify the object directly
// log::warn!("toJSON not found, trying JSON.stringify");
let json_str = match js_sys::JSON::stringify(&push_subscription) {
Ok(s) => s,
Err(e) => {
log::error!("Failed to call toJSON: {:?}", e);
log::error!("Failed to stringify subscription: {:?}", e);
return;
}
};
// Parse back to object to match our expected flow (slightly inefficient but safe)
match js_sys::JSON::parse(&String::from(json_str)) {
Ok(v) => v,
Err(e) => {
log::error!("Failed to parse stringified subscription: {:?}", e);
return;
}
}
}
_ => {
log::error!("toJSON method not found on PushSubscription");
return;
}
};
let json_value = match js_sys::JSON::stringify(&json_result) {
Ok(val) => val,
Err(e) => {
log::error!("Failed to stringify subscription: {:?}", e);
return;
}
};
let subscription_json_str = json_value.as_string().expect("should be string");
log::info!("Push subscription: {}", subscription_json_str);
// Parse and send to backend
let subscription_data: serde_json::Value = match serde_json::from_str(&subscription_json_str) {
// Convert JsValue (JSON object) to PushSubscriptionJSON struct via serde
// Note: web_sys::PushSubscriptionJSON is not a struct we can directly use with serde_json usually,
// but we can use serde-wasm-bindgen to convert JsValue -> Rust Struct
let subscription_data: PushSubscriptionData = match serde_wasm_bindgen::from_value(json_val) {
Ok(data) => data,
Err(e) => {
log::error!("Failed to parse subscription JSON: {:?}", e);
@@ -533,37 +518,9 @@ pub async fn subscribe_to_push_notifications() {
}
};
// Extract endpoint and keys
let endpoint = subscription_data
.get("endpoint")
.and_then(|v| v.as_str())
.expect("endpoint should exist")
.to_string();
let keys_obj = subscription_data
.get("keys")
.expect("keys should exist");
let p256dh = keys_obj
.get("p256dh")
.and_then(|v| v.as_str())
.expect("p256dh should exist")
.to_string();
let auth = keys_obj
.get("auth")
.and_then(|v| v.as_str())
.expect("auth should exist")
.to_string();
let push_data = PushSubscriptionData {
endpoint,
keys: PushKeys { p256dh, auth },
};
// Send to backend
// Send to backend (subscription_data is already the struct we need)
let response = match Request::post("/api/push/subscribe")
.json(&push_data)
.json(&subscription_data)
.expect("serialization should succeed")
.send()
.await
@@ -583,34 +540,15 @@ pub async fn subscribe_to_push_notifications() {
}
/// Helper to convert URL-safe base64 string to Uint8Array
/// Uses JavaScript to properly decode binary data (avoids UTF-8 encoding issues)
/// Uses pure Rust base64 crate for better safety and performance
fn url_base64_to_uint8array(base64_string: &str) -> Result<js_sys::Uint8Array, JsValue> {
// Add padding
let padding = (4 - (base64_string.len() % 4)) % 4;
let mut padded = base64_string.to_string();
padded.push_str(&"=".repeat(padding));
use base64::{engine::general_purpose, Engine as _};
// Replace URL-safe characters
let standard_base64 = padded.replace('-', "+").replace('_', "/");
// VAPID keys are URL-safe base64. Try both NO_PAD and padded for robustness.
let bytes = general_purpose::URL_SAFE_NO_PAD
.decode(base64_string)
.or_else(|_| general_purpose::URL_SAFE.decode(base64_string))
.map_err(|e| JsValue::from_str(&format!("Base64 decode error: {}", e)))?;
// Decode using JavaScript to avoid UTF-8 encoding issues
// Create a JavaScript function to decode the base64 and convert to Uint8Array
let js_code = format!(
r#"
(function() {{
const binaryString = atob('{}');
const bytes = new Uint8Array(binaryString.length);
for (let i = 0; i < binaryString.length; i++) {{
bytes[i] = binaryString.charCodeAt(i);
}}
return bytes;
}})()
"#,
standard_base64
);
let result = js_sys::eval(&js_code)?;
let array = result.dyn_into::<js_sys::Uint8Array>()?;
Ok(array)
Ok(js_sys::Uint8Array::from(&bytes[..]))
}