This commit is contained in:
Julian Freeman
2026-04-19 10:26:07 -04:00
parent e86bc86793
commit bcadf36b71
15 changed files with 1236 additions and 411 deletions

68
src-tauri/Cargo.lock generated
View File

@@ -19,6 +19,18 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.4"
@@ -1020,6 +1032,18 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "fallible-iterator"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "fallible-streaming-iterator"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -1543,12 +1567,30 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash",
]
[[package]]
name = "hashbrown"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "hashlink"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
dependencies = [
"hashbrown 0.14.5",
]
[[package]]
name = "heck"
version = "0.4.1"
@@ -2110,6 +2152,17 @@ dependencies = [
"libc",
]
[[package]]
name = "libsqlite3-sys"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
dependencies = [
"cc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "libz-rs-sys"
version = "0.5.2"
@@ -3455,6 +3508,20 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "rusqlite"
version = "0.32.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e"
dependencies = [
"bitflags 2.10.0",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"smallvec",
]
[[package]]
name = "rustc-hash"
version = "2.1.1"
@@ -3979,6 +4046,7 @@ dependencies = [
"futures-util",
"regex",
"reqwest",
"rusqlite",
"serde",
"serde_json",
"tauri",

View File

@@ -27,3 +27,4 @@ futures-util = "0.3"
regex = "1.10"
uuid = { version = "1.0", features = ["v4", "serde"] }
zip = "6.0.0"
rusqlite = { version = "0.32", features = ["bundled"] }

View File

@@ -20,6 +20,27 @@ const QJS_REPO_URL: &str = "https://bellard.org/quickjs/binary_releases";
const FFMPEG_GITHUB_API: &str = "https://api.github.com/repos/BtbN/FFmpeg-Builds/releases/latest";
const FFMPEG_EVERMEET_BASE: &str = "https://evermeet.cx/ffmpeg";
#[derive(serde::Serialize, Clone, Debug)]
pub struct RuntimeStatus {
pub ffmpeg_source: String,
pub ffmpeg_version: String,
pub js_runtime_name: String,
pub js_runtime_source: String,
}
#[derive(Clone, Debug)]
pub enum FfmpegLocation {
System,
Managed(PathBuf),
}
#[derive(Clone, Debug)]
pub enum JsRuntime {
Deno,
Node,
ManagedQuickJs(PathBuf),
}
pub fn get_ytdlp_binary_name() -> &'static str {
if cfg!(target_os = "windows") {
"yt-dlp.exe"
@@ -79,10 +100,7 @@ pub fn get_ffmpeg_path(app: &AppHandle) -> Result<PathBuf> {
}
pub fn check_binaries(app: &AppHandle) -> bool {
let ytdlp = get_ytdlp_path(app).map(|p| p.exists()).unwrap_or(false);
let qjs = get_qjs_path(app).map(|p| p.exists()).unwrap_or(false);
let ffmpeg = get_ffmpeg_path(app).map(|p| p.exists()).unwrap_or(false);
ytdlp && qjs && ffmpeg
get_ytdlp_path(app).map(|p| p.exists()).unwrap_or(false)
}
// --- yt-dlp Logic ---
@@ -436,6 +454,10 @@ pub async fn update_ffmpeg(app: &AppHandle) -> Result<String> {
}
pub fn get_ffmpeg_version(app: &AppHandle) -> Result<String> {
if let Some(version) = run_version_command("ffmpeg", "-version") {
return Ok(version);
}
let path = get_ffmpeg_path(app)?;
if !path.exists() {
return Ok("未安装".to_string());
@@ -505,37 +527,138 @@ pub async fn ensure_binaries(app: &AppHandle) -> Result<()> {
}
}
let qjs = get_qjs_path(app)?;
if !qjs.exists() {
download_qjs(app).await?;
} else {
#[cfg(target_os = "macos")]
{
std::process::Command::new("xattr")
.arg("-d")
.arg("com.apple.quarantine")
.arg(&qjs)
.output()
.ok();
Ok(())
}
fn run_version_command(command: &str, arg: &str) -> Option<String> {
let mut cmd = std::process::Command::new(command);
cmd.arg(arg);
#[cfg(target_os = "windows")]
cmd.creation_flags(0x08000000);
cmd.output()
.ok()
.filter(|output| output.status.success())
.and_then(|output| first_non_empty_line(&output))
}
pub fn resolve_ffmpeg(app: &AppHandle, allow_download: bool) -> Result<Option<FfmpegLocation>> {
if run_version_command("ffmpeg", "-version").is_some() {
return Ok(Some(FfmpegLocation::System));
}
let managed = get_ffmpeg_path(app)?;
if managed.exists() {
return Ok(Some(FfmpegLocation::Managed(managed)));
}
if allow_download {
return Ok(Some(FfmpegLocation::Managed(managed)));
}
Ok(None)
}
pub async fn ensure_ffmpeg_available(app: &AppHandle) -> Result<Option<FfmpegLocation>> {
if let Some(location) = resolve_ffmpeg(app, false)? {
return Ok(Some(location));
}
let path = download_ffmpeg(app).await?;
Ok(Some(FfmpegLocation::Managed(path)))
}
pub fn resolve_js_runtime(app: &AppHandle, allow_download: bool) -> Result<Option<JsRuntime>> {
if run_version_command("deno", "--version").is_some() {
return Ok(Some(JsRuntime::Deno));
}
if run_version_command("node", "--version").is_some() {
return Ok(Some(JsRuntime::Node));
}
let managed = get_qjs_path(app)?;
if managed.exists() {
return Ok(Some(JsRuntime::ManagedQuickJs(managed)));
}
if allow_download {
return Ok(Some(JsRuntime::ManagedQuickJs(managed)));
}
Ok(None)
}
pub async fn ensure_js_runtime_available(app: &AppHandle) -> Result<Option<JsRuntime>> {
if let Some(runtime) = resolve_js_runtime(app, false)? {
return Ok(Some(runtime));
}
let path = download_qjs(app).await?;
Ok(Some(JsRuntime::ManagedQuickJs(path)))
}
impl FfmpegLocation {
pub fn source_label(&self) -> &'static str {
match self {
FfmpegLocation::System => "system",
FfmpegLocation::Managed(_) => "managed",
}
}
let ffmpeg = get_ffmpeg_path(app)?;
if !ffmpeg.exists() {
download_ffmpeg(app).await?;
} else {
#[cfg(target_os = "macos")]
{
std::process::Command::new("xattr")
.arg("-d")
.arg("com.apple.quarantine")
.arg(&ffmpeg)
.output()
.ok();
pub fn version(&self, app: &AppHandle) -> Result<String> {
match self {
FfmpegLocation::System => Ok(run_version_command("ffmpeg", "-version").unwrap_or_else(|| "未知".to_string())),
FfmpegLocation::Managed(_) => get_ffmpeg_version(app),
}
}
Ok(())
}
impl JsRuntime {
pub fn source_label(&self) -> &'static str {
match self {
JsRuntime::Deno | JsRuntime::Node => "system",
JsRuntime::ManagedQuickJs(_) => "managed",
}
}
pub fn display_name(&self, app: &AppHandle) -> Result<String> {
match self {
JsRuntime::Deno => Ok(run_version_command("deno", "--version").unwrap_or_else(|| "deno".to_string())),
JsRuntime::Node => Ok(run_version_command("node", "--version").unwrap_or_else(|| "node".to_string())),
JsRuntime::ManagedQuickJs(_) => get_qjs_version(app),
}
}
pub fn yt_dlp_argument(&self) -> String {
match self {
JsRuntime::Deno => "deno".to_string(),
JsRuntime::Node => "node".to_string(),
JsRuntime::ManagedQuickJs(path) => format!("quickjs:{}", path.to_string_lossy()),
}
}
}
pub async fn get_runtime_status(app: &AppHandle) -> Result<RuntimeStatus> {
let ffmpeg = resolve_ffmpeg(app, false)?;
let js_runtime = resolve_js_runtime(app, false)?;
let (ffmpeg_source, ffmpeg_version) = match ffmpeg {
Some(location) => (location.source_label().to_string(), location.version(app)?),
None => ("unavailable".to_string(), "未安装".to_string()),
};
let (js_runtime_name, js_runtime_source) = match js_runtime {
Some(runtime) => (runtime.display_name(app)?, runtime.source_label().to_string()),
None => ("未安装".to_string(), "unavailable".to_string()),
};
Ok(RuntimeStatus {
ffmpeg_source,
ffmpeg_version,
js_runtime_name,
js_runtime_source,
})
}
#[cfg(test)]

View File

@@ -1,56 +1,65 @@
// filepath: src-tauri/src/commands.rs
use tauri::{AppHandle, Manager};
use crate::{binary_manager, downloader, storage};
use crate::downloader::DownloadOptions;
use crate::storage::{Settings, HistoryItem};
use uuid::Uuid;
use std::path::Path;
use std::sync::LazyLock;
use tauri::{AppHandle, Emitter, Manager};
use tokio::sync::Semaphore;
use uuid::Uuid;
use crate::binary_manager;
use crate::downloader::{self, DownloadOptions};
use crate::storage::{self, Settings, TaskLogEntry, TaskRecord};
use crate::task_runtime;
static DOWNLOAD_SEMAPHORE: LazyLock<Semaphore> = LazyLock::new(|| Semaphore::new(3));
#[tauri::command]
pub async fn init_ytdlp(app: AppHandle) -> Result<bool, String> {
storage::initialize_storage(&app).map_err(|error| error.to_string())?;
storage::recover_incomplete_tasks(&app).map_err(|error| error.to_string())?;
if binary_manager::check_binaries(&app) {
return Ok(true);
}
// If not found, try to download
match binary_manager::ensure_binaries(&app).await {
Ok(_) => Ok(true),
Err(e) => Err(format!("Failed to download binaries: {}", e)),
}
binary_manager::ensure_binaries(&app)
.await
.map(|_| true)
.map_err(|error| format!("Failed to prepare runtime: {error}"))
}
#[tauri::command]
pub async fn update_ytdlp(app: AppHandle) -> Result<String, String> {
binary_manager::update_ytdlp(&app).await.map_err(|e| e.to_string())
binary_manager::update_ytdlp(&app).await.map_err(|error| error.to_string())
}
#[tauri::command]
pub async fn update_quickjs(app: AppHandle) -> Result<String, String> {
binary_manager::update_qjs(&app).await.map_err(|e| e.to_string())
binary_manager::update_qjs(&app).await.map_err(|error| error.to_string())
}
#[tauri::command]
pub async fn update_ffmpeg(app: AppHandle) -> Result<String, String> {
binary_manager::update_ffmpeg(&app).await.map_err(|e| e.to_string())
binary_manager::update_ffmpeg(&app).await.map_err(|error| error.to_string())
}
#[tauri::command]
pub fn get_ytdlp_version(app: AppHandle) -> Result<String, String> {
binary_manager::get_ytdlp_version(&app).map_err(|e| e.to_string())
binary_manager::get_ytdlp_version(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn get_quickjs_version(app: AppHandle) -> Result<String, String> {
binary_manager::get_qjs_version(&app).map_err(|e| e.to_string())
binary_manager::get_qjs_version(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn get_ffmpeg_version(app: AppHandle) -> Result<String, String> {
binary_manager::get_ffmpeg_version(&app).map_err(|e| e.to_string())
binary_manager::get_ffmpeg_version(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub async fn get_runtime_status(app: AppHandle) -> Result<binary_manager::RuntimeStatus, String> {
binary_manager::get_runtime_status(&app).await.map_err(|error| error.to_string())
}
#[tauri::command]
@@ -58,11 +67,15 @@ pub async fn fetch_image(url: String) -> Result<String, String> {
use base64::{Engine as _, engine::general_purpose};
let client = reqwest::Client::new();
let res = client.get(&url)
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36")
let res = client
.get(&url)
.header(
"User-Agent",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
)
.send()
.await
.map_err(|e| e.to_string())?;
.map_err(|error| error.to_string())?;
if !res.status().is_success() {
return Err(format!("image fetch failed with status {}", res.status()));
@@ -82,9 +95,7 @@ pub async fn fetch_image(url: String) -> Result<String, String> {
"image/jpeg".to_string()
}
});
let bytes = res.bytes().await.map_err(|e| e.to_string())?;
// Convert to base64
let bytes = res.bytes().await.map_err(|error| error.to_string())?;
let b64 = general_purpose::STANDARD.encode(&bytes);
Ok(format!("data:{};base64,{}", mime, b64))
@@ -92,67 +103,88 @@ pub async fn fetch_image(url: String) -> Result<String, String> {
#[tauri::command]
pub async fn fetch_metadata(app: AppHandle, url: String, parse_mix_playlist: bool) -> Result<downloader::MetadataResult, String> {
downloader::fetch_metadata(&app, &url, parse_mix_playlist).await.map_err(|e| e.to_string())
downloader::fetch_metadata(&app, &url, parse_mix_playlist)
.await
.map_err(|error| error.to_string())
}
#[tauri::command]
pub async fn start_download(app: AppHandle, url: String, options: DownloadOptions, metadata: downloader::VideoMetadata) -> Result<String, String> {
// Generate a task ID
pub async fn start_download(
app: AppHandle,
url: String,
options: DownloadOptions,
metadata: downloader::VideoMetadata,
) -> Result<String, String> {
let id = Uuid::new_v4().to_string();
let normalized_url = metadata.url.clone().unwrap_or_else(|| url.clone());
let task = storage::create_task(&app, &id, &url, &normalized_url, &options, &metadata)
.map_err(|error| error.to_string())?;
app.emit("task-updated", &task).ok();
let id_clone = id.clone();
// Spawn the download task
tauri::async_runtime::spawn(async move {
let _permit = DOWNLOAD_SEMAPHORE.acquire().await.ok();
let res = downloader::download_video(app.clone(), id_clone.clone(), url.clone(), options.clone()).await;
let status = if res.is_ok() { "success" } else { "failed" };
let file_path = res.ok().flatten();
// Add to history
let output_dir = options.output_path.clone(); // Store the directory user selected
let item = HistoryItem {
id: id_clone,
title: metadata.title,
thumbnail: metadata.thumbnail,
url: url,
output_path: output_dir,
file_path,
timestamp: chrono::Utc::now(),
status: status.to_string(),
format: options.output_format,
};
let _ = storage::add_history_item(&app, item);
if let Err(error) = downloader::download_video(app.clone(), id_clone.clone(), url.clone(), options.clone()).await {
storage::add_log_entry(&app, &id_clone, "error", &error.to_string()).ok();
}
});
Ok(id)
}
#[tauri::command]
pub async fn cancel_task(app: AppHandle, id: String) -> Result<(), String> {
let _ = app;
task_runtime::cancel_task(&id)
.await
.map(|_| ())
.map_err(|error| error.to_string())
}
#[tauri::command]
pub async fn retry_task(app: AppHandle, id: String) -> Result<String, String> {
let payload = storage::get_task_payload(&app, &id).map_err(|error| error.to_string())?;
start_download(app, payload.source_url, payload.options, payload.metadata).await
}
#[tauri::command]
pub fn get_tasks(app: AppHandle) -> Result<Vec<TaskRecord>, String> {
storage::list_tasks(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn get_task_logs(app: AppHandle) -> Result<Vec<TaskLogEntry>, String> {
storage::load_logs(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn clear_task_logs(app: AppHandle) -> Result<(), String> {
storage::clear_logs(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn get_settings(app: AppHandle) -> Result<Settings, String> {
storage::load_settings(&app).map_err(|e| e.to_string())
storage::load_settings(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn save_settings(app: AppHandle, settings: Settings) -> Result<(), String> {
storage::save_settings(&app, &settings).map_err(|e| e.to_string())
storage::save_settings(&app, &settings).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn get_history(app: AppHandle) -> Result<Vec<HistoryItem>, String> {
storage::load_history(&app).map_err(|e| e.to_string())
pub fn get_history(app: AppHandle) -> Result<Vec<storage::HistoryItem>, String> {
storage::load_history(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn clear_history(app: AppHandle) -> Result<(), String> {
storage::clear_history(&app).map_err(|e| e.to_string())
storage::clear_history(&app).map_err(|error| error.to_string())
}
#[tauri::command]
pub fn delete_history_item(app: AppHandle, id: String) -> Result<(), String> {
storage::delete_history_item(&app, &id).map_err(|e| e.to_string())
storage::delete_history_item(&app, &id).map_err(|error| error.to_string())
}
#[tauri::command]
@@ -170,8 +202,9 @@ pub fn open_in_explorer(app: AppHandle, path: String) -> Result<(), String> {
let resolved_path = if Path::new(&path).exists() {
path
} else {
app.path().download_dir()
.map(|p| p.to_string_lossy().to_string())
app.path()
.download_dir()
.map(|value| value.to_string_lossy().to_string())
.unwrap_or_else(|_| ".".to_string())
};
@@ -186,7 +219,7 @@ pub fn open_in_explorer(app: AppHandle, path: String) -> Result<(), String> {
command.arg(resolved);
}
command.spawn().map_err(|e| e.to_string())?;
command.spawn().map_err(|error| error.to_string())?;
}
#[cfg(target_os = "macos")]
{
@@ -199,7 +232,7 @@ pub fn open_in_explorer(app: AppHandle, path: String) -> Result<(), String> {
command.arg(resolved);
}
command.spawn().map_err(|e| e.to_string())?;
command.spawn().map_err(|error| error.to_string())?;
}
Ok(())
}

View File

@@ -1,13 +1,17 @@
// filepath: src-tauri/src/downloader.rs
use std::process::Stdio;
use std::sync::Arc;
use anyhow::{Result, anyhow};
use regex::Regex;
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter};
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use std::process::Stdio;
use serde::{Deserialize, Serialize};
use anyhow::{Result, anyhow};
use regex::Regex;
use crate::binary_manager;
use crate::storage;
use tokio::sync::Mutex;
use crate::binary_manager::{self, FfmpegLocation};
use crate::storage::{self, TaskRecord};
use crate::task_runtime;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct VideoMetadata {
@@ -17,6 +21,8 @@ pub struct VideoMetadata {
pub duration: Option<f64>,
pub uploader: Option<String>,
pub url: Option<String>,
pub extractor: Option<String>,
pub site_name: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -33,12 +39,12 @@ pub enum MetadataResult {
Playlist(PlaylistMetadata),
}
#[derive(Deserialize, Debug, Clone)]
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct DownloadOptions {
pub is_audio_only: bool,
pub quality: String, // e.g., "1080", "720", "best"
pub output_path: String, // Directory
pub output_format: String, // "original", "mp4", "webm", "mkv", "m4a", "aac", "opus", "vorbis", "wav", etc.
pub quality: String,
pub output_path: String,
pub output_format: String,
pub cookies_path: Option<String>,
}
@@ -47,58 +53,75 @@ pub struct ProgressEvent {
pub id: String,
pub progress: f64,
pub speed: String,
pub status: String, // "downloading", "processing", "finished", "error"
pub eta: Option<String>,
pub status: String,
}
#[derive(Serialize, Clone, Debug)]
pub struct LogEvent {
pub id: String,
pub message: String,
pub level: String, // "info", "error"
pub level: String,
}
const FINAL_PATH_MARKER: &str = "__STREAM_CAPTURE_FINAL_PATH__";
fn emit_task(app: &AppHandle, task: &TaskRecord) {
app.emit("task-updated", task).ok();
}
fn emit_log(app: &AppHandle, task_id: &str, message: impl Into<String>, level: &str) {
let message = message.into();
storage::add_log_entry(app, task_id, level, &message).ok();
app.emit(
"download-log",
LogEvent {
id: task_id.to_string(),
message,
level: level.to_string(),
},
)
.ok();
}
fn emit_progress(app: &AppHandle, task_id: &str, progress: f64, speed: String, eta: Option<String>, status: &str) {
app.emit(
"download-progress",
ProgressEvent {
id: task_id.to_string(),
progress,
speed,
eta,
status: status.to_string(),
},
)
.ok();
}
pub async fn fetch_metadata(app: &AppHandle, url: &str, parse_mix_playlist: bool) -> Result<MetadataResult> {
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: format!("正在为 URL: {} 获取元数据", url),
level: "info".to_string(),
}).ok();
emit_log(app, "Analysis", format!("正在为 URL: {} 获取元数据", url), "info");
let ytdlp_path = binary_manager::get_ytdlp_path(app)?;
let qjs_path = binary_manager::get_qjs_path(app)?; // Get absolute path to quickjs
// Load settings to check for cookies
let js_runtime = binary_manager::ensure_js_runtime_available(app).await?;
let settings = storage::load_settings(app)?;
let mut args = Vec::new();
// Pass the runtime and its absolute path to --js-runtimes
// Rust's Command automatically handles spaces in arguments, so we should NOT quote the path here.
args.push("--js-runtimes".to_string());
args.push(format!("quickjs:{}", qjs_path.to_string_lossy()));
let mut args = Vec::new();
if let Some(runtime) = js_runtime {
args.push("--js-runtimes".to_string());
args.push(runtime.yt_dlp_argument());
}
let mut has_cookies = false;
if let Some(cookies) = &settings.cookies_path {
if !cookies.is_empty() {
if std::path::Path::new(cookies).exists() {
args.push("--cookies".to_string());
args.push(cookies.clone());
has_cookies = true;
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: format!("已加载 Cookies: {}", cookies),
level: "info".to_string(),
}).ok();
} else {
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: format!("Cookies 文件不存在: {}", cookies),
level: "error".to_string(),
}).ok();
}
if std::path::Path::new(cookies).exists() {
args.push("--cookies".to_string());
args.push(cookies.clone());
has_cookies = true;
emit_log(app, "Analysis", format!("已加载 Cookies: {}", cookies), "info");
} else {
emit_log(app, "Analysis", format!("Cookies 文件不存在: {}", cookies), "error");
}
}
}
@@ -107,12 +130,9 @@ pub async fn fetch_metadata(app: &AppHandle, url: &str, parse_mix_playlist: bool
args.push("--no-warnings".to_string());
if has_cookies {
// When using cookies, avoid skipping JS player to prevent challenge errors
args.push("--extractor-args".to_string());
args.push("youtube:skip=dash,hls,translated_subs".to_string());
} else {
// Optimize metadata fetching: skip heavy manifests and player JS execution.
// Skipping JS prevents slow QuickJS spin-up and signature decryption, drastically speeding up single video parsing.
args.push("--extractor-args".to_string());
args.push("youtube:skip=dash,hls,translated_subs;player_skip=js".to_string());
}
@@ -127,133 +147,135 @@ pub async fn fetch_metadata(app: &AppHandle, url: &str, parse_mix_playlist: bool
let mut cmd = Command::new(&ytdlp_path);
#[cfg(target_os = "windows")]
cmd.creation_flags(0x08000000);
cmd.args(&args);
cmd.stderr(Stdio::piped());
// Log the full command
let full_cmd_str = format!("{} {}", ytdlp_path.to_string_lossy(), args.join(" "));
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: format!("正在执行分析命令: {}", full_cmd_str),
level: "info".to_string(),
}).ok();
emit_log(
app,
"Analysis",
format!("正在执行分析命令: {} {}", ytdlp_path.to_string_lossy(), args.join(" ")),
"info",
);
let output = cmd.output().await?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: format!("元数据获取失败: {}", stderr),
level: "error".to_string(),
}).ok();
emit_log(app, "Analysis", format!("元数据获取失败: {}", stderr), "error");
return Err(anyhow!("yt-dlp error: {}", stderr));
}
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout)?;
// Check if playlist
if let Some(_type) = json.get("_type") {
if _type == "playlist" {
let entries_json = json["entries"].as_array().ok_or(anyhow!("No entries in playlist"))?;
let mut entries = Vec::new();
for entry in entries_json {
entries.push(parse_video_metadata(entry));
}
let result = MetadataResult::Playlist(PlaylistMetadata {
id: json["id"].as_str().unwrap_or("").to_string(),
title: json["title"].as_str().unwrap_or("Unknown Playlist").to_string(),
entries,
});
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: "元数据获取成功(播放列表)".to_string(),
level: "info".to_string(),
}).ok();
return Ok(result);
}
if json.get("_type").and_then(|value| value.as_str()) == Some("playlist") {
let entries_json = json["entries"].as_array().ok_or_else(|| anyhow!("No entries in playlist"))?;
let entries = entries_json.iter().map(parse_video_metadata).collect();
let result = MetadataResult::Playlist(PlaylistMetadata {
id: json["id"].as_str().unwrap_or("").to_string(),
title: json["title"].as_str().unwrap_or("Unknown Playlist").to_string(),
entries,
});
emit_log(app, "Analysis", "元数据获取成功(播放列表)", "info");
return Ok(result);
}
// Single video
let result = MetadataResult::Video(parse_video_metadata(&json));
app.emit("download-log", LogEvent {
id: "Analysis".to_string(),
message: "元数据获取成功(视频)".to_string(),
level: "info".to_string(),
}).ok();
emit_log(app, "Analysis", "元数据获取成功(视频)", "info");
Ok(result)
}
fn parse_video_metadata(json: &serde_json::Value) -> VideoMetadata {
let id = json["id"].as_str().unwrap_or("").to_string();
// Thumbnail fallback logic
let thumbnail = match json.get("thumbnail").and_then(|t| t.as_str()) {
Some(t) if !t.is_empty() => t.to_string(),
_ => format!("https://i.ytimg.com/vi/{}/mqdefault.jpg", id),
let extractor = json["extractor_key"].as_str().map(|value| value.to_string());
let site_name = json["extractor"].as_str().map(|value| value.to_string());
let thumbnail = match json.get("thumbnail").and_then(|value| value.as_str()) {
Some(value) if !value.is_empty() => value.to_string(),
_ if extractor
.as_deref()
.map(|value| value.to_lowercase().contains("youtube"))
.unwrap_or(false) =>
{
format!("https://i.ytimg.com/vi/{}/mqdefault.jpg", id)
}
_ => String::new(),
};
let url = json["webpage_url"].as_str()
let url = json["webpage_url"]
.as_str()
.or_else(|| json["url"].as_str())
.map(|s| s.to_string());
.map(|value| value.to_string());
VideoMetadata {
id,
title: json["title"].as_str().unwrap_or("Unknown Title").to_string(),
thumbnail,
duration: json["duration"].as_f64(),
uploader: json["uploader"].as_str().map(|s| s.to_string()),
uploader: json["uploader"].as_str().map(|value| value.to_string()),
url,
extractor,
site_name,
}
}
fn apply_runtime_args(args: &mut Vec<String>, ffmpeg: &Option<FfmpegLocation>, js_runtime: &Option<binary_manager::JsRuntime>) {
if let Some(runtime) = js_runtime {
args.push("--js-runtimes".to_string());
args.push(runtime.yt_dlp_argument());
}
if let Some(FfmpegLocation::Managed(path)) = ffmpeg {
args.push("--ffmpeg-location".to_string());
args.push(path.to_string_lossy().to_string());
}
}
fn needs_ffmpeg(options: &DownloadOptions) -> bool {
options.is_audio_only || options.output_format != "original" || options.quality != "best"
}
pub async fn download_video(
app: AppHandle,
id: String, // Unique ID for this download task (provided by frontend)
id: String,
url: String,
options: DownloadOptions,
) -> Result<Option<String>> {
let ytdlp_path = binary_manager::get_ytdlp_path(&app)?;
let qjs_path = binary_manager::get_qjs_path(&app)?; // Get absolute path to quickjs
let ffmpeg_path = binary_manager::get_ffmpeg_path(&app)?; // Get absolute path to ffmpeg
let js_runtime = binary_manager::ensure_js_runtime_available(&app).await?;
let ffmpeg = if needs_ffmpeg(&options) {
binary_manager::ensure_ffmpeg_available(&app).await?
} else {
binary_manager::resolve_ffmpeg(&app, false)?
};
let task = storage::update_task_status(&app, &id, "preparing", None, None)?;
emit_task(&app, &task);
let mut args = Vec::new();
// Pass the runtime and its absolute path to --js-runtimes
args.push("--js-runtimes".to_string());
// Rust's Command automatically handles spaces in arguments, so we should NOT quote the path here.
args.push(format!("quickjs:{}", qjs_path.to_string_lossy()));
// Pass ffmpeg location so yt-dlp can find our managed ffmpeg
args.push("--ffmpeg-location".to_string());
args.push(ffmpeg_path.to_string_lossy().to_string());
apply_runtime_args(&mut args, &ffmpeg, &js_runtime);
if let Some(cookies) = &options.cookies_path {
if !cookies.is_empty() {
args.push("--cookies".to_string());
args.push(cookies.clone());
args.push("--cookies".to_string());
args.push(cookies.clone());
}
}
args.push(url);
// Output template
let output_template = format!("{}/%(title)s.%(ext)s", options.output_path.trim_end_matches(std::path::MAIN_SEPARATOR));
args.push(url.clone());
let output_template = format!(
"{}/%(title)s.%(ext)s",
options.output_path.trim_end_matches(std::path::MAIN_SEPARATOR)
);
args.push("-o".to_string());
args.push(output_template);
args.push("--print".to_string());
args.push(format!("after_move:{FINAL_PATH_MARKER}%(filepath)s"));
// Formats
if options.is_audio_only {
args.push("-x".to_string());
// Only set audio format if not "original"
if options.output_format != "original" {
args.push("--audio-format".to_string());
args.push(options.output_format.clone());
@@ -262,43 +284,46 @@ pub async fn download_video(
let format_arg = if options.quality == "best" {
"bestvideo+bestaudio/best".to_string()
} else {
format!("bestvideo[height<={}]+bestaudio/best[height<={}]/best", options.quality, options.quality)
format!(
"bestvideo[height<={}]+bestaudio/best[height<={}]/best",
options.quality, options.quality
)
};
args.push("-f".to_string());
args.push(format_arg);
// Only set merge output format if not "original"
if options.output_format != "original" {
args.push("--merge-output-format".to_string());
args.push(options.output_format.clone());
}
}
// Progress output
args.push("--newline".to_string());
// Log the full command
let full_cmd_str = format!("{} {}", ytdlp_path.to_string_lossy(), args.join(" "));
app.emit("download-log", LogEvent {
id: id.clone(),
message: format!("正在执行命令: {}", full_cmd_str),
level: "info".to_string(),
}).ok();
emit_log(
&app,
&id,
format!("正在执行命令: {} {}", ytdlp_path.to_string_lossy(), args.join(" ")),
"info",
);
let mut cmd = Command::new(ytdlp_path);
let mut command = Command::new(ytdlp_path);
#[cfg(target_os = "windows")]
cmd.creation_flags(0x08000000);
let mut child = cmd
command.creation_flags(0x08000000);
let mut child = command
.args(&args)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
let stdout = child.stdout.take().ok_or(anyhow!("Failed to open stdout"))?;
let stderr = child.stderr.take().ok_or(anyhow!("Failed to open stderr"))?;
let stdout = child.stdout.take().ok_or_else(|| anyhow!("Failed to open stdout"))?;
let stderr = child.stderr.take().ok_or_else(|| anyhow!("Failed to open stderr"))?;
let shared_child = Arc::new(Mutex::new(child));
task_runtime::register_child(&id, shared_child.clone()).await;
let progress_regex = Regex::new(r"\[download\]\s+(\d+(?:\.\d+)?)%.*?(?:\s+at\s+([^\s]+))?").unwrap();
let progress_regex =
Regex::new(r"\[download\]\s+(\d+(?:\.\d+)?)%.*?(?:\s+at\s+([^\s]+))?(?:.*?ETA\s+([^\s]+))?").unwrap();
let stdout_task = {
let app = app.clone();
@@ -318,30 +343,28 @@ pub async fn download_video(
continue;
}
if trimmed.contains("Destination") || trimmed.contains("Merging formats") || trimmed.contains("Post-process") {
let task = storage::update_task_status(&app, &id, "postprocessing", None, final_path.as_deref())?;
emit_task(&app, &task);
}
if let Some(caps) = progress_regex.captures(trimmed) {
if let Some(pct_match) = caps.get(1) {
if let Ok(pct) = pct_match.as_str().parse::<f64>() {
if let Some(progress_match) = caps.get(1) {
if let Ok(progress) = progress_match.as_str().parse::<f64>() {
let speed = caps
.get(2)
.map(|value| value.as_str().to_string())
.unwrap_or_else(|| "待定".to_string());
app.emit("download-progress", ProgressEvent {
id: id.clone(),
progress: pct,
speed,
status: "downloading".to_string(),
}).ok();
let eta = caps.get(3).map(|value| value.as_str().to_string());
let task = storage::update_task_progress(&app, &id, progress, &speed, eta.as_deref(), "downloading")?;
emit_task(&app, &task);
emit_progress(&app, &id, progress, speed, eta, "downloading");
continue;
}
}
}
app.emit("download-log", LogEvent {
id: id.clone(),
message: trimmed.to_string(),
level: "info".to_string(),
}).ok();
emit_log(&app, &id, trimmed.to_string(), "info");
}
Ok::<Option<String>, anyhow::Error>(final_path)
@@ -362,41 +385,50 @@ pub async fn download_video(
}
last_error = Some(trimmed.to_string());
app.emit("download-log", LogEvent {
id: id.clone(),
message: trimmed.to_string(),
level: "error".to_string(),
}).ok();
emit_log(&app, &id, trimmed.to_string(), "error");
}
Ok::<Option<String>, anyhow::Error>(last_error)
})
};
let status = child.wait().await?;
let final_path = stdout_task.await.map_err(|e| anyhow!(e.to_string()))??;
let last_error = stderr_task.await.map_err(|e| anyhow!(e.to_string()))??;
let status = {
let mut child = shared_child.lock().await;
child.wait().await?
};
if status.success() {
app.emit("download-progress", ProgressEvent {
id: id.clone(),
progress: 100.0,
speed: "-".to_string(),
status: "finished".to_string(),
}).ok();
task_runtime::unregister_child(&id).await;
let was_cancelled = task_runtime::take_cancelled(&id).await;
let final_path = stdout_task.await.map_err(|error| anyhow!(error.to_string()))??;
let last_error = stderr_task.await.map_err(|error| anyhow!(error.to_string()))??;
if status.success() && !was_cancelled {
let task = storage::update_task_status(&app, &id, "completed", None, final_path.as_deref())?;
emit_task(&app, &task);
emit_progress(&app, &id, 100.0, "-".to_string(), None, "completed");
Ok(final_path)
} else {
app.emit("download-progress", ProgressEvent {
id: id.clone(),
progress: 0.0,
speed: "-".to_string(),
status: "error".to_string(),
}).ok();
Err(anyhow!(
"下载进程失败{}",
last_error
.map(|message| format!(": {message}"))
.unwrap_or_default()
))
let (status_name, error_message) = if was_cancelled {
("cancelled", Some("任务已取消".to_string()))
} else {
(
"failed",
Some(
last_error
.unwrap_or_else(|| "下载进程失败".to_string()),
),
)
};
let task = storage::update_task_status(
&app,
&id,
status_name,
error_message.as_deref(),
final_path.as_deref(),
)?;
emit_task(&app, &task);
emit_progress(&app, &id, task.progress, "-".to_string(), None, status_name);
Err(anyhow!(error_message.unwrap_or_else(|| "下载失败".to_string())))
}
}

View File

@@ -4,6 +4,7 @@ mod downloader;
mod storage;
mod commands;
mod process_utils;
mod task_runtime;
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
@@ -18,9 +19,15 @@ pub fn run() {
commands::get_ytdlp_version,
commands::get_quickjs_version,
commands::get_ffmpeg_version,
commands::get_runtime_status,
commands::fetch_image,
commands::fetch_metadata,
commands::start_download,
commands::cancel_task,
commands::retry_task,
commands::get_tasks,
commands::get_task_logs,
commands::clear_task_logs,
commands::get_settings,
commands::save_settings,
commands::get_history,

View File

@@ -1,25 +1,28 @@
// filepath: src-tauri/src/storage.rs
use anyhow::{Context, Result, anyhow};
use chrono::{DateTime, Utc};
use rusqlite::{Connection, OptionalExtension, params};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
use tauri::{AppHandle, Manager};
use anyhow::Result;
use chrono::{DateTime, Utc};
use crate::downloader::{DownloadOptions, VideoMetadata};
const TERMINAL_STATUSES: &[&str] = &["completed", "failed", "cancelled"];
const ACTIVE_STATUSES: &[&str] = &["queued", "preparing", "analyzing", "downloading", "postprocessing"];
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Settings {
pub download_path: String,
pub cookies_path: Option<String>,
pub theme: String, // 'light', 'dark', 'system'
pub theme: String,
pub last_updated: Option<DateTime<Utc>>,
}
impl Default for Settings {
fn default() -> Self {
// We'll resolve the actual download path at runtime if empty,
// but for default struct we can keep it empty or a placeholder.
Self {
download_path: "".to_string(),
download_path: String::new(),
cookies_path: None,
theme: "system".to_string(),
last_updated: None,
@@ -27,6 +30,38 @@ impl Default for Settings {
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskRecord {
pub id: String,
pub source_url: String,
pub normalized_url: String,
pub extractor: Option<String>,
pub site_name: Option<String>,
pub title: String,
pub thumbnail: String,
pub output_path: String,
pub file_path: Option<String>,
pub status: String,
pub progress: f64,
pub speed: String,
pub eta: Option<String>,
pub format: String,
pub is_audio_only: bool,
pub quality: String,
pub output_format: String,
pub cookies_path: Option<String>,
pub error_message: Option<String>,
pub created_at: DateTime<Utc>,
pub started_at: Option<DateTime<Utc>>,
pub finished_at: Option<DateTime<Utc>>,
}
impl TaskRecord {
pub fn is_terminal(&self) -> bool {
TERMINAL_STATUSES.contains(&self.status.as_str())
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct HistoryItem {
pub id: String,
@@ -34,37 +69,47 @@ pub struct HistoryItem {
pub thumbnail: String,
pub url: String,
pub output_path: String,
#[serde(default)]
pub file_path: Option<String>,
pub timestamp: DateTime<Utc>,
pub status: String, // "success", "failed"
pub status: String,
pub format: String,
}
fn write_json_atomically(path: &PathBuf, content: &str) -> Result<()> {
let file_name = path
.file_name()
.and_then(|name| name.to_str())
.unwrap_or("data.json");
let tmp_path = path.with_file_name(format!("{file_name}.tmp"));
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskLogEntry {
pub id: i64,
pub task_id: String,
pub message: String,
pub level: String,
pub timestamp: DateTime<Utc>,
}
fs::write(&tmp_path, content)?;
#[derive(Debug, Clone)]
pub struct TaskPayload {
pub options: DownloadOptions,
pub metadata: VideoMetadata,
pub source_url: String,
}
if path.exists() {
match fs::rename(&tmp_path, path) {
Ok(()) => return Ok(()),
Err(_) => {
fs::remove_file(path)?;
}
}
}
fn to_rfc3339(timestamp: DateTime<Utc>) -> String {
timestamp.to_rfc3339()
}
fs::rename(&tmp_path, path)?;
Ok(())
fn parse_datetime(value: Option<String>) -> Result<Option<DateTime<Utc>>> {
value
.map(|item| {
DateTime::parse_from_rfc3339(&item)
.map(|timestamp| timestamp.with_timezone(&Utc))
.map_err(|error| anyhow!(error))
})
.transpose()
}
fn parse_required_datetime(value: String) -> Result<DateTime<Utc>> {
parse_datetime(Some(value))?.ok_or_else(|| anyhow!("missing datetime"))
}
pub fn get_app_data_dir(app: &AppHandle) -> Result<PathBuf> {
// In Tauri v2, we use app.path().app_data_dir()
let path = app.path().app_data_dir()?;
if !path.exists() {
fs::create_dir_all(&path)?;
@@ -72,71 +117,430 @@ pub fn get_app_data_dir(app: &AppHandle) -> Result<PathBuf> {
Ok(path)
}
pub fn get_settings_path(app: &AppHandle) -> Result<PathBuf> {
Ok(get_app_data_dir(app)?.join("settings.json"))
fn get_db_path(app: &AppHandle) -> Result<PathBuf> {
Ok(get_app_data_dir(app)?.join("stream_capture.db"))
}
pub fn get_history_path(app: &AppHandle) -> Result<PathBuf> {
Ok(get_app_data_dir(app)?.join("history.json"))
fn open_database(app: &AppHandle) -> Result<Connection> {
let path = get_db_path(app)?;
let connection = Connection::open(path)?;
connection.execute_batch(
"
PRAGMA journal_mode = WAL;
PRAGMA foreign_keys = ON;
CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS tasks (
id TEXT PRIMARY KEY,
source_url TEXT NOT NULL,
normalized_url TEXT NOT NULL,
extractor TEXT,
site_name TEXT,
title TEXT NOT NULL,
thumbnail TEXT NOT NULL,
output_path TEXT NOT NULL,
file_path TEXT,
status TEXT NOT NULL,
progress REAL NOT NULL DEFAULT 0,
speed TEXT NOT NULL DEFAULT '',
eta TEXT,
format TEXT NOT NULL,
is_audio_only INTEGER NOT NULL,
quality TEXT NOT NULL,
output_format TEXT NOT NULL,
cookies_path TEXT,
error_message TEXT,
created_at TEXT NOT NULL,
started_at TEXT,
finished_at TEXT,
metadata_json TEXT NOT NULL,
options_json TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS task_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_id TEXT NOT NULL,
level TEXT NOT NULL,
message TEXT NOT NULL,
timestamp TEXT NOT NULL,
FOREIGN KEY(task_id) REFERENCES tasks(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_tasks_created_at ON tasks(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_tasks_status ON tasks(status);
CREATE INDEX IF NOT EXISTS idx_logs_task_id ON task_logs(task_id, id);
"
)?;
Ok(connection)
}
pub fn initialize_storage(app: &AppHandle) -> Result<()> {
let _ = open_database(app)?;
Ok(())
}
fn load_setting_value(connection: &Connection, key: &str) -> Result<Option<String>> {
let value = connection
.query_row(
"SELECT value FROM settings WHERE key = ?1",
[key],
|row| row.get::<_, String>(0),
)
.optional()?;
Ok(value)
}
fn save_setting_value(connection: &Connection, key: &str, value: &str) -> Result<()> {
connection.execute(
"INSERT INTO settings (key, value) VALUES (?1, ?2)
ON CONFLICT(key) DO UPDATE SET value = excluded.value",
params![key, value],
)?;
Ok(())
}
pub fn load_settings(app: &AppHandle) -> Result<Settings> {
let path = get_settings_path(app)?;
if path.exists() {
let content = fs::read_to_string(&path)?;
let settings: Settings = serde_json::from_str(&content)?;
return Ok(settings);
}
// If not exists, return default.
// Note: We might want to set a default download path here if possible.
let connection = open_database(app)?;
let mut settings = Settings::default();
if let Ok(download_dir) = app.path().download_dir() {
settings.download_path = download_dir.to_string_lossy().to_string();
}
if let Some(download_path) = load_setting_value(&connection, "download_path")? {
settings.download_path = download_path;
}
settings.cookies_path = normalize_optional_string(load_setting_value(&connection, "cookies_path")?);
if let Some(theme) = load_setting_value(&connection, "theme")? {
settings.theme = theme;
}
settings.last_updated = parse_datetime(load_setting_value(&connection, "last_updated")?)?;
Ok(settings)
}
pub fn save_settings(app: &AppHandle, settings: &Settings) -> Result<()> {
let path = get_settings_path(app)?;
let content = serde_json::to_string_pretty(settings)?;
write_json_atomically(&path, &content)?;
let connection = open_database(app)?;
save_setting_value(&connection, "download_path", &settings.download_path)?;
save_setting_value(
&connection,
"cookies_path",
settings.cookies_path.as_deref().unwrap_or(""),
)?;
save_setting_value(&connection, "theme", &settings.theme)?;
save_setting_value(
&connection,
"last_updated",
&settings.last_updated.map(to_rfc3339).unwrap_or_default(),
)?;
Ok(())
}
fn normalize_optional_string(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn task_from_row(row: &rusqlite::Row<'_>) -> rusqlite::Result<TaskRecord> {
let created_at: String = row.get("created_at")?;
let started_at: Option<String> = row.get("started_at")?;
let finished_at: Option<String> = row.get("finished_at")?;
Ok(TaskRecord {
id: row.get("id")?,
source_url: row.get("source_url")?,
normalized_url: row.get("normalized_url")?,
extractor: row.get("extractor")?,
site_name: row.get("site_name")?,
title: row.get("title")?,
thumbnail: row.get("thumbnail")?,
output_path: row.get("output_path")?,
file_path: row.get("file_path")?,
status: row.get("status")?,
progress: row.get("progress")?,
speed: row.get("speed")?,
eta: row.get("eta")?,
format: row.get("format")?,
is_audio_only: row.get::<_, i64>("is_audio_only")? != 0,
quality: row.get("quality")?,
output_format: row.get("output_format")?,
cookies_path: normalize_optional_string(row.get("cookies_path")?),
error_message: normalize_optional_string(row.get("error_message")?),
created_at: parse_required_datetime(created_at)
.map_err(|error| rusqlite::Error::ToSqlConversionFailure(error.into()))?,
started_at: parse_datetime(started_at)
.map_err(|error| rusqlite::Error::ToSqlConversionFailure(error.into()))?,
finished_at: parse_datetime(finished_at)
.map_err(|error| rusqlite::Error::ToSqlConversionFailure(error.into()))?,
})
}
fn load_task_with_connection(connection: &Connection, id: &str) -> Result<TaskRecord> {
connection
.query_row("SELECT * FROM tasks WHERE id = ?1", [id], task_from_row)
.with_context(|| format!("task not found: {id}"))
}
pub fn list_tasks(app: &AppHandle) -> Result<Vec<TaskRecord>> {
let connection = open_database(app)?;
let mut stmt = connection.prepare("SELECT * FROM tasks ORDER BY created_at DESC")?;
let rows = stmt.query_map([], task_from_row)?;
let mut tasks = Vec::new();
for row in rows {
tasks.push(row?);
}
Ok(tasks)
}
pub fn create_task(
app: &AppHandle,
id: &str,
source_url: &str,
normalized_url: &str,
options: &DownloadOptions,
metadata: &VideoMetadata,
) -> Result<TaskRecord> {
let connection = open_database(app)?;
let now = Utc::now();
let metadata_json = serde_json::to_string(metadata)?;
let options_json = serde_json::to_string(options)?;
let format = if options.output_format == "original" {
if options.is_audio_only {
"audio-original".to_string()
} else {
"video-original".to_string()
}
} else {
options.output_format.clone()
};
connection.execute(
"INSERT INTO tasks (
id, source_url, normalized_url, extractor, site_name, title, thumbnail, output_path,
file_path, status, progress, speed, eta, format, is_audio_only, quality, output_format,
cookies_path, error_message, created_at, started_at, finished_at, metadata_json, options_json
) VALUES (
?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8,
NULL, 'queued', 0, '', NULL, ?9, ?10, ?11, ?12,
?13, NULL, ?14, NULL, NULL, ?15, ?16
)",
params![
id,
source_url,
normalized_url,
metadata.extractor.clone(),
metadata.site_name.clone(),
metadata.title,
metadata.thumbnail,
options.output_path,
format,
if options.is_audio_only { 1 } else { 0 },
options.quality,
options.output_format,
options.cookies_path.clone().unwrap_or_default(),
to_rfc3339(now),
metadata_json,
options_json
],
)?;
load_task_with_connection(&connection, id)
}
pub fn update_task_status(
app: &AppHandle,
id: &str,
status: &str,
error_message: Option<&str>,
file_path: Option<&str>,
) -> Result<TaskRecord> {
let connection = open_database(app)?;
let started_at = if status == "preparing" {
Some(to_rfc3339(Utc::now()))
} else {
None
};
let finished_at = if TERMINAL_STATUSES.contains(&status) {
Some(to_rfc3339(Utc::now()))
} else {
None
};
connection.execute(
"UPDATE tasks
SET status = ?2,
error_message = COALESCE(?3, error_message),
file_path = COALESCE(?4, file_path),
started_at = COALESCE(?5, started_at),
finished_at = CASE WHEN ?6 IS NOT NULL THEN ?6 ELSE finished_at END
WHERE id = ?1",
params![
id,
status,
error_message,
file_path,
started_at,
finished_at
],
)?;
load_task_with_connection(&connection, id)
}
pub fn update_task_progress(
app: &AppHandle,
id: &str,
progress: f64,
speed: &str,
eta: Option<&str>,
status: &str,
) -> Result<TaskRecord> {
let connection = open_database(app)?;
connection.execute(
"UPDATE tasks
SET progress = ?2,
speed = ?3,
eta = ?4,
status = ?5,
started_at = COALESCE(started_at, ?6)
WHERE id = ?1",
params![id, progress, speed, eta, status, to_rfc3339(Utc::now())],
)?;
load_task_with_connection(&connection, id)
}
pub fn get_task_payload(app: &AppHandle, id: &str) -> Result<TaskPayload> {
let connection = open_database(app)?;
let row = connection.query_row(
"SELECT source_url, metadata_json, options_json FROM tasks WHERE id = ?1",
[id],
|row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
row.get::<_, String>(2)?,
))
},
)?;
Ok(TaskPayload {
source_url: row.0,
metadata: serde_json::from_str(&row.1)?,
options: serde_json::from_str(&row.2)?,
})
}
pub fn load_history(app: &AppHandle) -> Result<Vec<HistoryItem>> {
let path = get_history_path(app)?;
if path.exists() {
let content = fs::read_to_string(&path)?;
let history: Vec<HistoryItem> = serde_json::from_str(&content)?;
Ok(history)
} else {
Ok(Vec::new())
let tasks = list_tasks(app)?;
let mut history = Vec::new();
for task in tasks.into_iter().filter(TaskRecord::is_terminal) {
history.push(HistoryItem {
id: task.id,
title: task.title,
thumbnail: task.thumbnail,
url: task.source_url,
output_path: task.output_path,
file_path: task.file_path,
timestamp: task.finished_at.unwrap_or(task.created_at),
status: task.status,
format: task.format,
});
}
}
pub fn save_history(app: &AppHandle, history: &[HistoryItem]) -> Result<()> {
let path = get_history_path(app)?;
let content = serde_json::to_string_pretty(history)?;
write_json_atomically(&path, &content)?;
Ok(())
}
pub fn add_history_item(app: &AppHandle, item: HistoryItem) -> Result<()> {
let mut history = load_history(app)?;
// Prepend
history.insert(0, item);
save_history(app, &history)?;
Ok(())
Ok(history)
}
pub fn clear_history(app: &AppHandle) -> Result<()> {
save_history(app, &[])
let connection = open_database(app)?;
let placeholders = TERMINAL_STATUSES
.iter()
.map(|_| "?")
.collect::<Vec<_>>()
.join(", ");
let query = format!("DELETE FROM tasks WHERE status IN ({placeholders})");
connection.execute(
&query,
rusqlite::params_from_iter(TERMINAL_STATUSES.iter().copied()),
)?;
Ok(())
}
pub fn delete_history_item(app: &AppHandle, id: &str) -> Result<()> {
let mut history = load_history(app)?;
history.retain(|item| item.id != id);
save_history(app, &history)?;
let connection = open_database(app)?;
connection.execute("DELETE FROM tasks WHERE id = ?1", [id])?;
Ok(())
}
pub fn add_log_entry(app: &AppHandle, task_id: &str, level: &str, message: &str) -> Result<()> {
let connection = open_database(app)?;
connection.execute(
"INSERT INTO task_logs (task_id, level, message, timestamp) VALUES (?1, ?2, ?3, ?4)",
params![task_id, level, message, to_rfc3339(Utc::now())],
)?;
Ok(())
}
pub fn load_logs(app: &AppHandle) -> Result<Vec<TaskLogEntry>> {
let connection = open_database(app)?;
let mut stmt = connection.prepare(
"SELECT id, task_id, message, level, timestamp FROM task_logs ORDER BY id ASC",
)?;
let rows = stmt.query_map([], |row| {
let timestamp: String = row.get(4)?;
Ok(TaskLogEntry {
id: row.get(0)?,
task_id: row.get(1)?,
message: row.get(2)?,
level: row.get(3)?,
timestamp: parse_required_datetime(timestamp)
.map_err(|error| rusqlite::Error::ToSqlConversionFailure(error.into()))?,
})
})?;
let mut logs = Vec::new();
for row in rows {
logs.push(row?);
}
Ok(logs)
}
pub fn clear_logs(app: &AppHandle) -> Result<()> {
let connection = open_database(app)?;
connection.execute("DELETE FROM task_logs", [])?;
Ok(())
}
pub fn recover_incomplete_tasks(app: &AppHandle) -> Result<Vec<TaskRecord>> {
let connection = open_database(app)?;
let now = to_rfc3339(Utc::now());
let placeholders = ACTIVE_STATUSES
.iter()
.map(|_| "?")
.collect::<Vec<_>>()
.join(", ");
let update = format!(
"UPDATE tasks
SET status = 'failed',
error_message = COALESCE(error_message, '应用重启导致任务中断'),
finished_at = COALESCE(finished_at, ?1)
WHERE status IN ({placeholders})"
);
let mut values: Vec<String> = Vec::with_capacity(ACTIVE_STATUSES.len() + 1);
values.push(now);
values.extend(ACTIVE_STATUSES.iter().map(|item| item.to_string()));
connection.execute(&update, rusqlite::params_from_iter(values.iter()))?;
list_tasks(app)
}

View File

@@ -0,0 +1,42 @@
use std::collections::{HashMap, HashSet};
use std::sync::{Arc, LazyLock};
use anyhow::Result;
use tokio::process::Child;
use tokio::sync::Mutex;
type SharedChild = Arc<Mutex<Child>>;
static ACTIVE_TASKS: LazyLock<Mutex<HashMap<String, SharedChild>>> =
LazyLock::new(|| Mutex::new(HashMap::new()));
static CANCELLED_TASKS: LazyLock<Mutex<HashSet<String>>> =
LazyLock::new(|| Mutex::new(HashSet::new()));
pub async fn register_child(id: &str, child: SharedChild) {
ACTIVE_TASKS.lock().await.insert(id.to_string(), child);
}
pub async fn unregister_child(id: &str) {
ACTIVE_TASKS.lock().await.remove(id);
}
pub async fn cancel_task(id: &str) -> Result<bool> {
CANCELLED_TASKS.lock().await.insert(id.to_string());
let child = {
let tasks = ACTIVE_TASKS.lock().await;
tasks.get(id).cloned()
};
if let Some(child) = child {
let mut child = child.lock().await;
child.start_kill()?;
return Ok(true);
}
Ok(false)
}
pub async fn take_cancelled(id: &str) -> bool {
CANCELLED_TASKS.lock().await.remove(id)
}