Working? Lets hope so

This commit is contained in:
mircea32000 2025-02-10 21:05:39 +02:00
parent 5e9aa2b0ea
commit 897e2c3193
7 changed files with 292 additions and 239 deletions

View file

@ -1,162 +1,48 @@
import aria2p import aria2p
from typing import Union, List
import logging
import os
from pathlib import Path
from aria2p import API, Client, Download
import requests
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class HttpDownloader: class HttpDownloader:
def __init__(self): def __init__(self):
self.downloads = [] # vom păstra toate download-urile active self.download = None
self.aria2 = API(Client(host="http://localhost", port=6800)) self.aria2 = aria2p.API(
self.download = None # pentru compatibilitate cu codul vechi aria2p.Client(
host="http://localhost",
def unlock_alldebrid_link(self, link: str) -> str: port=6800,
"""Deblochează un link AllDebrid și returnează link-ul real de descărcare.""" secret=""
api_key = os.getenv('ALLDEBRID_API_KEY')
if not api_key:
logger.error("AllDebrid API key nu a fost găsită în variabilele de mediu")
return link
try:
response = requests.post(
"https://api.alldebrid.com/v4/link/unlock",
params={
"agent": "hydra",
"apikey": api_key,
"link": link
}
) )
data = response.json() )
if data.get("status") == "success":
return data["data"]["link"]
else:
logger.error(f"Eroare la deblocarea link-ului AllDebrid: {data.get('error', {}).get('message', 'Unknown error')}")
return link
except Exception as e:
logger.error(f"Eroare la apelul API AllDebrid: {str(e)}")
return link
def start_download(self, url: Union[str, List[str]], save_path: str, header: str = None, out: str = None): def start_download(self, url: str, save_path: str, header: str, out: str = None):
logger.info(f"Starting download with URL: {url}, save_path: {save_path}, header: {header}, out: {out}") if self.download:
self.aria2.resume([self.download])
# Pentru AllDebrid care returnează un link per fișier
if isinstance(url, list):
logger.info(f"Multiple URLs detected: {len(url)} files to download")
self.downloads = []
# Deblocăm toate link-urile AllDebrid
unlocked_urls = []
for single_url in url:
logger.info(f"Unlocking AllDebrid URL: {single_url}")
unlocked_url = self.unlock_alldebrid_link(single_url)
if unlocked_url:
unlocked_urls.append(unlocked_url)
logger.info(f"URL deblocat cu succes: {unlocked_url}")
# Descărcăm folosind link-urile deblocate
for unlocked_url in unlocked_urls:
logger.info(f"Adding download for unlocked URL: {unlocked_url}")
options = {
"dir": save_path
}
if header:
if isinstance(header, list):
options["header"] = header
else:
options["header"] = [header]
try:
download = self.aria2.add_uris([unlocked_url], options=options)
logger.info(f"Download added successfully: {download.gid}")
self.downloads.append(download)
except Exception as e:
logger.error(f"Error adding download for URL {unlocked_url}: {str(e)}")
if self.downloads:
self.download = self.downloads[0] # păstrăm primul pentru referință
else:
logger.error("No downloads were successfully added!")
# Pentru RealDebrid/alte servicii care returnează un singur link pentru tot
else: else:
logger.info(f"Single URL download: {url}") downloads = self.aria2.add(url, options={"header": header, "dir": save_path, "out": out})
options = {
"dir": save_path self.download = downloads[0]
}
if header:
if isinstance(header, list):
options["header"] = header
else:
options["header"] = [header]
if out:
options["out"] = out
try:
download = self.aria2.add_uris([url], options=options)
self.download = download
self.downloads = [self.download]
logger.info(f"Single download added successfully: {self.download.gid}")
except Exception as e:
logger.error(f"Error adding single download: {str(e)}")
def pause_download(self): def pause_download(self):
try: if self.download:
for download in self.downloads: self.aria2.pause([self.download])
download.pause()
except Exception as e:
logger.error(f"Error pausing downloads: {str(e)}")
def cancel_download(self): def cancel_download(self):
try: if self.download:
for download in self.downloads: self.aria2.remove([self.download])
download.remove() self.download = None
except Exception as e:
logger.error(f"Error canceling downloads: {str(e)}")
def get_download_status(self): def get_download_status(self):
try: if self.download == None:
if not self.downloads:
return None
total_size = 0
downloaded = 0
download_speed = 0
active_downloads = []
for download in self.downloads:
try:
download.update()
if download.is_active:
active_downloads.append(download)
total_size += download.total_length
downloaded += download.completed_length
download_speed += download.download_speed
except Exception as e:
logger.error(f"Error updating download status for {download.gid}: {str(e)}")
if not active_downloads:
return None
# Folosim primul download pentru numele folderului
folder_path = os.path.dirname(active_downloads[0].files[0].path)
folder_name = os.path.basename(folder_path)
return {
"progress": downloaded / total_size if total_size > 0 else 0,
"numPeers": 0, # nu este relevant pentru HTTP
"numSeeds": 0, # nu este relevant pentru HTTP
"downloadSpeed": download_speed,
"bytesDownloaded": downloaded,
"fileSize": total_size,
"folderName": folder_name,
"status": "downloading"
}
except Exception as e:
logger.error(f"Error getting download status: {str(e)}")
return None return None
download = self.aria2.get_download(self.download.gid)
response = {
'folderName': download.name,
'fileSize': download.total_length,
'progress': download.completed_length / download.total_length if download.total_length else 0,
'downloadSpeed': download.download_speed,
'numPeers': 0,
'numSeeds': 0,
'status': download.status,
'bytesDownloaded': download.completed_length,
}
print("HTTP_DOWNLOADER_STATUS: ", response)
return response

View file

@ -0,0 +1,151 @@
import aria2p
from aria2p.client import ClientException as DownloadNotFound
class HttpMultiLinkDownloader:
def __init__(self):
self.downloads = []
self.completed_downloads = []
self.total_size = None
self.aria2 = aria2p.API(
aria2p.Client(
host="http://localhost",
port=6800,
secret=""
)
)
def start_download(self, urls: list[str], save_path: str, header: str = None, out: str = None, total_size: int = None):
"""Add multiple URLs to download queue with same options"""
options = {"dir": save_path}
if header:
options["header"] = header
if out:
options["out"] = out
# Clear any existing downloads first
self.cancel_download()
self.completed_downloads = []
self.total_size = total_size
for url in urls:
try:
added_downloads = self.aria2.add(url, options=options)
self.downloads.extend(added_downloads)
except Exception as e:
print(f"Error adding download for URL {url}: {str(e)}")
def pause_download(self):
"""Pause all active downloads"""
if self.downloads:
try:
self.aria2.pause(self.downloads)
except Exception as e:
print(f"Error pausing downloads: {str(e)}")
def cancel_download(self):
"""Cancel and remove all downloads"""
if self.downloads:
try:
# First try to stop the downloads
self.aria2.remove(self.downloads)
except Exception as e:
print(f"Error removing downloads: {str(e)}")
finally:
# Clear the downloads list regardless of success/failure
self.downloads = []
self.completed_downloads = []
def get_download_status(self):
"""Get status for all tracked downloads, auto-remove completed/failed ones"""
if not self.downloads and not self.completed_downloads:
return []
total_completed = 0
current_download_speed = 0
active_downloads = []
to_remove = []
# First calculate sizes from completed downloads
for completed in self.completed_downloads:
total_completed += completed['size']
# Then check active downloads
for download in self.downloads:
try:
current_download = self.aria2.get_download(download.gid)
# Skip downloads that are not properly initialized
if not current_download or not current_download.files:
to_remove.append(download)
continue
# Add to completed size and speed calculations
total_completed += current_download.completed_length
current_download_speed += current_download.download_speed
# If download is complete, move it to completed_downloads
if current_download.status == 'complete':
self.completed_downloads.append({
'name': current_download.name,
'size': current_download.total_length
})
to_remove.append(download)
else:
active_downloads.append({
'name': current_download.name,
'size': current_download.total_length,
'completed': current_download.completed_length,
'speed': current_download.download_speed
})
except DownloadNotFound:
to_remove.append(download)
continue
except Exception as e:
print(f"Error getting download status: {str(e)}")
continue
# Clean up completed/removed downloads from active list
for download in to_remove:
try:
if download in self.downloads:
self.downloads.remove(download)
except ValueError:
pass
# Return aggregate status
if self.total_size or active_downloads or self.completed_downloads:
# Use the first active download's name as the folder name, or completed if none active
folder_name = None
if active_downloads:
folder_name = active_downloads[0]['name']
elif self.completed_downloads:
folder_name = self.completed_downloads[0]['name']
if folder_name and '/' in folder_name:
folder_name = folder_name.split('/')[0]
# Use provided total size if available, otherwise sum from downloads
total_size = self.total_size
if not total_size:
total_size = sum(d['size'] for d in active_downloads) + sum(d['size'] for d in self.completed_downloads)
# Calculate completion status based on total downloaded vs total size
is_complete = len(active_downloads) == 0 and total_completed >= (total_size * 0.99) # Allow 1% margin for size differences
# If all downloads are complete, clear the completed_downloads list to prevent status updates
if is_complete:
self.completed_downloads = []
return [{
'folderName': folder_name,
'fileSize': total_size,
'progress': total_completed / total_size if total_size > 0 else 0,
'downloadSpeed': current_download_speed,
'numPeers': 0,
'numSeeds': 0,
'status': 'complete' if is_complete else 'active',
'bytesDownloaded': total_completed,
}]
return []

View file

@ -2,6 +2,7 @@ from flask import Flask, request, jsonify
import sys, json, urllib.parse, psutil import sys, json, urllib.parse, psutil
from torrent_downloader import TorrentDownloader from torrent_downloader import TorrentDownloader
from http_downloader import HttpDownloader from http_downloader import HttpDownloader
from http_multi_link_downloader import HttpMultiLinkDownloader
from profile_image_processor import ProfileImageProcessor from profile_image_processor import ProfileImageProcessor
import libtorrent as lt import libtorrent as lt
@ -23,27 +24,27 @@ torrent_session = lt.session({'listen_interfaces': '0.0.0.0:{port}'.format(port=
if start_download_payload: if start_download_payload:
initial_download = json.loads(urllib.parse.unquote(start_download_payload)) initial_download = json.loads(urllib.parse.unquote(start_download_payload))
downloading_game_id = initial_download['game_id'] downloading_game_id = initial_download['game_id']
url = initial_download['url']
# Verificăm dacă avem un URL de tip magnet (fie direct, fie primul dintr-o listă) if isinstance(initial_download['url'], list):
is_magnet = False # Handle multiple URLs using HttpMultiLinkDownloader
if isinstance(url, str): http_multi_downloader = HttpMultiLinkDownloader()
is_magnet = url.startswith('magnet') downloads[initial_download['game_id']] = http_multi_downloader
elif isinstance(url, list) and url: try:
is_magnet = False # Pentru AllDebrid, chiar dacă vine dintr-un magnet, primim HTTP links http_multi_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
except Exception as e:
if is_magnet: print("Error starting multi-link download", e)
elif initial_download['url'].startswith('magnet'):
torrent_downloader = TorrentDownloader(torrent_session) torrent_downloader = TorrentDownloader(torrent_session)
downloads[initial_download['game_id']] = torrent_downloader downloads[initial_download['game_id']] = torrent_downloader
try: try:
torrent_downloader.start_download(url, initial_download['save_path']) torrent_downloader.start_download(initial_download['url'], initial_download['save_path'])
except Exception as e: except Exception as e:
print("Error starting torrent download", e) print("Error starting torrent download", e)
else: else:
http_downloader = HttpDownloader() http_downloader = HttpDownloader()
downloads[initial_download['game_id']] = http_downloader downloads[initial_download['game_id']] = http_downloader
try: try:
http_downloader.start_download(url, initial_download['save_path'], initial_download.get('header'), initial_download.get("out")) http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
except Exception as e: except Exception as e:
print("Error starting http download", e) print("Error starting http download", e)
@ -70,12 +71,23 @@ def status():
return auth_error return auth_error
downloader = downloads.get(downloading_game_id) downloader = downloads.get(downloading_game_id)
if downloader: if not downloader:
status = downloads.get(downloading_game_id).get_download_status()
return jsonify(status), 200
else:
return jsonify(None) return jsonify(None)
status = downloader.get_download_status()
if not status:
return jsonify(None)
if isinstance(status, list):
if not status: # Empty list
return jsonify(None)
# For multi-link downloader, use the aggregated status
# The status will already be aggregated by the HttpMultiLinkDownloader
return jsonify(status[0]), 200
return jsonify(status), 200
@app.route("/seed-status", methods=["GET"]) @app.route("/seed-status", methods=["GET"])
def seed_status(): def seed_status():
auth_error = validate_rpc_password() auth_error = validate_rpc_password()
@ -89,10 +101,24 @@ def seed_status():
continue continue
response = downloader.get_download_status() response = downloader.get_download_status()
if response is None: if not response:
continue continue
if response.get('status') == 5: if isinstance(response, list):
# For multi-link downloader, check if all files are complete
if response and all(item['status'] == 'complete' for item in response):
seed_status.append({
'gameId': game_id,
'status': 'complete',
'folderName': response[0]['folderName'],
'fileSize': sum(item['fileSize'] for item in response),
'bytesDownloaded': sum(item['bytesDownloaded'] for item in response),
'downloadSpeed': 0,
'numPeers': 0,
'numSeeds': 0,
'progress': 1.0
})
elif response.get('status') == 5: # Original torrent seeding check
seed_status.append({ seed_status.append({
'gameId': game_id, 'gameId': game_id,
**response, **response,
@ -143,18 +169,18 @@ def action():
if action == 'start': if action == 'start':
url = data.get('url') url = data.get('url')
print(f"Starting download with URL: {url}")
existing_downloader = downloads.get(game_id) existing_downloader = downloads.get(game_id)
# Verificăm dacă avem un URL de tip magnet (fie direct, fie primul dintr-o listă) if isinstance(url, list):
is_magnet = False # Handle multiple URLs using HttpMultiLinkDownloader
if isinstance(url, str): if existing_downloader and isinstance(existing_downloader, HttpMultiLinkDownloader):
is_magnet = url.startswith('magnet') existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
elif isinstance(url, list) and url: else:
is_magnet = False # Pentru AllDebrid, chiar dacă vine dintr-un magnet, primim HTTP links http_multi_downloader = HttpMultiLinkDownloader()
downloads[game_id] = http_multi_downloader
if is_magnet: http_multi_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
elif url.startswith('magnet'):
if existing_downloader and isinstance(existing_downloader, TorrentDownloader): if existing_downloader and isinstance(existing_downloader, TorrentDownloader):
existing_downloader.start_download(url, data['save_path']) existing_downloader.start_download(url, data['save_path'])
else: else:
@ -188,6 +214,7 @@ def action():
downloader = downloads.get(game_id) downloader = downloads.get(game_id)
if downloader: if downloader:
downloader.cancel_download() downloader.cancel_download()
else: else:
return jsonify({"error": "Invalid action"}), 400 return jsonify({"error": "Invalid action"}), 400

View file

@ -27,6 +27,12 @@ interface AllDebridError {
message: string; message: string;
} }
interface AllDebridDownloadUrl {
link: string;
size?: number;
filename?: string;
}
export class AllDebridClient { export class AllDebridClient {
private static instance: AxiosInstance; private static instance: AxiosInstance;
private static readonly baseURL = "https://api.alldebrid.com/v4"; private static readonly baseURL = "https://api.alldebrid.com/v4";
@ -201,7 +207,7 @@ export class AllDebridClient {
} }
} }
public static async getDownloadUrls(uri: string): Promise<string[]> { public static async getDownloadUrls(uri: string): Promise<AllDebridDownloadUrl[]> {
try { try {
logger.info("[AllDebrid] Getting download URLs for URI:", uri); logger.info("[AllDebrid] Getting download URLs for URI:", uri);
@ -226,7 +232,11 @@ export class AllDebridClient {
try { try {
const unlockedLink = await this.unlockLink(link.link); const unlockedLink = await this.unlockLink(link.link);
logger.info("[AllDebrid] Successfully unlocked link:", unlockedLink); logger.info("[AllDebrid] Successfully unlocked link:", unlockedLink);
return unlockedLink; return {
link: unlockedLink,
size: link.size,
filename: link.filename
};
} catch (error) { } catch (error) {
logger.error("[AllDebrid] Failed to unlock link:", link.link, error); logger.error("[AllDebrid] Failed to unlock link:", link.link, error);
throw new Error("Failed to unlock all links"); throw new Error("Failed to unlock all links");
@ -249,7 +259,9 @@ export class AllDebridClient {
// Pentru link-uri normale, doar debridam link-ul // Pentru link-uri normale, doar debridam link-ul
const downloadUrl = await this.unlockLink(uri); const downloadUrl = await this.unlockLink(uri);
logger.info("[AllDebrid] Got unlocked download URL:", downloadUrl); logger.info("[AllDebrid] Got unlocked download URL:", downloadUrl);
return [downloadUrl]; return [{
link: downloadUrl
}];
} }
} catch (error: any) { } catch (error: any) {
logger.error("[AllDebrid] Get Download URLs Error:", error); logger.error("[AllDebrid] Get Download URLs Error:", error);

View file

@ -19,6 +19,16 @@ import { TorBoxClient } from "./torbox";
import { AllDebridClient } from "./all-debrid"; import { AllDebridClient } from "./all-debrid";
import { spawn } from "child_process"; import { spawn } from "child_process";
interface GamePayload {
action: string;
game_id: string;
url: string | string[];
save_path: string;
header?: string;
out?: string;
total_size?: number;
}
export class DownloadManager { export class DownloadManager {
private static downloadingGameId: string | null = null; private static downloadingGameId: string | null = null;
@ -135,45 +145,15 @@ export class DownloadManager {
if (progress === 1 && download) { if (progress === 1 && download) {
publishDownloadCompleteNotification(game); publishDownloadCompleteNotification(game);
if ( await downloadsSublevel.put(gameId, {
userPreferences?.seedAfterDownloadComplete && ...download,
download.downloader === Downloader.Torrent status: "complete",
) { shouldSeed: false,
downloadsSublevel.put(gameId, { queued: false,
...download, });
status: "seeding",
shouldSeed: true,
queued: false,
});
} else {
downloadsSublevel.put(gameId, {
...download,
status: "complete",
shouldSeed: false,
queued: false,
});
this.cancelDownload(gameId); await this.cancelDownload(gameId);
} this.downloadingGameId = null;
const downloads = await downloadsSublevel
.values()
.all()
.then((games) => {
return sortBy(
games.filter((game) => game.status === "paused" && game.queued),
"timestamp",
"DESC"
);
});
const [nextItemOnQueue] = downloads;
if (nextItemOnQueue) {
this.resumeDownload(nextItemOnQueue);
} else {
this.downloadingGameId = null;
}
} }
} }
} }
@ -340,11 +320,14 @@ export class DownloadManager {
if (!downloadUrls.length) throw new Error(DownloadError.NotCachedInAllDebrid); if (!downloadUrls.length) throw new Error(DownloadError.NotCachedInAllDebrid);
const totalSize = downloadUrls.reduce((total, url) => total + (url.size || 0), 0);
return { return {
action: "start", action: "start",
game_id: downloadId, game_id: downloadId,
url: downloadUrls, url: downloadUrls.map(d => d.link),
save_path: download.downloadPath, save_path: download.downloadPath,
total_size: totalSize
}; };
} }
case Downloader.TorBox: { case Downloader.TorBox: {

View file

@ -17,17 +17,24 @@ export const calculateETA = (
}; };
export const getDirSize = async (dir: string): Promise<number> => { export const getDirSize = async (dir: string): Promise<number> => {
const getItemSize = async (filePath: string): Promise<number> => { try {
const stat = await fs.promises.stat(filePath); const stat = await fs.promises.stat(dir);
if (stat.isDirectory()) { // If it's a file, return its size directly
return getDirSize(filePath); if (!stat.isDirectory()) {
return stat.size;
} }
return stat.size; const getItemSize = async (filePath: string): Promise<number> => {
}; const stat = await fs.promises.stat(filePath);
if (stat.isDirectory()) {
return getDirSize(filePath);
}
return stat.size;
};
try {
const files = await fs.promises.readdir(dir); const files = await fs.promises.readdir(dir);
const filePaths = files.map((file) => path.join(dir, file)); const filePaths = files.map((file) => path.join(dir, file));
const sizes = await Promise.all(filePaths.map(getItemSize)); const sizes = await Promise.all(filePaths.map(getItemSize));

View file

@ -8,8 +8,6 @@ import crypto from "node:crypto";
import { pythonRpcLogger } from "./logger"; import { pythonRpcLogger } from "./logger";
import { Readable } from "node:stream"; import { Readable } from "node:stream";
import { app, dialog } from "electron"; import { app, dialog } from "electron";
import { db, levelKeys } from "@main/level";
import type { UserPreferences } from "@types";
interface GamePayload { interface GamePayload {
game_id: string; game_id: string;
@ -44,7 +42,7 @@ export class PythonRPC {
readable.on("data", pythonRpcLogger.log); readable.on("data", pythonRpcLogger.log);
} }
public static async spawn( public static spawn(
initialDownload?: GamePayload, initialDownload?: GamePayload,
initialSeeding?: GamePayload[] initialSeeding?: GamePayload[]
) { ) {
@ -56,15 +54,6 @@ export class PythonRPC {
initialSeeding ? JSON.stringify(initialSeeding) : "", initialSeeding ? JSON.stringify(initialSeeding) : "",
]; ];
const userPreferences = await db.get<string, UserPreferences | null>(levelKeys.userPreferences, {
valueEncoding: "json",
});
const env = {
...process.env,
ALLDEBRID_API_KEY: userPreferences?.allDebridApiKey || ""
};
if (app.isPackaged) { if (app.isPackaged) {
const binaryName = binaryNameByPlatform[process.platform]!; const binaryName = binaryNameByPlatform[process.platform]!;
const binaryPath = path.join( const binaryPath = path.join(
@ -85,7 +74,6 @@ export class PythonRPC {
const childProcess = cp.spawn(binaryPath, commonArgs, { const childProcess = cp.spawn(binaryPath, commonArgs, {
windowsHide: true, windowsHide: true,
stdio: ["inherit", "inherit"], stdio: ["inherit", "inherit"],
env
}); });
this.logStderr(childProcess.stderr); this.logStderr(childProcess.stderr);
@ -102,7 +90,6 @@ export class PythonRPC {
const childProcess = cp.spawn("python3", [scriptPath, ...commonArgs], { const childProcess = cp.spawn("python3", [scriptPath, ...commonArgs], {
stdio: ["inherit", "inherit"], stdio: ["inherit", "inherit"],
env
}); });
this.logStderr(childProcess.stderr); this.logStderr(childProcess.stderr);
@ -118,4 +105,4 @@ export class PythonRPC {
this.pythonProcess = null; this.pythonProcess = null;
} }
} }
} }