Working? Lets hope so

This commit is contained in:
mircea32000 2025-02-10 21:05:39 +02:00
parent 5e9aa2b0ea
commit 897e2c3193
7 changed files with 292 additions and 239 deletions

View file

@ -1,162 +1,48 @@
import aria2p
from typing import Union, List
import logging
import os
from pathlib import Path
from aria2p import API, Client, Download
import requests
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class HttpDownloader:
def __init__(self):
self.downloads = [] # vom păstra toate download-urile active
self.aria2 = API(Client(host="http://localhost", port=6800))
self.download = None # pentru compatibilitate cu codul vechi
def unlock_alldebrid_link(self, link: str) -> str:
"""Deblochează un link AllDebrid și returnează link-ul real de descărcare."""
api_key = os.getenv('ALLDEBRID_API_KEY')
if not api_key:
logger.error("AllDebrid API key nu a fost găsită în variabilele de mediu")
return link
try:
response = requests.post(
"https://api.alldebrid.com/v4/link/unlock",
params={
"agent": "hydra",
"apikey": api_key,
"link": link
}
self.download = None
self.aria2 = aria2p.API(
aria2p.Client(
host="http://localhost",
port=6800,
secret=""
)
data = response.json()
if data.get("status") == "success":
return data["data"]["link"]
else:
logger.error(f"Eroare la deblocarea link-ului AllDebrid: {data.get('error', {}).get('message', 'Unknown error')}")
return link
except Exception as e:
logger.error(f"Eroare la apelul API AllDebrid: {str(e)}")
return link
)
def start_download(self, url: Union[str, List[str]], save_path: str, header: str = None, out: str = None):
logger.info(f"Starting download with URL: {url}, save_path: {save_path}, header: {header}, out: {out}")
# Pentru AllDebrid care returnează un link per fișier
if isinstance(url, list):
logger.info(f"Multiple URLs detected: {len(url)} files to download")
self.downloads = []
# Deblocăm toate link-urile AllDebrid
unlocked_urls = []
for single_url in url:
logger.info(f"Unlocking AllDebrid URL: {single_url}")
unlocked_url = self.unlock_alldebrid_link(single_url)
if unlocked_url:
unlocked_urls.append(unlocked_url)
logger.info(f"URL deblocat cu succes: {unlocked_url}")
# Descărcăm folosind link-urile deblocate
for unlocked_url in unlocked_urls:
logger.info(f"Adding download for unlocked URL: {unlocked_url}")
options = {
"dir": save_path
}
if header:
if isinstance(header, list):
options["header"] = header
else:
options["header"] = [header]
try:
download = self.aria2.add_uris([unlocked_url], options=options)
logger.info(f"Download added successfully: {download.gid}")
self.downloads.append(download)
except Exception as e:
logger.error(f"Error adding download for URL {unlocked_url}: {str(e)}")
if self.downloads:
self.download = self.downloads[0] # păstrăm primul pentru referință
else:
logger.error("No downloads were successfully added!")
# Pentru RealDebrid/alte servicii care returnează un singur link pentru tot
def start_download(self, url: str, save_path: str, header: str, out: str = None):
if self.download:
self.aria2.resume([self.download])
else:
logger.info(f"Single URL download: {url}")
options = {
"dir": save_path
}
if header:
if isinstance(header, list):
options["header"] = header
else:
options["header"] = [header]
if out:
options["out"] = out
try:
download = self.aria2.add_uris([url], options=options)
self.download = download
self.downloads = [self.download]
logger.info(f"Single download added successfully: {self.download.gid}")
except Exception as e:
logger.error(f"Error adding single download: {str(e)}")
downloads = self.aria2.add(url, options={"header": header, "dir": save_path, "out": out})
self.download = downloads[0]
def pause_download(self):
try:
for download in self.downloads:
download.pause()
except Exception as e:
logger.error(f"Error pausing downloads: {str(e)}")
if self.download:
self.aria2.pause([self.download])
def cancel_download(self):
try:
for download in self.downloads:
download.remove()
except Exception as e:
logger.error(f"Error canceling downloads: {str(e)}")
if self.download:
self.aria2.remove([self.download])
self.download = None
def get_download_status(self):
try:
if not self.downloads:
return None
total_size = 0
downloaded = 0
download_speed = 0
active_downloads = []
for download in self.downloads:
try:
download.update()
if download.is_active:
active_downloads.append(download)
total_size += download.total_length
downloaded += download.completed_length
download_speed += download.download_speed
except Exception as e:
logger.error(f"Error updating download status for {download.gid}: {str(e)}")
if not active_downloads:
return None
# Folosim primul download pentru numele folderului
folder_path = os.path.dirname(active_downloads[0].files[0].path)
folder_name = os.path.basename(folder_path)
return {
"progress": downloaded / total_size if total_size > 0 else 0,
"numPeers": 0, # nu este relevant pentru HTTP
"numSeeds": 0, # nu este relevant pentru HTTP
"downloadSpeed": download_speed,
"bytesDownloaded": downloaded,
"fileSize": total_size,
"folderName": folder_name,
"status": "downloading"
}
except Exception as e:
logger.error(f"Error getting download status: {str(e)}")
if self.download == None:
return None
download = self.aria2.get_download(self.download.gid)
response = {
'folderName': download.name,
'fileSize': download.total_length,
'progress': download.completed_length / download.total_length if download.total_length else 0,
'downloadSpeed': download.download_speed,
'numPeers': 0,
'numSeeds': 0,
'status': download.status,
'bytesDownloaded': download.completed_length,
}
print("HTTP_DOWNLOADER_STATUS: ", response)
return response

View file

@ -0,0 +1,151 @@
import aria2p
from aria2p.client import ClientException as DownloadNotFound
class HttpMultiLinkDownloader:
def __init__(self):
self.downloads = []
self.completed_downloads = []
self.total_size = None
self.aria2 = aria2p.API(
aria2p.Client(
host="http://localhost",
port=6800,
secret=""
)
)
def start_download(self, urls: list[str], save_path: str, header: str = None, out: str = None, total_size: int = None):
"""Add multiple URLs to download queue with same options"""
options = {"dir": save_path}
if header:
options["header"] = header
if out:
options["out"] = out
# Clear any existing downloads first
self.cancel_download()
self.completed_downloads = []
self.total_size = total_size
for url in urls:
try:
added_downloads = self.aria2.add(url, options=options)
self.downloads.extend(added_downloads)
except Exception as e:
print(f"Error adding download for URL {url}: {str(e)}")
def pause_download(self):
"""Pause all active downloads"""
if self.downloads:
try:
self.aria2.pause(self.downloads)
except Exception as e:
print(f"Error pausing downloads: {str(e)}")
def cancel_download(self):
"""Cancel and remove all downloads"""
if self.downloads:
try:
# First try to stop the downloads
self.aria2.remove(self.downloads)
except Exception as e:
print(f"Error removing downloads: {str(e)}")
finally:
# Clear the downloads list regardless of success/failure
self.downloads = []
self.completed_downloads = []
def get_download_status(self):
"""Get status for all tracked downloads, auto-remove completed/failed ones"""
if not self.downloads and not self.completed_downloads:
return []
total_completed = 0
current_download_speed = 0
active_downloads = []
to_remove = []
# First calculate sizes from completed downloads
for completed in self.completed_downloads:
total_completed += completed['size']
# Then check active downloads
for download in self.downloads:
try:
current_download = self.aria2.get_download(download.gid)
# Skip downloads that are not properly initialized
if not current_download or not current_download.files:
to_remove.append(download)
continue
# Add to completed size and speed calculations
total_completed += current_download.completed_length
current_download_speed += current_download.download_speed
# If download is complete, move it to completed_downloads
if current_download.status == 'complete':
self.completed_downloads.append({
'name': current_download.name,
'size': current_download.total_length
})
to_remove.append(download)
else:
active_downloads.append({
'name': current_download.name,
'size': current_download.total_length,
'completed': current_download.completed_length,
'speed': current_download.download_speed
})
except DownloadNotFound:
to_remove.append(download)
continue
except Exception as e:
print(f"Error getting download status: {str(e)}")
continue
# Clean up completed/removed downloads from active list
for download in to_remove:
try:
if download in self.downloads:
self.downloads.remove(download)
except ValueError:
pass
# Return aggregate status
if self.total_size or active_downloads or self.completed_downloads:
# Use the first active download's name as the folder name, or completed if none active
folder_name = None
if active_downloads:
folder_name = active_downloads[0]['name']
elif self.completed_downloads:
folder_name = self.completed_downloads[0]['name']
if folder_name and '/' in folder_name:
folder_name = folder_name.split('/')[0]
# Use provided total size if available, otherwise sum from downloads
total_size = self.total_size
if not total_size:
total_size = sum(d['size'] for d in active_downloads) + sum(d['size'] for d in self.completed_downloads)
# Calculate completion status based on total downloaded vs total size
is_complete = len(active_downloads) == 0 and total_completed >= (total_size * 0.99) # Allow 1% margin for size differences
# If all downloads are complete, clear the completed_downloads list to prevent status updates
if is_complete:
self.completed_downloads = []
return [{
'folderName': folder_name,
'fileSize': total_size,
'progress': total_completed / total_size if total_size > 0 else 0,
'downloadSpeed': current_download_speed,
'numPeers': 0,
'numSeeds': 0,
'status': 'complete' if is_complete else 'active',
'bytesDownloaded': total_completed,
}]
return []

View file

@ -2,6 +2,7 @@ from flask import Flask, request, jsonify
import sys, json, urllib.parse, psutil
from torrent_downloader import TorrentDownloader
from http_downloader import HttpDownloader
from http_multi_link_downloader import HttpMultiLinkDownloader
from profile_image_processor import ProfileImageProcessor
import libtorrent as lt
@ -23,27 +24,27 @@ torrent_session = lt.session({'listen_interfaces': '0.0.0.0:{port}'.format(port=
if start_download_payload:
initial_download = json.loads(urllib.parse.unquote(start_download_payload))
downloading_game_id = initial_download['game_id']
url = initial_download['url']
# Verificăm dacă avem un URL de tip magnet (fie direct, fie primul dintr-o listă)
is_magnet = False
if isinstance(url, str):
is_magnet = url.startswith('magnet')
elif isinstance(url, list) and url:
is_magnet = False # Pentru AllDebrid, chiar dacă vine dintr-un magnet, primim HTTP links
if is_magnet:
if isinstance(initial_download['url'], list):
# Handle multiple URLs using HttpMultiLinkDownloader
http_multi_downloader = HttpMultiLinkDownloader()
downloads[initial_download['game_id']] = http_multi_downloader
try:
http_multi_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
except Exception as e:
print("Error starting multi-link download", e)
elif initial_download['url'].startswith('magnet'):
torrent_downloader = TorrentDownloader(torrent_session)
downloads[initial_download['game_id']] = torrent_downloader
try:
torrent_downloader.start_download(url, initial_download['save_path'])
torrent_downloader.start_download(initial_download['url'], initial_download['save_path'])
except Exception as e:
print("Error starting torrent download", e)
else:
http_downloader = HttpDownloader()
downloads[initial_download['game_id']] = http_downloader
try:
http_downloader.start_download(url, initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
except Exception as e:
print("Error starting http download", e)
@ -70,12 +71,23 @@ def status():
return auth_error
downloader = downloads.get(downloading_game_id)
if downloader:
status = downloads.get(downloading_game_id).get_download_status()
return jsonify(status), 200
else:
if not downloader:
return jsonify(None)
status = downloader.get_download_status()
if not status:
return jsonify(None)
if isinstance(status, list):
if not status: # Empty list
return jsonify(None)
# For multi-link downloader, use the aggregated status
# The status will already be aggregated by the HttpMultiLinkDownloader
return jsonify(status[0]), 200
return jsonify(status), 200
@app.route("/seed-status", methods=["GET"])
def seed_status():
auth_error = validate_rpc_password()
@ -89,10 +101,24 @@ def seed_status():
continue
response = downloader.get_download_status()
if response is None:
if not response:
continue
if response.get('status') == 5:
if isinstance(response, list):
# For multi-link downloader, check if all files are complete
if response and all(item['status'] == 'complete' for item in response):
seed_status.append({
'gameId': game_id,
'status': 'complete',
'folderName': response[0]['folderName'],
'fileSize': sum(item['fileSize'] for item in response),
'bytesDownloaded': sum(item['bytesDownloaded'] for item in response),
'downloadSpeed': 0,
'numPeers': 0,
'numSeeds': 0,
'progress': 1.0
})
elif response.get('status') == 5: # Original torrent seeding check
seed_status.append({
'gameId': game_id,
**response,
@ -143,18 +169,18 @@ def action():
if action == 'start':
url = data.get('url')
print(f"Starting download with URL: {url}")
existing_downloader = downloads.get(game_id)
# Verificăm dacă avem un URL de tip magnet (fie direct, fie primul dintr-o listă)
is_magnet = False
if isinstance(url, str):
is_magnet = url.startswith('magnet')
elif isinstance(url, list) and url:
is_magnet = False # Pentru AllDebrid, chiar dacă vine dintr-un magnet, primim HTTP links
if is_magnet:
if isinstance(url, list):
# Handle multiple URLs using HttpMultiLinkDownloader
if existing_downloader and isinstance(existing_downloader, HttpMultiLinkDownloader):
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
else:
http_multi_downloader = HttpMultiLinkDownloader()
downloads[game_id] = http_multi_downloader
http_multi_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
elif url.startswith('magnet'):
if existing_downloader and isinstance(existing_downloader, TorrentDownloader):
existing_downloader.start_download(url, data['save_path'])
else:
@ -188,6 +214,7 @@ def action():
downloader = downloads.get(game_id)
if downloader:
downloader.cancel_download()
else:
return jsonify({"error": "Invalid action"}), 400