This commit is contained in:
elias
2025-08-03 21:46:06 +02:00
parent cc96802637
commit 80ffad6d86

View File

@@ -0,0 +1,159 @@
using BencodeNET.Parsing;
using BencodeNET.Torrents;
using BencodeNET.Objects;
using System.Text;
public interface ITorrentService
{
Task<TorrentInfo> ParseTorrentAsync(IFormFile file);
Task<TorrentInfo> FetchTrackerStatsAsync(TorrentInfo info);
}
public class TorrentService : ITorrentService
{
private readonly HttpClient _httpClient;
private readonly ILogger<TorrentService> _logger;
// Kända trackers och deras egenskaper
private readonly Dictionary<string, TrackerInfo> _knownTrackers = new()
{
["hdts-announce.ru"] = new TrackerInfo
{
Name = "HD-Torrents",
SupportsScraping = true, // Ändrat till true
RequiresAuth = false, // Kan fungera utan auth för scraping
IsPrivate = true,
Notes = "Privat tracker, scraping kan fungera utan inloggning"
}
};
public TorrentService(HttpClient httpClient, ILogger<TorrentService> logger)
{
_httpClient = httpClient;
_logger = logger;
_httpClient.Timeout = TimeSpan.FromSeconds(10);
}
public async Task<TorrentInfo> ParseTorrentAsync(IFormFile file)
{
try
{
using var stream = new MemoryStream();
await file.CopyToAsync(stream);
stream.Position = 0;
var parser = new TorrentParser();
var torrent = parser.Parse(stream);
var infoHash = torrent.GetInfoHashBytes();
var announceUrl = torrent.Trackers?.FirstOrDefault()?.FirstOrDefault()?.ToString();
return new TorrentInfo
{
FileName = torrent.DisplayName ?? file.FileName,
AnnounceUrl = announceUrl,
ScrapeUrl = ConvertAnnounceToScrape(announceUrl),
InfoHash = UrlEncodeInfoHash(infoHash),
InfoHashBytes = infoHash,
Size = torrent.TotalSize
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Fel vid parsing av torrent-fil");
return new TorrentInfo
{
FileName = file.FileName,
ErrorMessage = $"Kunde inte parsa torrent-filen: {ex.Message}"
};
}
}
public async Task<TorrentInfo> FetchTrackerStatsAsync(TorrentInfo info)
{
if (string.IsNullOrWhiteSpace(info.ScrapeUrl))
{
info.ErrorMessage = "Ingen scrape URL tillgänglig";
return info;
}
var url = $"{info.ScrapeUrl}?info_hash={info.InfoHash}";
_logger.LogInformation("Scraping tracker: {Url}", url);
try
{
var data = await _httpClient.GetByteArrayAsync(url);
var parser = new BencodeParser();
var bdict = parser.Parse<BDictionary>(data);
if (bdict.TryGetValue("files", out var filesValue) && filesValue is BDictionary files)
{
if (TryGetStatsFromFiles(files, info.InfoHash, info) ||
TryGetStatsFromFiles(files, Encoding.UTF8.GetString(info.InfoHashBytes), info))
{
info.HasTrackerData = true;
return info;
}
info.ErrorMessage = "Info hash hittades inte i tracker-svaret";
}
else
{
info.ErrorMessage = "Inget 'files' objekt i tracker-svaret";
}
}
catch (HttpRequestException ex)
{
info.ErrorMessage = $"HTTP fel: {ex.Message}";
_logger.LogWarning(ex, "HTTP fel vid tracker scraping");
}
catch (TaskCanceledException)
{
info.ErrorMessage = "Timeout vid anslutning till tracker";
_logger.LogWarning("Timeout vid tracker scraping");
}
catch (Exception ex)
{
info.ErrorMessage = $"Fel vid parsing: {ex.Message}";
_logger.LogError(ex, "Fel vid tracker scraping");
}
return info;
}
private bool TryGetStatsFromFiles(BDictionary files, string hashKey, TorrentInfo info)
{
if (files.TryGetValue(hashKey, out var hashEntry) && hashEntry is BDictionary stats)
{
info.Seeders = stats.TryGetInt("complete") ?? 0;
info.Leechers = stats.TryGetInt("incomplete") ?? 0;
info.Completed = stats.TryGetInt("downloaded") ?? 0;
return true;
}
return false;
}
private string ConvertAnnounceToScrape(string announceUrl)
{
if (string.IsNullOrEmpty(announceUrl))
return null;
return announceUrl.Replace("/announce", "/scrape");
}
private string UrlEncodeInfoHash(byte[] infoHash)
{
var sb = new StringBuilder();
foreach (byte b in infoHash)
{
sb.AppendFormat("%{0:x2}", b);
}
return sb.ToString();
}
}
public static class BDictionaryExtensions
{
public static int? TryGetInt(this BDictionary dict, string key)
{
return dict.TryGetValue(key, out var value) && value is BNumber num ? (int?)num.Value : null;
}
}