196 lines
6.4 KiB
C#
196 lines
6.4 KiB
C#
using Aberwyn.Data;
|
|
using BencodeNET.Objects;
|
|
using BencodeNET.Parsing;
|
|
using BencodeNET.Torrents;
|
|
using Microsoft.EntityFrameworkCore;
|
|
using System.Text;
|
|
|
|
public interface ITorrentService
|
|
{
|
|
Task<TorrentInfo> ParseTorrentAsync(IFormFile file);
|
|
Task<TorrentInfo> FetchTrackerStatsAsync(TorrentInfo info);
|
|
Task<List<TorrentItem>> GetRecentTorrentsAsync(int count);
|
|
}
|
|
|
|
public class TorrentService : ITorrentService
|
|
{
|
|
private readonly HttpClient _httpClient;
|
|
private readonly ILogger<TorrentService> _logger;
|
|
private readonly ApplicationDbContext _context;
|
|
|
|
// Kända trackers och deras egenskaper
|
|
private readonly Dictionary<string, TrackerInfo> _knownTrackers = new()
|
|
{
|
|
["hdts-announce.ru"] = new TrackerInfo
|
|
{
|
|
Name = "HD-Torrents",
|
|
SupportsScraping = true, // Ändrat till true
|
|
RequiresAuth = false, // Kan fungera utan auth för scraping
|
|
IsPrivate = true,
|
|
Notes = "Privat tracker, scraping kan fungera utan inloggning"
|
|
}
|
|
};
|
|
|
|
public TorrentService(HttpClient httpClient, ILogger<TorrentService> logger, ApplicationDbContext context)
|
|
{
|
|
_httpClient = httpClient;
|
|
_logger = logger;
|
|
_context = context;
|
|
_httpClient.Timeout = TimeSpan.FromSeconds(10);
|
|
}
|
|
|
|
public async Task<List<TorrentItem>> GetRecentTorrentsAsync(int count)
|
|
{
|
|
return await _context.TorrentItems
|
|
.OrderByDescending(t => t.PublishDate)
|
|
.Take(count)
|
|
.ToListAsync();
|
|
}
|
|
public async Task<TorrentInfo> ParseTorrentAsync(IFormFile file)
|
|
{
|
|
try
|
|
{
|
|
using var stream = new MemoryStream();
|
|
await file.CopyToAsync(stream);
|
|
stream.Position = 0;
|
|
|
|
var parser = new TorrentParser();
|
|
var torrent = parser.Parse(stream);
|
|
var infoHash = torrent.GetInfoHashBytes();
|
|
var announceUrl = torrent.Trackers?.FirstOrDefault()?.FirstOrDefault()?.ToString();
|
|
|
|
return new TorrentInfo
|
|
{
|
|
FileName = torrent.DisplayName ?? file.FileName,
|
|
AnnounceUrl = announceUrl,
|
|
ScrapeUrl = ConvertAnnounceToScrape(announceUrl),
|
|
InfoHash = UrlEncodeInfoHash(infoHash),
|
|
InfoHashBytes = infoHash,
|
|
Size = torrent.TotalSize
|
|
};
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "Fel vid parsing av torrent-fil");
|
|
return new TorrentInfo
|
|
{
|
|
FileName = file.FileName,
|
|
ErrorMessage = $"Kunde inte parsa torrent-filen: {ex.Message}"
|
|
};
|
|
}
|
|
}
|
|
|
|
public async Task<TorrentInfo> FetchTrackerStatsAsync(TorrentInfo info)
|
|
{
|
|
if (string.IsNullOrWhiteSpace(info.ScrapeUrl))
|
|
{
|
|
info.ErrorMessage = "Ingen scrape URL tillgänglig";
|
|
return info;
|
|
}
|
|
|
|
var url = $"{info.ScrapeUrl}?info_hash={info.InfoHash}";
|
|
_logger.LogInformation("Scraping tracker: {Url}", url);
|
|
|
|
try
|
|
{
|
|
var data = await _httpClient.GetByteArrayAsync(url);
|
|
var parser = new BencodeParser();
|
|
var bdict = parser.Parse<BDictionary>(data);
|
|
|
|
if (bdict.TryGetValue("files", out var filesValue) && filesValue is BDictionary files)
|
|
{
|
|
// Använd direkt byte array istället för att konvertera till sträng
|
|
if (TryGetStatsFromFiles(files, info.InfoHashBytes, info))
|
|
{
|
|
info.HasTrackerData = true;
|
|
return info;
|
|
}
|
|
|
|
// Om det inte fungerar, prova att URL-decode först
|
|
if (!string.IsNullOrEmpty(info.InfoHash))
|
|
{
|
|
try
|
|
{
|
|
string decoded = Uri.UnescapeDataString(info.InfoHash);
|
|
byte[] decodedBytes = Encoding.GetEncoding("ISO-8859-1").GetBytes(decoded);
|
|
|
|
if (TryGetStatsFromFiles(files, decodedBytes, info))
|
|
{
|
|
info.HasTrackerData = true;
|
|
return info;
|
|
}
|
|
}
|
|
catch { /* Ignore decode errors */ }
|
|
}
|
|
|
|
info.ErrorMessage = "Info hash hittades inte i tracker-svaret";
|
|
}
|
|
}
|
|
catch (HttpRequestException ex)
|
|
{
|
|
info.ErrorMessage = $"HTTP fel: {ex.Message}";
|
|
_logger.LogWarning(ex, "HTTP fel vid tracker scraping");
|
|
}
|
|
catch (TaskCanceledException)
|
|
{
|
|
info.ErrorMessage = "Timeout vid anslutning till tracker";
|
|
_logger.LogWarning("Timeout vid tracker scraping");
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
info.ErrorMessage = $"Fel vid parsing: {ex.Message}";
|
|
_logger.LogError(ex, "Fel vid tracker scraping");
|
|
}
|
|
|
|
return info;
|
|
}
|
|
private bool ByteArraysEqual(byte[] a, byte[] b)
|
|
{
|
|
if (a.Length != b.Length) return false;
|
|
for (int i = 0; i < a.Length; i++)
|
|
{
|
|
if (a[i] != b[i]) return false;
|
|
}
|
|
return true;
|
|
}
|
|
private bool TryGetStatsFromFiles(BDictionary files, byte[] hashBytes, TorrentInfo info)
|
|
{
|
|
// Skapa en BString från byte array
|
|
var bStringKey = new BString(hashBytes);
|
|
|
|
if (files.TryGetValue(bStringKey, out var hashEntry) && hashEntry is BDictionary stats)
|
|
{
|
|
info.Seeders = stats.TryGetInt("complete") ?? 0;
|
|
info.Leechers = stats.TryGetInt("incomplete") ?? 0;
|
|
info.Completed = stats.TryGetInt("downloaded") ?? 0;
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
private string ConvertAnnounceToScrape(string announceUrl)
|
|
{
|
|
if (string.IsNullOrEmpty(announceUrl))
|
|
return null;
|
|
|
|
return announceUrl.Replace("/announce", "/scrape");
|
|
}
|
|
|
|
private string UrlEncodeInfoHash(byte[] infoHash)
|
|
{
|
|
var sb = new StringBuilder();
|
|
foreach (byte b in infoHash)
|
|
{
|
|
sb.AppendFormat("%{0:x2}", b);
|
|
}
|
|
return sb.ToString();
|
|
}
|
|
}
|
|
|
|
public static class BDictionaryExtensions
|
|
{
|
|
public static int? TryGetInt(this BDictionary dict, string key)
|
|
{
|
|
return dict.TryGetValue(key, out var value) && value is BNumber num ? (int?)num.Value : null;
|
|
}
|
|
} |