mirror of
https://github.com/Jackett/Jackett.git
synced 2025-09-12 23:14:08 +02:00
Compare commits
13 Commits
v0.17.996
...
v0.17.1036
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c2797e132e | ||
![]() |
d34dbcb626 | ||
![]() |
6740c7c40f | ||
![]() |
17fc2d50cf | ||
![]() |
77af202e2c | ||
![]() |
cdbe24dfdf | ||
![]() |
7983bc9a57 | ||
![]() |
381e674ac4 | ||
![]() |
921093934f | ||
![]() |
ca3466050c | ||
![]() |
993116c96f | ||
![]() |
98dad4c169 | ||
![]() |
7789a72ffb |
@@ -301,6 +301,14 @@ stages:
|
||||
targetType: inline
|
||||
failOnStderr: true
|
||||
script: |
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/DateTimeRoutines
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.Common
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.IntegrationTests
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.Server
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.Service
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.Test
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.Tray
|
||||
dotnet-format --fix-whitespace --verbosity diagnostic --folder ./src/Jackett.Updater
|
||||
dotnet-format --check --verbosity diagnostic --folder ./src/DateTimeRoutines
|
||||
dotnet-format --check --verbosity diagnostic --folder ./src/Jackett.Common
|
||||
dotnet-format --check --verbosity diagnostic --folder ./src/Jackett.IntegrationTests
|
||||
|
@@ -162,6 +162,8 @@ search:
|
||||
img.pro_free2up: 2
|
||||
img.pro_2up: 2
|
||||
"*": 1
|
||||
minimumratio:
|
||||
text: 0.8
|
||||
description:
|
||||
selector: td:nth-child(2)
|
||||
remove: a, img
|
||||
|
@@ -100,7 +100,7 @@ search:
|
||||
# does not support imdbid search and does not return imdb link in results
|
||||
|
||||
rows:
|
||||
selector: "table.lista > tbody > tr:has(a[href^=\"index.php?page=torrent-details&id=\"]){{ if .Config.freeleech }}:has(img[src=\"images/freeleech.gif\"]){{ else }}{{ end }}"
|
||||
selector: "table.lista > tbody > tr:has(a[href^=\"index.php?page=torrent-details&id=\"]){{ if .Config.freeleech }}:has(img[src=\"images/freeleech.gif\"]){{ else }}{{ end }}{{ if .Config.freeleech }}, table.lista > tbody > tr:has(a[href^=\"index.php?page=torrent-details&id=\"]):has(img[src=\"images/gold.gif\"]){{ else }}{{ end }}"
|
||||
|
||||
fields:
|
||||
category:
|
||||
@@ -151,7 +151,17 @@ search:
|
||||
img[src="images/bronze.gif"]: 0.75
|
||||
"*": 1
|
||||
uploadvolumefactor:
|
||||
text: 1
|
||||
case:
|
||||
img[src="images/2x.gif"]: 2
|
||||
img[src="images/3x.gif"]: 3
|
||||
img[src="images/4x.gif"]: 4
|
||||
img[src="images/5x.gif"]: 5
|
||||
img[src="images/6x.gif"]: 6
|
||||
img[src="images/7x.gif"]: 7
|
||||
img[src="images/8x.gif"]: 8
|
||||
img[src="images/9x.gif"]: 9
|
||||
img[src="images/10x.gif"]: 10
|
||||
"*": 1
|
||||
minimumratio:
|
||||
text: 1.0
|
||||
minimumseedtime:
|
||||
|
@@ -11,6 +11,7 @@ links:
|
||||
- https://www.torlock2.com/
|
||||
- https://www.torlock.icu/
|
||||
- https://torlock.unblockit.onl/
|
||||
- https://torlock.nocensor.space/
|
||||
legacylinks:
|
||||
- https://torlock.com/
|
||||
- https://torlock.unblockit.pro/
|
||||
|
@@ -7,6 +7,7 @@ type: public
|
||||
encoding: UTF-8
|
||||
links:
|
||||
- https://www.toros.co/
|
||||
- https://toros.nocensor.space/
|
||||
|
||||
caps:
|
||||
categorymappings:
|
||||
|
@@ -10,6 +10,7 @@ links:
|
||||
- https://www.torrentfunk.com/
|
||||
- https://www.torrentfunk2.com/
|
||||
- https://torrentfunk.unblockit.onl/
|
||||
- https://torrentfunk.nocensor.space/
|
||||
legacylinks:
|
||||
- https://torrentfunk.unblockit.pro/
|
||||
- https://torrentfunk.unblockit.one/
|
||||
|
@@ -79,7 +79,6 @@ search:
|
||||
selector: a[href$=".html"][title]
|
||||
attribute: href
|
||||
date:
|
||||
# note: this will cause 0m date results for MM-dd dates that are higher than current date, as Jackett dateparse assumes year is now.
|
||||
selector: div.wr-date:contains("-")
|
||||
optional: true
|
||||
filters:
|
||||
|
@@ -8,6 +8,7 @@ encoding: UTF-8
|
||||
links:
|
||||
- https://yourbittorrent.com/
|
||||
- https://yourbittorrent2.com/
|
||||
- https://yourbittorrent.nocensor.space/
|
||||
legacylinks:
|
||||
- https://yourbittorrent.host/
|
||||
|
||||
|
@@ -3,10 +3,8 @@ using System.Collections.Generic;
|
||||
using System.Collections.Specialized;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
@@ -17,8 +15,6 @@ using Jackett.Common.Models;
|
||||
using Jackett.Common.Models.IndexerConfig.Bespoke;
|
||||
using Jackett.Common.Services.Interfaces;
|
||||
using Jackett.Common.Utils;
|
||||
using Jackett.Common.Utils.Clients;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using NLog;
|
||||
using WebClient = Jackett.Common.Utils.Clients.WebClient;
|
||||
@@ -36,12 +32,6 @@ namespace Jackett.Common.Indexers
|
||||
private string SearchUrl => SiteLink + "torrents.php";
|
||||
private string DetailsUrl => SiteLink + "torrents.php?id=";
|
||||
private string ReplaceMulti => ConfigData.ReplaceMulti.Value;
|
||||
private bool Latency => ConfigData.Latency.Value;
|
||||
private bool DevMode => ConfigData.DevMode.Value;
|
||||
private bool CacheMode => ConfigData.HardDriveCache.Value;
|
||||
private static string Directory => Path.Combine(Path.GetTempPath(), Assembly.GetExecutingAssembly().GetName().Name.ToLower(), MethodBase.GetCurrentMethod().DeclaringType?.Name.ToLower());
|
||||
|
||||
private readonly Dictionary<string, string> emulatedBrowserHeaders = new Dictionary<string, string>();
|
||||
|
||||
private ConfigurationDataAbnormal ConfigData
|
||||
{
|
||||
@@ -76,8 +66,6 @@ namespace Jackett.Common.Indexers
|
||||
Language = "fr-fr";
|
||||
Encoding = Encoding.UTF8;
|
||||
Type = "private";
|
||||
// NET::ERR_CERT_DATE_INVALID expired 29 July 2020
|
||||
w.AddTrustedCertificate(new Uri(SiteLink).Host, "9cb32582b564256146616afddbdb8e7c94c428ed");
|
||||
|
||||
AddCategoryMapping("MOVIE|DVDR", TorznabCatType.MoviesDVD, "DVDR");
|
||||
AddCategoryMapping("MOVIE|DVDRIP", TorznabCatType.MoviesSD, "DVDRIP");
|
||||
@@ -115,34 +103,7 @@ namespace Jackett.Common.Indexers
|
||||
LoadValuesFromJson(configJson);
|
||||
|
||||
// Check & Validate Config
|
||||
validateConfig();
|
||||
|
||||
// Setting our data for a better emulated browser (maximum security)
|
||||
// TODO: Encoded Content not supported by Jackett at this time
|
||||
// emulatedBrowserHeaders.Add("Accept-Encoding", "gzip, deflate");
|
||||
|
||||
// If we want to simulate a browser
|
||||
if (ConfigData.Browser.Value)
|
||||
{
|
||||
// Clean headers
|
||||
emulatedBrowserHeaders.Clear();
|
||||
|
||||
// Inject headers
|
||||
emulatedBrowserHeaders.Add("Accept", ConfigData.HeaderAccept.Value);
|
||||
emulatedBrowserHeaders.Add("Accept-Language", ConfigData.HeaderAcceptLang.Value);
|
||||
emulatedBrowserHeaders.Add("DNT", Convert.ToInt32(ConfigData.HeaderDNT.Value).ToString());
|
||||
emulatedBrowserHeaders.Add("Upgrade-Insecure-Requests", Convert.ToInt32(ConfigData.HeaderUpgradeInsecure.Value).ToString());
|
||||
emulatedBrowserHeaders.Add("User-Agent", ConfigData.HeaderUserAgent.Value);
|
||||
}
|
||||
|
||||
// Getting login form to retrieve CSRF token
|
||||
var myRequest = new Utils.Clients.WebRequest
|
||||
{
|
||||
Url = LoginUrl
|
||||
};
|
||||
|
||||
// Add our headers to request
|
||||
myRequest.Headers = emulatedBrowserHeaders;
|
||||
ValidateConfig();
|
||||
|
||||
// Building login form data
|
||||
var pairs = new Dictionary<string, string> {
|
||||
@@ -152,20 +113,9 @@ namespace Jackett.Common.Indexers
|
||||
{ "login", "Connexion" }
|
||||
};
|
||||
|
||||
// Do the login
|
||||
var request = new Utils.Clients.WebRequest
|
||||
{
|
||||
PostData = pairs,
|
||||
Referer = LoginUrl,
|
||||
Type = RequestType.POST,
|
||||
Url = LoginUrl,
|
||||
Headers = emulatedBrowserHeaders
|
||||
};
|
||||
|
||||
// Perform loggin
|
||||
latencyNow();
|
||||
output("\nPerform loggin.. with " + LoginUrl);
|
||||
var response = await webclient.GetResultAsync(request);
|
||||
logger.Info("\nAbnormal - Perform loggin.. with " + LoginUrl);
|
||||
var response = await RequestLoginAndFollowRedirect(LoginUrl, pairs, null, true, null, LoginUrl, true);
|
||||
|
||||
// Test if we are logged in
|
||||
await ConfigureIfOK(response.Cookies, response.Cookies.Contains("session="), () =>
|
||||
@@ -179,11 +129,11 @@ namespace Jackett.Common.Indexers
|
||||
var left = dom.QuerySelector(".info").TextContent.Trim();
|
||||
|
||||
// Oops, unable to login
|
||||
output("-> Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !", "error");
|
||||
throw new ExceptionWithConfigData("Login failed: " + message, configData);
|
||||
logger.Info("Abnormal - Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !", "error");
|
||||
throw new ExceptionWithConfigData("Abnormal - Login failed: " + message, configData);
|
||||
});
|
||||
|
||||
output("-> Login Success");
|
||||
logger.Info("-> Login Success");
|
||||
|
||||
return IndexerConfigurationStatus.RequiresTesting;
|
||||
}
|
||||
@@ -206,41 +156,26 @@ namespace Jackett.Common.Indexers
|
||||
var qRowList = new List<IElement>();
|
||||
var searchTerm = query.GetQueryString();
|
||||
var searchUrl = SearchUrl;
|
||||
var nbResults = 0;
|
||||
var pageLinkCount = 0;
|
||||
|
||||
// Check cache first so we don't query the server (if search term used or not in dev mode)
|
||||
if (!DevMode && !string.IsNullOrEmpty(searchTerm))
|
||||
{
|
||||
lock (cache)
|
||||
{
|
||||
// Remove old cache items
|
||||
CleanCache();
|
||||
|
||||
// Search in cache
|
||||
var cachedResult = cache.Where(i => i.Query == searchTerm).FirstOrDefault();
|
||||
if (cachedResult != null)
|
||||
return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
// Build our query
|
||||
var request = buildQuery(searchTerm, query, searchUrl);
|
||||
var request = BuildQuery(searchTerm, query, searchUrl);
|
||||
|
||||
// Getting results & Store content
|
||||
var parser = new HtmlParser();
|
||||
var dom = parser.ParseDocument(await queryExec(request));
|
||||
var dom = parser.ParseDocument(await QueryExecAsync(request));
|
||||
|
||||
try
|
||||
{
|
||||
// Find torrent rows
|
||||
var firstPageRows = findTorrentRows(dom);
|
||||
var firstPageRows = FindTorrentRows(dom);
|
||||
|
||||
// Add them to torrents list
|
||||
qRowList.AddRange(firstPageRows);
|
||||
|
||||
// Check if there are pagination links at bottom
|
||||
var qPagination = dom.QuerySelectorAll(".linkbox > a");
|
||||
int pageLinkCount;
|
||||
int nbResults;
|
||||
if (qPagination.Length > 0)
|
||||
{
|
||||
// Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page)
|
||||
@@ -260,13 +195,13 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
else
|
||||
{
|
||||
output("\nNo result found for your query, please try another search term ...\n", "info");
|
||||
logger.Info("\nAbnormal - No result found for your query, please try another search term ...\n", "info");
|
||||
// No result found for this query
|
||||
return releases;
|
||||
}
|
||||
}
|
||||
output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
|
||||
output("\nThere are " + firstPageRows.Length + " results on the first page !");
|
||||
logger.Info("\nAbnormal - Found " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
|
||||
logger.Info("\nAbnormal - There are " + firstPageRows.Length + " results on the first page !");
|
||||
|
||||
// If we have a term used for search and pagination result superior to one
|
||||
if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1)
|
||||
@@ -274,20 +209,17 @@ namespace Jackett.Common.Indexers
|
||||
// Starting with page #2
|
||||
for (var i = 2; i <= Math.Min(int.Parse(ConfigData.Pages.Value), pageLinkCount); i++)
|
||||
{
|
||||
output("\nProcessing page #" + i);
|
||||
|
||||
// Request our page
|
||||
latencyNow();
|
||||
logger.Info("\nAbnormal - Processing page #" + i);
|
||||
|
||||
// Build our query
|
||||
var pageRequest = buildQuery(searchTerm, query, searchUrl, i);
|
||||
var pageRequest = BuildQuery(searchTerm, query, searchUrl, i);
|
||||
|
||||
// Getting results & Store content
|
||||
parser = new HtmlParser();
|
||||
dom = parser.ParseDocument(await queryExec(pageRequest));
|
||||
dom = parser.ParseDocument(await QueryExecAsync(pageRequest));
|
||||
|
||||
// Process page results
|
||||
var additionalPageRows = findTorrentRows(dom);
|
||||
var additionalPageRows = FindTorrentRows(dom);
|
||||
|
||||
// Add them to torrents list
|
||||
qRowList.AddRange(additionalPageRows);
|
||||
@@ -297,11 +229,8 @@ namespace Jackett.Common.Indexers
|
||||
// Loop on results
|
||||
foreach (var row in qRowList)
|
||||
{
|
||||
output("\n=>> Torrent #" + (releases.Count + 1));
|
||||
|
||||
// ID
|
||||
var id = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(2) > a").GetAttribute("href"), @"\d+").Value);
|
||||
output("ID: " + id);
|
||||
|
||||
// Release Name
|
||||
var name = row.QuerySelector("td:nth-of-type(2) > a").TextContent;
|
||||
@@ -311,39 +240,22 @@ namespace Jackett.Common.Indexers
|
||||
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
|
||||
name = regex.Replace(name, "$1" + ReplaceMulti + "$2");
|
||||
}
|
||||
output("Release: " + name);
|
||||
|
||||
// Category
|
||||
var categoryId = row.QuerySelector("td:nth-of-type(1) > a").GetAttribute("href").Replace("torrents.php?cat[]=", string.Empty);
|
||||
var newznab = MapTrackerCatToNewznab(categoryId);
|
||||
output("Category: " + MapTrackerCatToNewznab(categoryId).First().ToString() + " (" + categoryId + ")");
|
||||
|
||||
// Seeders
|
||||
var seeders = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value);
|
||||
output("Seeders: " + seeders);
|
||||
|
||||
// Leechers
|
||||
var leechers = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(7)").TextContent, @"\d+").Value);
|
||||
output("Leechers: " + leechers);
|
||||
|
||||
// Completed
|
||||
var completed = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value);
|
||||
output("Completed: " + completed);
|
||||
|
||||
// Size
|
||||
var sizeStr = row.QuerySelector("td:nth-of-type(5)").TextContent.Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb");
|
||||
var size = ReleaseInfo.GetBytes(sizeStr);
|
||||
output("Size: " + sizeStr + " (" + size + " bytes)");
|
||||
var categoryId = row.QuerySelector("td:nth-of-type(1) > a").GetAttribute("href").Replace("torrents.php?cat[]=", string.Empty); // Category
|
||||
var newznab = MapTrackerCatToNewznab(categoryId); // Newznab Category
|
||||
var seeders = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value); // Seeders
|
||||
var leechers = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(7)").TextContent, @"\d+").Value); // Leechers
|
||||
var completed = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value); // Completed
|
||||
var sizeStr = row.QuerySelector("td:nth-of-type(5)").TextContent.Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb"); // Size
|
||||
var size = ReleaseInfo.GetBytes(sizeStr); // Size in bytes
|
||||
|
||||
// Publish DateToString
|
||||
var datestr = row.QuerySelector("span.time").GetAttribute("title");
|
||||
var dateLocal = DateTime.SpecifyKind(DateTime.ParseExact(datestr, "MMM dd yyyy, HH:mm", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
|
||||
var date = TimeZoneInfo.ConvertTimeToUtc(dateLocal, FranceTz);
|
||||
output("Released on: " + date);
|
||||
|
||||
// Torrent Details URL
|
||||
var details = new Uri(DetailsUrl + id);
|
||||
output("Details: " + details.AbsoluteUri);
|
||||
|
||||
// Torrent Download URL
|
||||
Uri downloadLink = null;
|
||||
@@ -352,12 +264,11 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
// Download link available
|
||||
downloadLink = new Uri(SiteLink + link);
|
||||
output("Download Link: " + downloadLink.AbsoluteUri);
|
||||
}
|
||||
else
|
||||
{
|
||||
// No download link available -- Must be on pending ( can't be downloaded now...)
|
||||
output("Download Link: Not available, torrent pending ? Skipping ...");
|
||||
logger.Info("Abnormal - Download Link: Not available, torrent pending ? Skipping ...");
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -366,7 +277,6 @@ namespace Jackett.Common.Indexers
|
||||
if (row.QuerySelector("img[alt=\"Freeleech\"]") != null)
|
||||
{
|
||||
downloadVolumeFactor = 0;
|
||||
output("FreeLeech =)");
|
||||
}
|
||||
|
||||
// Building release infos
|
||||
@@ -387,6 +297,7 @@ namespace Jackett.Common.Indexers
|
||||
DownloadVolumeFactor = downloadVolumeFactor
|
||||
};
|
||||
releases.Add(release);
|
||||
logger.Info("Abnormal - Found Release: " + release.Title + "(" + id + ")");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -406,7 +317,7 @@ namespace Jackett.Common.Indexers
|
||||
/// <param name="url">Search url for provider</param>
|
||||
/// <param name="page">Page number to request</param>
|
||||
/// <returns>URL to query for parsing and processing results</returns>
|
||||
private string buildQuery(string term, TorznabQuery query, string url, int page = 0)
|
||||
private string BuildQuery(string term, TorznabQuery query, string url, int page = 0)
|
||||
{
|
||||
var parameters = new NameValueCollection();
|
||||
var categoriesList = MapTorznabCapsToTrackers(query);
|
||||
@@ -451,7 +362,7 @@ namespace Jackett.Common.Indexers
|
||||
// Building our query -- Cannot use GetQueryString due to UrlEncode (generating wrong cat[] param)
|
||||
url += "?" + string.Join("&", parameters.AllKeys.Select(a => a + "=" + parameters[a]));
|
||||
|
||||
output("\nBuilded query for \"" + term + "\"... " + url);
|
||||
logger.Info("\nAbnormal - Builded query for \"" + term + "\"... " + url);
|
||||
|
||||
// Return our search url
|
||||
return url;
|
||||
@@ -462,77 +373,13 @@ namespace Jackett.Common.Indexers
|
||||
/// </summary>
|
||||
/// <param name="request">URL created by Query Builder</param>
|
||||
/// <returns>Results from query</returns>
|
||||
private async Task<string> queryExec(string request)
|
||||
private async Task<string> QueryExecAsync(string request)
|
||||
{
|
||||
string results = null;
|
||||
|
||||
// Switch in we are in DEV mode with Hard Drive Cache or not
|
||||
if (DevMode && CacheMode)
|
||||
{
|
||||
// Check Cache before querying and load previous results if available
|
||||
results = await queryCache(request);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Querying tracker directly
|
||||
results = await queryTracker(request);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
// Querying tracker directly
|
||||
results = await QueryTrackerAsync(request);
|
||||
|
||||
/// <summary>
|
||||
/// Get Torrents Page from Cache by Query Provided
|
||||
/// </summary>
|
||||
/// <param name="request">URL created by Query Builder</param>
|
||||
/// <returns>Results from query</returns>
|
||||
private async Task<string> queryCache(string request)
|
||||
{
|
||||
string results;
|
||||
|
||||
// Create Directory if not exist
|
||||
System.IO.Directory.CreateDirectory(Directory);
|
||||
|
||||
// Clean Storage Provider Directory from outdated cached queries
|
||||
cleanCacheStorage();
|
||||
|
||||
// File Name
|
||||
var fileName = StringUtil.HashSHA1(request) + ".json";
|
||||
|
||||
// Create fingerprint for request
|
||||
var file = Path.Combine(Directory, fileName);
|
||||
|
||||
// Checking modes states
|
||||
if (File.Exists(file))
|
||||
{
|
||||
// File exist... loading it right now !
|
||||
output("Loading results from hard drive cache ..." + fileName);
|
||||
try
|
||||
{
|
||||
using (var fileReader = File.OpenText(file))
|
||||
{
|
||||
var serializer = new JsonSerializer();
|
||||
results = (string)serializer.Deserialize(fileReader, typeof(string));
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
output("Error loading cached results ! " + e.Message, "error");
|
||||
results = null;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// No cached file found, querying tracker directly
|
||||
results = await queryTracker(request);
|
||||
|
||||
// Cached file didn't exist for our query, writing it right now !
|
||||
output("Writing results to hard drive cache ..." + fileName);
|
||||
using (var fileWriter = File.CreateText(file))
|
||||
{
|
||||
var serializer = new JsonSerializer();
|
||||
serializer.Serialize(fileWriter, results);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
@@ -541,142 +388,31 @@ namespace Jackett.Common.Indexers
|
||||
/// </summary>
|
||||
/// <param name="request">URL created by Query Builder</param>
|
||||
/// <returns>Results from query</returns>
|
||||
private async Task<string> queryTracker(string request)
|
||||
private async Task<string> QueryTrackerAsync(string request)
|
||||
{
|
||||
// Cache mode not enabled or cached file didn't exist for our query
|
||||
output("\nQuerying tracker for results....");
|
||||
logger.Info("\nAbnormal - Querying tracker for results....");
|
||||
|
||||
// Request our first page
|
||||
latencyNow();
|
||||
var results = await RequestWithCookiesAndRetryAsync(request, headers: emulatedBrowserHeaders);
|
||||
var results = await RequestWithCookiesAndRetryAsync(request);
|
||||
|
||||
// Return results from tracker
|
||||
return results.ContentString;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clean Hard Drive Cache Storage
|
||||
/// </summary>
|
||||
/// <param name="force">Force Provider Folder deletion</param>
|
||||
private void cleanCacheStorage(bool force = false)
|
||||
{
|
||||
// Check cleaning method
|
||||
if (force)
|
||||
{
|
||||
// Deleting Provider Storage folder and all files recursively
|
||||
output("\nDeleting Provider Storage folder and all files recursively ...");
|
||||
|
||||
// Check if directory exist
|
||||
if (System.IO.Directory.Exists(Directory))
|
||||
{
|
||||
// Delete storage directory of provider
|
||||
System.IO.Directory.Delete(Directory, true);
|
||||
output("-> Storage folder deleted successfully.");
|
||||
}
|
||||
else
|
||||
{
|
||||
// No directory, so nothing to do
|
||||
output("-> No Storage folder found for this provider !");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var i = 0;
|
||||
// Check if there is file older than ... and delete them
|
||||
output("\nCleaning Provider Storage folder... in progress.");
|
||||
System.IO.Directory.GetFiles(Directory)
|
||||
.Select(f => new FileInfo(f))
|
||||
.Where(f => f.LastAccessTime < DateTime.Now.AddMilliseconds(-Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value)))
|
||||
.ToList()
|
||||
.ForEach(f =>
|
||||
{
|
||||
output("Deleting cached file << " + f.Name + " >> ... done.");
|
||||
f.Delete();
|
||||
i++;
|
||||
});
|
||||
|
||||
// Inform on what was cleaned during process
|
||||
if (i > 0)
|
||||
{
|
||||
output("-> Deleted " + i + " cached files during cleaning.");
|
||||
}
|
||||
else
|
||||
{
|
||||
output("-> Nothing deleted during cleaning.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generate a random fake latency to avoid detection on tracker side
|
||||
/// </summary>
|
||||
private void latencyNow()
|
||||
{
|
||||
// Need latency ?
|
||||
if (Latency)
|
||||
{
|
||||
// Generate a random value in our range
|
||||
var random = new Random(DateTime.Now.Millisecond);
|
||||
var waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value), Convert.ToInt32(ConfigData.LatencyEnd.Value));
|
||||
output("\nLatency Faker => Sleeping for " + waiting + " ms...");
|
||||
|
||||
// Sleep now...
|
||||
System.Threading.Thread.Sleep(waiting);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find torrent rows in search pages
|
||||
/// </summary>
|
||||
/// <returns>List of rows</returns>
|
||||
private IHtmlCollection<IElement> findTorrentRows(IHtmlDocument dom) =>
|
||||
private IHtmlCollection<IElement> FindTorrentRows(IHtmlDocument dom) =>
|
||||
dom.QuerySelectorAll(".torrent_table > tbody > tr:not(.colhead)");
|
||||
|
||||
/// <summary>
|
||||
/// Output message for logging or developpment (console)
|
||||
/// </summary>
|
||||
/// <param name="message">Message to output</param>
|
||||
/// <param name="level">Level for Logger</param>
|
||||
private void output(string message, string level = "debug")
|
||||
{
|
||||
// Check if we are in dev mode
|
||||
if (DevMode)
|
||||
{
|
||||
// Output message to console
|
||||
Console.WriteLine(message);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Send message to logger with level
|
||||
switch (level)
|
||||
{
|
||||
default:
|
||||
goto case "debug";
|
||||
case "debug":
|
||||
// Only if Debug Level Enabled on Jackett
|
||||
if (logger.IsDebugEnabled)
|
||||
{
|
||||
logger.Debug(message);
|
||||
}
|
||||
break;
|
||||
|
||||
case "info":
|
||||
logger.Info(message);
|
||||
break;
|
||||
|
||||
case "error":
|
||||
logger.Error(message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate Config entered by user on Jackett
|
||||
/// </summary>
|
||||
private void validateConfig()
|
||||
private void ValidateConfig()
|
||||
{
|
||||
output("\nValidating Settings ... \n");
|
||||
logger.Info("\nAbnormal - Validating Settings ... \n");
|
||||
|
||||
// Check Username Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.Username.Value))
|
||||
@@ -685,7 +421,7 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
else
|
||||
{
|
||||
output("Validated Setting -- Username (auth) => " + ConfigData.Username.Value.ToString());
|
||||
logger.Info("Abnormal - Validated Setting -- Username (auth) => " + ConfigData.Username.Value.ToString());
|
||||
}
|
||||
|
||||
// Check Password Setting
|
||||
@@ -695,7 +431,7 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
else
|
||||
{
|
||||
output("Validated Setting -- Password (auth) => " + ConfigData.Password.Value.ToString());
|
||||
logger.Info("Abnormal - Validated Setting -- Password (auth) => " + ConfigData.Password.Value.ToString());
|
||||
}
|
||||
|
||||
// Check Max Page Setting
|
||||
@@ -703,7 +439,7 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
try
|
||||
{
|
||||
output("Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
|
||||
logger.Info("Abnormal - Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
@@ -714,116 +450,6 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a maximum number of pages to crawl !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Latency Setting
|
||||
if (ConfigData.Latency.Value)
|
||||
{
|
||||
output("\nValidated Setting -- Latency Simulation enabled");
|
||||
|
||||
// Check Latency Start Setting
|
||||
if (!string.IsNullOrEmpty(ConfigData.LatencyStart.Value))
|
||||
{
|
||||
try
|
||||
{
|
||||
output("Validated Setting -- Latency Start => " + Convert.ToInt32(ConfigData.LatencyStart.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a numeric latency start in ms !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a start latency !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Latency End Setting
|
||||
if (!string.IsNullOrEmpty(ConfigData.LatencyEnd.Value))
|
||||
{
|
||||
try
|
||||
{
|
||||
output("Validated Setting -- Latency End => " + Convert.ToInt32(ConfigData.LatencyEnd.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a numeric latency end in ms !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a end latency !", ConfigData);
|
||||
}
|
||||
}
|
||||
|
||||
// Check Browser Setting
|
||||
if (ConfigData.Browser.Value)
|
||||
{
|
||||
output("\nValidated Setting -- Browser Simulation enabled");
|
||||
|
||||
// Check ACCEPT header Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.HeaderAccept.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT header !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
output("Validated Setting -- ACCEPT (header) => " + ConfigData.HeaderAccept.Value.ToString());
|
||||
}
|
||||
|
||||
// Check ACCEPT-LANG header Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.HeaderAcceptLang.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT-LANG header !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
output("Validated Setting -- ACCEPT-LANG (header) => " + ConfigData.HeaderAcceptLang.Value.ToString());
|
||||
}
|
||||
|
||||
// Check USER-AGENT header Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.HeaderUserAgent.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an USER-AGENT header !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
output("Validated Setting -- USER-AGENT (header) => " + ConfigData.HeaderUserAgent.Value.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
// Check Dev Cache Settings
|
||||
if (ConfigData.HardDriveCache.Value == true)
|
||||
{
|
||||
output("\nValidated Setting -- DEV Hard Drive Cache enabled");
|
||||
|
||||
// Check if Dev Mode enabled !
|
||||
if (!ConfigData.DevMode.Value)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Hard Drive is enabled but not in DEV MODE, Please enable DEV MODE !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Cache Keep Time Setting
|
||||
if (!string.IsNullOrEmpty(ConfigData.HardDriveCacheKeepTime.Value))
|
||||
{
|
||||
try
|
||||
{
|
||||
output("Validated Setting -- Cache Keep Time (ms) => " + Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a numeric hard drive keep time in ms !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ExceptionWithConfigData("Hard Drive Cache enabled, Please enter a maximum keep time for cache !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Delete cache if previously existed
|
||||
cleanCacheStorage(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
276
src/Jackett.Common/Indexers/BeyondHDAPI.cs
Normal file
276
src/Jackett.Common/Indexers/BeyondHDAPI.cs
Normal file
@@ -0,0 +1,276 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Jackett.Common.Models;
|
||||
using Jackett.Common.Models.IndexerConfig;
|
||||
using Jackett.Common.Services.Interfaces;
|
||||
using Jackett.Common.Utils;
|
||||
using Jackett.Common.Utils.Clients;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using NLog;
|
||||
|
||||
namespace Jackett.Common.Indexers
|
||||
{
|
||||
[ExcludeFromCodeCoverage]
|
||||
public class BeyondHDAPI : BaseWebIndexer
|
||||
{
|
||||
private readonly string APIBASE = "https://beyond-hd.me/api/torrents/";
|
||||
|
||||
private new ConfigurationDataAPIKeyAndRSSKey configData
|
||||
{
|
||||
get => (ConfigurationDataAPIKeyAndRSSKey)base.configData;
|
||||
set => base.configData = value;
|
||||
}
|
||||
|
||||
public BeyondHDAPI(IIndexerConfigurationService configService, WebClient wc, Logger l,
|
||||
IProtectionService ps, ICacheService cs)
|
||||
: base(id: "beyond-hd-api",
|
||||
name: "Beyond-HD (API)",
|
||||
description: "Without BeyondHD, your HDTV is just a TV",
|
||||
link: "https://beyond-hd.me/",
|
||||
caps: new TorznabCapabilities
|
||||
{
|
||||
LimitsDefault = 100,
|
||||
LimitsMax = 100,
|
||||
TvSearchParams = new List<TvSearchParam>
|
||||
{
|
||||
TvSearchParam.Q, TvSearchParam.Season, TvSearchParam.Ep, TvSearchParam.ImdbId
|
||||
},
|
||||
MovieSearchParams = new List<MovieSearchParam>
|
||||
{
|
||||
MovieSearchParam.Q, MovieSearchParam.ImdbId, MovieSearchParam.TmdbId
|
||||
}
|
||||
},
|
||||
configService: configService,
|
||||
client: wc,
|
||||
logger: l,
|
||||
p: ps,
|
||||
cacheService: cs,
|
||||
configData: new ConfigurationDataAPIKeyAndRSSKey())
|
||||
{
|
||||
Encoding = Encoding.UTF8;
|
||||
Language = "en-us";
|
||||
Type = "private";
|
||||
|
||||
AddCategoryMapping("Movies", TorznabCatType.Movies);
|
||||
AddCategoryMapping("TV", TorznabCatType.TV);
|
||||
}
|
||||
|
||||
public override async Task<IndexerConfigurationStatus> ApplyConfiguration(JToken configJson)
|
||||
{
|
||||
LoadValuesFromJson(configJson);
|
||||
|
||||
IsConfigured = false;
|
||||
try
|
||||
{
|
||||
var results = await PerformQuery(new TorznabQuery());
|
||||
if (results.Count() == 0)
|
||||
throw new Exception("Testing returned no results!");
|
||||
IsConfigured = true;
|
||||
SaveConfig();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new ExceptionWithConfigData(e.Message, configData);
|
||||
}
|
||||
|
||||
return IndexerConfigurationStatus.Completed;
|
||||
}
|
||||
|
||||
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
|
||||
{
|
||||
var apiKey = configData.ApiKey.Value;
|
||||
var apiUrl = $"{APIBASE}{apiKey}";
|
||||
|
||||
Dictionary<string, string> postData = new Dictionary<string, string>
|
||||
{
|
||||
{ BHDParams.action, "search" },
|
||||
{ BHDParams.rsskey, configData.RSSKey.Value },
|
||||
{ BHDParams.search, query.SanitizedSearchTerm },
|
||||
};
|
||||
|
||||
if (query.IsTVSearch)
|
||||
{
|
||||
postData.Add(BHDParams.categories, "TV");
|
||||
|
||||
if (query.Season != 0)
|
||||
postData[BHDParams.search] = $"{query.SanitizedSearchTerm} {query.GetEpisodeSearchString()}";
|
||||
}
|
||||
else if (query.IsMovieSearch)
|
||||
{
|
||||
postData.Add(BHDParams.categories, "Movies");
|
||||
}
|
||||
|
||||
var imdbId = ParseUtil.GetImdbID(query.ImdbID);
|
||||
if (imdbId != null)
|
||||
postData.Add(BHDParams.imdb_id, imdbId.ToString());
|
||||
if (query.IsTmdbQuery)
|
||||
postData.Add(BHDParams.tmdb_id, query.TmdbID.Value.ToString());
|
||||
|
||||
var bhdResponse = await GetBHDResponse(apiUrl, postData);
|
||||
var releaseInfos = bhdResponse.results.Select(mapToReleaseInfo);
|
||||
|
||||
return releaseInfos;
|
||||
}
|
||||
|
||||
private ReleaseInfo mapToReleaseInfo(BHDResult bhdResult)
|
||||
{
|
||||
var uri = new Uri(bhdResult.url);
|
||||
var downloadUri = new Uri(bhdResult.download_url);
|
||||
|
||||
var releaseInfo = new ReleaseInfo
|
||||
{
|
||||
Title = bhdResult.name,
|
||||
Seeders = bhdResult.seeders,
|
||||
Guid = new Uri(bhdResult.url),
|
||||
Details = new Uri(bhdResult.url),
|
||||
Link = downloadUri,
|
||||
InfoHash = bhdResult.info_hash,
|
||||
Peers = bhdResult.leechers + bhdResult.seeders,
|
||||
Grabs = bhdResult.times_completed,
|
||||
PublishDate = bhdResult.created_at,
|
||||
Size = bhdResult.size,
|
||||
Category = MapTrackerCatToNewznab(bhdResult.category)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(bhdResult.imdb_id))
|
||||
releaseInfo.Imdb = ParseUtil.GetImdbID(bhdResult.imdb_id);
|
||||
|
||||
releaseInfo.DownloadVolumeFactor = 1;
|
||||
releaseInfo.UploadVolumeFactor = 1;
|
||||
|
||||
if (bhdResult.freeleech == 1 || bhdResult.limited == 1)
|
||||
releaseInfo.DownloadVolumeFactor = 0;
|
||||
if (bhdResult.promo25 == 1)
|
||||
releaseInfo.DownloadVolumeFactor = .75;
|
||||
if (bhdResult.promo50 == 1)
|
||||
releaseInfo.DownloadVolumeFactor = .50;
|
||||
if (bhdResult.promo75 == 1)
|
||||
releaseInfo.DownloadVolumeFactor = .25;
|
||||
|
||||
return releaseInfo;
|
||||
}
|
||||
|
||||
private async Task<BHDResponse> GetBHDResponse(string apiUrl, Dictionary<string, string> postData)
|
||||
{
|
||||
var request = new WebRequest
|
||||
{
|
||||
PostData = postData,
|
||||
Type = RequestType.POST,
|
||||
Url = apiUrl
|
||||
};
|
||||
|
||||
var response = await webclient.GetResultAsync(request);
|
||||
|
||||
var bhdresponse = JsonConvert.DeserializeObject<BHDResponse>(response.ContentString);
|
||||
return bhdresponse;
|
||||
}
|
||||
|
||||
internal class BHDParams
|
||||
{
|
||||
internal const string action = "action"; // string - The torrents endpoint action you wish to perform. (search)
|
||||
internal const string rsskey = "rsskey"; // string - Your personal RSS key (RID) if you wish for results to include the uploaded_by and download_url fields
|
||||
internal const string page = "page"; // int - The page number of the results. Only if the result set has more than 100 total matches.
|
||||
|
||||
internal const string search = "search"; // string - The torrent name. It does support !negative searching. Example: Christmas Movie
|
||||
internal const string info_hash = "info_hash"; // string - The torrent info_hash. This is an exact match.
|
||||
internal const string folder_name = "folder_name"; // string - The torrent folder name. This is an exact match.file_name string The torrent included file names. This is an exact match.
|
||||
internal const string size = "size"; // int - The torrent size. This is an exact match.
|
||||
internal const string uploaded_by = "uploaded_by"; // string - The uploaders username. Only non anonymous results will be returned.
|
||||
internal const string imdb_id = "imdb_id"; // int - The ID of the matching IMDB page.
|
||||
internal const string tmdb_id = "tmdb_id"; // int - The ID of the matching TMDB page.
|
||||
internal const string categories = "categories"; // string - Any categories separated by comma(s). TV, Movies)
|
||||
internal const string types = "types"; // string - Any types separated by comma(s). BD Remux, 1080p, etc.)
|
||||
internal const string sources = "sources"; // string - Any sources separated by comma(s). Blu-ray, WEB, DVD, etc.)
|
||||
internal const string genres = "genres"; // string - Any genres separated by comma(s). Action, Anime, StandUp, Western, etc.)
|
||||
internal const string groups = "groups"; // string - Any internal release groups separated by comma(s).FraMeSToR, BHDStudio, BeyondHD, RPG, iROBOT, iFT, ZR, MKVULTRA
|
||||
internal const string freeleech = "freeleech"; // int - The torrent freeleech status. 1 = Must match.
|
||||
internal const string limited = "limited"; // int - The torrent limited UL promo. 1 = Must match.
|
||||
internal const string promo25 = "promo25"; // int - The torrent 25% promo. 1 = Must match.
|
||||
internal const string promo50 = "promo50"; // int - The torrent 50% promo. 1 = Must match.
|
||||
internal const string promo75 = "promo75"; // int - The torrent 75% promo. 1 = Must match.
|
||||
internal const string refund = "refund"; // int - The torrent refund promo. 1 = Must match.
|
||||
internal const string rescue = "rescue"; // int - The torrent rescue promo. 1 = Must match.
|
||||
internal const string rewind = "rewind"; // int - The torrent rewind promo. 1 = Must match.
|
||||
internal const string stream = "stream"; // int - The torrent Stream Optimized flag. 1 = Must match.
|
||||
internal const string sd = "sd"; // int - The torrent SD flag. 1 = Must match.
|
||||
internal const string pack = "pack"; // int - The torrent TV pack flag. 1 = Must match.
|
||||
internal const string h264 = "h264"; // int - The torrent x264/h264 codec flag. 1 = Must match.
|
||||
internal const string h265 = "h265"; // int - The torrent x265/h265 codec flag. 1 = Must match.
|
||||
internal const string alive = "alive"; // int - The torrent has at least 1 seeder. 1 = Must match.
|
||||
internal const string dying = "dying"; // int - The torrent has less than 3 seeders. 1 = Must match.
|
||||
internal const string dead = "dead"; // int - The torrent has no seeders. 1 = Must match.
|
||||
internal const string reseed = "reseed"; // int - The torrent has no seeders and an active reseed request. 1 = Must match.
|
||||
internal const string seeding = "seeding"; // int - The torrent is seeded by you. 1 = Must match.
|
||||
internal const string leeching = "leeching"; // int - The torrent is being leeched by you. 1 = Must match.
|
||||
internal const string completed = "completed"; // int - The torrent has been completed by you. 1 = Must match.
|
||||
internal const string incomplete = "incomplete"; // int - The torrent has not been completed by you. 1 = Must match.
|
||||
internal const string notdownloaded = "notdownloaded"; // int - The torrent has not been downloaded you. 1 = Must match.
|
||||
internal const string min_bhd = "min_bhd"; // int - The minimum BHD rating.
|
||||
internal const string vote_bhd = "vote_bhd"; // int - The minimum number of BHD votes.
|
||||
internal const string min_imdb = "min_imdb"; // int - The minimum IMDb rating.
|
||||
internal const string vote_imdb = "vote_imdb"; // int - The minimum number of IMDb votes.
|
||||
internal const string min_tmdb = "min_tmdb"; // int - The minimum TMDb rating.
|
||||
internal const string vote_tmdb = "vote_tmdb"; // int - The minimum number of TDMb votes.
|
||||
internal const string min_year = "min_year"; // int - The earliest release year.
|
||||
internal const string max_year = "max_year"; // int - The latest release year.
|
||||
internal const string sort = "sort"; // string - Field to sort results by. (bumped_at, created_at, seeders, leechers, times_completed, size, name, imdb_rating, tmdb_rating, bhd_rating). Default is bumped_at
|
||||
internal const string order = "order"; // string - The direction of the sort of results. (asc, desc). Default is desc
|
||||
|
||||
// Most of the comma separated fields are OR searches.
|
||||
internal const string features = "features"; // string - Any features separated by comma(s). DV, HDR10, HDR10P, Commentary)
|
||||
internal const string countries = "countries"; // string - Any production countries separated by comma(s). France, Japan, etc.)
|
||||
internal const string languages = "languages"; // string - Any spoken languages separated by comma(s). French, English, etc.)
|
||||
internal const string audios = "audios"; // string - Any audio tracks separated by comma(s). English, Japanese,etc.)
|
||||
internal const string subtitles = "subtitles"; // string - Any subtitles separated by comma(s). Dutch, Finnish, Swedish, etc.)
|
||||
|
||||
}
|
||||
|
||||
class BHDResponse
|
||||
{
|
||||
public int status_code { get; set; } // The status code of the post request. (0 = Failed and 1 = Success)
|
||||
public int page { get; set; } // The current page of results that you're on.
|
||||
public int total_pages { get; set; } // int The total number of pages of results matching your query.
|
||||
public int total_results { get; set; } // The total number of results matching your query.
|
||||
public bool success { get; set; } // The status of the call. (True = Success, False = Error)
|
||||
public BHDResult[] results { get; set; } // The results that match your query.
|
||||
}
|
||||
|
||||
class BHDResult
|
||||
{
|
||||
public int id { get; set; }
|
||||
public string name { get; set; }
|
||||
public string folder_name { get; set; }
|
||||
public string info_hash { get; set; }
|
||||
public long size { get; set; }
|
||||
public string uploaded_by { get; set; }
|
||||
public string category { get; set; }
|
||||
public string type { get; set; }
|
||||
public int seeders { get; set; }
|
||||
public int leechers { get; set; }
|
||||
public int times_completed { get; set; }
|
||||
public string imdb_id { get; set; }
|
||||
public string tmdb_id { get; set; }
|
||||
public decimal bhd_rating { get; set; }
|
||||
public decimal tmdb_rating { get; set; }
|
||||
public decimal imdb_rating { get; set; }
|
||||
public int tv_pack { get; set; }
|
||||
public int promo25 { get; set; }
|
||||
public int promo50 { get; set; }
|
||||
public int promo75 { get; set; }
|
||||
public int freeleech { get; set; }
|
||||
public int rewind { get; set; }
|
||||
public int refund { get; set; }
|
||||
public int limited { get; set; }
|
||||
public int rescue { get; set; }
|
||||
public DateTime bumped_at { get; set; }
|
||||
public DateTime created_at { get; set; }
|
||||
public string url { get; set; }
|
||||
public string download_url { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
@@ -1129,7 +1129,7 @@ namespace Jackett.Common.Indexers
|
||||
return Element.QuerySelector(Selector);
|
||||
}
|
||||
|
||||
protected string handleSelector(selectorBlock Selector, IElement Dom, Dictionary<string, object> variables = null)
|
||||
protected string handleSelector(selectorBlock Selector, IElement Dom, Dictionary<string, object> variables = null, bool required = true)
|
||||
{
|
||||
if (Selector.Text != null)
|
||||
{
|
||||
@@ -1147,7 +1147,9 @@ namespace Jackett.Common.Indexers
|
||||
selection = QuerySelector(Dom, Selector.Selector);
|
||||
if (selection == null)
|
||||
{
|
||||
throw new Exception(string.Format("Selector \"{0}\" didn't match {1}", Selector.Selector, Dom.ToHtmlPretty()));
|
||||
if (required)
|
||||
throw new Exception(string.Format("Selector \"{0}\" didn't match {1}", Selector.Selector, Dom.ToHtmlPretty()));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1170,13 +1172,21 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
}
|
||||
if (value == null)
|
||||
throw new Exception(string.Format("None of the case selectors \"{0}\" matched {1}", string.Join(",", Selector.Case), selection.ToHtmlPretty()));
|
||||
{
|
||||
if (required)
|
||||
throw new Exception(string.Format("None of the case selectors \"{0}\" matched {1}", string.Join(",", Selector.Case), selection.ToHtmlPretty()));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
else if (Selector.Attribute != null)
|
||||
{
|
||||
value = selection.GetAttribute(Selector.Attribute);
|
||||
if (value == null)
|
||||
throw new Exception(string.Format("Attribute \"{0}\" is not set for element {1}", Selector.Attribute, selection.ToHtmlPretty()));
|
||||
{
|
||||
if (required)
|
||||
throw new Exception(string.Format("Attribute \"{0}\" is not set for element {1}", Selector.Attribute, selection.ToHtmlPretty()));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -1401,9 +1411,16 @@ namespace Jackett.Common.Indexers
|
||||
|
||||
string value = null;
|
||||
var variablesKey = ".Result." + FieldName;
|
||||
var isOptional = OptionalFields.Contains(Field.Key) || FieldModifiers.Contains("optional") || Field.Value.Optional;
|
||||
try
|
||||
{
|
||||
value = handleSelector(Field.Value, Row, variables);
|
||||
value = handleSelector(Field.Value, Row, variables, !isOptional);
|
||||
if (isOptional && value == null)
|
||||
{
|
||||
variables[variablesKey] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (FieldName)
|
||||
{
|
||||
case "download":
|
||||
@@ -1560,7 +1577,7 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
if (!variables.ContainsKey(variablesKey))
|
||||
variables[variablesKey] = null;
|
||||
if (OptionalFields.Contains(Field.Key) || FieldModifiers.Contains("optional") || Field.Value.Optional)
|
||||
if (isOptional)
|
||||
{
|
||||
variables[variablesKey] = null;
|
||||
continue;
|
||||
|
@@ -3,9 +3,7 @@ using System.Collections.Generic;
|
||||
using System.Collections.Specialized;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
@@ -18,7 +16,6 @@ using Jackett.Common.Models.IndexerConfig.Bespoke;
|
||||
using Jackett.Common.Services.Interfaces;
|
||||
using Jackett.Common.Utils;
|
||||
using Jackett.Common.Utils.Clients;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using NLog;
|
||||
|
||||
@@ -32,12 +29,6 @@ namespace Jackett.Common.Indexers
|
||||
private string SearchUrl => SiteLink + "browse.php";
|
||||
private string TorrentDetailsUrl => SiteLink + "details.php?id={id}";
|
||||
private string TorrentDownloadUrl => SiteLink + "download.php?id={id}&passkey={passkey}";
|
||||
private bool Latency => ConfigData.Latency.Value;
|
||||
private bool DevMode => ConfigData.DevMode.Value;
|
||||
private bool CacheMode => ConfigData.HardDriveCache.Value;
|
||||
private static string Directory => Path.Combine(Path.GetTempPath(), "Jackett", MethodBase.GetCurrentMethod().DeclaringType?.Name);
|
||||
|
||||
private readonly Dictionary<string, string> _emulatedBrowserHeaders = new Dictionary<string, string>();
|
||||
|
||||
private ConfigurationDataNorbits ConfigData => (ConfigurationDataNorbits)configData;
|
||||
|
||||
@@ -113,26 +104,7 @@ namespace Jackett.Common.Indexers
|
||||
// Check & Validate Config
|
||||
ValidateConfig();
|
||||
|
||||
// Setting our data for a better emulated browser (maximum security)
|
||||
// TODO: Encoded Content not supported by Jackett at this time
|
||||
// emulatedBrowserHeaders.Add("Accept-Encoding", "gzip, deflate");
|
||||
|
||||
// If we want to simulate a browser
|
||||
if (ConfigData.Browser.Value)
|
||||
{
|
||||
// Clean headers
|
||||
_emulatedBrowserHeaders.Clear();
|
||||
|
||||
// Inject headers
|
||||
_emulatedBrowserHeaders.Add("Accept", ConfigData.HeaderAccept.Value);
|
||||
_emulatedBrowserHeaders.Add("Accept-Language", ConfigData.HeaderAcceptLang.Value);
|
||||
_emulatedBrowserHeaders.Add("DNT", Convert.ToInt32(ConfigData.HeaderDnt.Value).ToString());
|
||||
_emulatedBrowserHeaders.Add("Upgrade-Insecure-Requests", Convert.ToInt32(ConfigData.HeaderUpgradeInsecure.Value).ToString());
|
||||
_emulatedBrowserHeaders.Add("User-Agent", ConfigData.HeaderUserAgent.Value);
|
||||
_emulatedBrowserHeaders.Add("Referer", LoginUrl);
|
||||
}
|
||||
|
||||
await DoLogin();
|
||||
await DoLoginAsync();
|
||||
|
||||
return IndexerConfigurationStatus.RequiresTesting;
|
||||
}
|
||||
@@ -141,19 +113,18 @@ namespace Jackett.Common.Indexers
|
||||
/// Perform login to racker
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
private async Task DoLogin()
|
||||
private async Task DoLoginAsync()
|
||||
{
|
||||
// Build WebRequest for index
|
||||
var myIndexRequest = new WebRequest
|
||||
{
|
||||
Type = RequestType.GET,
|
||||
Url = SiteLink,
|
||||
Headers = _emulatedBrowserHeaders,
|
||||
Encoding = Encoding
|
||||
};
|
||||
|
||||
// Get index page for cookies
|
||||
Output("\nGetting index page (for cookies).. with " + SiteLink);
|
||||
logger.Info("\nNorBits - Getting index page (for cookies).. with " + SiteLink);
|
||||
var indexPage = await webclient.GetResultAsync(myIndexRequest);
|
||||
|
||||
// Building login form data
|
||||
@@ -167,15 +138,13 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
Type = RequestType.GET,
|
||||
Url = LoginUrl,
|
||||
Headers = _emulatedBrowserHeaders,
|
||||
Cookies = indexPage.Cookies,
|
||||
Referer = SiteLink,
|
||||
Encoding = Encoding
|
||||
};
|
||||
|
||||
// Get login page -- (not used, but simulation needed by tracker security's checks)
|
||||
LatencyNow();
|
||||
Output("\nGetting login page (user simulation).. with " + LoginUrl);
|
||||
logger.Info("\nNorBits - Getting login page (user simulation).. with " + LoginUrl);
|
||||
await webclient.GetResultAsync(myRequestLogin);
|
||||
|
||||
// Build WebRequest for submitting authentification
|
||||
@@ -185,14 +154,12 @@ namespace Jackett.Common.Indexers
|
||||
Referer = LoginUrl,
|
||||
Type = RequestType.POST,
|
||||
Url = LoginCheckUrl,
|
||||
Headers = _emulatedBrowserHeaders,
|
||||
Cookies = indexPage.Cookies,
|
||||
Encoding = Encoding
|
||||
};
|
||||
|
||||
// Perform loggin
|
||||
LatencyNow();
|
||||
Output("\nPerform loggin.. with " + LoginCheckUrl);
|
||||
logger.Info("\nPerform loggin.. with " + LoginCheckUrl);
|
||||
var response = await webclient.GetResultAsync(request);
|
||||
|
||||
// Test if we are logged in
|
||||
@@ -204,36 +171,36 @@ namespace Jackett.Common.Indexers
|
||||
var redirectTo = response.RedirectingTo;
|
||||
|
||||
// Oops, unable to login
|
||||
Output("-> Login failed: " + message, "error");
|
||||
logger.Info("NorBits - Login failed: " + message, "error");
|
||||
throw new ExceptionWithConfigData("Login failed: " + message, configData);
|
||||
});
|
||||
|
||||
Output("\nCookies saved for future uses...");
|
||||
logger.Info("\nNorBits - Cookies saved for future uses...");
|
||||
ConfigData.CookieHeader.Value = indexPage.Cookies + " " + response.Cookies + " ts_username=" + ConfigData.Username.Value;
|
||||
|
||||
Output("\n-> Login Success\n");
|
||||
logger.Info("\nNorBits - Login Success\n");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check logged-in state for provider
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
private async Task CheckLogin()
|
||||
private async Task CheckLoginAsync()
|
||||
{
|
||||
// Checking ...
|
||||
Output("\n-> Checking logged-in state....");
|
||||
logger.Info("\nNorBits - Checking logged-in state....");
|
||||
var loggedInCheck = await RequestWithCookiesAsync(SearchUrl);
|
||||
if (!loggedInCheck.ContentString.Contains("logout.php"))
|
||||
{
|
||||
// Cookie expired, renew session on provider
|
||||
Output("-> Not logged, login now...\n");
|
||||
logger.Info("NorBits - Not logged, login now...\n");
|
||||
|
||||
await DoLogin();
|
||||
await DoLoginAsync();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Already logged, session active
|
||||
Output("-> Already logged, continue...\n");
|
||||
logger.Info("NorBits - Already logged, continue...\n");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -249,22 +216,7 @@ namespace Jackett.Common.Indexers
|
||||
var searchUrl = SearchUrl;
|
||||
|
||||
// Check login before performing a query
|
||||
await CheckLogin();
|
||||
|
||||
// Check cache first so we don't query the server (if search term used or not in dev mode)
|
||||
if (!DevMode && !string.IsNullOrEmpty(exactSearchTerm))
|
||||
{
|
||||
lock (cache)
|
||||
{
|
||||
// Remove old cache items
|
||||
CleanCache();
|
||||
|
||||
// Search in cache
|
||||
var cachedResult = cache.FirstOrDefault(i => i.Query == exactSearchTerm);
|
||||
if (cachedResult != null)
|
||||
return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray();
|
||||
}
|
||||
}
|
||||
await CheckLoginAsync();
|
||||
|
||||
var SearchTerms = new List<string> { exactSearchTerm };
|
||||
|
||||
@@ -300,76 +252,41 @@ namespace Jackett.Common.Indexers
|
||||
else
|
||||
{
|
||||
// No result found for this query
|
||||
Output("\nNo result found for your query, please try another search term ...\n", "info");
|
||||
logger.Info("\nNorBits - No result found for your query, please try another search term ...\n", "info");
|
||||
break;
|
||||
}
|
||||
|
||||
Output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
|
||||
Output("\nThere are " + firstPageRows.Length + " results on the first page !");
|
||||
logger.Info("\nNorBits - Found " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
|
||||
logger.Info("\nNorBits - There are " + firstPageRows.Length + " results on the first page !");
|
||||
|
||||
// Loop on results
|
||||
|
||||
foreach (var row in firstPageRows)
|
||||
{
|
||||
Output("Torrent #" + (releases.Count + 1));
|
||||
|
||||
// ID
|
||||
var id = row.QuerySelector("td:nth-of-type(2) > a:nth-of-type(1)").GetAttribute("href").Split('=').Last();
|
||||
Output("ID: " + id);
|
||||
|
||||
// Release Name
|
||||
var name = row.QuerySelector("td:nth-of-type(2) > a:nth-of-type(1)").GetAttribute("title");
|
||||
|
||||
// Category
|
||||
var categoryName = row.QuerySelector("td:nth-of-type(1) > div > a:nth-of-type(1)").GetAttribute("title");
|
||||
var id = row.QuerySelector("td:nth-of-type(2) > a:nth-of-type(1)").GetAttribute("href").Split('=').Last(); // ID
|
||||
var name = row.QuerySelector("td:nth-of-type(2) > a:nth-of-type(1)").GetAttribute("title"); // Release Name
|
||||
var categoryName = row.QuerySelector("td:nth-of-type(1) > div > a:nth-of-type(1)").GetAttribute("title"); // Category
|
||||
var mainCat = row.QuerySelector("td:nth-of-type(1) > div > a:nth-of-type(1)").GetAttribute("href").Split('?').Last();
|
||||
var qSubCat2 = row.QuerySelector("td:nth-of-type(1) > div > a[href^=\"/browse.php?sub2_cat[]=\"]");
|
||||
|
||||
var cat = mainCat;
|
||||
if (qSubCat2 != null)
|
||||
cat += '&' + qSubCat2.GetAttribute("href").Split('?').Last();
|
||||
|
||||
Output("Category: " + cat + " - " + categoryName);
|
||||
|
||||
// Seeders
|
||||
var seeders = ParseUtil.CoerceInt(row.QuerySelector("td:nth-of-type(9)").TextContent);
|
||||
Output("Seeders: " + seeders);
|
||||
|
||||
// Leechers
|
||||
var leechers = ParseUtil.CoerceInt(row.QuerySelector("td:nth-of-type(10)").TextContent);
|
||||
Output("Leechers: " + leechers);
|
||||
|
||||
// Completed
|
||||
var regexObj = new Regex(@"[^\d]");
|
||||
var seeders = ParseUtil.CoerceInt(row.QuerySelector("td:nth-of-type(9)").TextContent); // Seeders
|
||||
var leechers = ParseUtil.CoerceInt(row.QuerySelector("td:nth-of-type(10)").TextContent); // Leechers
|
||||
var regexObj = new Regex(@"[^\d]"); // Completed
|
||||
var completed2 = row.QuerySelector("td:nth-of-type(8)").TextContent;
|
||||
var completed = ParseUtil.CoerceLong(regexObj.Replace(completed2, ""));
|
||||
Output("Completed: " + completed);
|
||||
|
||||
// Files
|
||||
var qFiles = row.QuerySelector("td:nth-of-type(3) > a");
|
||||
var qFiles = row.QuerySelector("td:nth-of-type(3) > a"); // Files
|
||||
var files = qFiles != null ? ParseUtil.CoerceInt(Regex.Match(qFiles.TextContent, @"\d+").Value) : 1;
|
||||
Output("Files: " + files);
|
||||
|
||||
// Size
|
||||
var humanSize = row.QuerySelector("td:nth-of-type(7)").TextContent.ToLowerInvariant();
|
||||
var size = ReleaseInfo.GetBytes(humanSize);
|
||||
Output("Size: " + humanSize + " (" + size + " bytes)");
|
||||
|
||||
// --> Date
|
||||
var humanSize = row.QuerySelector("td:nth-of-type(7)").TextContent.ToLowerInvariant(); // Size
|
||||
var size = ReleaseInfo.GetBytes(humanSize); // Date
|
||||
var dateTimeOrig = row.QuerySelector("td:nth-of-type(5)").TextContent;
|
||||
var dateTime = Regex.Replace(dateTimeOrig, @"<[^>]+>| ", "").Trim();
|
||||
var date = DateTime.ParseExact(dateTime, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
|
||||
Output("Released on: " + date);
|
||||
|
||||
// Torrent Details URL
|
||||
var details = new Uri(TorrentDetailsUrl.Replace("{id}", id.ToString()));
|
||||
Output("Details: " + details.AbsoluteUri);
|
||||
|
||||
// Torrent Download URL
|
||||
var passkey = row.QuerySelector("td:nth-of-type(2) > a:nth-of-type(2)").GetAttribute("href");
|
||||
var details = new Uri(TorrentDetailsUrl.Replace("{id}", id.ToString())); // Description Link
|
||||
var passkey = row.QuerySelector("td:nth-of-type(2) > a:nth-of-type(2)").GetAttribute("href"); // Download Link
|
||||
var key = Regex.Match(passkey, "(?<=passkey\\=)([a-zA-z0-9]*)");
|
||||
var downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{passkey}", key.ToString()));
|
||||
Output("Download Link: " + downloadLink.AbsoluteUri);
|
||||
|
||||
// Building release infos
|
||||
var release = new ReleaseInfo
|
||||
@@ -462,7 +379,7 @@ namespace Jackett.Common.Indexers
|
||||
// Building our query
|
||||
url += "?" + searchterm + "&" + parameters.GetQueryString() + "&" + CatQryStr;
|
||||
|
||||
Output("\nBuilded query for \"" + term + "\"... " + url);
|
||||
logger.Info("\nBuilded query for \"" + term + "\"... " + url);
|
||||
|
||||
// Return our search url
|
||||
return url;
|
||||
@@ -473,58 +390,10 @@ namespace Jackett.Common.Indexers
|
||||
/// </summary>
|
||||
/// <param name="request">URL created by Query Builder</param>
|
||||
/// <returns>Results from query</returns>
|
||||
private async Task<WebResult> QueryExec(string request)
|
||||
private async Task<WebResult> QueryExecAsync(string request)
|
||||
{
|
||||
WebResult results;
|
||||
|
||||
// Switch in we are in DEV mode with Hard Drive Cache or not
|
||||
if (DevMode && CacheMode)
|
||||
{
|
||||
// Check Cache before querying and load previous results if available
|
||||
results = await QueryCache(request);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Querying tracker directly
|
||||
results = await QueryTracker(request);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get Torrents Page from Cache by Query Provided
|
||||
/// </summary>
|
||||
/// <param name="request">URL created by Query Builder</param>
|
||||
/// <returns>Results from query</returns>
|
||||
private async Task<WebResult> QueryCache(string request)
|
||||
{
|
||||
WebResult results;
|
||||
|
||||
// Create Directory if not exist
|
||||
System.IO.Directory.CreateDirectory(Directory);
|
||||
|
||||
// Clean Storage Provider Directory from outdated cached queries
|
||||
CleanCacheStorage();
|
||||
|
||||
// Create fingerprint for request
|
||||
var file = Directory + request.GetHashCode() + ".json";
|
||||
|
||||
// Checking modes states
|
||||
if (File.Exists(file))
|
||||
{
|
||||
// File exist... loading it right now !
|
||||
Output("Loading results from hard drive cache ..." + request.GetHashCode() + ".json");
|
||||
results = JsonConvert.DeserializeObject<WebResult>(File.ReadAllText(file));
|
||||
}
|
||||
else
|
||||
{
|
||||
// No cached file found, querying tracker directly
|
||||
results = await QueryTracker(request);
|
||||
|
||||
// Cached file didn't exist for our query, writing it right now !
|
||||
Output("Writing results to hard drive cache ..." + request.GetHashCode() + ".json");
|
||||
File.WriteAllText(file, JsonConvert.SerializeObject(results));
|
||||
}
|
||||
results = await QueryTrackerAsync(request);
|
||||
return results;
|
||||
}
|
||||
|
||||
@@ -533,91 +402,18 @@ namespace Jackett.Common.Indexers
|
||||
/// </summary>
|
||||
/// <param name="request">URL created by Query Builder</param>
|
||||
/// <returns>Results from query</returns>
|
||||
private async Task<WebResult> QueryTracker(string request)
|
||||
private async Task<WebResult> QueryTrackerAsync(string request)
|
||||
{
|
||||
// Cache mode not enabled or cached file didn't exist for our query
|
||||
Output("\nQuerying tracker for results....");
|
||||
logger.Info("\nNorBits - Querying tracker for results....");
|
||||
|
||||
// Request our first page
|
||||
LatencyNow();
|
||||
var results = await RequestWithCookiesAndRetryAsync(request, ConfigData.CookieHeader.Value, RequestType.GET, SearchUrl, null, _emulatedBrowserHeaders);
|
||||
var results = await RequestWithCookiesAndRetryAsync(request, ConfigData.CookieHeader.Value, RequestType.GET, SearchUrl, null);
|
||||
|
||||
// Return results from tracker
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clean Hard Drive Cache Storage
|
||||
/// </summary>
|
||||
/// <param name="force">Force Provider Folder deletion</param>
|
||||
private void CleanCacheStorage(bool force = false)
|
||||
{
|
||||
// Check cleaning method
|
||||
if (force)
|
||||
{
|
||||
// Deleting Provider Storage folder and all files recursively
|
||||
Output("\nDeleting Provider Storage folder and all files recursively ...");
|
||||
|
||||
// Check if directory exist
|
||||
if (System.IO.Directory.Exists(Directory))
|
||||
{
|
||||
// Delete storage directory of provider
|
||||
System.IO.Directory.Delete(Directory, true);
|
||||
Output("-> Storage folder deleted successfully.");
|
||||
}
|
||||
else
|
||||
{
|
||||
// No directory, so nothing to do
|
||||
Output("-> No Storage folder found for this provider !");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var i = 0;
|
||||
// Check if there is file older than ... and delete them
|
||||
Output("\nCleaning Provider Storage folder... in progress.");
|
||||
System.IO.Directory.GetFiles(Directory)
|
||||
.Select(f => new FileInfo(f))
|
||||
.Where(f => f.LastAccessTime < DateTime.Now.AddMilliseconds(-Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value)))
|
||||
.ToList()
|
||||
.ForEach(f =>
|
||||
{
|
||||
Output("Deleting cached file << " + f.Name + " >> ... done.");
|
||||
f.Delete();
|
||||
i++;
|
||||
});
|
||||
|
||||
// Inform on what was cleaned during process
|
||||
if (i > 0)
|
||||
{
|
||||
Output("-> Deleted " + i + " cached files during cleaning.");
|
||||
}
|
||||
else
|
||||
{
|
||||
Output("-> Nothing deleted during cleaning.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generate a random fake latency to avoid detection on tracker side
|
||||
/// </summary>
|
||||
private void LatencyNow()
|
||||
{
|
||||
// Need latency ?
|
||||
if (Latency)
|
||||
{
|
||||
var random = new Random(DateTime.Now.Millisecond);
|
||||
var waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value),
|
||||
Convert.ToInt32(ConfigData.LatencyEnd.Value));
|
||||
Output("\nLatency Faker => Sleeping for " + waiting + " ms...");
|
||||
|
||||
// Sleep now...
|
||||
System.Threading.Thread.Sleep(waiting);
|
||||
}
|
||||
// Generate a random value in our range
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find torrent rows in search pages
|
||||
/// </summary>
|
||||
@@ -634,7 +430,7 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
// Retrieving ID from link provided
|
||||
var id = ParseUtil.CoerceInt(Regex.Match(link.AbsoluteUri, @"\d+").Value);
|
||||
Output("Torrent Requested ID: " + id);
|
||||
logger.Info("NorBits - Torrent Requested ID: " + id);
|
||||
|
||||
// Building login form data
|
||||
var pairs = new Dictionary<string, string> {
|
||||
@@ -642,67 +438,19 @@ namespace Jackett.Common.Indexers
|
||||
{ "_", string.Empty } // ~~ Strange, blank param...
|
||||
};
|
||||
|
||||
// Add emulated XHR request
|
||||
_emulatedBrowserHeaders.Add("X-Prototype-Version", "1.6.0.3");
|
||||
_emulatedBrowserHeaders.Add("X-Requested-With", "XMLHttpRequest");
|
||||
|
||||
// Get torrent file now
|
||||
Output("Getting torrent file now....");
|
||||
var response = await base.Download(link);
|
||||
|
||||
// Remove our XHR request header
|
||||
_emulatedBrowserHeaders.Remove("X-Prototype-Version");
|
||||
_emulatedBrowserHeaders.Remove("X-Requested-With");
|
||||
|
||||
// Return content
|
||||
return response;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output message for logging or developpment (console)
|
||||
/// </summary>
|
||||
/// <param name="message">Message to output</param>
|
||||
/// <param name="level">Level for Logger</param>
|
||||
private void Output(string message, string level = "debug")
|
||||
{
|
||||
// Check if we are in dev mode
|
||||
if (DevMode)
|
||||
{
|
||||
// Output message to console
|
||||
Console.WriteLine(message);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Send message to logger with level
|
||||
switch (level)
|
||||
{
|
||||
default:
|
||||
goto case "debug";
|
||||
case "debug":
|
||||
// Only if Debug Level Enabled on Jackett
|
||||
if (logger.IsDebugEnabled)
|
||||
{
|
||||
logger.Debug(message);
|
||||
}
|
||||
break;
|
||||
|
||||
case "info":
|
||||
logger.Info(message);
|
||||
break;
|
||||
|
||||
case "error":
|
||||
logger.Error(message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate Config entered by user on Jackett
|
||||
/// </summary>
|
||||
private void ValidateConfig()
|
||||
{
|
||||
Output("\nValidating Settings ... \n");
|
||||
logger.Info("\nNorBits - Validating Settings ... \n");
|
||||
|
||||
// Check Username Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.Username.Value))
|
||||
@@ -711,7 +459,7 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
else
|
||||
{
|
||||
Output("Validated Setting -- Username (auth) => " + ConfigData.Username.Value);
|
||||
logger.Info("NorBits - Validated Setting -- Username (auth) => " + ConfigData.Username.Value);
|
||||
}
|
||||
|
||||
// Check Password Setting
|
||||
@@ -721,7 +469,7 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
else
|
||||
{
|
||||
Output("Validated Setting -- Password (auth) => " + ConfigData.Password.Value);
|
||||
logger.Info("NorBits - Validated Setting -- Password (auth) => " + ConfigData.Password.Value);
|
||||
}
|
||||
|
||||
// Check Max Page Setting
|
||||
@@ -729,7 +477,7 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
try
|
||||
{
|
||||
Output("Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
|
||||
logger.Info("NorBits - Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
@@ -740,121 +488,6 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a maximum number of pages to crawl !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Latency Setting
|
||||
if (ConfigData.Latency.Value)
|
||||
{
|
||||
Output("\nValidated Setting -- Latency Simulation enabled");
|
||||
|
||||
// Check Latency Start Setting
|
||||
if (!string.IsNullOrEmpty(ConfigData.LatencyStart.Value))
|
||||
{
|
||||
try
|
||||
{
|
||||
Output("Validated Setting -- Latency Start => " + Convert.ToInt32(ConfigData.LatencyStart.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a numeric latency start in ms !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a start latency !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Latency End Setting
|
||||
if (!string.IsNullOrEmpty(ConfigData.LatencyEnd.Value))
|
||||
{
|
||||
try
|
||||
{
|
||||
Output("Validated Setting -- Latency End => " + Convert.ToInt32(ConfigData.LatencyEnd.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a numeric latency end in ms !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a end latency !", ConfigData);
|
||||
}
|
||||
}
|
||||
|
||||
// Check Browser Setting
|
||||
if (ConfigData.Browser.Value)
|
||||
{
|
||||
Output("\nValidated Setting -- Browser Simulation enabled");
|
||||
|
||||
// Check ACCEPT header Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.HeaderAccept.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT header !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
Output("Validated Setting -- ACCEPT (header) => " + ConfigData.HeaderAccept.Value);
|
||||
}
|
||||
|
||||
// Check ACCEPT-LANG header Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.HeaderAcceptLang.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT-LANG header !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
Output("Validated Setting -- ACCEPT-LANG (header) => " + ConfigData.HeaderAcceptLang.Value);
|
||||
}
|
||||
|
||||
// Check USER-AGENT header Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.HeaderUserAgent.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an USER-AGENT header !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
Output("Validated Setting -- USER-AGENT (header) => " + ConfigData.HeaderUserAgent.Value);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Browser simulation must be enabled (otherwhise, this provider will not work due to tracker's security)
|
||||
throw new ExceptionWithConfigData("Browser Simulation must be enabled for this provider to work, please enable it !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Dev Cache Settings
|
||||
if (ConfigData.HardDriveCache.Value)
|
||||
{
|
||||
Output("\nValidated Setting -- DEV Hard Drive Cache enabled");
|
||||
|
||||
// Check if Dev Mode enabled !
|
||||
if (!ConfigData.DevMode.Value)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Hard Drive is enabled but not in DEV MODE, Please enable DEV MODE !", ConfigData);
|
||||
}
|
||||
|
||||
// Check Cache Keep Time Setting
|
||||
if (!string.IsNullOrEmpty(ConfigData.HardDriveCacheKeepTime.Value))
|
||||
{
|
||||
try
|
||||
{
|
||||
Output("Validated Setting -- Cache Keep Time (ms) => " + Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value));
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Please enter a numeric hard drive keep time in ms !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ExceptionWithConfigData("Hard Drive Cache enabled, Please enter a maximum keep time for cache !", ConfigData);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Delete cache if previously existed
|
||||
CleanCacheStorage(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -20,6 +20,11 @@ namespace Jackett.Common.Indexers
|
||||
[ExcludeFromCodeCoverage]
|
||||
public class SubsPlease : BaseWebIndexer
|
||||
{
|
||||
public override string[] AlternativeSiteLinks { get; protected set; } = {
|
||||
"https://subsplease.org/",
|
||||
"https://subsplease.nocensor.space/"
|
||||
};
|
||||
|
||||
private string ApiEndpoint => SiteLink + "/api/?";
|
||||
|
||||
public SubsPlease(IIndexerConfigurationService configService, Utils.Clients.WebClient wc, Logger l, IProtectionService ps, ICacheService cs)
|
||||
|
@@ -2,21 +2,20 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Specialized;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
using Jackett.Common.Models;
|
||||
using Jackett.Common.Models.IndexerConfig.Bespoke;
|
||||
using Jackett.Common.Models.IndexerConfig;
|
||||
using Jackett.Common.Services.Interfaces;
|
||||
using Jackett.Common.Utils;
|
||||
using Jackett.Common.Utils.Clients;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using NLog;
|
||||
using static Jackett.Common.Models.IndexerConfig.ConfigurationData;
|
||||
using WebRequest = Jackett.Common.Utils.Clients.WebRequest;
|
||||
|
||||
namespace Jackett.Common.Indexers
|
||||
@@ -25,21 +24,27 @@ namespace Jackett.Common.Indexers
|
||||
public class Xthor : BaseCachingWebIndexer
|
||||
{
|
||||
private static string ApiEndpoint => "https://api.xthor.tk/";
|
||||
private int MaxPagesHardLimit => 4;
|
||||
private string TorrentDetailsUrl => SiteLink + "details.php?id={id}";
|
||||
private string WebRequestDelay => ((SingleSelectConfigurationItem)configData.GetDynamic("webRequestDelay")).Value;
|
||||
private int MaxPages => Convert.ToInt32(((SingleSelectConfigurationItem)configData.GetDynamic("maxPages")).Value);
|
||||
private bool MaxPagesBypassForTMDB => ((BoolConfigurationItem)configData.GetDynamic("maxPagesBypassForTMDB")).Value;
|
||||
private string MultiReplacement => ((StringConfigurationItem)configData.GetDynamic("multiReplacement")).Value;
|
||||
private bool SubReplacement => ((BoolConfigurationItem)configData.GetDynamic("subReplacement")).Value;
|
||||
private bool EnhancedAnimeSearch => ((BoolConfigurationItem)configData.GetDynamic("enhancedAnimeSearch")).Value;
|
||||
private string SpecificLanguageAccent => ((SingleSelectConfigurationItem)configData.GetDynamic("specificLanguageAccent")).Value;
|
||||
private bool FreeleechOnly => ((BoolConfigurationItem)configData.GetDynamic("freeleechOnly")).Value;
|
||||
|
||||
public override string[] LegacySiteLinks { get; protected set; } = {
|
||||
"https://xthor.bz/",
|
||||
"https://xthor.to"
|
||||
};
|
||||
|
||||
private string TorrentDetailsUrl => SiteLink + "details.php?id={id}";
|
||||
private string ReplaceMulti => ConfigData.ReplaceMulti.Value;
|
||||
private bool EnhancedAnime => ConfigData.EnhancedAnime.Value;
|
||||
private ConfigurationDataXthor ConfigData => (ConfigurationDataXthor)configData;
|
||||
private ConfigurationDataPasskey ConfigData => (ConfigurationDataPasskey)configData;
|
||||
|
||||
public Xthor(IIndexerConfigurationService configService, Utils.Clients.WebClient w, Logger l,
|
||||
IProtectionService ps, ICacheService cs)
|
||||
: base(id: "xthor",
|
||||
name: "Xthor",
|
||||
: base(id: "xthor-api",
|
||||
name: "Xthor API",
|
||||
description: "General French Private Tracker",
|
||||
link: "https://xthor.tk/",
|
||||
caps: new TorznabCapabilities
|
||||
@@ -50,7 +55,7 @@ namespace Jackett.Common.Indexers
|
||||
},
|
||||
MovieSearchParams = new List<MovieSearchParam>
|
||||
{
|
||||
MovieSearchParam.Q
|
||||
MovieSearchParam.Q, MovieSearchParam.TmdbId
|
||||
},
|
||||
MusicSearchParams = new List<MusicSearchParam>
|
||||
{
|
||||
@@ -67,15 +72,13 @@ namespace Jackett.Common.Indexers
|
||||
p: ps,
|
||||
cacheService: cs,
|
||||
downloadBase: "https://xthor.tk/download.php?torrent=",
|
||||
configData: new ConfigurationDataXthor())
|
||||
configData: new ConfigurationDataPasskey()
|
||||
)
|
||||
{
|
||||
Encoding = Encoding.UTF8;
|
||||
Language = "fr-fr";
|
||||
Type = "private";
|
||||
|
||||
// Api has 1req/2s limit
|
||||
webclient.requestDelay = 2.1;
|
||||
|
||||
// Movies / Films
|
||||
AddCategoryMapping(118, TorznabCatType.MoviesBluRay, "Films 2160p/Bluray");
|
||||
AddCategoryMapping(119, TorznabCatType.MoviesBluRay, "Films 2160p/Remux");
|
||||
@@ -144,6 +147,106 @@ namespace Jackett.Common.Indexers
|
||||
AddCategoryMapping(21, TorznabCatType.PC, "Logiciels Applis PC");
|
||||
AddCategoryMapping(22, TorznabCatType.PCMac, "Logiciels Applis Mac");
|
||||
AddCategoryMapping(23, TorznabCatType.PCMobileAndroid, "Logiciels Smartphone");
|
||||
|
||||
// Dynamic Configuration
|
||||
ConfigData.AddDynamic("optionsConfigurationWarning", new DisplayInfoConfigurationItem(string.Empty, "<center><b>Available Options</b></center>,<br /><br /> <ul><li><b>Freeleech Only</b>: (<i>Restrictive</i>) If you want to discover only freeleech torrents to not impact your ratio, check the related box. So only torrents marked as freeleech will be returned instead of all.</li><br /><li><b>Specific Language</b>: (<i>Restrictive</i>) You can scope your searches with a specific language / accent.</li></ul>"));
|
||||
|
||||
var ConfigFreeleechOnly = new BoolConfigurationItem("Do you want to discover only freeleech tagged torrents ?");
|
||||
ConfigData.AddDynamic("freeleechOnly", ConfigFreeleechOnly);
|
||||
|
||||
var ConfigSpecificLanguageAccent = new SingleSelectConfigurationItem("Do you want to scope your searches with a specific language ? (Accent)", new Dictionary<string, string>
|
||||
{
|
||||
{"0", "All Voices (default)"},
|
||||
{"1", "Françaises"},
|
||||
{"2", "Quebecoises"},
|
||||
{"47", "Françaises et Québécoises"},
|
||||
{"3", "Anglaises"},
|
||||
{"4", "Japonaises"},
|
||||
{"5", "Espagnoles"},
|
||||
{"6", "Allemandes"},
|
||||
{"7", "Chinoises"},
|
||||
{"8", "Italiennes"},
|
||||
{"9", "Coréennes"},
|
||||
{"10", "Danoises"},
|
||||
{"11", "Russes"},
|
||||
{"12", "Portugaises"},
|
||||
{"13", "Hindi"},
|
||||
{"14", "Hollandaises"},
|
||||
{"15", "Suédoises"},
|
||||
{"16", "Norvégiennes"},
|
||||
{"17", "Thaïlandaises"},
|
||||
{"18", "Hébreu"},
|
||||
{"19", "Persanes"},
|
||||
{"20", "Arabes"},
|
||||
{"21", "Turques"},
|
||||
{"22", "Hongroises"},
|
||||
{"23", "Polonaises"},
|
||||
{"24", "Finnoises"},
|
||||
{"25", "Indonésiennes"},
|
||||
{"26", "Roumaines"},
|
||||
{"27", "Malaisiennes"},
|
||||
{"28", "Estoniennes"},
|
||||
{"29", "Islandaises"},
|
||||
{"30", "Grecques"},
|
||||
{"31", "Serbes"},
|
||||
{"32", "Norvégiennes"},
|
||||
{"33", "Ukrainiennes"},
|
||||
{"34", "Bulgares"},
|
||||
{"35", "Tagalogues"},
|
||||
{"36", "Xhosa"},
|
||||
{"37", "Kurdes"},
|
||||
{"38", "Bengali"},
|
||||
{"39", "Amhariques"},
|
||||
{"40", "Bosniaques"},
|
||||
{"41", "Malayalam"},
|
||||
{"42", "Télougou"},
|
||||
{"43", "Bambara"},
|
||||
{"44", "Catalanes"},
|
||||
{"45", "Tchèques"},
|
||||
{"46", "Afrikaans"}
|
||||
})
|
||||
{ Value = "0" };
|
||||
ConfigData.AddDynamic("specificLanguageAccent", ConfigSpecificLanguageAccent);
|
||||
|
||||
ConfigData.AddDynamic("advancedConfigurationWarning", new DisplayInfoConfigurationItem(string.Empty, "<center><b>Advanced Configuration</b></center>,<br /><br /> <center><b><u>WARNING !</u></b> <i>Be sure to read instructions before editing options bellow, you can <b>drastically reduce performance</b> of queries or have <b>non-accurate results</b>.</i></center><br/><br/><ul><li><b>Delay betwwen Requests</b>: (<i>not recommended</i>) you can increase delay to requests made to the tracker, but a minimum of 2.1s is enforced as there is an anti-spam protection.</li><br /><li><b>Max Pages</b>: (<i>not recommended</i>) you can increase max pages to follow when making a request. But be aware that others apps can consider this indexer not working if jackett take too many times to return results. Another thing is that API is very buggy on tracker side, most of time, results of next pages are same ... as the first page. Even if we deduplicate rows, you will loose performance for the same results. You can check logs to see if an higher pages following is not benefical, you will see an error percentage (duplicates) with recommandations.</li><br /><li><b>Bypass for TMDB</b>: (<i>recommended</i>) this indexer is compatible with TMDB queries (<i>for movies only</i>), so when requesting content with an TMDB ID, we will search directly ID on API instead of name. Results will be more accurate, so you can enable a max pages bypass for this query type. You will be at least limited by the hard limit of 4 pages.</li><br /><li><b>Enhanced Anime</b>: if you have \"Anime\", this will improve queries made to this tracker related to this type when making searches.</li><br /><li><b>Multi Replacement</b>: you can dynamically replace the word \"MULTI\" with another of your choice like \"MULTI.FRENCH\" for better analysis of 3rd party softwares.</li><li><b>Sub Replacement</b>: you can dynamically replace the word \"VOSTFR\" or \"SUBFRENCH\" with the word \"ENGLISH\" for better analysis of 3rd party softwares.</li></ul>"));
|
||||
|
||||
var ConfigWebRequestDelay = new SingleSelectConfigurationItem("Which delay do you want to apply between each requests made to tracker ?", new Dictionary<string, string>
|
||||
{
|
||||
{"2.1", "2.1s (minimum)"},
|
||||
{"2.2", "2.2s"},
|
||||
{"2.3", "2.3s"},
|
||||
{"2.4", "2.4s" },
|
||||
{"2.5", "2.5s"},
|
||||
{"2.6", "2.6s"}
|
||||
})
|
||||
{ Value = "2.1" };
|
||||
ConfigData.AddDynamic("webRequestDelay", ConfigWebRequestDelay);
|
||||
|
||||
var ConfigMaxPages = new SingleSelectConfigurationItem("How many pages do you want to follow ?", new Dictionary<string, string>
|
||||
{
|
||||
{"1", "1 (32 results - default / best perf.)"},
|
||||
{"2", "2 (64 results)"},
|
||||
{"3", "3 (96 results)"},
|
||||
{"4", "4 (128 results - hard limit max)" },
|
||||
})
|
||||
{ Value = "1" };
|
||||
ConfigData.AddDynamic("maxPages", ConfigMaxPages);
|
||||
|
||||
var ConfigMaxPagesBypassForTMDB = new BoolConfigurationItem("Do you want to bypass max pages for TMDB searches ? (Radarr) - Hard limit of 4") { Value = true };
|
||||
ConfigData.AddDynamic("maxPagesBypassForTMDB", ConfigMaxPagesBypassForTMDB);
|
||||
|
||||
var ConfigEnhancedAnimeSearch = new BoolConfigurationItem("Do you want to use enhanced ANIME search ?") { Value = false };
|
||||
ConfigData.AddDynamic("enhancedAnimeSearch", ConfigEnhancedAnimeSearch);
|
||||
|
||||
var ConfigMultiReplacement = new StringConfigurationItem("Do you want to replace \"MULTI\" keyword in release title by another word ?") { Value = "MULTI.FRENCH" };
|
||||
ConfigData.AddDynamic("multiReplacement", ConfigMultiReplacement);
|
||||
|
||||
var ConfigSubReplacement = new BoolConfigurationItem("Do you want to replace \"VOSTFR\" and \"SUBFRENCH\" with \"ENGLISH\" word ?") { Value = false };
|
||||
ConfigData.AddDynamic("subReplacement", ConfigSubReplacement);
|
||||
|
||||
// Api has 1req/2s limit (minimum)
|
||||
webclient.requestDelay = Convert.ToDouble(WebRequestDelay);
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -155,10 +258,9 @@ namespace Jackett.Common.Indexers
|
||||
// Warning 1998 is async method with no await calls inside
|
||||
// TODO: Remove pragma by wrapping return in Task.FromResult and removing async
|
||||
|
||||
#pragma warning disable 1998
|
||||
|
||||
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||
public override async Task<IndexerConfigurationStatus> ApplyConfiguration(JToken configJson)
|
||||
#pragma warning restore 1998
|
||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||
{
|
||||
// Provider not yet configured
|
||||
IsConfigured = false;
|
||||
@@ -166,8 +268,18 @@ namespace Jackett.Common.Indexers
|
||||
// Retrieve config values set by Jackett's user
|
||||
LoadValuesFromJson(configJson);
|
||||
|
||||
// Check & Validate Config
|
||||
ValidateConfig();
|
||||
logger.Debug("\nXthor - Validating Settings ... \n");
|
||||
|
||||
// Check Passkey Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.Passkey.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("You must provide your passkey for this tracker to be allowed to use API !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.Debug("Xthor - Validated Setting -- PassKey (auth) => " + ConfigData.Passkey.Value);
|
||||
}
|
||||
|
||||
|
||||
// Tracker is now configured
|
||||
IsConfigured = true;
|
||||
@@ -186,85 +298,145 @@ namespace Jackett.Common.Indexers
|
||||
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
|
||||
{
|
||||
var releases = new List<ReleaseInfo>();
|
||||
var searchTerm = query.GetEpisodeSearchString() + " " + query.SanitizedSearchTerm; // use episode search string first, see issue #1202
|
||||
var searchTerm = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
|
||||
searchTerm = searchTerm.Trim();
|
||||
searchTerm = searchTerm.ToLower();
|
||||
searchTerm = searchTerm.Replace(" ", ".");
|
||||
|
||||
if (EnhancedAnime && query.HasSpecifiedCategories && (query.Categories.Contains(TorznabCatType.TVAnime.ID) || query.Categories.Contains(100032) || query.Categories.Contains(100101) || query.Categories.Contains(100110)))
|
||||
if (EnhancedAnimeSearch && query.HasSpecifiedCategories && (query.Categories.Contains(TorznabCatType.TVAnime.ID) || query.Categories.Contains(100032) || query.Categories.Contains(100101) || query.Categories.Contains(100110)))
|
||||
{
|
||||
var regex = new Regex(" ([0-9]+)");
|
||||
searchTerm = regex.Replace(searchTerm, " E$1");
|
||||
}
|
||||
|
||||
// Build our query
|
||||
var request = BuildQuery(searchTerm, query, ApiEndpoint);
|
||||
|
||||
// Getting results
|
||||
var results = await QueryTrackerAsync(request);
|
||||
|
||||
try
|
||||
// Multiple page support
|
||||
var nextPage = 1; var followingPages = true;
|
||||
do
|
||||
{
|
||||
// Deserialize our Json Response
|
||||
var xthorResponse = JsonConvert.DeserializeObject<XthorResponse>(results);
|
||||
|
||||
// Check Tracker's State
|
||||
CheckApiState(xthorResponse.Error);
|
||||
// Build our query
|
||||
var request = BuildQuery(searchTerm, query, ApiEndpoint, nextPage);
|
||||
|
||||
// If contains torrents
|
||||
if (xthorResponse.Torrents != null)
|
||||
// Getting results
|
||||
logger.Info("\nXthor - Querying API page " + nextPage);
|
||||
var results = await QueryTrackerAsync(request);
|
||||
|
||||
// Torrents Result Count
|
||||
var torrentsCount = 0;
|
||||
|
||||
try
|
||||
{
|
||||
// Adding each torrent row to releases
|
||||
// Exclude hidden torrents (category 106, example => search 'yoda' in the API) #10407
|
||||
releases.AddRange(xthorResponse.Torrents
|
||||
.Where(torrent => torrent.Category != 106).Select(torrent =>
|
||||
// Deserialize our Json Response
|
||||
var xthorResponse = JsonConvert.DeserializeObject<XthorResponse>(results);
|
||||
|
||||
// Check Tracker's State
|
||||
CheckApiState(xthorResponse.Error);
|
||||
|
||||
// If contains torrents
|
||||
if (xthorResponse.Torrents != null)
|
||||
{
|
||||
//issue #3847 replace multi keyword
|
||||
if (!string.IsNullOrEmpty(ReplaceMulti))
|
||||
{
|
||||
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
|
||||
torrent.Name = regex.Replace(torrent.Name, "$1" + ReplaceMulti + "$2");
|
||||
}
|
||||
// Store torrents rows count result
|
||||
torrentsCount = xthorResponse.Torrents.Count();
|
||||
logger.Info("\nXthor - Found " + torrentsCount + " torrents on current page.");
|
||||
|
||||
// issue #8759 replace vostfr and subfrench with English
|
||||
if (ConfigData.Vostfr.Value) torrent.Name = torrent.Name.Replace("VOSTFR","ENGLISH").Replace("SUBFRENCH","ENGLISH");
|
||||
// Adding each torrent row to releases
|
||||
// Exclude hidden torrents (category 106, example => search 'yoda' in the API) #10407
|
||||
releases.AddRange(xthorResponse.Torrents
|
||||
.Where(torrent => torrent.Category != 106).Select(torrent =>
|
||||
{
|
||||
//issue #3847 replace multi keyword
|
||||
if (!string.IsNullOrEmpty(MultiReplacement))
|
||||
{
|
||||
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
|
||||
torrent.Name = regex.Replace(torrent.Name, "$1" + MultiReplacement + "$2");
|
||||
}
|
||||
|
||||
var publishDate = DateTimeUtil.UnixTimestampToDateTime(torrent.Added);
|
||||
//TODO replace with download link?
|
||||
var guid = new Uri(TorrentDetailsUrl.Replace("{id}", torrent.Id.ToString()));
|
||||
var details = new Uri(TorrentDetailsUrl.Replace("{id}", torrent.Id.ToString()));
|
||||
var link = new Uri(torrent.Download_link);
|
||||
var release = new ReleaseInfo
|
||||
{
|
||||
// Mapping data
|
||||
Category = MapTrackerCatToNewznab(torrent.Category.ToString()),
|
||||
Title = torrent.Name,
|
||||
Seeders = torrent.Seeders,
|
||||
Peers = torrent.Seeders + torrent.Leechers,
|
||||
MinimumRatio = 1,
|
||||
MinimumSeedTime = 345600,
|
||||
PublishDate = publishDate,
|
||||
Size = torrent.Size,
|
||||
Grabs = torrent.Times_completed,
|
||||
Files = torrent.Numfiles,
|
||||
UploadVolumeFactor = 1,
|
||||
DownloadVolumeFactor = (torrent.Freeleech == 1 ? 0 : 1),
|
||||
Guid = guid,
|
||||
Details = details,
|
||||
Link = link,
|
||||
TMDb = torrent.Tmdb_id
|
||||
};
|
||||
// issue #8759 replace vostfr and subfrench with English
|
||||
if (SubReplacement)
|
||||
torrent.Name = torrent.Name.Replace("VOSTFR", "ENGLISH").Replace("SUBFRENCH", "ENGLISH");
|
||||
|
||||
return release;
|
||||
}));
|
||||
var publishDate = DateTimeUtil.UnixTimestampToDateTime(torrent.Added);
|
||||
//TODO replace with download link?
|
||||
var guid = new Uri(TorrentDetailsUrl.Replace("{id}", torrent.Id.ToString()));
|
||||
var details = new Uri(TorrentDetailsUrl.Replace("{id}", torrent.Id.ToString()));
|
||||
var link = new Uri(torrent.Download_link);
|
||||
var release = new ReleaseInfo
|
||||
{
|
||||
// Mapping data
|
||||
Category = MapTrackerCatToNewznab(torrent.Category.ToString()),
|
||||
Title = torrent.Name,
|
||||
Seeders = torrent.Seeders,
|
||||
Peers = torrent.Seeders + torrent.Leechers,
|
||||
MinimumRatio = 1,
|
||||
MinimumSeedTime = 345600,
|
||||
PublishDate = publishDate,
|
||||
Size = torrent.Size,
|
||||
Grabs = torrent.Times_completed,
|
||||
Files = torrent.Numfiles,
|
||||
UploadVolumeFactor = 1,
|
||||
DownloadVolumeFactor = (torrent.Freeleech == 1 ? 0 : 1),
|
||||
Guid = guid,
|
||||
Details = details,
|
||||
Link = link,
|
||||
TMDb = torrent.Tmdb_id
|
||||
};
|
||||
|
||||
return release;
|
||||
}));
|
||||
nextPage++;
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.Info("\nXthor - No results found on page " + nextPage + ", stopping follow of next page.");
|
||||
// No results or no more results available
|
||||
followingPages = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
OnParseError("Unable to parse result \n" + ex.StackTrace, ex);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
OnParseError("Unable to parse result \n" + ex.StackTrace, ex);
|
||||
}
|
||||
|
||||
// Stop ?
|
||||
if(query.IsTmdbQuery && MaxPagesBypassForTMDB)
|
||||
{
|
||||
if(nextPage > MaxPagesHardLimit)
|
||||
{
|
||||
logger.Info("\nXthor - Stopping follow of next page " + nextPage + " due to page hard limit reached.");
|
||||
break;
|
||||
}
|
||||
logger.Info("\nXthor - Continue to next page " + nextPage + " due to TMDB request and activated max page bypass for this type of query. Max page hard limit: 4.");
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(torrentsCount < 32)
|
||||
{
|
||||
logger.Info("\nXthor - Stopping follow of next page " + nextPage + " due max available results reached.");
|
||||
break;
|
||||
} else if(nextPage > MaxPages)
|
||||
{
|
||||
logger.Info("\nXthor - Stopping follow of next page " + nextPage + " due to page limit reached.");
|
||||
break;
|
||||
} else if (query.IsTest)
|
||||
{
|
||||
logger.Info("\nXthor - Stopping follow of next page " + nextPage + " due to index test query.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
} while (followingPages);
|
||||
|
||||
// Check if there is duplicate and return unique rows - Xthor API can be very buggy !
|
||||
var uniqReleases = releases.GroupBy(x => x.Guid).Select(x => x.First()).ToList();
|
||||
var errorPercentage = 1 - ((double) uniqReleases.Count() / releases.Count());
|
||||
if(errorPercentage >= 0.25)
|
||||
{
|
||||
logger.Warn("\nXthor - High percentage error detected: " + string.Format("{0:0.0%}", errorPercentage) + "\nWe strongly recommend that you lower max page to 1, as there is no benefit to grab additionnals.\nTracker API sent us duplicated pages with same results, even if we deduplicate returned rows, please consider to lower as it's unnecessary and increase time used for query for the same result.");
|
||||
}
|
||||
// Return found releases
|
||||
return releases;
|
||||
return uniqReleases;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -330,26 +502,33 @@ namespace Jackett.Common.Indexers
|
||||
/// <param name="query">Torznab Query for categories mapping</param>
|
||||
/// <param name="url">Search url for provider</param>
|
||||
/// <returns>URL to query for parsing and processing results</returns>
|
||||
private string BuildQuery(string term, TorznabQuery query, string url)
|
||||
private string BuildQuery(string term, TorznabQuery query, string url, int page = 1)
|
||||
{
|
||||
var parameters = new NameValueCollection();
|
||||
var categoriesList = MapTorznabCapsToTrackers(query);
|
||||
|
||||
// Passkey
|
||||
parameters.Add("passkey", ConfigData.PassKey.Value);
|
||||
parameters.Add("passkey", ConfigData.Passkey.Value);
|
||||
|
||||
// If search term provided
|
||||
if (!string.IsNullOrWhiteSpace(term))
|
||||
if (query.IsTmdbQuery)
|
||||
{
|
||||
// Add search term
|
||||
// ReSharper disable once AssignNullToNotNullAttribute
|
||||
parameters.Add("search", WebUtility.UrlEncode(term));
|
||||
logger.Info("\nXthor - Search requested for movie with TMDB ID n°" + query.TmdbID.ToString());
|
||||
parameters.Add("tmdbid", query.TmdbID.ToString());
|
||||
}
|
||||
else
|
||||
{
|
||||
parameters.Add("search", string.Empty);
|
||||
// Showing all torrents (just for output function)
|
||||
term = "all";
|
||||
if (!string.IsNullOrWhiteSpace(term))
|
||||
{
|
||||
// Add search term
|
||||
logger.Info("\nXthor - Search requested for movie with title \"" + term + "\"");
|
||||
parameters.Add("search", WebUtility.UrlEncode(term));
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.Info("\nXthor - Global search requested without term");
|
||||
parameters.Add("search", string.Empty);
|
||||
// Showing all torrents
|
||||
}
|
||||
}
|
||||
|
||||
// Loop on Categories needed
|
||||
@@ -359,20 +538,27 @@ namespace Jackett.Common.Indexers
|
||||
}
|
||||
|
||||
// If Only Freeleech Enabled
|
||||
if (ConfigData.Freeleech.Value)
|
||||
if (FreeleechOnly)
|
||||
{
|
||||
parameters.Add("freeleech", "1");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(ConfigData.Accent.Value))
|
||||
// If Specific Language Accent Requested
|
||||
if (!string.IsNullOrEmpty(SpecificLanguageAccent) && SpecificLanguageAccent != "0")
|
||||
{
|
||||
parameters.Add("accent", ConfigData.Accent.Value);
|
||||
parameters.Add("accent", SpecificLanguageAccent);
|
||||
}
|
||||
|
||||
// Pages handling
|
||||
if (page > 1 && !query.IsTest)
|
||||
{
|
||||
parameters.Add("page", page.ToString());
|
||||
}
|
||||
|
||||
// Building our query -- Cannot use GetQueryString due to UrlEncode (generating wrong category param)
|
||||
url += "?" + string.Join("&", parameters.AllKeys.Select(a => a + "=" + parameters[a]));
|
||||
|
||||
logger.Debug("\nBuilded query for \"" + term + "\"... " + url);
|
||||
logger.Info("\nXthor - Builded query: " + url);
|
||||
|
||||
// Return our search url
|
||||
return url;
|
||||
@@ -416,63 +602,36 @@ namespace Jackett.Common.Indexers
|
||||
{
|
||||
case 0:
|
||||
// Everything OK
|
||||
logger.Debug("\nAPI State : Everything OK ... -> " + state.Descr);
|
||||
logger.Debug("\nXthor - API State : Everything OK ... -> " + state.Descr);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
// Passkey not found
|
||||
logger.Debug("\nAPI State : Error, Passkey not found in tracker's database, aborting... -> " + state.Descr);
|
||||
logger.Error("\nXthor - API State : Error, Passkey not found in tracker's database, aborting... -> " + state.Descr);
|
||||
throw new Exception("Passkey not found in tracker's database");
|
||||
case 2:
|
||||
// No results
|
||||
logger.Debug("\nAPI State : No results for query ... -> " + state.Descr);
|
||||
logger.Info("\nXthor - API State : No results for query ... -> " + state.Descr);
|
||||
break;
|
||||
|
||||
case 3:
|
||||
// Power Saver
|
||||
logger.Debug("\nAPI State : Power Saver mode, only cached query with no parameters available ... -> " + state.Descr);
|
||||
logger.Warn("\nXthor - API State : Power Saver mode, only cached query with no parameters available ... -> " + state.Descr);
|
||||
break;
|
||||
|
||||
case 4:
|
||||
// DDOS Attack, API disabled
|
||||
logger.Debug("\nAPI State : Tracker is under DDOS attack, API disabled, aborting ... -> " + state.Descr);
|
||||
logger.Error("\nXthor - API State : Tracker is under DDOS attack, API disabled, aborting ... -> " + state.Descr);
|
||||
throw new Exception("Tracker is under DDOS attack, API disabled");
|
||||
case 8:
|
||||
// AntiSpam Protection
|
||||
logger.Debug("\nAPI State : Triggered AntiSpam Protection -> " + state.Descr);
|
||||
logger.Warn("\nXthor - API State : Triggered AntiSpam Protection -> " + state.Descr);
|
||||
throw new Exception("Triggered AntiSpam Protection, please delay your requests !");
|
||||
default:
|
||||
// Unknown state
|
||||
logger.Debug("\nAPI State : Unknown state, aborting querying ... -> " + state.Descr);
|
||||
logger.Error("\nXthor - API State : Unknown state, aborting querying ... -> " + state.Descr);
|
||||
throw new Exception("Unknown state, aborting querying");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate Config entered by user on Jackett
|
||||
/// </summary>
|
||||
private void ValidateConfig()
|
||||
{
|
||||
logger.Debug("\nValidating Settings ... \n");
|
||||
|
||||
// Check Passkey Setting
|
||||
if (string.IsNullOrEmpty(ConfigData.PassKey.Value))
|
||||
{
|
||||
throw new ExceptionWithConfigData("You must provide your passkey for this tracker to be allowed to use API !", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.Debug("Validated Setting -- PassKey (auth) => " + ConfigData.PassKey.Value);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(ConfigData.Accent.Value) && !string.Equals(ConfigData.Accent.Value, "1") && !string.Equals(ConfigData.Accent.Value, "2"))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Only '1' or '2' are available in the Accent parameter.", ConfigData);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.Debug("Validated Setting -- Accent (audio) => " + ConfigData.Accent.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -13,22 +13,6 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
||||
public DisplayInfoConfigurationItem PagesWarning { get; private set; }
|
||||
public StringConfigurationItem ReplaceMulti { get; private set; }
|
||||
public StringConfigurationItem Pages { get; private set; }
|
||||
public DisplayInfoConfigurationItem SecurityWarning { get; private set; }
|
||||
public BoolConfigurationItem Latency { get; private set; }
|
||||
public BoolConfigurationItem Browser { get; private set; }
|
||||
public DisplayInfoConfigurationItem LatencyWarning { get; private set; }
|
||||
public StringConfigurationItem LatencyStart { get; private set; }
|
||||
public StringConfigurationItem LatencyEnd { get; private set; }
|
||||
public DisplayInfoConfigurationItem HeadersWarning { get; private set; }
|
||||
public StringConfigurationItem HeaderAccept { get; private set; }
|
||||
public StringConfigurationItem HeaderAcceptLang { get; private set; }
|
||||
public BoolConfigurationItem HeaderDNT { get; private set; }
|
||||
public BoolConfigurationItem HeaderUpgradeInsecure { get; private set; }
|
||||
public StringConfigurationItem HeaderUserAgent { get; private set; }
|
||||
public DisplayInfoConfigurationItem DevWarning { get; private set; }
|
||||
public BoolConfigurationItem DevMode { get; private set; }
|
||||
public BoolConfigurationItem HardDriveCache { get; private set; }
|
||||
public StringConfigurationItem HardDriveCacheKeepTime { get; private set; }
|
||||
|
||||
public ConfigurationDataAbnormal()
|
||||
: base()
|
||||
@@ -41,22 +25,6 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
||||
PagesWarning = new DisplayInfoConfigurationItem("Preferences", "<b>Preferences Configuration</b> (<i>Tweak your search settings</i>),<br /><br /> <ul><li><b>Replace MULTI</b>, replace multi keyword in the resultset (leave empty to deactivate)</li><li><b>Max Pages to Process</b> let you specify how many page (max) Jackett can process when doing a search. Setting a value <b>higher than 4 is dangerous</b> for you account ! (<b>Result of too many requests to tracker...that <u>will be suspect</u></b>).</li></ul>");
|
||||
Pages = new StringConfigurationItem("Max Pages to Process (Required)") { Value = "4" };
|
||||
ReplaceMulti = new StringConfigurationItem("Replace MULTI") { Value = "MULTI.FRENCH" };
|
||||
SecurityWarning = new DisplayInfoConfigurationItem("Security", "<b>Security Configuration</b> (<i>Read this area carefully !</i>),<br /><br /> <ul><li><b>Latency Simulation</b> will simulate human browsing with Jacket by pausing Jacket for an random time between each request, to fake a real content browsing.</li><li><b>Browser Simulation</b> will simulate a real human browser by injecting additionals headers when doing requests to tracker.</li></ul>");
|
||||
Latency = new BoolConfigurationItem("Latency Simulation (Optional)") { Value = false };
|
||||
Browser = new BoolConfigurationItem("Browser Simulation (Optional)") { Value = true };
|
||||
LatencyWarning = new DisplayInfoConfigurationItem("Simulate Latency", "<b>Latency Configuration</b> (<i>Required if latency simulation enabled</i>),<br /><br/> <ul><li>By filling this range, <b>Jackett will make a random timed pause</b> <u>between requests</u> to tracker <u>to simulate a real browser</u>.</li><li>MilliSeconds <b>only</b></li></ul>");
|
||||
LatencyStart = new StringConfigurationItem("Minimum Latency (ms)") { Value = "1589" };
|
||||
LatencyEnd = new StringConfigurationItem("Maximum Latency (ms)") { Value = "3674" };
|
||||
HeadersWarning = new DisplayInfoConfigurationItem("Injecting headers", "<b>Browser Headers Configuration</b> (<i>Required if browser simulation enabled</i>),<br /><br /> <ul><li>By filling these fields, <b>Jackett will inject headers</b> with your values <u>to simulate a real browser</u>.</li><li>You can get <b>your browser values</b> here: <a href='https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending' target='blank'>www.whatismybrowser.com</a></li></ul><br /><i><b>Note that</b> some headers are not necessary because they are injected automatically by this provider such as Accept_Encoding, Connection, Host or X-Requested-With</i>");
|
||||
HeaderAccept = new StringConfigurationItem("Accept") { Value = "" };
|
||||
HeaderAcceptLang = new StringConfigurationItem("Accept-Language") { Value = "" };
|
||||
HeaderDNT = new BoolConfigurationItem("DNT") { Value = false };
|
||||
HeaderUpgradeInsecure = new BoolConfigurationItem("Upgrade-Insecure-Requests") { Value = false };
|
||||
HeaderUserAgent = new StringConfigurationItem("User-Agent") { Value = "" };
|
||||
DevWarning = new DisplayInfoConfigurationItem("Development", "<b>Development Facility</b> (<i>For Developers ONLY</i>),<br /><br /> <ul><li>By enabling development mode, <b>Jackett will bypass his cache</b> and will <u>output debug messages to console</u> instead of his log file.</li><li>By enabling Hard Drive Cache, <b>This provider</b> will <u>save each query answers from tracker</u> in temp directory, in fact this reduce drastically HTTP requests when building a provider at parsing step for example. So, <b> Jackett will search for a cached query answer on hard drive before executing query on tracker side !</b> <i>DEV MODE must be enabled to use it !</li></ul>");
|
||||
DevMode = new BoolConfigurationItem("Enable DEV MODE (Developers ONLY)") { Value = false };
|
||||
HardDriveCache = new BoolConfigurationItem("Enable HARD DRIVE CACHE (Developers ONLY)") { Value = false };
|
||||
HardDriveCacheKeepTime = new StringConfigurationItem("Keep Cached files for (ms)") { Value = "300000" };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -11,22 +11,6 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
||||
public DisplayInfoConfigurationItem PagesWarning { get; private set; }
|
||||
public StringConfigurationItem Pages { get; private set; }
|
||||
public BoolConfigurationItem UseFullSearch { get; private set; }
|
||||
public DisplayInfoConfigurationItem SecurityWarning { get; private set; }
|
||||
public BoolConfigurationItem Latency { get; private set; }
|
||||
public BoolConfigurationItem Browser { get; private set; }
|
||||
public DisplayInfoConfigurationItem LatencyWarning { get; private set; }
|
||||
public StringConfigurationItem LatencyStart { get; private set; }
|
||||
public StringConfigurationItem LatencyEnd { get; private set; }
|
||||
public DisplayInfoConfigurationItem HeadersWarning { get; private set; }
|
||||
public StringConfigurationItem HeaderAccept { get; private set; }
|
||||
public StringConfigurationItem HeaderAcceptLang { get; private set; }
|
||||
public BoolConfigurationItem HeaderDnt { get; private set; }
|
||||
public BoolConfigurationItem HeaderUpgradeInsecure { get; private set; }
|
||||
public StringConfigurationItem HeaderUserAgent { get; private set; }
|
||||
public DisplayInfoConfigurationItem DevWarning { get; private set; }
|
||||
public BoolConfigurationItem DevMode { get; private set; }
|
||||
public BoolConfigurationItem HardDriveCache { get; private set; }
|
||||
public StringConfigurationItem HardDriveCacheKeepTime { get; private set; }
|
||||
|
||||
public ConfigurationDataNorbits()
|
||||
{
|
||||
@@ -36,22 +20,6 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
||||
PagesWarning = new DisplayInfoConfigurationItem("Preferences", "<b>Preferences Configuration</b> (<i>Tweak your search settings</i>),<br /><br /> <ul><li><b>Max Pages to Process</b> let you specify how many page (max) Jackett can process when doing a search. Setting a value <b>higher than 4 is dangerous</b> for you account ! (<b>Result of too many requests to tracker...that <u>will be suspect</u></b>).</li></ul>");
|
||||
Pages = new StringConfigurationItem("Max Pages to Process (Required)") { Value = "4" };
|
||||
UseFullSearch = new BoolConfigurationItem("Enable search in description.") { Value = false };
|
||||
SecurityWarning = new DisplayInfoConfigurationItem("Security", "<b>Security Configuration</b> (<i>Read this area carefully !</i>),<br /><br /> <ul><li><b>Latency Simulation</b> will simulate human browsing with Jacket by pausing Jacket for an random time between each request, to fake a real content browsing.</li><li><b>Browser Simulation</b> will simulate a real human browser by injecting additionals headers when doing requests to tracker.<b>You must enable it to use this provider!</b></li></ul>");
|
||||
Latency = new BoolConfigurationItem("Latency Simulation (Optional)") { Value = false };
|
||||
Browser = new BoolConfigurationItem("Browser Simulation (Forced)") { Value = true };
|
||||
LatencyWarning = new DisplayInfoConfigurationItem("Simulate Latency", "<b>Latency Configuration</b> (<i>Required if latency simulation enabled</i>),<br /><br/> <ul><li>By filling this range, <b>Jackett will make a random timed pause</b> <u>between requests</u> to tracker <u>to simulate a real browser</u>.</li><li>MilliSeconds <b>only</b></li></ul>");
|
||||
LatencyStart = new StringConfigurationItem("Minimum Latency (ms)") { Value = "1589" };
|
||||
LatencyEnd = new StringConfigurationItem("Maximum Latency (ms)") { Value = "3674" };
|
||||
HeadersWarning = new DisplayInfoConfigurationItem("Injecting headers", "<b>Browser Headers Configuration</b> (<i>Required if browser simulation enabled</i>),<br /><br /> <ul><li>By filling these fields, <b>Jackett will inject headers</b> with your values <u>to simulate a real browser</u>.</li><li>You can get <b>your browser values</b> here: <a href='https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending' target='blank'>www.whatismybrowser.com</a></li></ul><br /><i><b>Note that</b> some headers are not necessary because they are injected automatically by this provider such as Accept_Encoding, Connection, Host or X-Requested-With</i>");
|
||||
HeaderAccept = new StringConfigurationItem("Accept") { Value = "" };
|
||||
HeaderAcceptLang = new StringConfigurationItem("Accept-Language") { Value = "" };
|
||||
HeaderDnt = new BoolConfigurationItem("DNT") { Value = false };
|
||||
HeaderUpgradeInsecure = new BoolConfigurationItem("Upgrade-Insecure-Requests") { Value = false };
|
||||
HeaderUserAgent = new StringConfigurationItem("User-Agent") { Value = "" };
|
||||
DevWarning = new DisplayInfoConfigurationItem("Development", "<b>Development Facility</b> (<i>For Developers ONLY</i>),<br /><br /> <ul><li>By enabling development mode, <b>Jackett will bypass his cache</b> and will <u>output debug messages to console</u> instead of his log file.</li><li>By enabling Hard Drive Cache, <b>This provider</b> will <u>save each query answers from tracker</u> in temp directory, in fact this reduce drastically HTTP requests when building a provider at parsing step for example. So, <b> Jackett will search for a cached query answer on hard drive before executing query on tracker side !</b> <i>DEV MODE must be enabled to use it !</li></ul>");
|
||||
DevMode = new BoolConfigurationItem("Enable DEV MODE (Developers ONLY)") { Value = false };
|
||||
HardDriveCache = new BoolConfigurationItem("Enable HARD DRIVE CACHE (Developers ONLY)") { Value = false };
|
||||
HardDriveCacheKeepTime = new StringConfigurationItem("Keep Cached files for (ms)") { Value = "300000" };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,31 +0,0 @@
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
|
||||
namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
||||
{
|
||||
[ExcludeFromCodeCoverage]
|
||||
internal class ConfigurationDataXthor : ConfigurationData
|
||||
{
|
||||
public DisplayInfoConfigurationItem CredentialsWarning { get; private set; }
|
||||
public StringConfigurationItem PassKey { get; set; }
|
||||
public DisplayInfoConfigurationItem PagesWarning { get; private set; }
|
||||
public StringConfigurationItem Accent { get; set; }
|
||||
public BoolConfigurationItem Freeleech { get; private set; }
|
||||
public StringConfigurationItem ReplaceMulti { get; private set; }
|
||||
public BoolConfigurationItem EnhancedAnime { get; private set; }
|
||||
|
||||
public BoolConfigurationItem Vostfr { get; private set; }
|
||||
|
||||
public ConfigurationDataXthor()
|
||||
: base()
|
||||
{
|
||||
CredentialsWarning = new DisplayInfoConfigurationItem("Credentials", "<b>Credentials Configuration</b> (<i>Private Tracker</i>),<br /><br /> <ul><li><b>PassKey</b> is your private key on your account</li></ul>");
|
||||
PassKey = new StringConfigurationItem("PassKey") { Value = "" };
|
||||
Accent = new StringConfigurationItem("Accent") { Value = "" };
|
||||
PagesWarning = new DisplayInfoConfigurationItem("Preferences", "<b>Preferences Configuration</b> (<i>Tweak your search settings</i>),<br /><br /> <ul><li><b>Freeleech Only</b> let you search <u>only</u> for torrents which are marked Freeleech.</li><li><b>Replace MULTI</b>, replace multi keyword in the resultset (leave empty to deactivate)</li><li><b>Enhanced anime search</b>, Enhance sonarr compatibility with Xthor. Only effective on requests with the <u>TVAnime Torznab category</u>.</li><li><b>Accent</b> is the french accent you want. 1 for VFF (Truefrench) 2 for VFQ (FRENCH, canada). When one is selected, the other will not be searched.</li></ul>");
|
||||
Freeleech = new BoolConfigurationItem("Freeleech Only (Optional)") { Value = false };
|
||||
ReplaceMulti = new StringConfigurationItem("Replace MULTI") { Value = "MULTI.FRENCH" };
|
||||
EnhancedAnime = new BoolConfigurationItem("Enhanced anime search") { Value = false };
|
||||
Vostfr = new BoolConfigurationItem("Replace VOSTFR or SUBFRENCH with ENGLISH") { Value = false };
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,14 @@
|
||||
namespace Jackett.Common.Models.IndexerConfig
|
||||
{
|
||||
public class ConfigurationDataAPIKeyAndRSSKey : ConfigurationData
|
||||
{
|
||||
public StringConfigurationItem ApiKey { get; private set; }
|
||||
public StringConfigurationItem RSSKey { get; private set; }
|
||||
|
||||
public ConfigurationDataAPIKeyAndRSSKey()
|
||||
{
|
||||
ApiKey = new StringConfigurationItem("API Key");
|
||||
RSSKey = new StringConfigurationItem("RSS Key");
|
||||
}
|
||||
}
|
||||
}
|
@@ -42,11 +42,13 @@ namespace Jackett.Common.Utils
|
||||
}
|
||||
|
||||
// ex: "2 hours 1 day"
|
||||
public static DateTime FromTimeAgo(string str)
|
||||
public static DateTime FromTimeAgo(string str, DateTime? relativeFrom = null)
|
||||
{
|
||||
str = str.ToLowerInvariant();
|
||||
var now = relativeFrom ?? DateTime.Now;
|
||||
|
||||
if (str.Contains("now"))
|
||||
return DateTime.SpecifyKind(DateTime.Now, DateTimeKind.Local);
|
||||
return DateTime.SpecifyKind(now, DateTimeKind.Local);
|
||||
|
||||
str = str.Replace(",", "");
|
||||
str = str.Replace("ago", "");
|
||||
@@ -80,7 +82,7 @@ namespace Jackett.Common.Utils
|
||||
throw new Exception("TimeAgo parsing failed, unknown unit: " + unit);
|
||||
}
|
||||
|
||||
return DateTime.SpecifyKind(DateTime.Now - timeAgo, DateTimeKind.Local);
|
||||
return DateTime.SpecifyKind(now - timeAgo, DateTimeKind.Local);
|
||||
}
|
||||
|
||||
// Uses the DateTimeRoutines library to parse the date
|
||||
@@ -97,14 +99,23 @@ namespace Jackett.Common.Utils
|
||||
|
||||
throw new Exception("FromFuzzyTime parsing failed");
|
||||
}
|
||||
|
||||
private static DateTime FromFuzzyPastTime(string str, string format, DateTime now)
|
||||
{
|
||||
var result = FromFuzzyTime(str, format);
|
||||
if (result > now)
|
||||
result = result.AddYears(-1);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static DateTime FromUnknown(string str, string format = null)
|
||||
public static DateTime FromUnknown(string str, string format = null, DateTime? relativeFrom = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
str = ParseUtil.NormalizeSpace(str);
|
||||
var now = relativeFrom ?? DateTime.Now;
|
||||
if (str.ToLower().Contains("now"))
|
||||
return DateTime.Now;
|
||||
return now;
|
||||
|
||||
// ... ago
|
||||
var match = _TimeAgoRegexp.Match(str);
|
||||
@@ -119,7 +130,7 @@ namespace Jackett.Common.Utils
|
||||
if (match.Success)
|
||||
{
|
||||
var time = str.Replace(match.Groups[0].Value, "");
|
||||
var dt = DateTime.SpecifyKind(DateTime.UtcNow.Date, DateTimeKind.Unspecified);
|
||||
var dt = DateTime.SpecifyKind(now.Date, DateTimeKind.Unspecified);
|
||||
dt += ParseTimeSpan(time);
|
||||
return dt;
|
||||
}
|
||||
@@ -129,7 +140,7 @@ namespace Jackett.Common.Utils
|
||||
if (match.Success)
|
||||
{
|
||||
var time = str.Replace(match.Groups[0].Value, "");
|
||||
var dt = DateTime.SpecifyKind(DateTime.UtcNow.Date, DateTimeKind.Unspecified);
|
||||
var dt = DateTime.SpecifyKind(now.Date, DateTimeKind.Unspecified);
|
||||
dt += ParseTimeSpan(time);
|
||||
dt -= TimeSpan.FromDays(1);
|
||||
return dt;
|
||||
@@ -140,7 +151,7 @@ namespace Jackett.Common.Utils
|
||||
if (match.Success)
|
||||
{
|
||||
var time = str.Replace(match.Groups[0].Value, "");
|
||||
var dt = DateTime.SpecifyKind(DateTime.UtcNow.Date, DateTimeKind.Unspecified);
|
||||
var dt = DateTime.SpecifyKind(now.Date, DateTimeKind.Unspecified);
|
||||
dt += ParseTimeSpan(time);
|
||||
dt += TimeSpan.FromDays(1);
|
||||
return dt;
|
||||
@@ -151,7 +162,7 @@ namespace Jackett.Common.Utils
|
||||
if (match.Success)
|
||||
{
|
||||
var time = str.Replace(match.Groups[0].Value, "");
|
||||
var dt = DateTime.SpecifyKind(DateTime.UtcNow.Date, DateTimeKind.Unspecified);
|
||||
var dt = DateTime.SpecifyKind(now.Date, DateTimeKind.Unspecified);
|
||||
dt += ParseTimeSpan(time);
|
||||
|
||||
DayOfWeek dow;
|
||||
@@ -188,8 +199,9 @@ namespace Jackett.Common.Utils
|
||||
if (match.Success)
|
||||
{
|
||||
var date = match.Groups[1].Value;
|
||||
var newDate = DateTime.Now.Year + "-" + date;
|
||||
var newDate = now.Year + "-" + date;
|
||||
str = str.Replace(date, newDate);
|
||||
return FromFuzzyPastTime(str, format, now);
|
||||
}
|
||||
|
||||
// add missing year 2
|
||||
@@ -198,7 +210,8 @@ namespace Jackett.Common.Utils
|
||||
{
|
||||
var date = match.Groups[1].Value;
|
||||
var time = match.Groups[2].Value;
|
||||
str = date + " " + DateTime.Now.Year + " " + time;
|
||||
str = date + " " + now.Year + " " + time;
|
||||
return FromFuzzyPastTime(str, format, now);
|
||||
}
|
||||
|
||||
return FromFuzzyTime(str, format);
|
||||
@@ -210,8 +223,10 @@ namespace Jackett.Common.Utils
|
||||
}
|
||||
|
||||
// converts a date/time string to a DateTime object using a GoLang layout
|
||||
public static DateTime ParseDateTimeGoLang(string date, string layout)
|
||||
public static DateTime ParseDateTimeGoLang(string date, string layout, DateTime? relativeFrom = null)
|
||||
{
|
||||
var now = relativeFrom ?? DateTime.Now;
|
||||
|
||||
date = ParseUtil.NormalizeSpace(date);
|
||||
var pattern = layout;
|
||||
|
||||
@@ -278,7 +293,10 @@ namespace Jackett.Common.Utils
|
||||
|
||||
try
|
||||
{
|
||||
return DateTime.ParseExact(date, pattern, CultureInfo.InvariantCulture);
|
||||
var dateTime = DateTime.ParseExact(date, pattern, CultureInfo.InvariantCulture);
|
||||
if (!pattern.Contains("yy") && dateTime > now)
|
||||
dateTime = dateTime.AddYears(-1);
|
||||
return dateTime;
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
|
@@ -88,7 +88,7 @@ namespace Jackett.Test.Common.Utils
|
||||
public void FromUnknownTest()
|
||||
{
|
||||
var now = DateTime.Now;
|
||||
var today = DateTime.UtcNow.Date;
|
||||
var today = now.ToUniversalTime().Date;
|
||||
var yesterday = today.AddDays(-1);
|
||||
var tomorrow = today.AddDays(1);
|
||||
var testCases = new Dictionary<string, DateTime>
|
||||
@@ -105,27 +105,28 @@ namespace Jackett.Test.Common.Utils
|
||||
};
|
||||
|
||||
foreach (var testCase in testCases)
|
||||
Assert.AreEqual(testCase.Value, DateTimeUtil.FromUnknown(testCase.Key));
|
||||
Assert.AreEqual(testCase.Value, DateTimeUtil.FromUnknown(testCase.Key, relativeFrom: now));
|
||||
|
||||
AssertSimilarDates(now, DateTimeUtil.FromUnknown("now"));
|
||||
AssertSimilarDates(now.AddHours(-3), DateTimeUtil.FromUnknown("3 hours ago"));
|
||||
Assert.AreEqual(now, DateTimeUtil.FromUnknown("now", relativeFrom: now));
|
||||
AssertSimilarDates(now.AddHours(-3), DateTimeUtil.FromUnknown("3 hours ago", relativeFrom: now));
|
||||
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("monday at 10:20 am")).TotalSeconds <= 3600 * 24 * 7); // 7 days
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("Tuesday at 22:20")).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("wednesday at \n 22:20")).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("\n thursday \n at 22:20")).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("friday at 22:20")).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("Saturday at 00:20")).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("sunday at 22:00")).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("monday at 10:20 am", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7); // 7 days
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("Tuesday at 22:20", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("wednesday at \n 22:20", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("\n thursday \n at 22:20", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("friday at 22:20", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("Saturday at 00:20", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7);
|
||||
Assert.True((now - DateTimeUtil.FromUnknown("sunday at 22:00", relativeFrom: now)).TotalSeconds <= 3600 * 24 * 7);
|
||||
|
||||
Assert.AreEqual(new DateTime(2020, 10, 31, 3, 8, 27, DateTimeKind.Utc).ToLocalTime(),
|
||||
DateTimeUtil.FromUnknown("1604113707"));
|
||||
DateTimeUtil.FromUnknown("1604113707", relativeFrom: now));
|
||||
|
||||
Assert.AreEqual(new DateTime(now.Year, 2, 1), DateTimeUtil.FromUnknown("02-01"));
|
||||
Assert.AreEqual(new DateTime(now.Year, 2, 1), DateTimeUtil.FromUnknown("2-1"));
|
||||
Assert.AreEqual(new DateTime(now.Year, 1, 2, 10, 30, 0), DateTimeUtil.FromUnknown("2 Jan 10:30"));
|
||||
var refDate = new DateTime(2021, 03, 12, 12, 00, 00, DateTimeKind.Local);
|
||||
Assert.AreEqual(new DateTime(refDate.Year, 2, 1), DateTimeUtil.FromUnknown("02-01", relativeFrom: refDate));
|
||||
Assert.AreEqual(new DateTime(refDate.Year, 2, 1), DateTimeUtil.FromUnknown("2-1", relativeFrom: refDate));
|
||||
Assert.AreEqual(new DateTime(refDate.Year, 1, 2, 10, 30, 0), DateTimeUtil.FromUnknown("2 Jan 10:30", relativeFrom: refDate));
|
||||
|
||||
Assert.AreEqual(new DateTime(2005, 6, 10, 10, 30, 0),
|
||||
Assert.AreEqual(new DateTime(2005, 6, 10, 10, 30, 0),
|
||||
DateTimeUtil.FromUnknown("June 10, 2005 10:30AM"));
|
||||
|
||||
// bad cases
|
||||
@@ -138,6 +139,13 @@ namespace Jackett.Test.Common.Utils
|
||||
{
|
||||
// ignored
|
||||
}
|
||||
|
||||
Assert.AreEqual(new DateTime(refDate.Year - 1, 5, 2), DateTimeUtil.FromUnknown("05-02", relativeFrom: refDate));
|
||||
Assert.AreEqual(new DateTime(refDate.Year - 1, 5, 2), DateTimeUtil.FromUnknown("5-2", relativeFrom: refDate));
|
||||
Assert.AreEqual(new DateTime(refDate.Year - 1, 5, 2, 10, 30, 0), DateTimeUtil.FromUnknown("2 May 10:30", relativeFrom: refDate));
|
||||
|
||||
Assert.AreEqual(new DateTime(2020, 12, 31, 23, 59, 0), DateTimeUtil.FromUnknown("12-31 23:59", relativeFrom: new DateTime(2021, 12, 31, 23, 58, 59, DateTimeKind.Local)));
|
||||
Assert.AreEqual(new DateTime(2020, 1, 1, 0, 1, 0), DateTimeUtil.FromUnknown("1-1 00:01", relativeFrom: new DateTime(2021, 1, 1, 0, 0, 0, DateTimeKind.Local)));
|
||||
}
|
||||
|
||||
[Test]
|
||||
@@ -149,8 +157,9 @@ namespace Jackett.Test.Common.Utils
|
||||
DateTimeUtil.ParseDateTimeGoLang("21-06-2010 04:20:19 -04:00", "02-01-2006 15:04:05 -07:00"));
|
||||
Assert.AreEqual(new DateTimeOffset(2010, 6, 21, 0, 0, 0, new TimeSpan(-5, -30, 0)).ToLocalTime().DateTime,
|
||||
DateTimeUtil.ParseDateTimeGoLang("2010-06-21 -05:30", "2006-01-02 -07:00"));
|
||||
Assert.AreEqual(new DateTime(now.Year, 9, 14, 7, 0, 0),
|
||||
DateTimeUtil.ParseDateTimeGoLang("7am Sep. 14", "3pm Jan. 2"));
|
||||
var refDate = new DateTime(2021, 03, 12, 12, 00, 00, DateTimeKind.Local);
|
||||
Assert.AreEqual(new DateTime(refDate.Year - 1, 9, 14, 7, 0, 0),
|
||||
DateTimeUtil.ParseDateTimeGoLang("7am Sep. 14", "3pm Jan. 2", relativeFrom:refDate));
|
||||
|
||||
// bad cases
|
||||
try
|
||||
|
Reference in New Issue
Block a user