mirror of
https://github.com/Jackett/Jackett.git
synced 2025-09-17 17:34:09 +02:00
abnormal: cleaning, removed old dev mode. (#11676)
This commit is contained in:
@@ -3,10 +3,8 @@ using System.Collections.Generic;
|
|||||||
using System.Collections.Specialized;
|
using System.Collections.Specialized;
|
||||||
using System.Diagnostics.CodeAnalysis;
|
using System.Diagnostics.CodeAnalysis;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.IO;
|
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Net;
|
using System.Net;
|
||||||
using System.Reflection;
|
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -18,7 +16,6 @@ using Jackett.Common.Models.IndexerConfig.Bespoke;
|
|||||||
using Jackett.Common.Services.Interfaces;
|
using Jackett.Common.Services.Interfaces;
|
||||||
using Jackett.Common.Utils;
|
using Jackett.Common.Utils;
|
||||||
using Jackett.Common.Utils.Clients;
|
using Jackett.Common.Utils.Clients;
|
||||||
using Newtonsoft.Json;
|
|
||||||
using Newtonsoft.Json.Linq;
|
using Newtonsoft.Json.Linq;
|
||||||
using NLog;
|
using NLog;
|
||||||
using WebClient = Jackett.Common.Utils.Clients.WebClient;
|
using WebClient = Jackett.Common.Utils.Clients.WebClient;
|
||||||
@@ -36,12 +33,6 @@ namespace Jackett.Common.Indexers
|
|||||||
private string SearchUrl => SiteLink + "torrents.php";
|
private string SearchUrl => SiteLink + "torrents.php";
|
||||||
private string DetailsUrl => SiteLink + "torrents.php?id=";
|
private string DetailsUrl => SiteLink + "torrents.php?id=";
|
||||||
private string ReplaceMulti => ConfigData.ReplaceMulti.Value;
|
private string ReplaceMulti => ConfigData.ReplaceMulti.Value;
|
||||||
private bool Latency => ConfigData.Latency.Value;
|
|
||||||
private bool DevMode => ConfigData.DevMode.Value;
|
|
||||||
private bool CacheMode => ConfigData.HardDriveCache.Value;
|
|
||||||
private static string Directory => Path.Combine(Path.GetTempPath(), Assembly.GetExecutingAssembly().GetName().Name.ToLower(), MethodBase.GetCurrentMethod().DeclaringType?.Name.ToLower());
|
|
||||||
|
|
||||||
private readonly Dictionary<string, string> emulatedBrowserHeaders = new Dictionary<string, string>();
|
|
||||||
|
|
||||||
private ConfigurationDataAbnormal ConfigData
|
private ConfigurationDataAbnormal ConfigData
|
||||||
{
|
{
|
||||||
@@ -77,7 +68,7 @@ namespace Jackett.Common.Indexers
|
|||||||
Encoding = Encoding.UTF8;
|
Encoding = Encoding.UTF8;
|
||||||
Type = "private";
|
Type = "private";
|
||||||
// NET::ERR_CERT_DATE_INVALID expired 29 July 2020
|
// NET::ERR_CERT_DATE_INVALID expired 29 July 2020
|
||||||
w.AddTrustedCertificate(new Uri(SiteLink).Host, "9cb32582b564256146616afddbdb8e7c94c428ed");
|
//w.AddTrustedCertificate(new Uri(SiteLink).Host, "9cb32582b564256146616afddbdb8e7c94c428ed");
|
||||||
|
|
||||||
AddCategoryMapping("MOVIE|DVDR", TorznabCatType.MoviesDVD, "DVDR");
|
AddCategoryMapping("MOVIE|DVDR", TorznabCatType.MoviesDVD, "DVDR");
|
||||||
AddCategoryMapping("MOVIE|DVDRIP", TorznabCatType.MoviesSD, "DVDRIP");
|
AddCategoryMapping("MOVIE|DVDRIP", TorznabCatType.MoviesSD, "DVDRIP");
|
||||||
@@ -115,25 +106,7 @@ namespace Jackett.Common.Indexers
|
|||||||
LoadValuesFromJson(configJson);
|
LoadValuesFromJson(configJson);
|
||||||
|
|
||||||
// Check & Validate Config
|
// Check & Validate Config
|
||||||
validateConfig();
|
ValidateConfig();
|
||||||
|
|
||||||
// Setting our data for a better emulated browser (maximum security)
|
|
||||||
// TODO: Encoded Content not supported by Jackett at this time
|
|
||||||
// emulatedBrowserHeaders.Add("Accept-Encoding", "gzip, deflate");
|
|
||||||
|
|
||||||
// If we want to simulate a browser
|
|
||||||
if (ConfigData.Browser.Value)
|
|
||||||
{
|
|
||||||
// Clean headers
|
|
||||||
emulatedBrowserHeaders.Clear();
|
|
||||||
|
|
||||||
// Inject headers
|
|
||||||
emulatedBrowserHeaders.Add("Accept", ConfigData.HeaderAccept.Value);
|
|
||||||
emulatedBrowserHeaders.Add("Accept-Language", ConfigData.HeaderAcceptLang.Value);
|
|
||||||
emulatedBrowserHeaders.Add("DNT", Convert.ToInt32(ConfigData.HeaderDNT.Value).ToString());
|
|
||||||
emulatedBrowserHeaders.Add("Upgrade-Insecure-Requests", Convert.ToInt32(ConfigData.HeaderUpgradeInsecure.Value).ToString());
|
|
||||||
emulatedBrowserHeaders.Add("User-Agent", ConfigData.HeaderUserAgent.Value);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Getting login form to retrieve CSRF token
|
// Getting login form to retrieve CSRF token
|
||||||
var myRequest = new Utils.Clients.WebRequest
|
var myRequest = new Utils.Clients.WebRequest
|
||||||
@@ -141,9 +114,6 @@ namespace Jackett.Common.Indexers
|
|||||||
Url = LoginUrl
|
Url = LoginUrl
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add our headers to request
|
|
||||||
myRequest.Headers = emulatedBrowserHeaders;
|
|
||||||
|
|
||||||
// Building login form data
|
// Building login form data
|
||||||
var pairs = new Dictionary<string, string> {
|
var pairs = new Dictionary<string, string> {
|
||||||
{ "username", ConfigData.Username.Value },
|
{ "username", ConfigData.Username.Value },
|
||||||
@@ -158,13 +128,11 @@ namespace Jackett.Common.Indexers
|
|||||||
PostData = pairs,
|
PostData = pairs,
|
||||||
Referer = LoginUrl,
|
Referer = LoginUrl,
|
||||||
Type = RequestType.POST,
|
Type = RequestType.POST,
|
||||||
Url = LoginUrl,
|
Url = LoginUrl
|
||||||
Headers = emulatedBrowserHeaders
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Perform loggin
|
// Perform loggin
|
||||||
latencyNow();
|
logger.Info("\nAbnormal - Perform loggin.. with " + LoginUrl);
|
||||||
output("\nPerform loggin.. with " + LoginUrl);
|
|
||||||
var response = await webclient.GetResultAsync(request);
|
var response = await webclient.GetResultAsync(request);
|
||||||
|
|
||||||
// Test if we are logged in
|
// Test if we are logged in
|
||||||
@@ -179,11 +147,11 @@ namespace Jackett.Common.Indexers
|
|||||||
var left = dom.QuerySelector(".info").TextContent.Trim();
|
var left = dom.QuerySelector(".info").TextContent.Trim();
|
||||||
|
|
||||||
// Oops, unable to login
|
// Oops, unable to login
|
||||||
output("-> Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !", "error");
|
logger.Info("Abnormal - Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !", "error");
|
||||||
throw new ExceptionWithConfigData("Login failed: " + message, configData);
|
throw new ExceptionWithConfigData("Login failed: " + message, configData);
|
||||||
});
|
});
|
||||||
|
|
||||||
output("-> Login Success");
|
logger.Info("-> Login Success");
|
||||||
|
|
||||||
return IndexerConfigurationStatus.RequiresTesting;
|
return IndexerConfigurationStatus.RequiresTesting;
|
||||||
}
|
}
|
||||||
@@ -206,41 +174,26 @@ namespace Jackett.Common.Indexers
|
|||||||
var qRowList = new List<IElement>();
|
var qRowList = new List<IElement>();
|
||||||
var searchTerm = query.GetQueryString();
|
var searchTerm = query.GetQueryString();
|
||||||
var searchUrl = SearchUrl;
|
var searchUrl = SearchUrl;
|
||||||
var nbResults = 0;
|
|
||||||
var pageLinkCount = 0;
|
|
||||||
|
|
||||||
// Check cache first so we don't query the server (if search term used or not in dev mode)
|
|
||||||
if (!DevMode && !string.IsNullOrEmpty(searchTerm))
|
|
||||||
{
|
|
||||||
lock (cache)
|
|
||||||
{
|
|
||||||
// Remove old cache items
|
|
||||||
CleanCache();
|
|
||||||
|
|
||||||
// Search in cache
|
|
||||||
var cachedResult = cache.Where(i => i.Query == searchTerm).FirstOrDefault();
|
|
||||||
if (cachedResult != null)
|
|
||||||
return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build our query
|
// Build our query
|
||||||
var request = buildQuery(searchTerm, query, searchUrl);
|
var request = BuildQuery(searchTerm, query, searchUrl);
|
||||||
|
|
||||||
// Getting results & Store content
|
// Getting results & Store content
|
||||||
var parser = new HtmlParser();
|
var parser = new HtmlParser();
|
||||||
var dom = parser.ParseDocument(await queryExec(request));
|
var dom = parser.ParseDocument(await QueryExecAsync(request));
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
// Find torrent rows
|
// Find torrent rows
|
||||||
var firstPageRows = findTorrentRows(dom);
|
var firstPageRows = FindTorrentRows(dom);
|
||||||
|
|
||||||
// Add them to torrents list
|
// Add them to torrents list
|
||||||
qRowList.AddRange(firstPageRows);
|
qRowList.AddRange(firstPageRows);
|
||||||
|
|
||||||
// Check if there are pagination links at bottom
|
// Check if there are pagination links at bottom
|
||||||
var qPagination = dom.QuerySelectorAll(".linkbox > a");
|
var qPagination = dom.QuerySelectorAll(".linkbox > a");
|
||||||
|
int pageLinkCount;
|
||||||
|
int nbResults;
|
||||||
if (qPagination.Length > 0)
|
if (qPagination.Length > 0)
|
||||||
{
|
{
|
||||||
// Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page)
|
// Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page)
|
||||||
@@ -260,13 +213,13 @@ namespace Jackett.Common.Indexers
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
output("\nNo result found for your query, please try another search term ...\n", "info");
|
logger.Info("\nAbnormal - No result found for your query, please try another search term ...\n", "info");
|
||||||
// No result found for this query
|
// No result found for this query
|
||||||
return releases;
|
return releases;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
|
logger.Info("\nAbnormal - Found " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
|
||||||
output("\nThere are " + firstPageRows.Length + " results on the first page !");
|
logger.Info("\nAbnormal - There are " + firstPageRows.Length + " results on the first page !");
|
||||||
|
|
||||||
// If we have a term used for search and pagination result superior to one
|
// If we have a term used for search and pagination result superior to one
|
||||||
if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1)
|
if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1)
|
||||||
@@ -274,20 +227,17 @@ namespace Jackett.Common.Indexers
|
|||||||
// Starting with page #2
|
// Starting with page #2
|
||||||
for (var i = 2; i <= Math.Min(int.Parse(ConfigData.Pages.Value), pageLinkCount); i++)
|
for (var i = 2; i <= Math.Min(int.Parse(ConfigData.Pages.Value), pageLinkCount); i++)
|
||||||
{
|
{
|
||||||
output("\nProcessing page #" + i);
|
logger.Info("\nAbnormal - Processing page #" + i);
|
||||||
|
|
||||||
// Request our page
|
|
||||||
latencyNow();
|
|
||||||
|
|
||||||
// Build our query
|
// Build our query
|
||||||
var pageRequest = buildQuery(searchTerm, query, searchUrl, i);
|
var pageRequest = BuildQuery(searchTerm, query, searchUrl, i);
|
||||||
|
|
||||||
// Getting results & Store content
|
// Getting results & Store content
|
||||||
parser = new HtmlParser();
|
parser = new HtmlParser();
|
||||||
dom = parser.ParseDocument(await queryExec(pageRequest));
|
dom = parser.ParseDocument(await QueryExecAsync(pageRequest));
|
||||||
|
|
||||||
// Process page results
|
// Process page results
|
||||||
var additionalPageRows = findTorrentRows(dom);
|
var additionalPageRows = FindTorrentRows(dom);
|
||||||
|
|
||||||
// Add them to torrents list
|
// Add them to torrents list
|
||||||
qRowList.AddRange(additionalPageRows);
|
qRowList.AddRange(additionalPageRows);
|
||||||
@@ -297,11 +247,8 @@ namespace Jackett.Common.Indexers
|
|||||||
// Loop on results
|
// Loop on results
|
||||||
foreach (var row in qRowList)
|
foreach (var row in qRowList)
|
||||||
{
|
{
|
||||||
output("\n=>> Torrent #" + (releases.Count + 1));
|
|
||||||
|
|
||||||
// ID
|
// ID
|
||||||
var id = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(2) > a").GetAttribute("href"), @"\d+").Value);
|
var id = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(2) > a").GetAttribute("href"), @"\d+").Value);
|
||||||
output("ID: " + id);
|
|
||||||
|
|
||||||
// Release Name
|
// Release Name
|
||||||
var name = row.QuerySelector("td:nth-of-type(2) > a").TextContent;
|
var name = row.QuerySelector("td:nth-of-type(2) > a").TextContent;
|
||||||
@@ -311,39 +258,22 @@ namespace Jackett.Common.Indexers
|
|||||||
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
|
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
|
||||||
name = regex.Replace(name, "$1" + ReplaceMulti + "$2");
|
name = regex.Replace(name, "$1" + ReplaceMulti + "$2");
|
||||||
}
|
}
|
||||||
output("Release: " + name);
|
|
||||||
|
|
||||||
// Category
|
var categoryId = row.QuerySelector("td:nth-of-type(1) > a").GetAttribute("href").Replace("torrents.php?cat[]=", string.Empty); // Category
|
||||||
var categoryId = row.QuerySelector("td:nth-of-type(1) > a").GetAttribute("href").Replace("torrents.php?cat[]=", string.Empty);
|
var newznab = MapTrackerCatToNewznab(categoryId); // Newznab Category
|
||||||
var newznab = MapTrackerCatToNewznab(categoryId);
|
var seeders = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value); // Seeders
|
||||||
output("Category: " + MapTrackerCatToNewznab(categoryId).First().ToString() + " (" + categoryId + ")");
|
var leechers = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(7)").TextContent, @"\d+").Value); // Leechers
|
||||||
|
var completed = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value); // Completed
|
||||||
// Seeders
|
var sizeStr = row.QuerySelector("td:nth-of-type(5)").TextContent.Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb"); // Size
|
||||||
var seeders = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value);
|
var size = ReleaseInfo.GetBytes(sizeStr); // Size in bytes
|
||||||
output("Seeders: " + seeders);
|
|
||||||
|
|
||||||
// Leechers
|
|
||||||
var leechers = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(7)").TextContent, @"\d+").Value);
|
|
||||||
output("Leechers: " + leechers);
|
|
||||||
|
|
||||||
// Completed
|
|
||||||
var completed = ParseUtil.CoerceInt(Regex.Match(row.QuerySelector("td:nth-of-type(6)").TextContent, @"\d+").Value);
|
|
||||||
output("Completed: " + completed);
|
|
||||||
|
|
||||||
// Size
|
|
||||||
var sizeStr = row.QuerySelector("td:nth-of-type(5)").TextContent.Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb");
|
|
||||||
var size = ReleaseInfo.GetBytes(sizeStr);
|
|
||||||
output("Size: " + sizeStr + " (" + size + " bytes)");
|
|
||||||
|
|
||||||
// Publish DateToString
|
// Publish DateToString
|
||||||
var datestr = row.QuerySelector("span.time").GetAttribute("title");
|
var datestr = row.QuerySelector("span.time").GetAttribute("title");
|
||||||
var dateLocal = DateTime.SpecifyKind(DateTime.ParseExact(datestr, "MMM dd yyyy, HH:mm", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
|
var dateLocal = DateTime.SpecifyKind(DateTime.ParseExact(datestr, "MMM dd yyyy, HH:mm", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
|
||||||
var date = TimeZoneInfo.ConvertTimeToUtc(dateLocal, FranceTz);
|
var date = TimeZoneInfo.ConvertTimeToUtc(dateLocal, FranceTz);
|
||||||
output("Released on: " + date);
|
|
||||||
|
|
||||||
// Torrent Details URL
|
// Torrent Details URL
|
||||||
var details = new Uri(DetailsUrl + id);
|
var details = new Uri(DetailsUrl + id);
|
||||||
output("Details: " + details.AbsoluteUri);
|
|
||||||
|
|
||||||
// Torrent Download URL
|
// Torrent Download URL
|
||||||
Uri downloadLink = null;
|
Uri downloadLink = null;
|
||||||
@@ -352,12 +282,11 @@ namespace Jackett.Common.Indexers
|
|||||||
{
|
{
|
||||||
// Download link available
|
// Download link available
|
||||||
downloadLink = new Uri(SiteLink + link);
|
downloadLink = new Uri(SiteLink + link);
|
||||||
output("Download Link: " + downloadLink.AbsoluteUri);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// No download link available -- Must be on pending ( can't be downloaded now...)
|
// No download link available -- Must be on pending ( can't be downloaded now...)
|
||||||
output("Download Link: Not available, torrent pending ? Skipping ...");
|
logger.Info("Abnormal - Download Link: Not available, torrent pending ? Skipping ...");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -366,7 +295,6 @@ namespace Jackett.Common.Indexers
|
|||||||
if (row.QuerySelector("img[alt=\"Freeleech\"]") != null)
|
if (row.QuerySelector("img[alt=\"Freeleech\"]") != null)
|
||||||
{
|
{
|
||||||
downloadVolumeFactor = 0;
|
downloadVolumeFactor = 0;
|
||||||
output("FreeLeech =)");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Building release infos
|
// Building release infos
|
||||||
@@ -387,6 +315,7 @@ namespace Jackett.Common.Indexers
|
|||||||
DownloadVolumeFactor = downloadVolumeFactor
|
DownloadVolumeFactor = downloadVolumeFactor
|
||||||
};
|
};
|
||||||
releases.Add(release);
|
releases.Add(release);
|
||||||
|
logger.Info("Abnormal - Found Release: " + release.Title + "(" + id + ")");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -406,7 +335,7 @@ namespace Jackett.Common.Indexers
|
|||||||
/// <param name="url">Search url for provider</param>
|
/// <param name="url">Search url for provider</param>
|
||||||
/// <param name="page">Page number to request</param>
|
/// <param name="page">Page number to request</param>
|
||||||
/// <returns>URL to query for parsing and processing results</returns>
|
/// <returns>URL to query for parsing and processing results</returns>
|
||||||
private string buildQuery(string term, TorznabQuery query, string url, int page = 0)
|
private string BuildQuery(string term, TorznabQuery query, string url, int page = 0)
|
||||||
{
|
{
|
||||||
var parameters = new NameValueCollection();
|
var parameters = new NameValueCollection();
|
||||||
var categoriesList = MapTorznabCapsToTrackers(query);
|
var categoriesList = MapTorznabCapsToTrackers(query);
|
||||||
@@ -451,7 +380,7 @@ namespace Jackett.Common.Indexers
|
|||||||
// Building our query -- Cannot use GetQueryString due to UrlEncode (generating wrong cat[] param)
|
// Building our query -- Cannot use GetQueryString due to UrlEncode (generating wrong cat[] param)
|
||||||
url += "?" + string.Join("&", parameters.AllKeys.Select(a => a + "=" + parameters[a]));
|
url += "?" + string.Join("&", parameters.AllKeys.Select(a => a + "=" + parameters[a]));
|
||||||
|
|
||||||
output("\nBuilded query for \"" + term + "\"... " + url);
|
logger.Info("\nAbnormal - Builded query for \"" + term + "\"... " + url);
|
||||||
|
|
||||||
// Return our search url
|
// Return our search url
|
||||||
return url;
|
return url;
|
||||||
@@ -462,77 +391,13 @@ namespace Jackett.Common.Indexers
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="request">URL created by Query Builder</param>
|
/// <param name="request">URL created by Query Builder</param>
|
||||||
/// <returns>Results from query</returns>
|
/// <returns>Results from query</returns>
|
||||||
private async Task<string> queryExec(string request)
|
private async Task<string> QueryExecAsync(string request)
|
||||||
{
|
{
|
||||||
string results = null;
|
string results = null;
|
||||||
|
|
||||||
// Switch in we are in DEV mode with Hard Drive Cache or not
|
// Querying tracker directly
|
||||||
if (DevMode && CacheMode)
|
results = await QueryTrackerAsync(request);
|
||||||
{
|
|
||||||
// Check Cache before querying and load previous results if available
|
|
||||||
results = await queryCache(request);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Querying tracker directly
|
|
||||||
results = await queryTracker(request);
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Get Torrents Page from Cache by Query Provided
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="request">URL created by Query Builder</param>
|
|
||||||
/// <returns>Results from query</returns>
|
|
||||||
private async Task<string> queryCache(string request)
|
|
||||||
{
|
|
||||||
string results;
|
|
||||||
|
|
||||||
// Create Directory if not exist
|
|
||||||
System.IO.Directory.CreateDirectory(Directory);
|
|
||||||
|
|
||||||
// Clean Storage Provider Directory from outdated cached queries
|
|
||||||
cleanCacheStorage();
|
|
||||||
|
|
||||||
// File Name
|
|
||||||
var fileName = StringUtil.HashSHA1(request) + ".json";
|
|
||||||
|
|
||||||
// Create fingerprint for request
|
|
||||||
var file = Path.Combine(Directory, fileName);
|
|
||||||
|
|
||||||
// Checking modes states
|
|
||||||
if (File.Exists(file))
|
|
||||||
{
|
|
||||||
// File exist... loading it right now !
|
|
||||||
output("Loading results from hard drive cache ..." + fileName);
|
|
||||||
try
|
|
||||||
{
|
|
||||||
using (var fileReader = File.OpenText(file))
|
|
||||||
{
|
|
||||||
var serializer = new JsonSerializer();
|
|
||||||
results = (string)serializer.Deserialize(fileReader, typeof(string));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception e)
|
|
||||||
{
|
|
||||||
output("Error loading cached results ! " + e.Message, "error");
|
|
||||||
results = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No cached file found, querying tracker directly
|
|
||||||
results = await queryTracker(request);
|
|
||||||
|
|
||||||
// Cached file didn't exist for our query, writing it right now !
|
|
||||||
output("Writing results to hard drive cache ..." + fileName);
|
|
||||||
using (var fileWriter = File.CreateText(file))
|
|
||||||
{
|
|
||||||
var serializer = new JsonSerializer();
|
|
||||||
serializer.Serialize(fileWriter, results);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -541,142 +406,31 @@ namespace Jackett.Common.Indexers
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="request">URL created by Query Builder</param>
|
/// <param name="request">URL created by Query Builder</param>
|
||||||
/// <returns>Results from query</returns>
|
/// <returns>Results from query</returns>
|
||||||
private async Task<string> queryTracker(string request)
|
private async Task<string> QueryTrackerAsync(string request)
|
||||||
{
|
{
|
||||||
// Cache mode not enabled or cached file didn't exist for our query
|
// Cache mode not enabled or cached file didn't exist for our query
|
||||||
output("\nQuerying tracker for results....");
|
logger.Info("\nAbnormal - Querying tracker for results....");
|
||||||
|
|
||||||
// Request our first page
|
// Request our first page
|
||||||
latencyNow();
|
var results = await RequestWithCookiesAndRetryAsync(request);
|
||||||
var results = await RequestWithCookiesAndRetryAsync(request, headers: emulatedBrowserHeaders);
|
|
||||||
|
|
||||||
// Return results from tracker
|
// Return results from tracker
|
||||||
return results.ContentString;
|
return results.ContentString;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Clean Hard Drive Cache Storage
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="force">Force Provider Folder deletion</param>
|
|
||||||
private void cleanCacheStorage(bool force = false)
|
|
||||||
{
|
|
||||||
// Check cleaning method
|
|
||||||
if (force)
|
|
||||||
{
|
|
||||||
// Deleting Provider Storage folder and all files recursively
|
|
||||||
output("\nDeleting Provider Storage folder and all files recursively ...");
|
|
||||||
|
|
||||||
// Check if directory exist
|
|
||||||
if (System.IO.Directory.Exists(Directory))
|
|
||||||
{
|
|
||||||
// Delete storage directory of provider
|
|
||||||
System.IO.Directory.Delete(Directory, true);
|
|
||||||
output("-> Storage folder deleted successfully.");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No directory, so nothing to do
|
|
||||||
output("-> No Storage folder found for this provider !");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
var i = 0;
|
|
||||||
// Check if there is file older than ... and delete them
|
|
||||||
output("\nCleaning Provider Storage folder... in progress.");
|
|
||||||
System.IO.Directory.GetFiles(Directory)
|
|
||||||
.Select(f => new FileInfo(f))
|
|
||||||
.Where(f => f.LastAccessTime < DateTime.Now.AddMilliseconds(-Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value)))
|
|
||||||
.ToList()
|
|
||||||
.ForEach(f =>
|
|
||||||
{
|
|
||||||
output("Deleting cached file << " + f.Name + " >> ... done.");
|
|
||||||
f.Delete();
|
|
||||||
i++;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Inform on what was cleaned during process
|
|
||||||
if (i > 0)
|
|
||||||
{
|
|
||||||
output("-> Deleted " + i + " cached files during cleaning.");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
output("-> Nothing deleted during cleaning.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Generate a random fake latency to avoid detection on tracker side
|
|
||||||
/// </summary>
|
|
||||||
private void latencyNow()
|
|
||||||
{
|
|
||||||
// Need latency ?
|
|
||||||
if (Latency)
|
|
||||||
{
|
|
||||||
// Generate a random value in our range
|
|
||||||
var random = new Random(DateTime.Now.Millisecond);
|
|
||||||
var waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value), Convert.ToInt32(ConfigData.LatencyEnd.Value));
|
|
||||||
output("\nLatency Faker => Sleeping for " + waiting + " ms...");
|
|
||||||
|
|
||||||
// Sleep now...
|
|
||||||
System.Threading.Thread.Sleep(waiting);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Find torrent rows in search pages
|
/// Find torrent rows in search pages
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <returns>List of rows</returns>
|
/// <returns>List of rows</returns>
|
||||||
private IHtmlCollection<IElement> findTorrentRows(IHtmlDocument dom) =>
|
private IHtmlCollection<IElement> FindTorrentRows(IHtmlDocument dom) =>
|
||||||
dom.QuerySelectorAll(".torrent_table > tbody > tr:not(.colhead)");
|
dom.QuerySelectorAll(".torrent_table > tbody > tr:not(.colhead)");
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output message for logging or developpment (console)
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="message">Message to output</param>
|
|
||||||
/// <param name="level">Level for Logger</param>
|
|
||||||
private void output(string message, string level = "debug")
|
|
||||||
{
|
|
||||||
// Check if we are in dev mode
|
|
||||||
if (DevMode)
|
|
||||||
{
|
|
||||||
// Output message to console
|
|
||||||
Console.WriteLine(message);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Send message to logger with level
|
|
||||||
switch (level)
|
|
||||||
{
|
|
||||||
default:
|
|
||||||
goto case "debug";
|
|
||||||
case "debug":
|
|
||||||
// Only if Debug Level Enabled on Jackett
|
|
||||||
if (logger.IsDebugEnabled)
|
|
||||||
{
|
|
||||||
logger.Debug(message);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "info":
|
|
||||||
logger.Info(message);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "error":
|
|
||||||
logger.Error(message);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Validate Config entered by user on Jackett
|
/// Validate Config entered by user on Jackett
|
||||||
/// </summary>
|
/// </summary>
|
||||||
private void validateConfig()
|
private void ValidateConfig()
|
||||||
{
|
{
|
||||||
output("\nValidating Settings ... \n");
|
logger.Info("\nAbnormal - Validating Settings ... \n");
|
||||||
|
|
||||||
// Check Username Setting
|
// Check Username Setting
|
||||||
if (string.IsNullOrEmpty(ConfigData.Username.Value))
|
if (string.IsNullOrEmpty(ConfigData.Username.Value))
|
||||||
@@ -685,7 +439,7 @@ namespace Jackett.Common.Indexers
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
output("Validated Setting -- Username (auth) => " + ConfigData.Username.Value.ToString());
|
logger.Info("Abnormal - Validated Setting -- Username (auth) => " + ConfigData.Username.Value.ToString());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check Password Setting
|
// Check Password Setting
|
||||||
@@ -695,7 +449,7 @@ namespace Jackett.Common.Indexers
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
output("Validated Setting -- Password (auth) => " + ConfigData.Password.Value.ToString());
|
logger.Info("Abnormal - Validated Setting -- Password (auth) => " + ConfigData.Password.Value.ToString());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check Max Page Setting
|
// Check Max Page Setting
|
||||||
@@ -703,7 +457,7 @@ namespace Jackett.Common.Indexers
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
output("Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
|
logger.Info("Abnormal - Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
|
||||||
}
|
}
|
||||||
catch (Exception)
|
catch (Exception)
|
||||||
{
|
{
|
||||||
@@ -714,116 +468,6 @@ namespace Jackett.Common.Indexers
|
|||||||
{
|
{
|
||||||
throw new ExceptionWithConfigData("Please enter a maximum number of pages to crawl !", ConfigData);
|
throw new ExceptionWithConfigData("Please enter a maximum number of pages to crawl !", ConfigData);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check Latency Setting
|
|
||||||
if (ConfigData.Latency.Value)
|
|
||||||
{
|
|
||||||
output("\nValidated Setting -- Latency Simulation enabled");
|
|
||||||
|
|
||||||
// Check Latency Start Setting
|
|
||||||
if (!string.IsNullOrEmpty(ConfigData.LatencyStart.Value))
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
output("Validated Setting -- Latency Start => " + Convert.ToInt32(ConfigData.LatencyStart.Value));
|
|
||||||
}
|
|
||||||
catch (Exception)
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Please enter a numeric latency start in ms !", ConfigData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a start latency !", ConfigData);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check Latency End Setting
|
|
||||||
if (!string.IsNullOrEmpty(ConfigData.LatencyEnd.Value))
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
output("Validated Setting -- Latency End => " + Convert.ToInt32(ConfigData.LatencyEnd.Value));
|
|
||||||
}
|
|
||||||
catch (Exception)
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Please enter a numeric latency end in ms !", ConfigData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a end latency !", ConfigData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check Browser Setting
|
|
||||||
if (ConfigData.Browser.Value)
|
|
||||||
{
|
|
||||||
output("\nValidated Setting -- Browser Simulation enabled");
|
|
||||||
|
|
||||||
// Check ACCEPT header Setting
|
|
||||||
if (string.IsNullOrEmpty(ConfigData.HeaderAccept.Value))
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT header !", ConfigData);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
output("Validated Setting -- ACCEPT (header) => " + ConfigData.HeaderAccept.Value.ToString());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check ACCEPT-LANG header Setting
|
|
||||||
if (string.IsNullOrEmpty(ConfigData.HeaderAcceptLang.Value))
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT-LANG header !", ConfigData);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
output("Validated Setting -- ACCEPT-LANG (header) => " + ConfigData.HeaderAcceptLang.Value.ToString());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check USER-AGENT header Setting
|
|
||||||
if (string.IsNullOrEmpty(ConfigData.HeaderUserAgent.Value))
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an USER-AGENT header !", ConfigData);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
output("Validated Setting -- USER-AGENT (header) => " + ConfigData.HeaderUserAgent.Value.ToString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check Dev Cache Settings
|
|
||||||
if (ConfigData.HardDriveCache.Value == true)
|
|
||||||
{
|
|
||||||
output("\nValidated Setting -- DEV Hard Drive Cache enabled");
|
|
||||||
|
|
||||||
// Check if Dev Mode enabled !
|
|
||||||
if (!ConfigData.DevMode.Value)
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Hard Drive is enabled but not in DEV MODE, Please enable DEV MODE !", ConfigData);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check Cache Keep Time Setting
|
|
||||||
if (!string.IsNullOrEmpty(ConfigData.HardDriveCacheKeepTime.Value))
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
output("Validated Setting -- Cache Keep Time (ms) => " + Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value));
|
|
||||||
}
|
|
||||||
catch (Exception)
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Please enter a numeric hard drive keep time in ms !", ConfigData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new ExceptionWithConfigData("Hard Drive Cache enabled, Please enter a maximum keep time for cache !", ConfigData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Delete cache if previously existed
|
|
||||||
cleanCacheStorage(true);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -13,22 +13,6 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
|||||||
public DisplayInfoConfigurationItem PagesWarning { get; private set; }
|
public DisplayInfoConfigurationItem PagesWarning { get; private set; }
|
||||||
public StringConfigurationItem ReplaceMulti { get; private set; }
|
public StringConfigurationItem ReplaceMulti { get; private set; }
|
||||||
public StringConfigurationItem Pages { get; private set; }
|
public StringConfigurationItem Pages { get; private set; }
|
||||||
public DisplayInfoConfigurationItem SecurityWarning { get; private set; }
|
|
||||||
public BoolConfigurationItem Latency { get; private set; }
|
|
||||||
public BoolConfigurationItem Browser { get; private set; }
|
|
||||||
public DisplayInfoConfigurationItem LatencyWarning { get; private set; }
|
|
||||||
public StringConfigurationItem LatencyStart { get; private set; }
|
|
||||||
public StringConfigurationItem LatencyEnd { get; private set; }
|
|
||||||
public DisplayInfoConfigurationItem HeadersWarning { get; private set; }
|
|
||||||
public StringConfigurationItem HeaderAccept { get; private set; }
|
|
||||||
public StringConfigurationItem HeaderAcceptLang { get; private set; }
|
|
||||||
public BoolConfigurationItem HeaderDNT { get; private set; }
|
|
||||||
public BoolConfigurationItem HeaderUpgradeInsecure { get; private set; }
|
|
||||||
public StringConfigurationItem HeaderUserAgent { get; private set; }
|
|
||||||
public DisplayInfoConfigurationItem DevWarning { get; private set; }
|
|
||||||
public BoolConfigurationItem DevMode { get; private set; }
|
|
||||||
public BoolConfigurationItem HardDriveCache { get; private set; }
|
|
||||||
public StringConfigurationItem HardDriveCacheKeepTime { get; private set; }
|
|
||||||
|
|
||||||
public ConfigurationDataAbnormal()
|
public ConfigurationDataAbnormal()
|
||||||
: base()
|
: base()
|
||||||
@@ -41,22 +25,6 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
|
|||||||
PagesWarning = new DisplayInfoConfigurationItem("Preferences", "<b>Preferences Configuration</b> (<i>Tweak your search settings</i>),<br /><br /> <ul><li><b>Replace MULTI</b>, replace multi keyword in the resultset (leave empty to deactivate)</li><li><b>Max Pages to Process</b> let you specify how many page (max) Jackett can process when doing a search. Setting a value <b>higher than 4 is dangerous</b> for you account ! (<b>Result of too many requests to tracker...that <u>will be suspect</u></b>).</li></ul>");
|
PagesWarning = new DisplayInfoConfigurationItem("Preferences", "<b>Preferences Configuration</b> (<i>Tweak your search settings</i>),<br /><br /> <ul><li><b>Replace MULTI</b>, replace multi keyword in the resultset (leave empty to deactivate)</li><li><b>Max Pages to Process</b> let you specify how many page (max) Jackett can process when doing a search. Setting a value <b>higher than 4 is dangerous</b> for you account ! (<b>Result of too many requests to tracker...that <u>will be suspect</u></b>).</li></ul>");
|
||||||
Pages = new StringConfigurationItem("Max Pages to Process (Required)") { Value = "4" };
|
Pages = new StringConfigurationItem("Max Pages to Process (Required)") { Value = "4" };
|
||||||
ReplaceMulti = new StringConfigurationItem("Replace MULTI") { Value = "MULTI.FRENCH" };
|
ReplaceMulti = new StringConfigurationItem("Replace MULTI") { Value = "MULTI.FRENCH" };
|
||||||
SecurityWarning = new DisplayInfoConfigurationItem("Security", "<b>Security Configuration</b> (<i>Read this area carefully !</i>),<br /><br /> <ul><li><b>Latency Simulation</b> will simulate human browsing with Jacket by pausing Jacket for an random time between each request, to fake a real content browsing.</li><li><b>Browser Simulation</b> will simulate a real human browser by injecting additionals headers when doing requests to tracker.</li></ul>");
|
|
||||||
Latency = new BoolConfigurationItem("Latency Simulation (Optional)") { Value = false };
|
|
||||||
Browser = new BoolConfigurationItem("Browser Simulation (Optional)") { Value = true };
|
|
||||||
LatencyWarning = new DisplayInfoConfigurationItem("Simulate Latency", "<b>Latency Configuration</b> (<i>Required if latency simulation enabled</i>),<br /><br/> <ul><li>By filling this range, <b>Jackett will make a random timed pause</b> <u>between requests</u> to tracker <u>to simulate a real browser</u>.</li><li>MilliSeconds <b>only</b></li></ul>");
|
|
||||||
LatencyStart = new StringConfigurationItem("Minimum Latency (ms)") { Value = "1589" };
|
|
||||||
LatencyEnd = new StringConfigurationItem("Maximum Latency (ms)") { Value = "3674" };
|
|
||||||
HeadersWarning = new DisplayInfoConfigurationItem("Injecting headers", "<b>Browser Headers Configuration</b> (<i>Required if browser simulation enabled</i>),<br /><br /> <ul><li>By filling these fields, <b>Jackett will inject headers</b> with your values <u>to simulate a real browser</u>.</li><li>You can get <b>your browser values</b> here: <a href='https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending' target='blank'>www.whatismybrowser.com</a></li></ul><br /><i><b>Note that</b> some headers are not necessary because they are injected automatically by this provider such as Accept_Encoding, Connection, Host or X-Requested-With</i>");
|
|
||||||
HeaderAccept = new StringConfigurationItem("Accept") { Value = "" };
|
|
||||||
HeaderAcceptLang = new StringConfigurationItem("Accept-Language") { Value = "" };
|
|
||||||
HeaderDNT = new BoolConfigurationItem("DNT") { Value = false };
|
|
||||||
HeaderUpgradeInsecure = new BoolConfigurationItem("Upgrade-Insecure-Requests") { Value = false };
|
|
||||||
HeaderUserAgent = new StringConfigurationItem("User-Agent") { Value = "" };
|
|
||||||
DevWarning = new DisplayInfoConfigurationItem("Development", "<b>Development Facility</b> (<i>For Developers ONLY</i>),<br /><br /> <ul><li>By enabling development mode, <b>Jackett will bypass his cache</b> and will <u>output debug messages to console</u> instead of his log file.</li><li>By enabling Hard Drive Cache, <b>This provider</b> will <u>save each query answers from tracker</u> in temp directory, in fact this reduce drastically HTTP requests when building a provider at parsing step for example. So, <b> Jackett will search for a cached query answer on hard drive before executing query on tracker side !</b> <i>DEV MODE must be enabled to use it !</li></ul>");
|
|
||||||
DevMode = new BoolConfigurationItem("Enable DEV MODE (Developers ONLY)") { Value = false };
|
|
||||||
HardDriveCache = new BoolConfigurationItem("Enable HARD DRIVE CACHE (Developers ONLY)") { Value = false };
|
|
||||||
HardDriveCacheKeepTime = new StringConfigurationItem("Keep Cached files for (ms)") { Value = "300000" };
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user