diff --git a/README.md b/README.md
index a6ad9226f..0bb15d788 100644
--- a/README.md
+++ b/README.md
@@ -41,6 +41,7 @@ Developer note: The software implements the [Torznab](https://github.com/Sonarr/
* MoreThanTV
* NCore
* NextGen
+ * PhxBit
* Pretome
* PrivateHD
* RevolutionTT
diff --git a/src/Jackett/Content/logos/phxbit.png b/src/Jackett/Content/logos/phxbit.png
new file mode 100644
index 000000000..3a8a42a95
Binary files /dev/null and b/src/Jackett/Content/logos/phxbit.png differ
diff --git a/src/Jackett/Indexers/PhxBit.cs b/src/Jackett/Indexers/PhxBit.cs
new file mode 100644
index 000000000..008adf2bb
--- /dev/null
+++ b/src/Jackett/Indexers/PhxBit.cs
@@ -0,0 +1,802 @@
+using System;
+using System.Collections.Generic;
+using System.Collections.Specialized;
+using System.Linq;
+using System.Reflection;
+using System.Text.RegularExpressions;
+using System.Threading.Tasks;
+using CsQuery;
+using Jackett.Models;
+using Jackett.Models.IndexerConfig.Bespoke;
+using Jackett.Services;
+using Jackett.Utils;
+using Jackett.Utils.Clients;
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using NLog;
+
+namespace Jackett.Indexers
+{
+ ///
+ /// Provider for PhxBit Private French Tracker
+ ///
+ public class PhxBit : BaseIndexer, IIndexer
+ {
+ private string LoginUrl { get { return SiteLink + "connect.php"; } }
+ private string SearchUrl { get { return SiteLink + "sphinx.php"; } }
+ private string TorrentCommentUrl { get { return TorrentDescriptionUrl; } }
+ private string TorrentDescriptionUrl { get { return SiteLink + "torrent.php?id="; } }
+ private string TorrentDownloadUrl { get { return SiteLink + "get.php?action=torrent&id={id}&passkey={passkey}"; } }
+ private bool Latency { get { return ConfigData.Latency.Value; } }
+ private bool DevMode { get { return ConfigData.DevMode.Value; } }
+ private bool CacheMode { get { return ConfigData.HardDriveCache.Value; } }
+ private string directory { get { return System.IO.Path.GetTempPath() + "Jackett\\" + MethodBase.GetCurrentMethod().DeclaringType.Name + "\\"; } }
+
+ private Dictionary emulatedBrowserHeaders = new Dictionary();
+ private CQ fDom = null;
+
+ private ConfigurationDataPhxBit ConfigData
+ {
+ get { return (ConfigurationDataPhxBit)configData; }
+ set { base.configData = value; }
+ }
+
+ public PhxBit(IIndexerManagerService i, IWebClient w, Logger l, IProtectionService ps)
+ : base(
+ name: "PhxBit",
+ description: "General French Private Tracker",
+ link: "https://phxbit.com/",
+ caps: new TorznabCapabilities(),
+ manager: i,
+ client: w,
+ logger: l,
+ p: ps,
+ downloadBase: "https://phxbit.com/torrents.php?action=download&id=",
+ configData: new ConfigurationDataPhxBit())
+ {
+ // Clean capabilities
+ TorznabCaps.Categories.Clear();
+
+ // Movies
+ AddCategoryMapping(3, TorznabCatType.MoviesSD); // DVDRIP
+ AddCategoryMapping(33, TorznabCatType.MoviesSD); // WEBRIP
+ AddCategoryMapping(4, TorznabCatType.MoviesSD); // BRRIP/BDRIP
+ AddCategoryMapping(6, TorznabCatType.MoviesHD); // HD 720P
+ AddCategoryMapping(2, TorznabCatType.MoviesHD); // HD 1080P
+ AddCategoryMapping(5, TorznabCatType.MoviesBluRay); // FULL BLURAY
+ AddCategoryMapping(32, TorznabCatType.MoviesBluRay); // FULL BLURAY 3D
+ AddCategoryMapping(7, TorznabCatType.MoviesDVD); // FULL DVD
+
+ // Series
+ AddCategoryMapping(14, TorznabCatType.TVSD); // SD VOSTFR
+ AddCategoryMapping(16, TorznabCatType.TVHD); // HD VOSTFR
+ AddCategoryMapping(13, TorznabCatType.TVSD); // SD VF
+ AddCategoryMapping(15, TorznabCatType.TVHD); // HD VF
+ AddCategoryMapping(12, TorznabCatType.TVOTHER); // PACK
+ AddCategoryMapping(26, TorznabCatType.TVOTHER); // PACK VOSTFR
+ AddCategoryMapping(24, TorznabCatType.TVOTHER); // EMISSIONS
+ AddCategoryMapping(34, TorznabCatType.TVOTHER); // EMISSIONS
+ AddCategoryMapping(29, TorznabCatType.TVOTHER); // BDRIP VOSTFR
+
+ // Anime
+ AddCategoryMapping(1, TorznabCatType.TVAnime); // ANIME
+
+ // Documentaries
+ AddCategoryMapping(17, TorznabCatType.TVDocumentary); // DOCS
+
+ // Music
+ AddCategoryMapping(10, TorznabCatType.AudioLossless); // FLAC
+ AddCategoryMapping(9, TorznabCatType.AudioMP3); // MP3
+ AddCategoryMapping(25, TorznabCatType.AudioVideo); // CONCERT
+
+ // Other
+ AddCategoryMapping(27, TorznabCatType.PC); // PC
+ AddCategoryMapping(20, TorznabCatType.PCMac); // PC
+ AddCategoryMapping(19, TorznabCatType.PCGames); // GAMES
+ AddCategoryMapping(21, TorznabCatType.ConsoleXbox360); // GAMES
+ AddCategoryMapping(22, TorznabCatType.ConsoleWii); // GAMES
+ AddCategoryMapping(22, TorznabCatType.ConsolePS3); // GAMES
+ AddCategoryMapping(30, TorznabCatType.ConsolePSP); // GAMES
+ AddCategoryMapping(31, TorznabCatType.ConsoleNDS); // GAMES
+ AddCategoryMapping(8, TorznabCatType.BooksEbook); // EBOOKS
+ AddCategoryMapping(28, TorznabCatType.BooksEbook); // EBOOKS MANGA
+ AddCategoryMapping(11, TorznabCatType.BooksEbook); // EBOOKS AUDIO
+ AddCategoryMapping(35, TorznabCatType.PCPhoneAndroid); // ANDROID
+ }
+
+ ///
+ /// Configure our Provider
+ ///
+ /// Our params in Json
+ /// Configuration state
+ public async Task ApplyConfiguration(JToken configJson)
+ {
+ // Retrieve config values set by Jackett's user
+ ConfigData.LoadValuesFromJson(configJson);
+
+ // Check & Validate Config
+ validateConfig();
+
+ // Setting our data for a better emulated browser (maximum security)
+ // TODO: Encoded Content not supported by Jackett at this time
+ // emulatedBrowserHeaders.Add("Accept-Encoding", "gzip, deflate");
+
+ // If we want to simulate a browser
+ if (ConfigData.Browser.Value) {
+
+ // Clean headers
+ emulatedBrowserHeaders.Clear();
+
+ // Inject headers
+ emulatedBrowserHeaders.Add("Accept", ConfigData.HeaderAccept.Value);
+ emulatedBrowserHeaders.Add("Accept-Language", ConfigData.HeaderAcceptLang.Value);
+ emulatedBrowserHeaders.Add("DNT", Convert.ToInt32(ConfigData.HeaderDNT.Value).ToString());
+ emulatedBrowserHeaders.Add("Upgrade-Insecure-Requests", Convert.ToInt32(ConfigData.HeaderUpgradeInsecure.Value).ToString());
+ emulatedBrowserHeaders.Add("User-Agent", ConfigData.HeaderUserAgent.Value);
+ }
+
+
+ // Getting login form to retrieve CSRF token
+ /*var myRequest = new Utils.Clients.WebRequest()
+ {
+ Url = LoginUrl
+ };*/
+
+ // Add our headers to request
+ //myRequest.Headers = emulatedBrowserHeaders;
+
+ // Building login form data
+ var pairs = new Dictionary {
+ { "username", ConfigData.Username.Value },
+ { "password", ConfigData.Password.Value }
+ };
+
+ // Do the login
+ var request = new Utils.Clients.WebRequest(){
+ PostData = pairs,
+ Referer = LoginUrl,
+ Type = RequestType.POST,
+ Url = LoginUrl,
+ Headers = emulatedBrowserHeaders
+ };
+
+ // Perform loggin
+ latencyNow();
+ output("\nPerform loggin.. with " + LoginUrl);
+ var response = await webclient.GetString(request);
+
+ // Test if we are logged in
+ await ConfigureIfOK(response.Cookies, !response.Cookies.Contains("deleted"), () =>
+ {
+ // Parse error page
+ CQ dom = response.Content;
+ string message = dom[".error"].Text().Trim().Replace("X\n\t\t", "").Replace("\n\t\tX", "");
+
+ // Oops, unable to login
+ output("-> Login failed: \"" + message + "\".", "error");
+ throw new ExceptionWithConfigData("Login failed: << " + message + " >>", configData);
+ });
+
+ output("-> Login Success");
+
+ return IndexerConfigurationStatus.RequiresTesting;
+ }
+
+ ///
+ /// Execute our search query
+ ///
+ /// Query
+ /// Releases
+ public async Task> PerformQuery(TorznabQuery query)
+ {
+ var releases = new List();
+ var torrentRowList = new List();
+ var searchTerm = query.GetQueryString();
+ var searchUrl = SearchUrl;
+ int nbResults = 0;
+ int pageLinkCount = 0;
+
+ // Check cache first so we don't query the server (if search term used or not in dev mode)
+ if(!DevMode && !string.IsNullOrEmpty(searchTerm))
+ {
+ lock (cache)
+ {
+ // Remove old cache items
+ CleanCache();
+
+ // Search in cache
+ var cachedResult = cache.Where(i => i.Query == searchTerm).FirstOrDefault();
+ if (cachedResult != null)
+ return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray();
+ }
+ }
+
+ // Build our query
+ var request = buildQuery(searchTerm, query, searchUrl);
+
+ // Getting results & Store content
+ WebClientStringResult results = await queryExec(request);
+ fDom = results.Content;
+
+ try
+ {
+ // Find torrent rows
+ var firstPageRows = findTorrentRows();
+
+ // Add them to torrents list
+ torrentRowList.AddRange(firstPageRows.Select(fRow => fRow.Cq()));
+
+ // Check if there are pagination links at bottom
+ Boolean pagination = (fDom[".pager_align > a"].Length != 0);
+
+ // If pagination available
+ if (pagination) {
+ // Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page)
+ pageLinkCount = ParseUtil.CoerceInt(Regex.Match(fDom[".pager_align > a:not(:last-child)"].Last().Attr("href").ToString(), @"\d+").Value) + 1;
+
+ // Calculate average number of results (based on torrents rows lenght on first page)
+ nbResults = firstPageRows.Count() * pageLinkCount;
+ }
+ else {
+ // Check if we have a minimum of one result
+ if (firstPageRows.Length >= 1)
+ {
+ // Retrieve total count on our alone page
+ nbResults = firstPageRows.Count();
+ pageLinkCount = 1;
+ }
+ else
+ {
+ output("\nNo result found for your query, please try another search term ...\n", "info");
+ // No result found for this query
+ return releases;
+ }
+ }
+ output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !");
+ output("\nThere are " + firstPageRows.Length + " results on the first page !");
+
+ // If we have a term used for search and pagination result superior to one
+ if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1)
+ {
+ // Starting with page #2
+ for (int i = 2; i <= Math.Min(Int32.Parse(ConfigData.Pages.Value), pageLinkCount); i++)
+ {
+ output("\nProcessing page #" + i);
+
+ // Request our page
+ latencyNow();
+
+ // Build our query
+ var pageRequest = buildQuery(searchTerm, query, searchUrl, (i - 1));
+
+ // Getting results & Store content
+ WebClientStringResult pageResults = await queryExec(pageRequest);
+
+ // Assign response
+ fDom = pageResults.Content;
+
+ // Process page results
+ var additionalPageRows = findTorrentRows();
+
+ // Add them to torrents list
+ torrentRowList.AddRange(additionalPageRows.Select(fRow => fRow.Cq()));
+ }
+ }
+ else
+ {
+ // No search term, maybe testing... so registring passkey for future uses
+ string infosData = firstPageRows.First().Find("td:eq(2) > a").Attr("href");
+ IList infosList = infosData.Split('&').Select(s => s.Trim()).Where(s => s != String.Empty).ToList();
+ IList infosTracker = infosList.Select(s => s.Split(new[] { '=' }, 2)[1].Trim()).ToList();
+
+ output("\nStoring Passkey for future uses... \"" + infosTracker[2] + "\"");
+ ConfigData.PassKey.Value = infosTracker[2];
+
+ }
+
+ // Loop on results
+ foreach (CQ tRow in torrentRowList)
+ {
+ output("\n=>> Torrent #" + (releases.Count + 1));
+
+ // ID
+ string row = tRow.Html().ToString();
+ int id = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1) > a").Attr("href").ToString(), @"\d+").Value);
+ output("ID: " + id);
+
+ // Release Name
+ string name = tRow.Find("td:eq(1) > a").Attr("title").ToString();
+ output("Release: " + name);
+
+ // Category
+ string infosDataCategory = firstPageRows.First().Find("td:eq(0) > a").Attr("href");
+ IList infosListCategory = infosDataCategory.Split('&').Select(s => s.Trim()).Where(s => s != String.Empty).ToList();
+ IList infosCategory = infosListCategory.Select(s => s.Split(new[] { '=' }, 2)[0].Trim()).ToList();
+ string categoryID = infosCategory.Last().TrimStart('c');
+ output("Category: " + MapTrackerCatToNewznab(categoryID) + " (" + categoryID + ")");
+
+ // Seeders
+ int seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(6)").Text(), @"\d+").Value);
+ output("Seeders: " + seeders);
+
+ // Leechers
+ int leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(7)").Text(), @"\d+").Value);
+ output("Leechers: " + leechers);
+
+ // Completed
+ int completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value);
+ output("Completed: " + completed);
+
+ // Size
+ string sizeStr = tRow.Find("td:eq(4)").Text().Trim().Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb");
+ long size = ReleaseInfo.GetBytes(sizeStr);
+ output("Size: " + sizeStr + " (" + size + " bytes)");
+
+ // Health
+ int percent = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(8) > img").Attr("alt").ToString(), @"\d+").Value);
+ output("Health: " + percent + "%");
+
+ // Publish DateToString
+ //var date = agoToDate(null);
+ int timestamp = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1)").Attr("data-added").ToString(), @"\d+").Value);
+ DateTime date = unixTimeStampToDateTime(timestamp);
+ output("Released on: " + date.ToLocalTime() + " (TS >> " + timestamp + ")");
+
+ // Torrent Details URL
+ Uri detailsLink = new Uri(TorrentDescriptionUrl + id);
+ output("Details: " + detailsLink.AbsoluteUri);
+
+ // Torrent Comments URL
+ Uri commentsLink = new Uri(TorrentCommentUrl + id);
+ output("Comments Link: " + commentsLink.AbsoluteUri);
+
+ // Torrent Download URL
+ Uri downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{passkey}", ConfigData.PassKey.Value));
+ output("Download Link: " + downloadLink.AbsoluteUri);
+
+ // Building release infos
+ var release = new ReleaseInfo();
+ release.Category = MapTrackerCatToNewznab(categoryID.ToString());
+ release.Title = name;
+ release.Seeders = seeders;
+ release.Peers = seeders + leechers;
+ release.MinimumRatio = 1;
+ release.MinimumSeedTime = 345600;
+ release.PublishDate = date;
+ release.Size = size;
+ release.Guid = detailsLink;
+ release.Comments = commentsLink;
+ release.Link = downloadLink;
+ releases.Add(release);
+ }
+
+ }
+ catch (Exception ex)
+ {
+ OnParseError("Error, unable to parse result \n" + ex.StackTrace, ex);
+ }
+
+ // Return found releases
+ return releases;
+ }
+
+ ///
+ /// Build query to process
+ ///
+ /// Term to search
+ /// Torznab Query for categories mapping
+ /// Search url for provider
+ /// Page number to request
+ /// URL to query for parsing and processing results
+ private string buildQuery(string term, TorznabQuery query, string url, int page = 0)
+ {
+ var parameters = new NameValueCollection();
+ List categoriesList = MapTorznabCapsToTrackers(query);
+
+ // If search term provided
+ if (!string.IsNullOrWhiteSpace(term))
+ {
+ // Add search term
+ parameters.Add("q", term);
+ }
+ else
+ {
+ parameters.Add("q", string.Empty);
+ // Showing all torrents (just for output function)
+ term = "all";
+ }
+
+ // Default parameters
+ parameters.Add("exact", "0");
+ parameters.Add("sort", "normal");
+ parameters.Add("order", "desc");
+
+ // Check if we are processing a new page
+ if (page > 0)
+ {
+ // Adding page number to query
+ parameters.Add("page", page.ToString());
+ }
+
+ // Loop on Categories needed
+ foreach (string category in categoriesList)
+ {
+ // Add categories
+ parameters.Add("c" + category, "1");
+ }
+
+ // Building our query
+ url += "?" + parameters.GetQueryString();
+
+ output("\nBuilded query for \"" + term + "\"... " + url);
+
+ // Return our search url
+ return url;
+ }
+
+ ///
+ /// Switch Method for Querying
+ ///
+ /// URL created by Query Builder
+ /// Results from query
+ private async Task queryExec(string request)
+ {
+ WebClientStringResult results = null;
+
+ // Switch in we are in DEV mode with Hard Drive Cache or not
+ if (DevMode && CacheMode)
+ {
+ // Check Cache before querying and load previous results if available
+ results = await queryCache(request);
+ }
+ else
+ {
+ // Querying tracker directly
+ results = await queryTracker(request);
+ }
+ return results;
+ }
+
+ ///
+ /// Get Torrents Page from Cache by Query Provided
+ ///
+ /// URL created by Query Builder
+ /// Results from query
+ private async Task queryCache(string request)
+ {
+ WebClientStringResult results = null;
+
+ // Create Directory if not exist
+ System.IO.Directory.CreateDirectory(directory);
+
+ // Clean Storage Provider Directory from outdated cached queries
+ cleanCacheStorage();
+
+ // Create fingerprint for request
+ string file = directory + request.GetHashCode() + ".json";
+
+ // Checking modes states
+ if (System.IO.File.Exists(file))
+ {
+ // File exist... loading it right now !
+ output("Loading results from hard drive cache ..." + request.GetHashCode() + ".json");
+ results = JsonConvert.DeserializeObject(System.IO.File.ReadAllText(file));
+ }
+ else
+ {
+ // No cached file found, querying tracker directly
+ results = await queryTracker(request);
+
+ // Cached file didn't exist for our query, writing it right now !
+ output("Writing results to hard drive cache ..." + request.GetHashCode() + ".json");
+ System.IO.File.WriteAllText(file, JsonConvert.SerializeObject(results));
+ }
+ return results;
+ }
+
+ ///
+ /// Get Torrents Page from Tracker by Query Provided
+ ///
+ /// URL created by Query Builder
+ /// Results from query
+ private async Task queryTracker(string request)
+ {
+ WebClientStringResult results = null;
+
+ // Cache mode not enabled or cached file didn't exist for our query
+ output("\nQuerying tracker for results....");
+
+ // Request our first page
+ latencyNow();
+ results = await RequestStringWithCookiesAndRetry(request, null, null, emulatedBrowserHeaders);
+
+ // Return results from tracker
+ return results;
+ }
+
+ ///
+ /// Clean Hard Drive Cache Storage
+ ///
+ /// Force Provider Folder deletion
+ private void cleanCacheStorage(Boolean force = false)
+ {
+ // Check cleaning method
+ if(force)
+ {
+ // Deleting Provider Storage folder and all files recursively
+ output("\nDeleting Provider Storage folder and all files recursively ...");
+
+ // Check if directory exist
+ if(System.IO.Directory.Exists(directory))
+ {
+ // Delete storage directory of provider
+ System.IO.Directory.Delete(directory, true);
+ output("-> Storage folder deleted successfully.");
+ }
+ else
+ {
+ // No directory, so nothing to do
+ output("-> No Storage folder found for this provider !");
+ }
+ }
+ else
+ {
+ int i = 0;
+ // Check if there is file older than ... and delete them
+ output("\nCleaning Provider Storage folder... in progress.");
+ System.IO.Directory.GetFiles(directory)
+ .Select(f => new System.IO.FileInfo(f))
+ .Where(f => f.LastAccessTime < DateTime.Now.AddMilliseconds(-Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value)))
+ .ToList()
+ .ForEach(f => {
+ output("Deleting cached file << " + f.Name + " >> ... done.");
+ f.Delete();
+ i++;
+ });
+
+ // Inform on what was cleaned during process
+ if(i > 0) {
+ output("-> Deleted " + i + " cached files during cleaning.");
+ }
+ else {
+ output("-> Nothing deleted during cleaning.");
+ }
+ }
+ }
+
+ ///
+ /// Generate a random fake latency to avoid detection on tracker side
+ ///
+ private void latencyNow()
+ {
+ // Need latency ?
+ if(Latency)
+ {
+ // Generate a random value in our range
+ var random = new Random(DateTime.Now.Millisecond);
+ int waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value), Convert.ToInt32(ConfigData.LatencyEnd.Value));
+ output("\nLatency Faker => Sleeping for " + waiting + " ms...");
+
+ // Sleep now...
+ System.Threading.Thread.Sleep(waiting);
+ }
+ }
+
+ ///
+ /// Find torrent rows in search pages
+ ///
+ /// JQuery Object
+ private CQ findTorrentRows()
+ {
+ // Return all occurencis of torrents found
+ return fDom["#torrent_list > tbody > tr"].Not(".head_torrent").Filter("#torrent_");
+
+ // Dispatch Torrent Row and Torrent Infos
+ }
+
+ ///
+ /// Convert Unix TimeStamp to DateTime
+ ///
+ ///
+ /// A DateTime
+ private DateTime unixTimeStampToDateTime(double unixTimeStamp)
+ {
+ // Unix timestamp is seconds past epoch
+ System.DateTime dtDateTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, System.DateTimeKind.Utc);
+ dtDateTime = dtDateTime.AddSeconds(unixTimeStamp).ToLocalTime();
+ return dtDateTime;
+ }
+
+ ///
+ /// Output message for logging or developpment (console)
+ ///
+ /// Message to output
+ /// Level for Logger
+ private void output(string message, string level = "debug")
+ {
+ // Check if we are in dev mode
+ if(DevMode)
+ {
+ // Output message to console
+ Console.WriteLine(message);
+ }
+ else
+ {
+ // Send message to logger with level
+ switch (level)
+ {
+ default:
+ goto case "debug";
+ case "debug":
+ // Only if Debug Level Enabled on Jackett
+ if (Engine.Logger.IsDebugEnabled)
+ {
+ logger.Debug(message);
+ }
+ break;
+ case "info":
+ logger.Info(message);
+ break;
+ case "error":
+ logger.Error(message);
+ break;
+ }
+ }
+ }
+
+ ///
+ /// Validate Config entered by user on Jackett
+ ///
+ private void validateConfig()
+ {
+ output("\nValidating Settings ... \n");
+
+ // Check Username Setting
+ if (string.IsNullOrEmpty(ConfigData.Username.Value))
+ {
+ throw new ExceptionWithConfigData("You must provide a username for this tracker to login !", ConfigData);
+ }
+ else
+ {
+ output("Validated Setting -- Username (auth) => " + ConfigData.Username.Value.ToString());
+ }
+
+ // Check Password Setting
+ if (string.IsNullOrEmpty(ConfigData.Password.Value))
+ {
+ throw new ExceptionWithConfigData("You must provide a password with your username for this tracker to login !", ConfigData);
+ }
+ else
+ {
+ output("Validated Setting -- Password (auth) => " + ConfigData.Password.Value.ToString());
+ }
+
+ // Check Max Page Setting
+ if (!string.IsNullOrEmpty(ConfigData.Pages.Value))
+ {
+ try
+ {
+ output("Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value));
+ }
+ catch (Exception)
+ {
+ throw new ExceptionWithConfigData("Please enter a numeric maximum number of pages to crawl !", ConfigData);
+ }
+ }
+ else
+ {
+ throw new ExceptionWithConfigData("Please enter a maximum number of pages to crawl !", ConfigData);
+ }
+
+ // Check Latency Setting
+ if (ConfigData.Latency.Value)
+ {
+ output("\nValidated Setting -- Latency Simulation enabled");
+
+ // Check Latency Start Setting
+ if (!string.IsNullOrEmpty(ConfigData.LatencyStart.Value))
+ {
+ try
+ {
+ output("Validated Setting -- Latency Start => " + Convert.ToInt32(ConfigData.LatencyStart.Value));
+ }
+ catch (Exception)
+ {
+ throw new ExceptionWithConfigData("Please enter a numeric latency start in ms !", ConfigData);
+ }
+ }
+ else
+ {
+ throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a start latency !", ConfigData);
+ }
+
+ // Check Latency End Setting
+ if (!string.IsNullOrEmpty(ConfigData.LatencyEnd.Value))
+ {
+ try
+ {
+ output("Validated Setting -- Latency End => " + Convert.ToInt32(ConfigData.LatencyEnd.Value));
+ }
+ catch (Exception)
+ {
+ throw new ExceptionWithConfigData("Please enter a numeric latency end in ms !", ConfigData);
+ }
+ }
+ else
+ {
+ throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a end latency !", ConfigData);
+ }
+ }
+
+ // Check Browser Setting
+ if (ConfigData.Browser.Value)
+ {
+ output("\nValidated Setting -- Browser Simulation enabled");
+
+ // Check ACCEPT header Setting
+ if (string.IsNullOrEmpty(ConfigData.HeaderAccept.Value))
+ {
+ throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT header !", ConfigData);
+ }
+ else
+ {
+ output("Validated Setting -- ACCEPT (header) => " + ConfigData.HeaderAccept.Value.ToString());
+ }
+
+ // Check ACCEPT-LANG header Setting
+ if (string.IsNullOrEmpty(ConfigData.HeaderAcceptLang.Value))
+ {
+ throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT-LANG header !", ConfigData);
+ }
+ else
+ {
+ output("Validated Setting -- ACCEPT-LANG (header) => " + ConfigData.HeaderAcceptLang.Value.ToString());
+ }
+
+ // Check USER-AGENT header Setting
+ if (string.IsNullOrEmpty(ConfigData.HeaderUserAgent.Value))
+ {
+ throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an USER-AGENT header !", ConfigData);
+ }
+ else
+ {
+ output("Validated Setting -- USER-AGENT (header) => " + ConfigData.HeaderUserAgent.Value.ToString());
+ }
+ }
+
+ // Check Dev Cache Settings
+ if (ConfigData.HardDriveCache.Value == true)
+ {
+ output("\nValidated Setting -- DEV Hard Drive Cache enabled");
+
+ // Check if Dev Mode enabled !
+ if (!ConfigData.DevMode.Value)
+ {
+ throw new ExceptionWithConfigData("Hard Drive is enabled but not in DEV MODE, Please enable DEV MODE !", ConfigData);
+ }
+
+ // Check Cache Keep Time Setting
+ if (!string.IsNullOrEmpty(ConfigData.HardDriveCacheKeepTime.Value))
+ {
+ try
+ {
+ output("Validated Setting -- Cache Keep Time (ms) => " + Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value));
+ }
+ catch (Exception)
+ {
+ throw new ExceptionWithConfigData("Please enter a numeric hard drive keep time in ms !", ConfigData);
+ }
+ }
+ else
+ {
+ throw new ExceptionWithConfigData("Hard Drive Cache enabled, Please enter a maximum keep time for cache !", ConfigData);
+ }
+ }
+ else
+ {
+ // Delete cache if previously existed
+ cleanCacheStorage(true);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/Jackett/Jackett.csproj b/src/Jackett/Jackett.csproj
index 99bea18e0..5ee6656f4 100644
--- a/src/Jackett/Jackett.csproj
+++ b/src/Jackett/Jackett.csproj
@@ -186,6 +186,7 @@
+
@@ -219,6 +220,7 @@
+
@@ -439,6 +441,9 @@
PreserveNewest
+
+ PreserveNewest
+
PreserveNewest
diff --git a/src/Jackett/Models/IndexerConfig/Bespoke/ConfigurationDataPhxBit.cs b/src/Jackett/Models/IndexerConfig/Bespoke/ConfigurationDataPhxBit.cs
new file mode 100644
index 000000000..07b2c2397
--- /dev/null
+++ b/src/Jackett/Models/IndexerConfig/Bespoke/ConfigurationDataPhxBit.cs
@@ -0,0 +1,55 @@
+namespace Jackett.Models.IndexerConfig.Bespoke
+{
+ class ConfigurationDataPhxBit : ConfigurationData
+ {
+ public HiddenItem PassKey { get; set; }
+ public DisplayItem CredentialsWarning { get; private set; }
+ public StringItem Username { get; private set; }
+ public StringItem Password { get; private set; }
+ public DisplayItem PagesWarning { get; private set; }
+ public StringItem Pages { get; private set; }
+ public DisplayItem SecurityWarning { get; private set; }
+ public BoolItem Latency { get; private set; }
+ public BoolItem Browser { get; private set; }
+ public DisplayItem LatencyWarning { get; private set; }
+ public StringItem LatencyStart { get; private set; }
+ public StringItem LatencyEnd { get; private set; }
+ public DisplayItem HeadersWarning { get; private set; }
+ public StringItem HeaderAccept { get; private set; }
+ public StringItem HeaderAcceptLang { get; private set; }
+ public BoolItem HeaderDNT { get; private set; }
+ public BoolItem HeaderUpgradeInsecure { get; private set; }
+ public StringItem HeaderUserAgent { get; private set; }
+ public DisplayItem DevWarning { get; private set; }
+ public BoolItem DevMode { get; private set; }
+ public BoolItem HardDriveCache { get; private set; }
+ public StringItem HardDriveCacheKeepTime { get; private set; }
+
+ public ConfigurationDataPhxBit()
+ : base()
+ {
+ PassKey = new HiddenItem { Name = "PassKey", Value = "" };
+ CredentialsWarning = new DisplayItem("Credentials Configuration (Private Tracker),
- Username is your account name on this tracker.
- Password is your password associated to your account name.
") { Name = "Credentials" };
+ Username = new StringItem { Name = "Username (Required)", Value = "" };
+ Password = new StringItem { Name = "Password (Required)", Value = "" };
+ PagesWarning = new DisplayItem("Preferences Configuration (Tweak your search settings),
- Max Pages to Process let you specify how many page (max) Jackett can process when doing a search. Setting a value higher than 4 is dangerous for you account ! (Result of too many requests to tracker...that will be suspect).
") { Name = "Preferences" };
+ Pages = new StringItem { Name = "Max Pages to Process (Required)", Value = "4" };
+ SecurityWarning = new DisplayItem("Security Configuration (Read this area carefully !),
- Latency Simulation will simulate human browsing with Jacket by pausing Jacket for an random time between each request, to fake a real content browsing.
- Browser Simulation will simulate a real human browser by injecting additionals headers when doing requests to tracker.
") { Name = "Security" };
+ Latency = new BoolItem() { Name = "Latency Simulation (Optional)", Value = false };
+ Browser = new BoolItem() { Name = "Browser Simulation (Optional)", Value = true };
+ LatencyWarning = new DisplayItem("Latency Configuration (Required if latency simulation enabled),
- By filling this range, Jackett will make a random timed pause between requests to tracker to simulate a real browser.
- MilliSeconds only
") { Name = "Simulate Latency" };
+ LatencyStart = new StringItem { Name = "Minimum Latency (ms)", Value = "1589" };
+ LatencyEnd = new StringItem { Name = "Maximum Latency (ms)", Value = "3674" };
+ HeadersWarning = new DisplayItem("Browser Headers Configuration (Required if browser simulation enabled),
- By filling these fields, Jackett will inject headers with your values to simulate a real browser.
- You can get your browser values here: www.whatismybrowser.com
Note that some headers are not necessary because they are injected automatically by this provider such as Accept_Encoding, Connection, Host or X-Requested-With") { Name = "Injecting headers" };
+ HeaderAccept = new StringItem { Name = "Accept", Value = "" };
+ HeaderAcceptLang = new StringItem { Name = "Accept-Language", Value = "" };
+ HeaderDNT = new BoolItem { Name = "DNT", Value = false };
+ HeaderUpgradeInsecure = new BoolItem { Name = "Upgrade-Insecure-Requests", Value = false };
+ HeaderUserAgent = new StringItem { Name = "User-Agent", Value = "" };
+ DevWarning = new DisplayItem("Development Facility (For Developers ONLY),
- By enabling development mode, Jackett will bypass his cache and will output debug messages to console instead of his log file.
- By enabling Hard Drive Cache, This provider will save each query answers from tracker in temp directory, in fact this reduce drastically HTTP requests when building a provider at parsing step for example. So, Jackett will search for a cached query answer on hard drive before executing query on tracker side ! DEV MODE must be enabled to use it !
") { Name = "Development" };
+ DevMode = new BoolItem { Name = "Enable DEV MODE (Developers ONLY)", Value = false };
+ HardDriveCache = new BoolItem { Name = "Enable HARD DRIVE CACHE (Developers ONLY)", Value = false };
+ HardDriveCacheKeepTime = new StringItem { Name = "Keep Cached files for (ms)", Value = "300000" };
+ }
+ }
+}