mirror of
https://github.com/Jackett/Jackett.git
synced 2025-09-17 17:34:09 +02:00
Merge changes
This commit is contained in:
10
README.md
10
README.md
@@ -20,19 +20,28 @@ Download in the [Releases page](https://github.com/zone117x/Jackett/releases)
|
||||
* *For MoreThanTV & ThePirateBay* install libcurl-dev for your system, [tutorial](http://curl.haxx.se/dlwiz/?type=devel)
|
||||
* For apt-get systems its simply: `apt-get install libcurl4-openssl-dev`
|
||||
|
||||
### Running Jackett
|
||||
|
||||
On Windows the recommened way of running Jackett is to install it as a windows service. When installed as a service the tray icon acts as a way to open/start/stop Jackett. If you opted to not install it as a service then Jacett will run it's web server from the tray tool.
|
||||
|
||||
Jackett can also be run from the command line (See --help for switches) using JackettConsole.exe if you would like to see log messages. On Linux / OSX you would need to run the console using "mono JackettConsole.exe".
|
||||
|
||||
|
||||
### Supported Trackers
|
||||
* [AlphaRatio](https://alpharatio.cc/)
|
||||
* [AnimeBytes](https://animebytes.tv/)
|
||||
* [BakaBT](http://bakabt.me/)
|
||||
* [bB](http://reddit.com/r/baconbits)
|
||||
* [BeyondHD](https://beyondhd.me/)
|
||||
* [BIT-HDTV](https://www.bit-hdtv.com)
|
||||
* [BitMeTV](http://www.bitmetv.org/)
|
||||
* [FrenchTorrentDb](http://www.frenchtorrentdb.com/)
|
||||
* [Freshon](https://freshon.tv/)
|
||||
* [HD-Space](https://hd-space.org/)
|
||||
* [HD-Torrents.org](https://hd-torrents.org/)
|
||||
* [IPTorrents](https://iptorrents.com/)
|
||||
* [MoreThan.tv](https://morethan.tv/)
|
||||
* [pretome](https://pretome.info)
|
||||
* [PrivateHD](https://privatehd.to/)
|
||||
* [RARBG](https://rarbg.com)
|
||||
* [SceneAccess](https://sceneaccess.eu/login)
|
||||
@@ -45,7 +54,6 @@ Download in the [Releases page](https://github.com/zone117x/Jackett/releases)
|
||||
* [TorrentLeech](http://www.torrentleech.org/)
|
||||
* [TorrentShack](http://torrentshack.me/)
|
||||
* [Torrentz](https://torrentz.eu/)
|
||||
* [BakaBT](http://bakabt.me/)
|
||||
|
||||
|
||||
### Additional Trackers
|
||||
|
BIN
src/Jackett/Content/logos/hdspace.png
Normal file
BIN
src/Jackett/Content/logos/hdspace.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 40 KiB |
BIN
src/Jackett/Content/logos/pretome.png
Normal file
BIN
src/Jackett/Content/logos/pretome.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 34 KiB |
@@ -353,8 +353,7 @@ namespace Jackett.Indexers
|
||||
var size = rowCq.Find(".torrent_size");
|
||||
if (size.Count() > 0)
|
||||
{
|
||||
var sizeParts = size.First().Text().Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
release.Size = ReleaseInfo.GetBytes(size.First().Text());
|
||||
}
|
||||
|
||||
// Additional 5 hours per GB
|
||||
|
@@ -132,9 +132,8 @@ namespace Jackett.Indexers
|
||||
var dateStr = row.ChildElements.ElementAt(3).Cq().Text().Trim().Replace(" and", "");
|
||||
release.PublishDate = DateTimeUtil.FromTimeAgo(dateStr);
|
||||
|
||||
var sizeStr = row.ChildElements.ElementAt(4).Cq().Text().Trim();
|
||||
var sizeParts = sizeStr.Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
var sizeStr = row.ChildElements.ElementAt(4).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(7).Cq().Text().Trim());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text().Trim()) + release.Seeders;
|
||||
|
@@ -185,8 +185,8 @@ namespace Jackett.Indexers
|
||||
|
||||
release.MinimumRatio = 1;
|
||||
|
||||
var size = qRow.Find(".size").First().Text().Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(size[1], ParseUtil.CoerceFloat(size[0]));
|
||||
var size = qRow.Find(".size").First().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(size);
|
||||
|
||||
//22 Jul 15
|
||||
var dateStr = qRow.Find(".added . datetime").First().Text().Replace("'", string.Empty);
|
||||
|
@@ -116,11 +116,8 @@ namespace Jackett.Indexers
|
||||
var dateStr = descCol.ChildElements.Last().Cq().Text().Split('|').Last().ToLowerInvariant().Replace("ago.", "").Trim();
|
||||
release.PublishDate = DateTimeUtil.FromTimeAgo(dateStr);
|
||||
|
||||
var sizeEl = row.ChildElements.ElementAt(7);
|
||||
var sizeVal = ParseUtil.CoerceFloat(sizeEl.ChildNodes.First().NodeValue);
|
||||
var sizeUnit = sizeEl.ChildNodes.Last().NodeValue;
|
||||
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
|
||||
var sizeStr = row.ChildElements.ElementAt(7).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(10).Cq().Text()) + release.Seeders;
|
||||
|
@@ -125,10 +125,8 @@ namespace Jackett.Indexers
|
||||
var pubDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
|
||||
release.PublishDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Local);
|
||||
|
||||
var sizeCol = qRow.Children().ElementAt(6);
|
||||
var sizeVal = sizeCol.ChildNodes[0].NodeValue;
|
||||
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
|
||||
var sizeStr = qRow.Children().ElementAt(6).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(8).Cq().Text().Trim());
|
||||
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(9).Cq().Text().Trim()) + release.Seeders;
|
||||
|
@@ -163,10 +163,8 @@ namespace Jackett.Indexers
|
||||
|
||||
release.Link = new Uri(SiteLink + "/" + row.ChildElements.ElementAt(2).Cq().Children("a.index").Attr("href"));
|
||||
|
||||
var sizeCol = row.ChildElements.ElementAt(6);
|
||||
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue);
|
||||
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
|
||||
var sizeStr = row.ChildElements.ElementAt(6).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
|
||||
|
@@ -139,10 +139,8 @@ namespace Jackett.Indexers
|
||||
release.PublishDate = DateTime.Now;
|
||||
release.Seeders = ParseUtil.CoerceInt(qRow.Find("li.torrents_seeders").Text());
|
||||
release.Peers = ParseUtil.CoerceInt(qRow.Find("li.torrents_leechers").Text()) + release.Seeders;
|
||||
var sizeParts = qRow.Find("li.torrents_size").Text().Split(' ');
|
||||
var sizeVal = ParseUtil.CoerceFloat(sizeParts[0]);
|
||||
var sizeUnit = sizeParts[1];
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
|
||||
var sizeParts = qRow.Find("li.torrents_size").Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts);
|
||||
|
||||
releases.Add(release);
|
||||
}
|
||||
|
@@ -162,10 +162,8 @@ namespace Jackett.Indexers
|
||||
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td.table_seeders").Text().Trim());
|
||||
release.Peers = ParseUtil.CoerceInt(qRow.Find("td.table_leechers").Text().Trim()) + release.Seeders;
|
||||
|
||||
var sizeCol = qRow.Find("td.table_size")[0];
|
||||
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue.Trim());
|
||||
var sizeUnit = sizeCol.ChildNodes[2].NodeValue.Trim();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
|
||||
var sizeStr = qRow.Find("td.table_size")[0].Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
releases.Add(release);
|
||||
}
|
||||
|
185
src/Jackett/Indexers/HDSpace.cs
Normal file
185
src/Jackett/Indexers/HDSpace.cs
Normal file
@@ -0,0 +1,185 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Jackett.Models;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Jackett.Utils.Clients;
|
||||
using Jackett.Services;
|
||||
using NLog;
|
||||
using Jackett.Utils;
|
||||
using CsQuery;
|
||||
using System.Web;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Globalization;
|
||||
|
||||
namespace Jackett.Indexers
|
||||
{
|
||||
public class HDSpace : BaseIndexer, IIndexer
|
||||
{
|
||||
|
||||
private readonly string LoginUrl = "";
|
||||
private readonly string SearchUrl = "";
|
||||
private string cookieHeader = "";
|
||||
|
||||
private IWebClient webclient;
|
||||
|
||||
public HDSpace(IIndexerManagerService i, IWebClient wc, Logger l)
|
||||
: base(name: "HD-Space",
|
||||
description: "Sharing The Universe",
|
||||
link: new Uri("https://hd-space.org"),
|
||||
caps: TorznabCapsUtil.CreateDefaultTorznabTVCaps(),
|
||||
manager: i,
|
||||
logger: l)
|
||||
{
|
||||
LoginUrl = SiteLink + "index.php?page=login";
|
||||
SearchUrl = SiteLink + "index.php?page=torrents&active=0&options=0&category=21%3B22&search={0}";
|
||||
webclient = wc;
|
||||
}
|
||||
|
||||
public Task<ConfigurationData> GetConfigurationForSetup()
|
||||
{
|
||||
var config = new ConfigurationDataBasicLogin();
|
||||
return Task.FromResult<ConfigurationData>(config);
|
||||
}
|
||||
|
||||
public async Task ApplyConfiguration(JToken configJson)
|
||||
{
|
||||
var config = new ConfigurationDataBasicLogin();
|
||||
config.LoadValuesFromJson(configJson);
|
||||
|
||||
var loginPage = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = LoginUrl,
|
||||
Type = RequestType.GET
|
||||
});
|
||||
|
||||
var pairs = new Dictionary<string, string> {
|
||||
{ "uid", config.Username.Value },
|
||||
{ "pwd", config.Password.Value }
|
||||
};
|
||||
|
||||
// Send Post
|
||||
var loginPost = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = LoginUrl,
|
||||
PostData = pairs,
|
||||
Referer = LoginUrl,
|
||||
Type = RequestType.POST,
|
||||
Cookies = loginPage.Cookies
|
||||
});
|
||||
|
||||
if (loginPost.Status == System.Net.HttpStatusCode.OK)
|
||||
{
|
||||
var errorStr = "You have {0} remaining login attempts";
|
||||
var remainingAttemptSpan = new Regex(string.Format(errorStr, "(.*?)")).Match(loginPage.Content).Groups[1].ToString();
|
||||
var attempts = Regex.Replace(remainingAttemptSpan, "<.*?>", String.Empty);
|
||||
var errorMessage = string.Format(errorStr, attempts);
|
||||
throw new ExceptionWithConfigData(errorMessage, (ConfigurationData)config);
|
||||
}
|
||||
|
||||
// Get result from redirect
|
||||
var loginResult = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = SiteLink + loginPost.RedirectingTo,
|
||||
Type = RequestType.GET,
|
||||
Cookies = loginPost.Cookies
|
||||
});
|
||||
|
||||
if (!loginResult.Content.Contains("logout.php"))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Login failed", (ConfigurationData)config);
|
||||
}
|
||||
else
|
||||
{
|
||||
cookieHeader = loginPost.Cookies;
|
||||
var configSaveData = new JObject();
|
||||
configSaveData["cookies"] = cookieHeader;
|
||||
SaveConfig(configSaveData);
|
||||
IsConfigured = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void LoadFromSavedConfiguration(JToken jsonConfig)
|
||||
{
|
||||
cookieHeader = (string)jsonConfig["cookies"];
|
||||
IsConfigured = true;
|
||||
}
|
||||
|
||||
public async Task<byte[]> Download(Uri link)
|
||||
{
|
||||
var response = await webclient.GetBytes(new WebRequest()
|
||||
{
|
||||
Url = link.ToString(),
|
||||
Cookies = cookieHeader
|
||||
});
|
||||
return response.Content;
|
||||
}
|
||||
|
||||
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
|
||||
{
|
||||
List<ReleaseInfo> releases = new List<ReleaseInfo>();
|
||||
|
||||
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
|
||||
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
|
||||
|
||||
var response = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = episodeSearchUrl,
|
||||
Referer = SiteLink.ToString(),
|
||||
Cookies = cookieHeader
|
||||
});
|
||||
var results = response.Content;
|
||||
|
||||
try
|
||||
{
|
||||
CQ dom = results;
|
||||
var rows = dom["table.lista > tbody > tr"];
|
||||
foreach (var row in rows)
|
||||
{
|
||||
// this tracker has horrible markup, find the result rows by looking for the style tag before each one
|
||||
var prev = row.PreviousElementSibling;
|
||||
if (prev == null || prev.NodeName.ToLowerInvariant() != "style") continue;
|
||||
|
||||
CQ qRow = row.Cq();
|
||||
var release = new ReleaseInfo();
|
||||
|
||||
release.MinimumRatio = 1;
|
||||
release.MinimumSeedTime = 172800;
|
||||
|
||||
var qLink = row.ChildElements.ElementAt(1).FirstElementChild.Cq();
|
||||
release.Title = qLink.Text().Trim();
|
||||
release.Comments = new Uri(SiteLink + qLink.Attr("href"));
|
||||
release.Guid = release.Comments;
|
||||
|
||||
var qDownload = row.ChildElements.ElementAt(3).FirstElementChild.Cq();
|
||||
release.Link = new Uri(SiteLink + qDownload.Attr("href"));
|
||||
|
||||
//"July 11, 2015, 13:34:09", "Today at 20:04:23"
|
||||
var dateStr = row.ChildElements.ElementAt(4).Cq().Text().Trim();
|
||||
if (dateStr.StartsWith("Today"))
|
||||
release.PublishDate = DateTime.Today + TimeSpan.ParseExact(dateStr.Replace("Today at ", ""), "hh\\:mm\\:ss", CultureInfo.InvariantCulture);
|
||||
else if (dateStr.StartsWith("Yesterday"))
|
||||
release.PublishDate = DateTime.Today - TimeSpan.FromDays(1) + TimeSpan.ParseExact(dateStr.Replace("Yesterday at ", ""), "hh\\:mm\\:ss", CultureInfo.InvariantCulture);
|
||||
else
|
||||
release.PublishDate = DateTime.SpecifyKind(DateTime.ParseExact(dateStr, "MMMM dd, yyyy, HH:mm:ss", CultureInfo.InvariantCulture), DateTimeKind.Local);
|
||||
|
||||
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(7).Cq().Text());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text()) + release.Seeders;
|
||||
|
||||
releases.Add(release);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
OnParseError(results, ex);
|
||||
}
|
||||
return releases.ToArray();
|
||||
}
|
||||
}
|
||||
}
|
@@ -167,8 +167,7 @@ namespace Jackett.Indexers
|
||||
}
|
||||
|
||||
string fullSize = qRow.Find("td.mainblockcontent").Get(6).InnerText;
|
||||
string[] sizeSplit = fullSize.Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeSplit[1], ParseUtil.CoerceFloat(sizeSplit[0]));
|
||||
release.Size = ReleaseInfo.GetBytes(fullSize);
|
||||
|
||||
release.Guid = new Uri(SiteLink + "/" + qRow.Find("td.mainblockcontent b a").Attr("href"));
|
||||
release.Link = new Uri(SiteLink + "/" + qRow.Find("td.mainblockcontent").Get(3).FirstChild.GetAttribute("href"));
|
||||
|
@@ -137,7 +137,7 @@ namespace Jackett.Indexers
|
||||
var qTitleLink = qRow.Find("a.t_title").First();
|
||||
release.Title = qTitleLink.Text().Trim();
|
||||
|
||||
// If we get a no results found page we still get a table but without any data
|
||||
// If we search an get no results, we still get a table just with no info.
|
||||
if (string.IsNullOrWhiteSpace(release.Title))
|
||||
{
|
||||
break;
|
||||
@@ -155,10 +155,8 @@ namespace Jackett.Indexers
|
||||
var qLink = row.ChildElements.ElementAt(3).Cq().Children("a");
|
||||
release.Link = new Uri(SiteLink + qLink.Attr("href"));
|
||||
|
||||
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
|
||||
var sizeVal = ParseUtil.CoerceFloat(sizeStr.Split(' ')[0]);
|
||||
var sizeUnit = sizeStr.Split(' ')[1];
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
|
||||
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".t_seeders").Text().Trim());
|
||||
release.Peers = ParseUtil.CoerceInt(qRow.Find(".t_leechers").Text().Trim()) + release.Seeders;
|
||||
|
189
src/Jackett/Indexers/Pretome.cs
Normal file
189
src/Jackett/Indexers/Pretome.cs
Normal file
@@ -0,0 +1,189 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Jackett.Models;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Jackett.Utils.Clients;
|
||||
using Jackett.Services;
|
||||
using NLog;
|
||||
using Jackett.Utils;
|
||||
using CsQuery;
|
||||
using System.Web;
|
||||
|
||||
namespace Jackett.Indexers
|
||||
{
|
||||
public class Pretome : BaseIndexer, IIndexer
|
||||
{
|
||||
|
||||
class PretomeConfiguration : ConfigurationDataBasicLogin
|
||||
{
|
||||
public StringItem Pin { get; private set; }
|
||||
|
||||
public PretomeConfiguration() : base()
|
||||
{
|
||||
Pin = new StringItem { Name = "Login Pin Number" };
|
||||
}
|
||||
|
||||
public override Item[] GetItems()
|
||||
{
|
||||
return new Item[] { Pin, Username, Password };
|
||||
}
|
||||
}
|
||||
|
||||
private readonly string LoginUrl = "";
|
||||
private readonly string LoginReferer = "";
|
||||
private readonly string SearchUrl = "";
|
||||
private string cookieHeader = "";
|
||||
|
||||
private IWebClient webclient;
|
||||
|
||||
public Pretome(IIndexerManagerService i, IWebClient wc, Logger l)
|
||||
: base(name: "PrivateHD",
|
||||
description: "BitTorrent site for High Quality, High Definition (HD) movies and TV Shows",
|
||||
link: new Uri("https://pretome.info"),
|
||||
caps: TorznabCapsUtil.CreateDefaultTorznabTVCaps(),
|
||||
manager: i,
|
||||
logger: l)
|
||||
{
|
||||
LoginUrl = SiteLink + "takelogin.php";
|
||||
LoginReferer = SiteLink + "index.php?cat=1";
|
||||
SearchUrl = SiteLink + "browse.php?tags=&st=1&tf=all&cat%5B%5D=7&search={0}";
|
||||
webclient = wc;
|
||||
}
|
||||
|
||||
public Task<ConfigurationData> GetConfigurationForSetup()
|
||||
{
|
||||
var config = new PretomeConfiguration();
|
||||
return Task.FromResult<ConfigurationData>(config);
|
||||
}
|
||||
|
||||
public async Task ApplyConfiguration(JToken configJson)
|
||||
{
|
||||
var config = new PretomeConfiguration();
|
||||
config.LoadValuesFromJson(configJson);
|
||||
|
||||
var loginPage = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = LoginUrl,
|
||||
Type = RequestType.GET
|
||||
});
|
||||
|
||||
var pairs = new Dictionary<string, string> {
|
||||
{ "returnto", "%2F" },
|
||||
{ "login_pin", config.Pin.Value },
|
||||
{ "username", config.Username.Value },
|
||||
{ "password", config.Password.Value },
|
||||
{ "login", "Login" }
|
||||
};
|
||||
|
||||
|
||||
// Send Post
|
||||
var loginPost = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = LoginUrl,
|
||||
PostData = pairs,
|
||||
Referer = LoginReferer,
|
||||
Type = RequestType.POST,
|
||||
Cookies = loginPage.Cookies
|
||||
});
|
||||
|
||||
if (loginPost.RedirectingTo == null)
|
||||
{
|
||||
throw new ExceptionWithConfigData("Login failed. Did you use the PIN number that pretome emailed you?", (ConfigurationData)config);
|
||||
}
|
||||
|
||||
// Get result from redirect
|
||||
var loginResult = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = loginPost.RedirectingTo,
|
||||
Type = RequestType.GET,
|
||||
Cookies = loginPost.Cookies
|
||||
});
|
||||
|
||||
if (!loginResult.Content.Contains("logout.php"))
|
||||
{
|
||||
throw new ExceptionWithConfigData("Failed", (ConfigurationData)config);
|
||||
}
|
||||
else
|
||||
{
|
||||
cookieHeader = loginPost.Cookies;
|
||||
var configSaveData = new JObject();
|
||||
configSaveData["cookies"] = cookieHeader;
|
||||
SaveConfig(configSaveData);
|
||||
IsConfigured = true;
|
||||
}
|
||||
}
|
||||
|
||||
public void LoadFromSavedConfiguration(JToken jsonConfig)
|
||||
{
|
||||
cookieHeader = (string)jsonConfig["cookies"];
|
||||
IsConfigured = true;
|
||||
}
|
||||
|
||||
public async Task<byte[]> Download(Uri link)
|
||||
{
|
||||
var response = await webclient.GetBytes(new WebRequest()
|
||||
{
|
||||
Url = link.ToString(),
|
||||
Cookies = cookieHeader
|
||||
});
|
||||
return response.Content;
|
||||
}
|
||||
|
||||
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
|
||||
{
|
||||
List<ReleaseInfo> releases = new List<ReleaseInfo>();
|
||||
|
||||
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
|
||||
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
|
||||
|
||||
var response = await webclient.GetString(new WebRequest()
|
||||
{
|
||||
Url = episodeSearchUrl,
|
||||
Referer = SiteLink.ToString(),
|
||||
Cookies = cookieHeader
|
||||
});
|
||||
var results = response.Content;
|
||||
|
||||
try
|
||||
{
|
||||
CQ dom = results;
|
||||
var rows = dom["table > tbody > tr.browse"];
|
||||
foreach (var row in rows)
|
||||
{
|
||||
CQ qRow = row.Cq();
|
||||
var release = new ReleaseInfo();
|
||||
|
||||
release.MinimumRatio = 1;
|
||||
release.MinimumSeedTime = 172800;
|
||||
|
||||
var qLink = row.ChildElements.ElementAt(1).Cq().Find("a").First();
|
||||
release.Title = qLink.Text().Trim();
|
||||
release.Comments = new Uri(SiteLink + qLink.Attr("href"));
|
||||
release.Guid = release.Comments;
|
||||
|
||||
var qDownload = row.ChildElements.ElementAt(2).Cq().Find("a").First();
|
||||
release.Link = new Uri(SiteLink + qDownload.Attr("href"));
|
||||
|
||||
var dateStr = row.ChildElements.ElementAt(5).Cq().Text();
|
||||
release.PublishDate = DateTimeUtil.FromTimeAgo(dateStr);
|
||||
|
||||
var sizeStr = row.ChildElements.ElementAt(7).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).InnerText);
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(10).InnerText) + release.Seeders;
|
||||
|
||||
releases.Add(release);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
OnParseError(results, ex);
|
||||
}
|
||||
return releases.ToArray();
|
||||
}
|
||||
}
|
||||
}
|
@@ -154,9 +154,8 @@ namespace Jackett.Indexers
|
||||
var dateStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
|
||||
release.PublishDate = DateTimeUtil.FromTimeAgo(dateStr);
|
||||
|
||||
var sizeStr = row.ChildElements.ElementAt(6).Cq().Text().Trim();
|
||||
var sizeParts = sizeStr.Split(new char[0], StringSplitOptions.RemoveEmptyEntries);
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
var sizeStr = row.ChildElements.ElementAt(6).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
|
||||
|
@@ -138,8 +138,7 @@ namespace Jackett.Indexers
|
||||
release.Link = new Uri(SiteLink + "/" + qRow.Find(".td_dl > a").Attr("href"));
|
||||
|
||||
var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue;
|
||||
var sizeParts = sizeStr.Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
var timeStr = qRow.Find(".ttr_added").Text();
|
||||
DateTime time;
|
||||
|
@@ -137,10 +137,8 @@ namespace Jackett.Indexers
|
||||
var localDate = TimeZoneInfo.ConvertTimeToUtc(euDate, TimeZoneInfo.FindSystemTimeZoneById(timezoneString)).ToLocalTime();
|
||||
release.PublishDate = localDate;
|
||||
|
||||
var sizeNodes = row.ChildElements.ElementAt(5).ChildNodes;
|
||||
var sizeVal = sizeNodes.First().NodeValue;
|
||||
var sizeUnit = sizeNodes.Last().NodeValue;
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
|
||||
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(6).Cq().Text().Trim());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(7).Cq().Text().Trim()) + release.Seeders;
|
||||
|
@@ -121,8 +121,8 @@ namespace Jackett.Indexers
|
||||
|
||||
release.Title = Regex.Replace((string)jobj["name"], "<.*?>", String.Empty);
|
||||
|
||||
var sizeParts = ((string)jobj["size"]).Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
var SizeStr = ((string)jobj["size"]);
|
||||
release.Size = ReleaseInfo.GetBytes(SizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt((string)jobj["seed"]);
|
||||
release.Peers = ParseUtil.CoerceInt((string)jobj["leech"]) + release.Seeders;
|
||||
|
@@ -152,10 +152,7 @@ namespace Jackett.Indexers
|
||||
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
|
||||
}
|
||||
|
||||
var sizeParts = descParts[1].Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
var sizeVal = ParseUtil.CoerceFloat(sizeParts[1]);
|
||||
var sizeUnit = sizeParts[2];
|
||||
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
|
||||
release.Size = ReleaseInfo.GetBytes(descParts[1]);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(2).Cq().Text());
|
||||
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(3).Cq().Text()) + release.Seeders;
|
||||
|
@@ -132,9 +132,8 @@ namespace Jackett.Indexers
|
||||
release.Comments = release.Guid;
|
||||
release.Link = new Uri(SiteLink + "/" + qRow.Find(".dlLinksInfo > a").Attr("href"));
|
||||
|
||||
var sizeStr = qRow.Find(".sizeInfo").Text().Trim();
|
||||
var sizeParts = sizeStr.Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
var sizeStr = qRow.Find(".sizeInfo").Text();
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
var dateStr = qRow.Find(".ulInfo").Text().Split('|').Last().Trim();
|
||||
release.PublishDate = DateTimeUtil.FromTimeAgo(dateStr);
|
||||
|
@@ -128,8 +128,8 @@ namespace Jackett.Indexers
|
||||
//"yyyy-MMM-dd hh:mm:ss"
|
||||
release.PublishDate = DateTime.ParseExact(dateString, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture);
|
||||
|
||||
var sizeStringParts = qRow.Children().ElementAt(4).InnerText.Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], ParseUtil.CoerceFloat(sizeStringParts[0]));
|
||||
var sizeStr = qRow.Children().ElementAt(4).InnerText;
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
|
||||
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seeders").Text());
|
||||
release.Peers = release.Seeders + ParseUtil.CoerceInt(qRow.Find(".leechers").Text());
|
||||
|
@@ -120,9 +120,8 @@ namespace Jackett.Indexers
|
||||
var dateStr = qRow.Find(".time").Text().Trim();
|
||||
release.PublishDate = DateTimeUtil.FromTimeAgo(dateStr);
|
||||
|
||||
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue.Trim();
|
||||
var sizeParts = sizeStr.Split(' ');
|
||||
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
|
||||
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue;
|
||||
release.Size = ReleaseInfo.GetBytes(sizeStr);
|
||||
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(6).InnerText.Trim());
|
||||
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(7).InnerText.Trim()) + release.Seeders;
|
||||
|
||||
|
@@ -158,6 +158,8 @@
|
||||
<Compile Include="Indexers\BakaBT.cs" />
|
||||
<Compile Include="Indexers\BaseIndexer.cs" />
|
||||
<Compile Include="Indexers\BB.cs" />
|
||||
<Compile Include="Indexers\HDSpace.cs" />
|
||||
<Compile Include="Indexers\Pretome.cs" />
|
||||
<Compile Include="Indexers\PrivateHD.cs" />
|
||||
<Compile Include="Indexers\SpeedCD.cs" />
|
||||
<Compile Include="Models\TorznabCapabilities.cs" />
|
||||
@@ -277,7 +279,7 @@
|
||||
<Content Include="Content\logos\animebytes.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Content\logos\BakaBT.png">
|
||||
<Content Include="Content\logos\bakabt.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Content\logos\bb.png">
|
||||
@@ -289,9 +291,15 @@
|
||||
<Content Include="Content\logos\frenchtorrentdb.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Content\logos\hdspace.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Content\logos\hdtorrents.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Content\logos\pretome.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Content\logos\privatehd.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
|
@@ -1,7 +1,9 @@
|
||||
using System;
|
||||
using Jackett.Utils;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace Jackett.Models
|
||||
@@ -53,22 +55,25 @@ namespace Jackett.Models
|
||||
};
|
||||
}
|
||||
|
||||
// ex: " 3.5 gb "
|
||||
public static long GetBytes(string str)
|
||||
{
|
||||
var valStr = new string(str.Where(c => char.IsDigit(c) || c == '.').ToArray());
|
||||
var unit = new string(str.Where(char.IsLetter).ToArray());
|
||||
var val = ParseUtil.CoerceFloat(valStr);
|
||||
return GetBytes(unit, val);
|
||||
}
|
||||
|
||||
public static long GetBytes(string unit, float value)
|
||||
{
|
||||
switch (unit.ToLower())
|
||||
{
|
||||
case "kb":
|
||||
case "kib":
|
||||
return BytesFromKB(value);
|
||||
case "mb":
|
||||
case "mib":
|
||||
return BytesFromMB(value);
|
||||
case "gb":
|
||||
case "gib":
|
||||
return BytesFromGB(value);
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
unit = unit.Replace("i", "").ToLowerInvariant();
|
||||
if (unit.Contains("kb"))
|
||||
return BytesFromKB(value);
|
||||
if (unit.Contains("mb"))
|
||||
return BytesFromMB(value);
|
||||
if (unit.Contains("gb"))
|
||||
return BytesFromGB(value);
|
||||
return 0;
|
||||
}
|
||||
|
||||
public static long BytesFromGB(float gb)
|
||||
|
Reference in New Issue
Block a user