Bug fixes for number parsing, fixed ThePirateBay indexer

This commit is contained in:
unknown
2015-07-11 10:24:41 -06:00
parent edb629adb4
commit 8d08279895
15 changed files with 115 additions and 77 deletions

View File

@@ -75,11 +75,11 @@ namespace Jackett.Indexers
{
var config = new ConfigurationDataBasicLoginAnimeBytes();
config.LoadValuesFromJson(configJson);
// Get the login form as we need the CSRF Token
var loginPage = await client.GetAsync(LoginUrl);
CQ loginPageDom = await loginPage.Content.ReadAsStringAsync();
CQ loginPageDom = await loginPage.Content.ReadAsStringAsync();
var csrfToken = loginPageDom["input[name=\"csrf_token\"]"].Last();
// Build login form
@@ -133,7 +133,7 @@ namespace Jackett.Indexers
var configSaveData = new JObject();
configSaveData["cookies"] = cookieContainer.ToJson(SiteLink);
configSaveData["raws"] = AllowRaws;
if (OnSaveConfigurationRequested != null)
OnSaveConfigurationRequested(this, configSaveData);
@@ -198,9 +198,10 @@ namespace Jackett.Indexers
return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray();
}
var queryUrl = SearchUrl;
var queryUrl = SearchUrl;
// Only include the query bit if its required as hopefully the site caches the non query page
if(!string.IsNullOrWhiteSpace(query.SearchTerm)){
if (!string.IsNullOrWhiteSpace(query.SearchTerm))
{
queryUrl += "&action=advanced&search_type=title&sort=time_added&way=desc&anime%5Btv_series%5D=1&searchstr=" + WebUtility.UrlEncode(query.SearchTerm);
}
@@ -229,8 +230,8 @@ namespace Jackett.Indexers
var yearStr = seriesCq.Find(".group_title strong").First().Text().Trim().Replace("]", "").Trim();
int yearIndex = yearStr.LastIndexOf("[");
if (yearIndex > -1)
yearStr = yearStr.Substring(yearIndex+1);
yearStr = yearStr.Substring(yearIndex + 1);
int year = 0;
if (!int.TryParse(yearStr, out year))
year = DateTime.Now.Year;
@@ -301,7 +302,7 @@ namespace Jackett.Indexers
var downloadLink = links.Get(0);
release.Guid = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href") + "&nh=" + Hash(title)); // Sonarr should dedupe on this url - allow a url per name.
release.Link = release.Guid;// We dont know this so try to fake based on the release year
release.PublishDate = new DateTime(year,1,1);
release.PublishDate = new DateTime(year, 1, 1);
release.PublishDate = release.PublishDate.AddDays(Math.Min(DateTime.Now.DayOfYear, 365) - 1);
var infoLink = links.Get(1);
@@ -347,15 +348,15 @@ namespace Jackett.Indexers
if (size.Count() > 0)
{
var sizeParts = size.First().Text().Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0]));
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
}
// Additional 5 hours per GB
release.MinimumSeedTime += (release.Size / 1000000000) * 18000;
// Peer info
release.Seeders = int.Parse(rowCq.Find(".torrent_seeders").Text());
release.Peers = release.Seeders + int.Parse(rowCq.Find(".torrent_leechers").Text());
release.Seeders = ParseUtil.CoerceInt(rowCq.Find(".torrent_seeders").Text());
release.Peers = release.Seeders + ParseUtil.CoerceInt(rowCq.Find(".torrent_leechers").Text());
releases.Add(release);
}
@@ -369,7 +370,7 @@ namespace Jackett.Indexers
throw ex;
}
// Add to the cache
lock (cache)
{

View File

@@ -142,10 +142,10 @@ namespace Jackett.Indexers
var sizeCol = qRow.Children().ElementAt(6);
var sizeVal = sizeCol.ChildNodes[0].NodeValue;
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, float.Parse(sizeVal));
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
release.Seeders = int.Parse(qRow.Children().ElementAt(8).Cq().Text().Trim(), NumberStyles.AllowThousands);
release.Peers = int.Parse(qRow.Children().ElementAt(9).Cq().Text().Trim(), NumberStyles.AllowThousands) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(8).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(9).Cq().Text().Trim()) + release.Seeders;
releases.Add(release);
}

View File

@@ -168,12 +168,12 @@ namespace Jackett
release.Link = new Uri(BaseUrl + "/" + row.ChildElements.ElementAt(2).Cq().Children("a.index").Attr("href"));
var sizeCol = row.ChildElements.ElementAt(6);
var sizeVal = float.Parse(sizeCol.ChildNodes[0].NodeValue);
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue);
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = int.Parse(row.ChildElements.ElementAt(8).Cq().Text(), NumberStyles.AllowThousands);
release.Peers = int.Parse(row.ChildElements.ElementAt(9).Cq().Text(), NumberStyles.AllowThousands) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
//if (!release.Title.ToLower().Contains(title.ToLower()))
// continue;

View File

@@ -164,11 +164,11 @@ namespace Jackett
pubDate = DateTime.ParseExact(dateString, "d-MMM-yyyy HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
release.PublishDate = pubDate;
release.Seeders = int.Parse(qRow.Find("td.table_seeders").Text().Trim(), NumberStyles.AllowThousands);
release.Peers = int.Parse(qRow.Find("td.table_leechers").Text().Trim(), NumberStyles.AllowThousands) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td.table_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find("td.table_leechers").Text().Trim()) + release.Seeders;
var sizeCol = qRow.Find("td.table_size")[0];
var sizeVal = float.Parse(sizeCol.ChildNodes[0].NodeValue.Trim());
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue.Trim());
var sizeUnit = sizeCol.ChildNodes[2].NodeValue.Trim();
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);

View File

@@ -151,7 +151,7 @@ namespace Jackett.Indexers
var descString = qRow.Find(".t_ctime").Text();
var dateString = descString.Split('|').Last().Trim();
dateString = dateString.Split(new string[] { " by " }, StringSplitOptions.None)[0];
var dateValue = float.Parse(dateString.Split(' ')[0]);
var dateValue = ParseUtil.CoerceFloat(dateString.Split(' ')[0]);
var dateUnit = dateString.Split(' ')[1];
if (dateUnit.Contains("minute"))
pubDate = DateTime.Now - TimeSpan.FromMinutes(dateValue);
@@ -173,12 +173,12 @@ namespace Jackett.Indexers
release.Link = new Uri(BaseUrl + qLink.Attr("href"));
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
var sizeVal = float.Parse(sizeStr.Split(' ')[0]);
var sizeVal = ParseUtil.CoerceFloat(sizeStr.Split(' ')[0]);
var sizeUnit = sizeStr.Split(' ')[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = int.Parse(qRow.Find(".t_seeders").Text().Trim(), NumberStyles.AllowThousands);
release.Peers = int.Parse(qRow.Find(".t_leechers").Text().Trim(), NumberStyles.AllowThousands) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".t_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".t_leechers").Text().Trim()) + release.Seeders;
releases.Add(release);
}

View File

@@ -166,7 +166,7 @@ namespace Jackett.Indexers
var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue;
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0], NumberStyles.Float | NumberStyles.AllowThousands));
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var timeStr = qRow.Find(".ttr_added").Text();
DateTime time;
@@ -175,8 +175,8 @@ namespace Jackett.Indexers
release.PublishDate = time;
}
release.Seeders = int.Parse(qRow.Find(".ttr_seeders").Text(), NumberStyles.AllowThousands);
release.Peers = int.Parse(qRow.Find(".ttr_leechers").Text(), NumberStyles.AllowThousands) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".ttr_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".ttr_leechers").Text()) + release.Seeders;
releases.Add(release);
}

View File

@@ -28,10 +28,9 @@ namespace Jackett.Indexers
public bool IsConfigured { get; private set; }
const string DefaultUrl = "https://thepiratebay.gd";
const string SearchUrl = "/s/?q=\"{0}\"&category=205&page=0&orderby=99";
const string SearchUrl2 = "/s/?q=\"{0}\"&category=208&page=0&orderby=99";
const string SwitchSingleViewUrl = "/switchview.php?view=s";
const string DefaultUrl = "https://thepiratebay.mn";
const string SearchUrl = "/search/{0}/0/99/208";
const string SearchUrl2 = "/search/{0}/0/99/205";
string BaseUrl;
@@ -108,25 +107,19 @@ namespace Jackett.Indexers
foreach (var episodeSearchUrl in searchUrls)
{
var message = new HttpRequestMessage
{
Method = HttpMethod.Get,
RequestUri = new Uri(baseUrl + SwitchSingleViewUrl)
};
message.Headers.Referrer = new Uri(episodeSearchUrl);
string results;
if (Program.IsWindows)
{
var response = await client.SendAsync(message);
results = await response.Content.ReadAsStringAsync();
results = await client.GetStringAsync(episodeSearchUrl);
}
else
{
var response = await CurlHelper.GetAsync(baseUrl + SwitchSingleViewUrl, null, episodeSearchUrl);
var response = await CurlHelper.GetAsync(episodeSearchUrl, null, episodeSearchUrl);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
CQ dom = results;
@@ -136,7 +129,8 @@ namespace Jackett.Indexers
{
var release = new ReleaseInfo();
CQ qLink = row.ChildElements.ElementAt(1).Cq().Children("a").First();
CQ qRow = row.Cq();
CQ qLink = qRow.Find(".detName > .detLink").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
@@ -145,13 +139,27 @@ namespace Jackett.Indexers
release.Comments = new Uri(baseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
var timeString = row.ChildElements.ElementAt(2).Cq().Text();
var downloadCol = row.ChildElements.ElementAt(1).Cq().Children("a");
release.MagnetUri = new Uri(downloadCol.Attr("href"));
release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0];
var descString = qRow.Find(".detDesc").Text().Trim();
var descParts = descString.Split(',');
var timeString = descParts[0].Split(' ')[1];
if (timeString.Contains("mins ago"))
release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(int.Parse(timeString.Split(' ')[0])));
{
release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(ParseUtil.CoerceInt(timeString.Split(' ')[0])));
}
else if (timeString.Contains("Today"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(2) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains("Y-day"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(26) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains(':'))
{
var utc = DateTime.ParseExact(timeString, "MM-dd HH:mm", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
@@ -163,17 +171,13 @@ namespace Jackett.Indexers
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
var downloadCol = row.ChildElements.ElementAt(3).Cq().Find("a");
release.MagnetUri = new Uri(downloadCol.Attr("href"));
release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0];
var sizeString = row.ChildElements.ElementAt(4).Cq().Text().Split(' ');
var sizeVal = float.Parse(sizeString[0], CultureInfo.InvariantCulture);
var sizeUnit = sizeString[1];
var sizeParts = descParts[1].Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var sizeVal = ParseUtil.CoerceFloat(sizeParts[1]);
var sizeUnit = sizeParts[2];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = int.Parse(row.ChildElements.ElementAt(5).Cq().Text());
release.Peers = int.Parse(row.ChildElements.ElementAt(6).Cq().Text()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(2).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(3).Cq().Text()) + release.Seeders;
releases.Add(release);
}

View File

@@ -150,11 +150,11 @@ namespace Jackett.Indexers
var sizeStr = qRow.Find(".sizeInfo").Text().Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0]));
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var dateStr = qRow.Find(".ulInfo").Text().Trim();
var dateStr = qRow.Find(".ulInfo").Text().Split('|').Last().Trim();
var dateParts = dateStr.Split(' ');
var dateValue = int.Parse(dateParts[1]);
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
@@ -172,8 +172,8 @@ namespace Jackett.Indexers
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
release.Seeders = int.Parse(qRow.Find(".seedersInfo").Text(), NumberStyles.AllowThousands);
release.Peers = int.Parse(qRow.Find(".leechersInfo").Text(), NumberStyles.AllowThousands) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seedersInfo").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".leechersInfo").Text()) + release.Seeders;
releases.Add(release);
}

View File

@@ -146,10 +146,10 @@ namespace Jackett.Indexers
release.PublishDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
var sizeStringParts = qRow.Children().ElementAt(4).InnerText.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], float.Parse(sizeStringParts[0]));
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], ParseUtil.CoerceFloat(sizeStringParts[0]));
release.Seeders = int.Parse(qRow.Find(".seeders").Text());
release.Peers = release.Seeders + int.Parse(qRow.Find(".leechers").Text());
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seeders").Text());
release.Peers = release.Seeders + ParseUtil.CoerceInt(qRow.Find(".leechers").Text());
releases.Add(release);
}

View File

@@ -139,7 +139,7 @@ namespace Jackett.Indexers
else
{
var dateParts = dateStr.Split(' ');
var dateValue = int.Parse(dateParts[0]);
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
@@ -160,9 +160,9 @@ namespace Jackett.Indexers
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue.Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0], NumberStyles.AllowThousands));
release.Seeders = int.Parse(qRow.Children().ElementAt(6).InnerText.Trim(), NumberStyles.AllowThousands);
release.Peers = int.Parse(qRow.Children().ElementAt(7).InnerText.Trim(), NumberStyles.AllowThousands) + release.Seeders;
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(6).InnerText.Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(7).InnerText.Trim()) + release.Seeders;
releases.Add(release);
}

View File

@@ -232,13 +232,13 @@ namespace Jackett.Indexers
switch (counter)
{
case 0:
this.Size = ReleaseInfo.BytesFromMB(long.Parse(val.Substring(0, val.IndexOf(" ") - 1)));
this.Size = ReleaseInfo.BytesFromMB(ParseUtil.CoerceLong(val.Substring(0, val.IndexOf(" ") - 1)));
break;
case 1:
this.Seeders = int.Parse(val.Contains(",") ? val.Remove(val.IndexOf(","), 1) : val);
this.Seeders = ParseUtil.CoerceInt(val.Contains(",") ? val.Remove(val.IndexOf(","), 1) : val);
break;
case 2:
this.Peers = int.Parse(val.Contains(",") ? val.Remove(val.IndexOf(","), 1) : val);
this.Peers = ParseUtil.CoerceInt(val.Contains(",") ? val.Remove(val.IndexOf(","), 1) : val);
break;
case 3:
this.hash = val;

View File

@@ -114,6 +114,7 @@
<Compile Include="Main.Designer.cs">
<DependentUpon>Main.cs</DependentUpon>
</Compile>
<Compile Include="ParseUtil.cs" />
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Properties\Resources.Designer.cs">
@@ -163,7 +164,9 @@
<Content Include="WebContent\logos\sceneaccess.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="WebContent\logos\showrss.png" />
<Content Include="WebContent\logos\showrss.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="WebContent\logos\torrentday.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
@@ -237,7 +240,9 @@
<Content Include="WebContent\logos\torrentleech.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="WebContent\logos\torrentz.png" />
<Content Include="WebContent\logos\torrentz.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="WebContent\setup_indexer.html">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>

28
src/Jackett/ParseUtil.cs Normal file
View File

@@ -0,0 +1,28 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Jackett
{
public static class ParseUtil
{
public static float CoerceFloat(string str)
{
return float.Parse(str, NumberStyles.Any, CultureInfo.InvariantCulture);
}
public static int CoerceInt(string str)
{
return int.Parse(str, NumberStyles.Any, CultureInfo.InvariantCulture);
}
public static long CoerceLong(string str)
{
return long.Parse(str, NumberStyles.Any, CultureInfo.InvariantCulture);
}
}
}

View File

@@ -129,9 +129,9 @@ namespace Jackett
{
var config = new ConfigurationSonarr();
config.LoadValuesFromJson(configJson);
await ReloadNameMappings(config.Host.Value, int.Parse(config.Port.Value), config.ApiKey.Value);
await ReloadNameMappings(config.Host.Value, ParseUtil.CoerceInt(config.Port.Value), config.ApiKey.Value);
Host = "http://" + new Uri(config.Host.Value).Host;
Port = int.Parse(config.Port.Value);
Port = ParseUtil.CoerceInt(config.Port.Value);
ApiKey = config.ApiKey.Value;
SaveSettings();
}

View File

@@ -34,7 +34,7 @@ namespace Jackett
else if (string.IsNullOrEmpty(Episode))
episodeString = string.Format("S{0:00}", Season);
else
episodeString = string.Format("S{0:00}E{1:00}", Season, int.Parse(Episode));
episodeString = string.Format("S{0:00}E{1:00}", Season, ParseUtil.CoerceInt(Episode));
return episodeString;
}
@@ -53,16 +53,16 @@ namespace Jackett
if (query["extended"] != null)
{
q.Extended = int.Parse(query["extended"]);
q.Extended = ParseUtil.CoerceInt(query["extended"]);
}
q.ApiKey = query["apikey"];
if (query["limit"] != null)
{
q.Limit = int.Parse(query["limit"]);
q.Limit = ParseUtil.CoerceInt(query["limit"]);
}
if (query["offset"] != null)
{
q.Offset = int.Parse(query["offset"]);
q.Offset = ParseUtil.CoerceInt(query["offset"]);
}
int temp;