Compare commits

...

71 Commits

Author SHA1 Message Date
Qstick
bc1e397ce3 Sync Indexers on app start, go to http if not sync'd yet 2022-01-31 12:10:52 -06:00
Qstick
17608cf915 Misc definition handling improvements 2022-01-30 20:57:06 -06:00
PearsonFlyer
a3de574de5 Fixed: Updated ruTorrent stopped state helptext 2022-01-26 09:53:07 -06:00
Robin Dadswell
22161e6d57 Fixed: Added missing translate for Database 2022-01-25 17:24:29 -06:00
Zippy79
c46ed33544 Fixed: Download limit check was using the query limit instead of the grab limit. 2022-01-25 08:56:54 -06:00
Qstick
7388655e6d Fixed: ToString() to AbsoluteUri for MagnetUrls as well 2022-01-23 19:41:20 -06:00
Qstick
5b5c186d0c Revert "Replace WebUtility.UrlEncode to Uri.EscapeDataString "
This reverts commit a7b1ef19f5.
2022-01-23 19:34:59 -06:00
Qstick
ae5d93d6dd Revert "Remove extra replacing (already done by Uri.EscapeDataString)"
This reverts commit 6880f38635.
2022-01-23 19:34:54 -06:00
Qstick
62f6670a21 Optimize Indexer updates 2022-01-21 21:30:17 -06:00
Qstick
c9951e7eba New: Add AppName to system status response 2022-01-18 23:29:30 -06:00
Qstick
2da22c08b0 Fixed: Log DB write connection opening 2022-01-18 21:06:12 -06:00
Qstick
e480f53f7f More mono cleaning 2022-01-18 20:17:01 -06:00
ta264
8701e67b1e Fixed: Close all database connections on shutdown 2022-01-18 20:07:33 -06:00
Qstick
97f4a2e651 Fixed: Use rolling 24 hours for indexer limits
Fixes #789
2022-01-18 19:56:17 -06:00
Servarr
3c3272cb25 Automated API Docs update 2022-01-17 21:42:48 -06:00
Qstick
fa626a53e6 Fixed: Smarter Int normalization
Fixes #787
2022-01-17 21:27:22 -06:00
Qstick
76daee3a1b New: Identify indexers that are already setup in add list 2022-01-17 19:24:52 -06:00
Qstick
1cbf61f4db New: Show definition name on add/edit screen for Cardigann 2022-01-17 19:23:08 -06:00
Qstick
34e57f27ff New: Don't close Indexer add list when adding Indexers 2022-01-17 18:45:40 -06:00
bakerboy448
de17ae9969 Fixed: (RuTracker org) Update Privacy to Semi-Private 2022-01-17 17:44:32 -06:00
bakerboy448
06913a2975 Fixed: (PornoLab) Update Privacy to Semi-Private 2022-01-17 17:44:21 -06:00
Qstick
dad16f2c57 Bump version to 0.2 2022-01-16 14:22:32 -06:00
François-Xavier Payet
2f22e7295c Changes Uri.ToString() to Uri.AbsoluteUri to prevent unescaping characters 2022-01-16 11:22:16 -06:00
François-Xavier Payet
6880f38635 Remove extra replacing (already done by Uri.EscapeDataString) 2022-01-16 11:22:16 -06:00
François-Xavier Payet
a7b1ef19f5 Replace WebUtility.UrlEncode to Uri.EscapeDataString 2022-01-16 11:22:16 -06:00
Yukine
aa59da2f22 Fixed: (MoreThanTV) use www url to fix cookie/redirect issues 2022-01-16 01:20:47 -06:00
Yukine
a62a4360e3 New: (MoreThanTV) Add MoreThanTV (#771)
* New: (MoreThanTV) Add MoreThanTV

* fix(ParseUtil): revert change since its already implemented elsewhere

* docs: clarify how to get cookies

* fix: set correct FieldDefinition order in Settings
2022-01-15 21:29:17 -06:00
Robin Dadswell
9e9e666204 Fixed: Postgres default port 2022-01-08 17:19:30 -06:00
bakerboy448
8d23cbf52b New: (DanishBytes) Add .org Alt Link
based on jackett bac28c030295bddeb2cc7934da6257dd013d8f60
2022-01-08 15:26:54 -06:00
Qstick
d925b37066 Update createSentryMiddleware.js 2022-01-08 15:12:07 -06:00
Qstick
9dadb35b98 Fixed: Don't die when definition doesn't exist
Fixes #596
2022-01-03 18:02:41 -06:00
Qstick
79e3e31028 Only dispatch search to enabled, non-failed, indexers
Fixes #712
2022-01-03 17:23:21 -06:00
Qstick
0af8e84d2d Remove unused sortByName import from indexerIndexActions 2022-01-03 16:43:22 -06:00
Qstick
573dde97e5 Default Method in HttpRequest to HttpMethod.Get 2022-01-03 14:13:29 -06:00
Qstick
08d112a96f New: Privacy custom filter option
Fixes #759
2022-01-03 12:35:05 -06:00
Qstick
76b6b0dead Use native HttpMethod 2022-01-03 00:28:20 -06:00
Qstick
e04133d34a More Mono Cleaning 2022-01-02 23:19:47 -06:00
Qstick
07575ae239 Fixed: (LazyLibrarian) Test indexer pull on setup to validate 2022-01-02 21:50:44 -06:00
Qstick
8e7acd8946 Fixed: (Mylar) Test indexer pull on setup to validate Mylar functionality 2022-01-02 21:50:44 -06:00
Qstick
3ecc926298 Fixed: (HDTorrents) Use Sanitized search string on all search types 2022-01-02 19:43:46 -06:00
Qstick
1e532624af Fixed: (Redacted) Guid and FL Parsing in line with Gazelle 2022-01-02 16:45:34 -06:00
Agneev Mukherjee
8a5194e604 Update index.ejs 2022-01-02 13:10:07 -06:00
Agneev Mukherjee
8a73cf72c2 Set login.html theme-color to color of logo 2022-01-02 13:10:07 -06:00
Qstick
76982c5988 Fixed: (Gazelle) Freeleech detection for releases 2022-01-01 20:08:59 -06:00
Qstick
b9dfe5e359 Fixed: (Gazelle) Use InfoUrl for GUID to avoid global duplicates 2022-01-01 20:01:40 -06:00
Qstick
a5e13ca776 New: Genre parameter for Music search 2022-01-01 15:04:05 -06:00
Qstick
e2ddfbff9c New: Genre parameter for Movie search 2022-01-01 14:49:01 -06:00
Qstick
66b4c7891d New: TmdbId Parameter for TV Search 2022-01-01 14:43:49 -06:00
Qstick
480a76c290 New: Support for language metadata 2022-01-01 14:23:44 -06:00
Servarr
1373ab255d Automated API Docs update 2021-12-31 19:41:04 -06:00
Qstick
1dc00eb445 More powerful label actions 2021-12-31 19:34:28 -06:00
Qstick
a366bec684 New: Reenable TV Id search for PrivateHD 2021-12-31 19:34:28 -06:00
Qstick
ecca6e9f49 New: TVDB and TMDB Search for AvistaZ
Ref #717
2021-12-31 19:34:13 -06:00
Yukine
03db7a9bbd Fixed: (SpeedApp) correct categories in query string of requests 2021-12-31 15:09:03 -06:00
Qstick
9cb04466c1 DbType param on update check 2021-12-31 13:19:22 -06:00
bakerboy448
2bae37d0c5 Fixed: (TorrentLeech) Calculating Incorrect Age
Closes #720
2021-12-31 11:04:31 -06:00
bakerboy448
0dbd23c52b Fixed: Various Translations 2021-12-30 23:34:48 -06:00
bakerboy448
66a6311dcc Fixed: SemiPublic => SemiPrivate 2021-12-30 23:00:19 -06:00
Servarr
c5b111530c Automated API Docs update 2021-12-30 22:37:20 -06:00
Qstick
77724a50a4 Really fix Github token for API PR 2021-12-30 21:42:46 -06:00
Qstick
22cbd01c57 Bump to 0.1.10 2021-12-30 19:03:04 -06:00
Yukine
fd55a624a7 Fixed: (AnimeBytes) Do not Page requests 2021-12-30 19:01:42 -06:00
Qstick
75984e954e Update LocalizationController.cs 2021-12-30 18:41:56 -06:00
Qstick
3fce120578 Fixed: (SpeedApp) Map Categories instead of building
Fixes #574
2021-12-30 18:30:58 -06:00
Qstick
6e8fb22c71 New: Additional logging for InvalidModel BadRequest API calls
[common]
2021-12-30 17:51:34 -06:00
Qstick
8ec7a4898d Maintain PrimaryKey and AutoIncrement on some schemas
[common]
2021-12-30 17:51:11 -06:00
Qstick
642848d331 API Annotations 2021-12-30 15:51:51 -06:00
Qstick
c9e6a0339e Fixed: (Cardigann) Indexer privacy tweaks, Semi-Public fixes
Fixes #744
2021-12-29 18:18:09 -06:00
Qstick
25620e8670 Fix GitHub token variable usage 2021-12-29 18:16:36 -06:00
Mouton99
5b804e8f3a New: (TorrentSeeds) Migrate to API & YML 2021-12-29 18:14:22 -06:00
Servarr
548db6a5cd Automated API Docs update 2021-12-29 17:48:53 -06:00
146 changed files with 1453 additions and 1038 deletions

16
.github/label-actions.yml vendored Normal file
View File

@@ -0,0 +1,16 @@
# Configuration for Label Actions - https://github.com/dessant/label-actions
'Type: Support':
comment: >
:wave: @{issue-author}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a support request. Please hop over onto our [Discord](https://prowlarr.com/discord)
or [Subreddit](https://reddit.com/r/prowlarr)
close: true
'Type: Indexer Request':
comment: >
:wave: @{issue-author}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a indexer request. Please use our Indexer request [site](https://requests.prowlarr.com/)
close: true

23
.github/workflows/label-actions.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: 'Label Actions'
on:
issues:
types: [labeled, unlabeled]
pull_request:
types: [labeled, unlabeled]
discussion:
types: [labeled, unlabeled]
permissions:
contents: read
issues: write
pull-requests: write
discussions: write
jobs:
action:
runs-on: ubuntu-latest
steps:
- uses: dessant/label-actions@v2
with:
process-only: 'issues, prs'

View File

@@ -1,21 +0,0 @@
name: 'Support requests'
on:
issues:
types: [labeled, unlabeled, reopened]
jobs:
support:
runs-on: ubuntu-latest
steps:
- uses: dessant/support-requests@v2
with:
github-token: ${{ github.token }}
support-label: 'Type: Support'
issue-comment: >
:wave: @{issue-author}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a support request. Please hop over onto our [Discord](https://prowlarr.com/discord)
or [Subreddit](https://reddit.com/r/prowlarr)
close-issue: true
lock-issue: false

View File

@@ -7,7 +7,7 @@ variables:
outputFolder: './_output'
artifactsFolder: './_artifacts'
testsFolder: './_tests'
majorVersion: '0.1.9'
majorVersion: '0.2.0'
minorVersion: $[counter('minorVersion', 1)]
prowlarrVersion: '$(majorVersion).$(minorVersion)'
buildName: '$(Build.SourceBranchName).$(prowlarrVersion)'
@@ -857,14 +857,14 @@ stages:
then
git commit -am 'Automated API Docs update'
git push -f --set-upstream origin api-docs
curl -X POST -H 'Authorization: token $GITHUB_TOKEN' -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/prowlarr/prowlarr/pulls -d '{"head":"api-docs","base":"develop","title":"Update API docs"}'
curl -X POST -H "Authorization: token ${GITHUBTOKEN}" -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/prowlarr/prowlarr/pulls -d '{"head":"api-docs","base":"develop","title":"Update API docs"}'
else
echo "No changes since last run"
fi
displayName: Commit API Doc Change
continueOnError: true
env:
GITHUB_TOKEN: $(githubToken)
GITHUBTOKEN: $(githubToken)
- task: CopyFiles@2
displayName: 'Copy openapi.json to: $(Build.ArtifactStagingDirectory)'
inputs:

View File

@@ -77,7 +77,9 @@ function AppUpdatedModalContent(props) {
<div>
{
!update.changes &&
<div className={styles.maintenance}>Maintenance release</div>
<div className={styles.maintenance}>
{translate('MaintenanceRelease')}
</div>
}
{

View File

@@ -166,7 +166,9 @@ class FilterBuilderModalContent extends Component {
</div>
</div>
<div className={styles.label}>Filters</div>
<div className={styles.label}>
{translate('Filters')}
</div>
<div className={styles.rows}>
{

View File

@@ -8,6 +8,7 @@ import BoolFilterBuilderRowValue from './BoolFilterBuilderRowValue';
import DateFilterBuilderRowValue from './DateFilterBuilderRowValue';
import FilterBuilderRowValueConnector from './FilterBuilderRowValueConnector';
import IndexerFilterBuilderRowValueConnector from './IndexerFilterBuilderRowValueConnector';
import PrivacyFilterBuilderRowValue from './PrivacyFilterBuilderRowValue';
import ProtocolFilterBuilderRowValue from './ProtocolFilterBuilderRowValue';
import TagFilterBuilderRowValueConnector from './TagFilterBuilderRowValueConnector';
import styles from './FilterBuilderRow.css';
@@ -63,6 +64,9 @@ function getRowValueConnector(selectedFilterBuilderProp) {
case filterBuilderValueTypes.PROTOCOL:
return ProtocolFilterBuilderRowValue;
case filterBuilderValueTypes.PRIVACY:
return PrivacyFilterBuilderRowValue;
case filterBuilderValueTypes.TAG:
return TagFilterBuilderRowValueConnector;

View File

@@ -0,0 +1,20 @@
import React from 'react';
import translate from 'Utilities/String/translate';
import FilterBuilderRowValue from './FilterBuilderRowValue';
const privacyTypes = [
{ id: 'public', name: translate('Public') },
{ id: 'private', name: translate('Private') },
{ id: 'semiPrivate', name: translate('SemiPrivate') }
];
function PrivacyFilterBuilderRowValue(props) {
return (
<FilterBuilderRowValue
tagList={privacyTypes}
{...props}
/>
);
}
export default PrivacyFilterBuilderRowValue;

View File

@@ -4,6 +4,7 @@ export const DATE = 'date';
export const DEFAULT = 'default';
export const INDEXER = 'indexer';
export const PROTOCOL = 'protocol';
export const PRIVACY = 'privacy';
export const APP_PROFILE = 'appProfile';
export const MOVIE_STATUS = 'movieStatus';
export const TAG = 'tag';

View File

@@ -41,7 +41,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Query Results'}
title={translate('QueryResults')}
data={queryResults ? queryResults : '-'}
/>
}
@@ -49,7 +49,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Categories'}
title={translate('Categories')}
data={categories ? categories : '-'}
/>
}
@@ -57,7 +57,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Source'}
title={translate('Source')}
data={source}
/>
}
@@ -65,7 +65,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Url'}
title={translate('Url')}
data={url ? <Link to={url}>{translate('Link')}</Link> : '-'}
/>
}
@@ -93,7 +93,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Source'}
title={translate('Source')}
data={source ? source : '-'}
/>
}
@@ -101,7 +101,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Title'}
title={translate('Title')}
data={title ? title : '-'}
/>
}
@@ -109,7 +109,7 @@ function HistoryDetails(props) {
{
!!data &&
<DescriptionListItem
title={'Url'}
title={translate('Url')}
data={url ? <Link to={url}>{translate('Link')}</Link> : '-'}
/>
}

View File

@@ -4,6 +4,7 @@ import FormGroup from 'Components/Form/FormGroup';
import FormInputGroup from 'Components/Form/FormInputGroup';
import FormLabel from 'Components/Form/FormLabel';
import { inputTypes } from 'Helpers/Props';
import translate from 'Utilities/String/translate';
class HistoryOptions extends Component {
@@ -56,14 +57,14 @@ class HistoryOptions extends Component {
return (
<Fragment>
<FormGroup>
<FormLabel>History Cleanup</FormLabel>
<FormLabel>{translate('HistoryCleanup')}</FormLabel>
<FormInputGroup
type={inputTypes.NUMBER}
name="historyCleanupDays"
value={historyCleanupDays}
helpText="Set to 0 to disable automatic cleanup"
helpTextWarning="History items older than the selected number of days will be cleaned up automatically"
helpText={translate('HistoryCleanupDaysHelpText')}
helpTextWarning={translate('HistoryCleanupDaysHelpTextWarning')}
onChange={this.onGlobalInputChange}
/>
</FormGroup>

View File

@@ -4,7 +4,7 @@ import Modal from 'Components/Modal/Modal';
import AddIndexerModalContentConnector from './AddIndexerModalContentConnector';
import styles from './AddIndexerModal.css';
function AddIndexerModal({ isOpen, onModalClose, ...otherProps }) {
function AddIndexerModal({ isOpen, onModalClose, onSelectIndexer, ...otherProps }) {
return (
<Modal
isOpen={isOpen}
@@ -14,6 +14,7 @@ function AddIndexerModal({ isOpen, onModalClose, ...otherProps }) {
<AddIndexerModalContentConnector
{...otherProps}
onModalClose={onModalClose}
onSelectIndexer={onSelectIndexer}
/>
</Modal>
);
@@ -21,7 +22,8 @@ function AddIndexerModal({ isOpen, onModalClose, ...otherProps }) {
AddIndexerModal.propTypes = {
isOpen: PropTypes.bool.isRequired,
onModalClose: PropTypes.func.isRequired
onModalClose: PropTypes.func.isRequired,
onSelectIndexer: PropTypes.func.isRequired
};
export default AddIndexerModal;

View File

@@ -15,7 +15,7 @@ import TableBody from 'Components/Table/TableBody';
import { kinds, scrollDirections } from 'Helpers/Props';
import getErrorMessage from 'Utilities/Object/getErrorMessage';
import translate from 'Utilities/String/translate';
import SelectIndexerRow from './SelectIndexerRow';
import SelectIndexerRowConnector from './SelectIndexerRowConnector';
import styles from './AddIndexerModalContent.css';
const columns = [
@@ -56,6 +56,21 @@ const protocols = [
}
];
const privacyLevels = [
{
key: 'private',
value: translate('Private')
},
{
key: 'semiPrivate',
value: translate('SemiPrivate')
},
{
key: 'public',
value: translate('Public')
}
];
class AddIndexerModalContent extends Component {
//
@@ -99,10 +114,6 @@ class AddIndexerModalContent extends Component {
.sort((a, b) => a.localeCompare(b))
.map((language) => ({ key: language, value: language }));
const privacyLevels = Array.from(new Set(indexers.map(({ privacy }) => privacy)))
.sort((a, b) => a.localeCompare(b))
.map((privacy) => ({ key: privacy, value: privacy }));
const filteredIndexers = indexers.filter((indexer) => {
const { filter, filterProtocols, filterLanguages, filterPrivacyLevels } = this.state;
@@ -208,7 +219,7 @@ class AddIndexerModalContent extends Component {
<TableBody>
{
filteredIndexers.map((indexer) => (
<SelectIndexerRow
<SelectIndexerRowConnector
key={indexer.name}
implementation={indexer.implementation}
{...indexer}

View File

@@ -51,7 +51,7 @@ class AddIndexerModalContentConnector extends Component {
onIndexerSelect = ({ implementation, name }) => {
this.props.selectIndexerSchema({ implementation, name });
this.props.onModalClose({ indexerSelected: true });
this.props.onSelectIndexer();
};
onSortPress = (sortKey, sortDirection) => {
@@ -76,7 +76,8 @@ AddIndexerModalContentConnector.propTypes = {
fetchIndexerSchema: PropTypes.func.isRequired,
selectIndexerSchema: PropTypes.func.isRequired,
setIndexerSchemaSort: PropTypes.func.isRequired,
onModalClose: PropTypes.func.isRequired
onModalClose: PropTypes.func.isRequired,
onSelectIndexer: PropTypes.func.isRequired
};
export default connect(createMapStateToProps, mapDispatchToProps)(AddIndexerModalContentConnector);

View File

@@ -3,3 +3,9 @@
width: 32px;
}
.alreadyExistsIcon {
margin-left: 10px;
color: #37bc9b;
pointer-events: all;
}

View File

@@ -1,8 +1,12 @@
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import Icon from 'Components/Icon';
import TableRowCell from 'Components/Table/Cells/TableRowCell';
import TableRowButton from 'Components/Table/TableRowButton';
import { icons } from 'Helpers/Props';
import ProtocolLabel from 'Indexer/Index/Table/ProtocolLabel';
import firstCharToUpper from 'Utilities/String/firstCharToUpper';
import translate from 'Utilities/String/translate';
import styles from './SelectIndexerRow.css';
class SelectIndexerRow extends Component {
@@ -27,7 +31,8 @@ class SelectIndexerRow extends Component {
protocol,
privacy,
name,
language
language,
isExistingIndexer
} = this.props;
return (
@@ -40,6 +45,16 @@ class SelectIndexerRow extends Component {
<TableRowCell>
{name}
{
isExistingIndexer ?
<Icon
className={styles.alreadyExistsIcon}
name={icons.CHECK_CIRCLE}
size={15}
title={translate('IndexerAlreadySetup')}
/> :
null
}
</TableRowCell>
<TableRowCell>
@@ -47,7 +62,7 @@ class SelectIndexerRow extends Component {
</TableRowCell>
<TableRowCell>
{privacy}
{translate(firstCharToUpper(privacy))}
</TableRowCell>
</TableRowButton>
);
@@ -60,7 +75,8 @@ SelectIndexerRow.propTypes = {
privacy: PropTypes.string.isRequired,
language: PropTypes.string.isRequired,
implementation: PropTypes.string.isRequired,
onIndexerSelect: PropTypes.func.isRequired
onIndexerSelect: PropTypes.func.isRequired,
isExistingIndexer: PropTypes.bool.isRequired
};
export default SelectIndexerRow;

View File

@@ -0,0 +1,18 @@
import { connect } from 'react-redux';
import { createSelector } from 'reselect';
import createExistingIndexerSelector from 'Store/Selectors/createExistingIndexerSelector';
import SelectIndexerRow from './SelectIndexerRow';
function createMapStateToProps() {
return createSelector(
createExistingIndexerSelector(),
(isExistingIndexer, dimensions) => {
return {
isExistingIndexer
};
}
);
}
export default connect(createMapStateToProps)(SelectIndexerRow);

View File

@@ -39,6 +39,7 @@ function EditIndexerModalContent(props) {
const {
id,
implementationName,
definitionName,
name,
enable,
redirect,
@@ -50,10 +51,12 @@ function EditIndexerModalContent(props) {
priority
} = item;
const indexerDisplayName = implementationName === definitionName ? implementationName : `${implementationName} (${definitionName})`;
return (
<ModalContent onModalClose={onModalClose}>
<ModalHeader>
{`${id ? translate('EditIndexer') : translate('AddIndexer')} - ${implementationName}`}
{`${id ? translate('EditIndexer') : translate('AddIndexer')} - ${indexerDisplayName}`}
</ModalHeader>
<ModalBody>
@@ -159,7 +162,7 @@ function EditIndexerModalContent(props) {
<FormInputGroup
type={inputTypes.TAG}
name="tags"
helpText="Use tags to specify default clients, specify Indexer Proxies, or just to organize your indexers."
helpText={translate('IndexerTagsHelpText')}
{...tags}
onChange={onInputChange}
/>

View File

@@ -193,11 +193,12 @@ class IndexerIndex extends Component {
this.setState({ isAddIndexerModalOpen: true });
};
onAddIndexerModalClose = ({ indexerSelected = false } = {}) => {
this.setState({
isAddIndexerModalOpen: false,
isEditIndexerModalOpen: indexerSelected
});
onAddIndexerModalClose = () => {
this.setState({ isAddIndexerModalOpen: false });
};
onAddIndexerSelectIndexer = () => {
this.setState({ isEditIndexerModalOpen: true });
};
onEditIndexerModalClose = () => {
@@ -302,14 +303,14 @@ class IndexerIndex extends Component {
<PageToolbar>
<PageToolbarSection>
<PageToolbarButton
label={'Add Indexer'}
label={translate('AddIndexer')}
iconName={icons.ADD}
spinningName={icons.ADD}
onPress={this.onAddIndexerPress}
/>
<PageToolbarButton
label={'Test All Indexers'}
label={translate('TestAllIndexers')}
iconName={icons.TEST}
isSpinning={isTestingAll}
isDisabled={hasNoIndexer}
@@ -321,13 +322,13 @@ class IndexerIndex extends Component {
{
isMovieEditorActive ?
<PageToolbarButton
label={'Indexers'}
label={translate('Indexers')}
iconName={icons.MOVIE_CONTINUING}
isDisabled={hasNoIndexer}
onPress={this.onMovieEditorTogglePress}
/> :
<PageToolbarButton
label={'Mass Editor'}
label={translate('MassEditor')}
iconName={icons.EDIT}
isDisabled={hasNoIndexer}
onPress={this.onMovieEditorTogglePress}
@@ -463,6 +464,7 @@ class IndexerIndex extends Component {
<AddIndexerModal
isOpen={isAddIndexerModalOpen}
onModalClose={this.onAddIndexerModalClose}
onSelectIndexer={this.onAddIndexerSelectIndexer}
/>
<EditIndexerModalConnector

View File

@@ -240,16 +240,19 @@ class IndexerIndexRow extends Component {
>
<IconButton
name={icons.INFO}
title={'Indexer info'}
title={translate('IndexerInfo')}
onPress={this.onIndexerInfoPress}
/>
<IconButton
className={styles.externalLink}
name={icons.EXTERNAL_LINK}
title={'Website'}
to={indexerUrls[0].replace('api.', '')}
/>
{
indexerUrls ?
<IconButton
className={styles.externalLink}
name={icons.EXTERNAL_LINK}
title={translate('Website')}
to={indexerUrls[0].replace('api.', '')}
/> : null
}
<IconButton
name={icons.EDIT}
@@ -289,7 +292,7 @@ class IndexerIndexRow extends Component {
IndexerIndexRow.propTypes = {
id: PropTypes.number.isRequired,
indexerUrls: PropTypes.arrayOf(PropTypes.string).isRequired,
indexerUrls: PropTypes.arrayOf(PropTypes.string),
protocol: PropTypes.string.isRequired,
privacy: PropTypes.string.isRequired,
priority: PropTypes.number.isRequired,
@@ -298,7 +301,7 @@ IndexerIndexRow.propTypes = {
redirect: PropTypes.bool.isRequired,
appProfile: PropTypes.object.isRequired,
status: PropTypes.object,
capabilities: PropTypes.object.isRequired,
capabilities: PropTypes.object,
added: PropTypes.string.isRequired,
tags: PropTypes.arrayOf(PropTypes.number).isRequired,
columns: PropTypes.arrayOf(PropTypes.object).isRequired,

View File

@@ -299,7 +299,7 @@ class SearchIndexRow extends Component {
<IconButton
className={styles.downloadLink}
name={icons.SAVE}
title={'Save'}
title={translate('Save')}
to={downloadUrl}
/>
</VirtualTableRowCell>

View File

@@ -61,7 +61,7 @@ class Applications extends Component {
return (
<FieldSet legend={translate('Applications')}>
<PageSectionContent
errorMessage="Unable to load application list"
errorMessage={translate('UnableToLoadApplicationList')}
{...otherProps}
>
<div className={styles.applications}>

View File

@@ -61,7 +61,7 @@ class IndexerProxies extends Component {
} = this.state;
return (
<FieldSet legend={translate('Indexer Proxies')}>
<FieldSet legend={translate('IndexerProxies')}>
<PageSectionContent
errorMessage={translate('UnableToLoadIndexerProxies')}
{...otherProps}

View File

@@ -67,7 +67,7 @@ function TagDetailsModalContent(props) {
{
!!indexerProxies.length &&
<FieldSet legend={translate('Indexer Proxies')}>
<FieldSet legend={translate('IndexerProxies')}>
{
indexerProxies.map((item) => {
return (

View File

@@ -118,12 +118,12 @@ export const defaultState = {
filterBuilderProps: [
{
name: 'name',
label: 'Indexer Name',
label: translate('IndexerName'),
type: filterBuilderTypes.STRING
},
{
name: 'enable',
label: 'Enabled',
label: translate('Enabled'),
type: filterBuilderTypes.EXACT,
valueType: filterBuilderValueTypes.BOOL
},
@@ -135,15 +135,21 @@ export const defaultState = {
},
{
name: 'priority',
label: 'Priority',
label: translate('Priority'),
type: filterBuilderTypes.NUMBER
},
{
name: 'protocol',
label: 'Protocol',
label: translate('Protocol'),
type: filterBuilderTypes.EXACT,
valueType: filterBuilderValueTypes.PROTOCOL
},
{
name: 'privacy',
label: translate('Privacy'),
type: filterBuilderTypes.EXACT,
valueType: filterBuilderValueTypes.PRIVACY
},
{
name: 'appProfileId',
label: translate('AppProfile'),

View File

@@ -80,8 +80,8 @@ export default function createSentryMiddleware() {
return;
}
const dsn = isProduction ? 'https://b0fb75c38ef4487dbf742f79c4ba62d2@sentry.servarr.com/12' :
'https://da610619280249f891ec3ee306906793@sentry.servarr.com/13';
const dsn = isProduction ? 'https://b233094711fe4430a0b0c5da2e01df93@sentry.servarr.com/28' :
'https://116efebd253a4dff9df9475a31510001@sentry.servarr.com/37';
sentry.init({
dsn,

View File

@@ -1,14 +0,0 @@
import _ from 'lodash';
import { createSelector } from 'reselect';
function createExclusionMovieSelector() {
return createSelector(
(state, { tmdbId }) => tmdbId,
(state) => state.settings.importExclusions,
(tmdbId, importExclusions) => {
return _.some(importExclusions.items, { tmdbId });
}
);
}
export default createExclusionMovieSelector;

View File

@@ -0,0 +1,15 @@
import _ from 'lodash';
import { createSelector } from 'reselect';
import createAllIndexersSelector from './createAllIndexersSelector';
function createExistingIndexerSelector() {
return createSelector(
(state, { definitionName }) => definitionName,
createAllIndexersSelector(),
(definitionName, indexers) => {
return _.some(indexers, { definitionName });
}
);
}
export default createExistingIndexerSelector;

View File

@@ -36,6 +36,12 @@ function selectSettings(item, pendingChanges, saveError) {
return result;
}
if (key === 'definitionName') {
result.definitionName = item[key];
return result;
}
const setting = {
value: item[key],
errors: _.map(_.remove(validationFailures, (failure) => {

View File

@@ -199,7 +199,7 @@ class QueuedTaskRow extends Component {
</span>
{
clientUserAgent ?
<span className={styles.userAgent} title="User-Agent provided by the app that called the API">
<span className={styles.userAgent} title={translate('UserAgentProvidedByTheAppThatCalledTheAPI')}>
from: {clientUserAgent}
</span> :
null

View File

@@ -6,8 +6,8 @@
<meta name="mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<!-- Chrome, Opera, and Firefox OS -->
<meta name="theme-color" content="#3a3f51" />
<!-- Chrome, Safari, Opera, and Firefox OS -->
<meta name="theme-color" content="#e66001" />
<!-- Windows Phone -->
<meta name="msapplication-navbutton-color" content="#3a3f51" />

View File

@@ -6,8 +6,8 @@
<meta name="mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<!-- Chrome, Opera, and Firefox OS -->
<meta name="theme-color" content="#464b51" />
<!-- Chrome, Safari, Opera, and Firefox OS -->
<meta name="theme-color" content="#e66001" />
<!-- Windows Phone -->
<meta name="msapplication-navbutton-color" content="#464b51" />

View File

@@ -229,11 +229,6 @@ namespace NzbDrone.Common.Test.Http
[Test]
public void should_follow_redirects_to_https()
{
if (typeof(TDispatcher) == typeof(ManagedHttpDispatcher) && PlatformInfo.IsMono)
{
Assert.Ignore("Will fail on tls1.2 via managed dispatcher, ignore.");
}
var request = new HttpRequestBuilder($"https://{_httpBinHost}/redirect-to")
.AddQueryParam("url", $"https://radarr.video/")
.Build();

View File

@@ -34,19 +34,10 @@ namespace NzbDrone.Common.Http.Dispatchers
{
var webRequest = (HttpWebRequest)WebRequest.Create((Uri)request.Url);
if (PlatformInfo.IsMono)
{
// On Mono GZipStream/DeflateStream leaks memory if an exception is thrown, use an intermediate buffer in that case.
webRequest.AutomaticDecompression = DecompressionMethods.None;
webRequest.Headers.Add("Accept-Encoding", "gzip");
}
else
{
// Deflate is not a standard and could break depending on implementation.
// we should just stick with the more compatible Gzip
//http://stackoverflow.com/questions/8490718/how-to-decompress-stream-deflated-with-java-util-zip-deflater-in-net
webRequest.AutomaticDecompression = DecompressionMethods.GZip;
}
// Deflate is not a standard and could break depending on implementation.
// we should just stick with the more compatible Gzip
//http://stackoverflow.com/questions/8490718/how-to-decompress-stream-deflated-with-java-util-zip-deflater-in-net
webRequest.AutomaticDecompression = DecompressionMethods.GZip;
webRequest.Method = request.Method.ToString();
webRequest.UserAgent = _userAgentBuilder.GetUserAgent(request.UseSimplifiedUserAgent);
@@ -91,9 +82,6 @@ namespace NzbDrone.Common.Http.Dispatchers
if (httpWebResponse == null)
{
// Workaround for mono not closing connections properly in certain situations.
AbortWebRequest(webRequest);
// The default messages for WebException on mono are pretty horrible.
if (e.Status == WebExceptionStatus.NameResolutionFailure)
{
@@ -127,19 +115,6 @@ namespace NzbDrone.Common.Http.Dispatchers
try
{
data = await responseStream.ToBytes();
if (PlatformInfo.IsMono && httpWebResponse.ContentEncoding == "gzip")
{
using (var compressedStream = new MemoryStream(data))
using (var gzip = new GZipStream(compressedStream, CompressionMode.Decompress))
using (var decompressedStream = new MemoryStream())
{
gzip.CopyTo(decompressedStream);
data = decompressedStream.ToArray();
}
httpWebResponse.Headers.Remove("Content-Encoding");
}
}
catch (Exception ex)
{
@@ -264,36 +239,5 @@ namespace NzbDrone.Common.Http.Dispatchers
}
}
}
// Workaround for mono not closing connections properly on timeouts
private void AbortWebRequest(HttpWebRequest webRequest)
{
// First affected version was mono 5.16
if (OsInfo.IsNotWindows && _platformInfo.Version >= new Version(5, 16))
{
try
{
var currentOperationInfo = webRequest.GetType().GetField("currentOperation", BindingFlags.NonPublic | BindingFlags.Instance);
var currentOperation = currentOperationInfo.GetValue(webRequest);
if (currentOperation != null)
{
var responseStreamInfo = currentOperation.GetType().GetField("responseStream", BindingFlags.NonPublic | BindingFlags.Instance);
var responseStream = responseStreamInfo.GetValue(currentOperation) as Stream;
// Note that responseStream will likely be null once mono fixes it.
responseStream?.Dispose();
}
}
catch (Exception ex)
{
// This can fail randomly on future mono versions that have been changed/fixed. Log to sentry and ignore.
_logger.Trace()
.Exception(ex)
.Message("Unable to dispose responseStream on mono {0}", _platformInfo.Version)
.Write();
}
}
}
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using NLog;
using NzbDrone.Common.Cache;
@@ -87,7 +88,7 @@ namespace NzbDrone.Common.Http
// 302 or 303 should default to GET on redirect even if POST on original
if (response.StatusCode == HttpStatusCode.Redirect || response.StatusCode == HttpStatusCode.RedirectMethod)
{
request.Method = HttpMethod.GET;
request.Method = HttpMethod.Get;
request.ContentData = null;
}
@@ -263,7 +264,7 @@ namespace NzbDrone.Common.Http
public Task<HttpResponse> GetAsync(HttpRequest request)
{
request.Method = HttpMethod.GET;
request.Method = HttpMethod.Get;
return ExecuteAsync(request);
}
@@ -288,7 +289,7 @@ namespace NzbDrone.Common.Http
public Task<HttpResponse> HeadAsync(HttpRequest request)
{
request.Method = HttpMethod.HEAD;
request.Method = HttpMethod.Head;
return ExecuteAsync(request);
}
@@ -299,7 +300,7 @@ namespace NzbDrone.Common.Http
public Task<HttpResponse> PostAsync(HttpRequest request)
{
request.Method = HttpMethod.POST;
request.Method = HttpMethod.Post;
return ExecuteAsync(request);
}

View File

@@ -1,14 +0,0 @@
namespace NzbDrone.Common.Http
{
public enum HttpMethod
{
GET,
POST,
PUT,
DELETE,
HEAD,
OPTIONS,
PATCH,
MERGE
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Text;
using NzbDrone.Common.EnvironmentInfo;
using NzbDrone.Common.Extensions;
@@ -13,6 +14,7 @@ namespace NzbDrone.Common.Http
{
Url = new HttpUri(url);
Headers = new HttpHeader();
Method = HttpMethod.Get;
ConnectionKeepAlive = true;
AllowAutoRedirect = true;
Cookies = new Dictionary<string, string>();

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using NzbDrone.Common.Extensions;
@@ -37,7 +38,7 @@ namespace NzbDrone.Common.Http
{
BaseUrl = new HttpUri(baseUrl);
ResourceUrl = string.Empty;
Method = HttpMethod.GET;
Method = HttpMethod.Get;
Encoding = Encoding.UTF8;
QueryParams = new List<KeyValuePair<string, string>>();
SuffixQueryParams = new List<KeyValuePair<string, string>>();
@@ -275,7 +276,7 @@ namespace NzbDrone.Common.Http
public virtual HttpRequestBuilder Post()
{
Method = HttpMethod.POST;
Method = HttpMethod.Post;
return this;
}
@@ -397,7 +398,7 @@ namespace NzbDrone.Common.Http
public virtual HttpRequestBuilder AddFormParameter(string key, object value)
{
if (Method != HttpMethod.POST)
if (Method != HttpMethod.Post)
{
throw new NotSupportedException("HttpRequest Method must be POST to add FormParameter.");
}
@@ -413,7 +414,7 @@ namespace NzbDrone.Common.Http
public virtual HttpRequestBuilder AddFormUpload(string name, string fileName, byte[] data, string contentType = "application/octet-stream")
{
if (Method != HttpMethod.POST)
if (Method != HttpMethod.Post)
{
throw new NotSupportedException("HttpRequest Method must be POST to add FormUpload.");
}

View File

@@ -1,6 +1,7 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using Newtonsoft.Json;
using NzbDrone.Common.Serializer;
@@ -17,14 +18,14 @@ namespace NzbDrone.Common.Http
public JsonRpcRequestBuilder(string baseUrl)
: base(baseUrl)
{
Method = HttpMethod.POST;
Method = HttpMethod.Post;
JsonParameters = new List<object>();
}
public JsonRpcRequestBuilder(string baseUrl, string method, IEnumerable<object> parameters)
: base(baseUrl)
{
Method = HttpMethod.POST;
Method = HttpMethod.Post;
JsonMethod = method;
JsonParameters = parameters.ToList();
}

View File

@@ -38,16 +38,6 @@ namespace NzbDrone.Common.Instrumentation
return;
}
if (PlatformInfo.IsMono)
{
if ((exception is TypeInitializationException && exception.InnerException is DllNotFoundException) ||
exception is DllNotFoundException)
{
Logger.Debug(exception, "Minor Fail: " + exception.Message);
return;
}
}
Console.WriteLine("EPIC FAIL: {0}", exception);
Logger.Fatal(exception, "EPIC FAIL.");
}

View File

@@ -106,13 +106,6 @@ namespace NzbDrone.Common.Instrumentation.Sentry
o.Debug = false;
o.DiagnosticLevel = SentryLevel.Debug;
o.Release = BuildInfo.Release;
if (PlatformInfo.IsMono)
{
// Mono 6.0 broke GzipStream.WriteAsync
// TODO: Check specific version
o.RequestBodyCompressionLevel = System.IO.Compression.CompressionLevel.NoCompression;
}
o.BeforeSend = x => SentryCleanser.CleanseEvent(x);
o.BeforeBreadcrumb = x => SentryCleanser.CleanseBreadcrumb(x);
o.Environment = BuildInfo.Branch;
@@ -155,7 +148,7 @@ namespace NzbDrone.Common.Instrumentation.Sentry
{
scope.SetTag("is_docker", $"{osInfo.IsDocker}");
if (osInfo.Name != null && PlatformInfo.IsMono)
if (osInfo.Name != null && !OsInfo.IsWindows)
{
// Sentry auto-detection of non-Windows platforms isn't that accurate on certain devices.
scope.Contexts.OperatingSystem.Name = osInfo.Name.FirstCharToUpper();

View File

@@ -366,11 +366,6 @@ namespace NzbDrone.Common.Processes
private (string Path, string Args) GetPathAndArgs(string path, string args)
{
if (PlatformInfo.IsMono && path.EndsWith(".exe", StringComparison.InvariantCultureIgnoreCase))
{
return ("mono", $"--debug {path} {args}");
}
if (OsInfo.IsWindows && path.EndsWith(".bat", StringComparison.InvariantCultureIgnoreCase))
{
return ("cmd.exe", $"/c {path} {args}");

View File

@@ -1,4 +1,4 @@
using System.Data;
using System.Data;
using System.Linq;
using FluentAssertions;
using NUnit.Framework;
@@ -38,6 +38,7 @@ namespace NzbDrone.Core.Test.Datastore.SqliteSchemaDumperTests
result.Name.Should().Be(tableName);
result.Columns.Count.Should().Be(1);
result.Columns.First().Name.Should().Be(columnName);
result.Columns.First().IsIdentity.Should().BeTrue();
}
[TestCase(@"CREATE INDEX TestIndex ON TestTable (MyId)", "TestIndex", "TestTable", "MyId")]

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -34,7 +35,7 @@ namespace NzbDrone.Core.Test.IndexerTests.AvistazTests
var recentFeed = ReadAllText(@"Files/Indexers/Avistaz/recentfeed.json");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader { { "Content-Type", "application/json" } }, new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 } })).Releases;

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -34,7 +35,7 @@ namespace NzbDrone.Core.Test.IndexerTests.AvistazTests
var recentFeed = ReadAllText(@"Files/Indexers/PrivateHD/recentfeed.json");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader { { "Content-Type", "application/json" } }, new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 } })).Releases;

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -33,7 +34,7 @@ namespace NzbDrone.Core.Test.IndexerTests.FileListTests
var recentFeed = ReadAllText(@"Files/Indexers/FileList/recentfeed.json");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 } })).Releases;

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using FluentAssertions;
@@ -45,7 +46,7 @@ namespace NzbDrone.Core.Test.IndexerTests.HDBitsTests
var responseJson = ReadAllText(fileName);
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.POST), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Post), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), responseJson)));
var torrents = (await Subject.Fetch(_movieSearchCriteria)).Releases;

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -43,7 +44,7 @@ namespace NzbDrone.Core.Test.IndexerTests.NewznabTests
var recentFeed = ReadAllText(@"Files/Indexers/Newznab/newznab_nzb_su.xml");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 }, Limit = 100, Offset = 0 })).Releases;

View File

@@ -1,5 +1,6 @@
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -37,11 +38,11 @@ namespace NzbDrone.Core.Test.IndexerTests.PTPTests
var responseJson = ReadAllText(fileName);
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.POST), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Post), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), authStream.ToString())));
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader { ContentType = HttpAccept.Json.Value }, new CookieCollection(), responseJson)));
var torrents = (await Subject.Fetch(new MovieSearchCriteria())).Releases;

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -39,7 +40,7 @@ namespace NzbDrone.Core.Test.IndexerTests.RarbgTests
var recentFeed = ReadAllText(@"Files/Indexers/Rarbg/RecentFeed_v2.json");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 } })).Releases;
@@ -66,7 +67,7 @@ namespace NzbDrone.Core.Test.IndexerTests.RarbgTests
public async Task should_parse_error_20_as_empty_results()
{
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), "{ error_code: 20, error: \"some message\" }")));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 } })).Releases;
@@ -78,7 +79,7 @@ namespace NzbDrone.Core.Test.IndexerTests.RarbgTests
public async Task should_warn_on_unknown_error()
{
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), "{ error_code: 25, error: \"some message\" }")));
var releases = (await Subject.Fetch(new MovieSearchCriteria { Categories = new int[] { 2000 } })).Releases;

View File

@@ -1,6 +1,7 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using Moq;
@@ -44,7 +45,7 @@ namespace NzbDrone.Core.Test.IndexerTests.TorznabTests
var recentFeed = ReadAllText(@"Files/Indexers/Torznab/torznab_hdaccess_net.xml");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria())).Releases;
@@ -73,7 +74,7 @@ namespace NzbDrone.Core.Test.IndexerTests.TorznabTests
var recentFeed = ReadAllText(@"Files/Indexers/Torznab/torznab_tpb.xml");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria())).Releases;
@@ -103,7 +104,7 @@ namespace NzbDrone.Core.Test.IndexerTests.TorznabTests
var recentFeed = ReadAllText(@"Files/Indexers/Torznab/torznab_animetosho.xml");
Mocker.GetMock<IIndexerHttpClient>()
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET), Subject.Definition))
.Setup(o => o.ExecuteProxiedAsync(It.Is<HttpRequest>(v => v.Method == HttpMethod.Get), Subject.Definition))
.Returns<HttpRequest, IndexerDefinition>((r, d) => Task.FromResult(new HttpResponse(r, new HttpHeader(), new CookieCollection(), recentFeed)));
var releases = (await Subject.Fetch(new MovieSearchCriteria())).Releases;

View File

@@ -30,9 +30,27 @@ namespace NzbDrone.Core.Test.ParserTests
[TestCase("1", 1)]
[TestCase("11", 11)]
[TestCase("1000 grabs", 1000)]
[TestCase("2.222", 2222)]
[TestCase("2,222", 2222)]
[TestCase("2 222", 2222)]
[TestCase("2,22", 222)]
public void should_parse_int_from_string(string original, int parsedInt)
{
ParseUtil.CoerceInt(original).Should().Be(parsedInt);
}
[TestCase("1.0", 1.0)]
[TestCase("1.1", 1.1)]
[TestCase("1000 grabs", 1000.0)]
[TestCase("2.222", 2.222)]
[TestCase("2,222", 2.222)]
[TestCase("2.222,22", 2222.22)]
[TestCase("2,222.22", 2222.22)]
[TestCase("2 222", 2222.0)]
[TestCase("2,22", 2.22)]
public void should_parse_double_from_string(string original, double parsedInt)
{
ParseUtil.CoerceDouble(original).Should().Be(parsedInt);
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -31,13 +32,13 @@ namespace NzbDrone.Core.Applications.LazyLibrarian
public LazyLibrarianStatus GetStatus(LazyLibrarianSettings settings)
{
var request = BuildRequest(settings, "/api", "getVersion", HttpMethod.GET);
var request = BuildRequest(settings, "/api", "getVersion", HttpMethod.Get);
return Execute<LazyLibrarianStatus>(request);
}
public List<LazyLibrarianIndexer> GetIndexers(LazyLibrarianSettings settings)
{
var request = BuildRequest(settings, "/api", "listNabProviders", HttpMethod.GET);
var request = BuildRequest(settings, "/api", "listNabProviders", HttpMethod.Get);
var response = Execute<LazyLibrarianIndexerResponse>(request);
@@ -76,7 +77,7 @@ namespace NzbDrone.Core.Applications.LazyLibrarian
{ "providertype", indexerType.ToString().ToLower() }
};
var request = BuildRequest(settings, "/api", "delProvider", HttpMethod.GET, parameters);
var request = BuildRequest(settings, "/api", "delProvider", HttpMethod.Get, parameters);
CheckForError(Execute<LazyLibrarianStatus>(request));
}
@@ -92,7 +93,7 @@ namespace NzbDrone.Core.Applications.LazyLibrarian
{ "categories", indexer.Categories }
};
var request = BuildRequest(settings, "/api", "addProvider", HttpMethod.GET, parameters);
var request = BuildRequest(settings, "/api", "addProvider", HttpMethod.Get, parameters);
CheckForError(Execute<LazyLibrarianStatus>(request));
return indexer;
}
@@ -110,7 +111,7 @@ namespace NzbDrone.Core.Applications.LazyLibrarian
{ "altername", indexer.Altername }
};
var request = BuildRequest(settings, "/api", "changeProvider", HttpMethod.GET, parameters);
var request = BuildRequest(settings, "/api", "changeProvider", HttpMethod.Get, parameters);
CheckForError(Execute<LazyLibrarianStatus>(request));
return indexer;
}
@@ -133,12 +134,19 @@ namespace NzbDrone.Core.Applications.LazyLibrarian
{
return new ValidationFailure("ApiKey", status.Error.Message);
}
var indexers = GetIndexers(settings);
}
catch (HttpException ex)
{
_logger.Error(ex, "Unable to send test message");
return new ValidationFailure("BaseUrl", "Unable to complete application test");
}
catch (LazyLibrarianException ex)
{
_logger.Error(ex, "Connection test failed");
return new ValidationFailure("", ex.Message);
}
catch (Exception ex)
{
_logger.Error(ex, "Unable to send test message");

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -33,13 +34,13 @@ namespace NzbDrone.Core.Applications.Lidarr
public LidarrStatus GetStatus(LidarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/system/status", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v1/system/status", HttpMethod.Get);
return Execute<LidarrStatus>(request);
}
public List<LidarrIndexer> GetIndexers(LidarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.Get);
return Execute<List<LidarrIndexer>>(request);
}
@@ -47,7 +48,7 @@ namespace NzbDrone.Core.Applications.Lidarr
{
try
{
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.GET);
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.Get);
return Execute<LidarrIndexer>(request);
}
catch (HttpException ex)
@@ -63,19 +64,19 @@ namespace NzbDrone.Core.Applications.Lidarr
public void RemoveIndexer(int indexerId, LidarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.DELETE);
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.Delete);
_httpClient.Execute(request);
}
public List<LidarrIndexer> GetIndexerSchema(LidarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/indexer/schema", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v1/indexer/schema", HttpMethod.Get);
return Execute<List<LidarrIndexer>>(request);
}
public LidarrIndexer AddIndexer(LidarrIndexer indexer, LidarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.POST);
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.Post);
request.SetContent(indexer.ToJson());
@@ -84,7 +85,7 @@ namespace NzbDrone.Core.Applications.Lidarr
public LidarrIndexer UpdateIndexer(LidarrIndexer indexer, LidarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v1/indexer/{indexer.Id}", HttpMethod.PUT);
var request = BuildRequest(settings, $"/api/v1/indexer/{indexer.Id}", HttpMethod.Put);
request.SetContent(indexer.ToJson());
@@ -93,7 +94,7 @@ namespace NzbDrone.Core.Applications.Lidarr
public ValidationFailure TestConnection(LidarrIndexer indexer, LidarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v1/indexer/test", HttpMethod.POST);
var request = BuildRequest(settings, $"/api/v1/indexer/test", HttpMethod.Post);
request.SetContent(indexer.ToJson());

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -31,13 +32,13 @@ namespace NzbDrone.Core.Applications.Mylar
public MylarStatus GetStatus(MylarSettings settings)
{
var request = BuildRequest(settings, "/api", "getVersion", HttpMethod.GET);
var request = BuildRequest(settings, "/api", "getVersion", HttpMethod.Get);
return Execute<MylarStatus>(request);
}
public List<MylarIndexer> GetIndexers(MylarSettings settings)
{
var request = BuildRequest(settings, "/api", "listProviders", HttpMethod.GET);
var request = BuildRequest(settings, "/api", "listProviders", HttpMethod.Get);
var response = Execute<MylarIndexerResponse>(request);
@@ -76,7 +77,7 @@ namespace NzbDrone.Core.Applications.Mylar
{ "providertype", indexerType.ToString().ToLower() }
};
var request = BuildRequest(settings, "/api", "delProvider", HttpMethod.GET, parameters);
var request = BuildRequest(settings, "/api", "delProvider", HttpMethod.Get, parameters);
CheckForError(Execute<MylarStatus>(request));
}
@@ -92,7 +93,7 @@ namespace NzbDrone.Core.Applications.Mylar
{ "categories", indexer.Categories }
};
var request = BuildRequest(settings, "/api", "addProvider", HttpMethod.GET, parameters);
var request = BuildRequest(settings, "/api", "addProvider", HttpMethod.Get, parameters);
CheckForError(Execute<MylarStatus>(request));
return indexer;
}
@@ -110,7 +111,7 @@ namespace NzbDrone.Core.Applications.Mylar
{ "altername", indexer.Altername }
};
var request = BuildRequest(settings, "/api", "changeProvider", HttpMethod.GET, parameters);
var request = BuildRequest(settings, "/api", "changeProvider", HttpMethod.Get, parameters);
CheckForError(Execute<MylarStatus>(request));
return indexer;
}
@@ -133,12 +134,19 @@ namespace NzbDrone.Core.Applications.Mylar
{
return new ValidationFailure("ApiKey", status.Error.Message);
}
var indexers = GetIndexers(settings);
}
catch (HttpException ex)
{
_logger.Error(ex, "Unable to send test message");
return new ValidationFailure("BaseUrl", "Unable to complete application test");
}
catch (MylarException ex)
{
_logger.Error(ex, "Connection test failed");
return new ValidationFailure("", ex.Message);
}
catch (Exception ex)
{
_logger.Error(ex, "Unable to send test message");

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -33,13 +34,13 @@ namespace NzbDrone.Core.Applications.Radarr
public RadarrStatus GetStatus(RadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/system/status", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v3/system/status", HttpMethod.Get);
return Execute<RadarrStatus>(request);
}
public List<RadarrIndexer> GetIndexers(RadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.Get);
return Execute<List<RadarrIndexer>>(request);
}
@@ -47,7 +48,7 @@ namespace NzbDrone.Core.Applications.Radarr
{
try
{
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.GET);
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.Get);
return Execute<RadarrIndexer>(request);
}
catch (HttpException ex)
@@ -63,19 +64,19 @@ namespace NzbDrone.Core.Applications.Radarr
public void RemoveIndexer(int indexerId, RadarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.DELETE);
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.Delete);
_httpClient.Execute(request);
}
public List<RadarrIndexer> GetIndexerSchema(RadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/indexer/schema", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v3/indexer/schema", HttpMethod.Get);
return Execute<List<RadarrIndexer>>(request);
}
public RadarrIndexer AddIndexer(RadarrIndexer indexer, RadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.POST);
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.Post);
request.SetContent(indexer.ToJson());
@@ -84,7 +85,7 @@ namespace NzbDrone.Core.Applications.Radarr
public RadarrIndexer UpdateIndexer(RadarrIndexer indexer, RadarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v3/indexer/{indexer.Id}", HttpMethod.PUT);
var request = BuildRequest(settings, $"/api/v3/indexer/{indexer.Id}", HttpMethod.Put);
request.SetContent(indexer.ToJson());
@@ -93,7 +94,7 @@ namespace NzbDrone.Core.Applications.Radarr
public ValidationFailure TestConnection(RadarrIndexer indexer, RadarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v3/indexer/test", HttpMethod.POST);
var request = BuildRequest(settings, $"/api/v3/indexer/test", HttpMethod.Post);
request.SetContent(indexer.ToJson());

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -33,13 +34,13 @@ namespace NzbDrone.Core.Applications.Readarr
public ReadarrStatus GetStatus(ReadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/system/status", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v1/system/status", HttpMethod.Get);
return Execute<ReadarrStatus>(request);
}
public List<ReadarrIndexer> GetIndexers(ReadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.Get);
return Execute<List<ReadarrIndexer>>(request);
}
@@ -47,7 +48,7 @@ namespace NzbDrone.Core.Applications.Readarr
{
try
{
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.GET);
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.Get);
return Execute<ReadarrIndexer>(request);
}
catch (HttpException ex)
@@ -63,19 +64,19 @@ namespace NzbDrone.Core.Applications.Readarr
public void RemoveIndexer(int indexerId, ReadarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.DELETE);
var request = BuildRequest(settings, $"/api/v1/indexer/{indexerId}", HttpMethod.Delete);
_httpClient.Execute(request);
}
public List<ReadarrIndexer> GetIndexerSchema(ReadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/indexer/schema", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v1/indexer/schema", HttpMethod.Get);
return Execute<List<ReadarrIndexer>>(request);
}
public ReadarrIndexer AddIndexer(ReadarrIndexer indexer, ReadarrSettings settings)
{
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.POST);
var request = BuildRequest(settings, "/api/v1/indexer", HttpMethod.Post);
request.SetContent(indexer.ToJson());
@@ -84,7 +85,7 @@ namespace NzbDrone.Core.Applications.Readarr
public ReadarrIndexer UpdateIndexer(ReadarrIndexer indexer, ReadarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v1/indexer/{indexer.Id}", HttpMethod.PUT);
var request = BuildRequest(settings, $"/api/v1/indexer/{indexer.Id}", HttpMethod.Put);
request.SetContent(indexer.ToJson());
@@ -93,7 +94,7 @@ namespace NzbDrone.Core.Applications.Readarr
public ValidationFailure TestConnection(ReadarrIndexer indexer, ReadarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v1/indexer/test", HttpMethod.POST);
var request = BuildRequest(settings, $"/api/v1/indexer/test", HttpMethod.Post);
request.SetContent(indexer.ToJson());

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -33,13 +34,13 @@ namespace NzbDrone.Core.Applications.Sonarr
public SonarrStatus GetStatus(SonarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/system/status", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v3/system/status", HttpMethod.Get);
return Execute<SonarrStatus>(request);
}
public List<SonarrIndexer> GetIndexers(SonarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.Get);
return Execute<List<SonarrIndexer>>(request);
}
@@ -47,7 +48,7 @@ namespace NzbDrone.Core.Applications.Sonarr
{
try
{
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.GET);
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.Get);
return Execute<SonarrIndexer>(request);
}
catch (HttpException ex)
@@ -63,19 +64,19 @@ namespace NzbDrone.Core.Applications.Sonarr
public void RemoveIndexer(int indexerId, SonarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.DELETE);
var request = BuildRequest(settings, $"/api/v3/indexer/{indexerId}", HttpMethod.Delete);
_httpClient.Execute(request);
}
public List<SonarrIndexer> GetIndexerSchema(SonarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/indexer/schema", HttpMethod.GET);
var request = BuildRequest(settings, "/api/v3/indexer/schema", HttpMethod.Get);
return Execute<List<SonarrIndexer>>(request);
}
public SonarrIndexer AddIndexer(SonarrIndexer indexer, SonarrSettings settings)
{
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.POST);
var request = BuildRequest(settings, "/api/v3/indexer", HttpMethod.Post);
request.SetContent(indexer.ToJson());
@@ -84,7 +85,7 @@ namespace NzbDrone.Core.Applications.Sonarr
public SonarrIndexer UpdateIndexer(SonarrIndexer indexer, SonarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v3/indexer/{indexer.Id}", HttpMethod.PUT);
var request = BuildRequest(settings, $"/api/v3/indexer/{indexer.Id}", HttpMethod.Put);
request.SetContent(indexer.ToJson());
@@ -93,7 +94,7 @@ namespace NzbDrone.Core.Applications.Sonarr
public ValidationFailure TestConnection(SonarrIndexer indexer, SonarrSettings settings)
{
var request = BuildRequest(settings, $"/api/v3/indexer/test", HttpMethod.POST);
var request = BuildRequest(settings, $"/api/v3/indexer/test", HttpMethod.Post);
request.SetContent(indexer.ToJson());

View File

@@ -197,7 +197,7 @@ namespace NzbDrone.Core.Configuration
public string PostgresPassword => GetValue("PostgresPassword", string.Empty, persist: false);
public string PostgresMainDb => GetValue("PostgresMainDb", "prowlarr-main", persist: false);
public string PostgresLogDb => GetValue("PostgresLogDb", "prowlarr-log", persist: false);
public int PostgresPort => GetValueInt("PostgresPort", 5436, persist: false);
public int PostgresPort => GetValueInt("PostgresPort", 5432, persist: false);
public bool LogSql => GetValueBoolean("LogSql", false, persist: false);
public int LogRotate => GetValueInt("LogRotate", 50, persist: false);
public bool FilterSentryEvents => GetValueBoolean("FilterSentryEvents", true, persist: false);

View File

@@ -0,0 +1,18 @@
using FluentMigrator;
using NzbDrone.Core.Datastore.Migration.Framework;
namespace NzbDrone.Core.Datastore.Migration
{
[Migration(15)]
public class IndexerVersions : NzbDroneMigrationBase
{
protected override void MainDbUpgrade()
{
Create.TableForModel("IndexerDefinitionVersions")
.WithColumn("DefinitionId").AsString().NotNullable().Unique()
.WithColumn("File").AsString().NotNullable().Unique()
.WithColumn("Sha").AsString().Nullable()
.WithColumn("LastUpdated").AsDateTime().Nullable();
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using FluentMigrator.Model;
using FluentMigrator.Runner.Processors.SQLite;
@@ -66,6 +67,24 @@ namespace NzbDrone.Core.Datastore.Migration.Framework
if (columnReader.Read() == SqliteSyntaxReader.TokenType.StringToken)
{
if (columnReader.ValueToUpper == "PRIMARY")
{
columnReader.SkipTillToken(SqliteSyntaxReader.TokenType.ListStart);
if (columnReader.Read() == SqliteSyntaxReader.TokenType.Identifier)
{
var pk = table.Columns.First(v => v.Name == columnReader.Value);
pk.IsPrimaryKey = true;
pk.IsNullable = true;
pk.IsUnique = true;
if (columnReader.Buffer.ToUpperInvariant().Contains("AUTOINCREMENT"))
{
pk.IsIdentity = true;
}
continue;
}
}
if (columnReader.ValueToUpper == "CONSTRAINT" ||
columnReader.ValueToUpper == "PRIMARY" || columnReader.ValueToUpper == "UNIQUE" ||
columnReader.ValueToUpper == "CHECK" || columnReader.ValueToUpper == "FOREIGN")

View File

@@ -11,6 +11,7 @@ using NzbDrone.Core.Datastore.Converters;
using NzbDrone.Core.Download;
using NzbDrone.Core.IndexerProxies;
using NzbDrone.Core.Indexers;
using NzbDrone.Core.IndexerVersions;
using NzbDrone.Core.Instrumentation;
using NzbDrone.Core.Jobs;
using NzbDrone.Core.Languages;
@@ -94,6 +95,7 @@ namespace NzbDrone.Core.Datastore
Mapper.Entity<UpdateHistory>("UpdateHistory").RegisterModel();
Mapper.Entity<AppSyncProfile>("AppSyncProfiles").RegisterModel();
Mapper.Entity<IndexerDefinitionVersion>("IndexerDefinitionVersions").RegisterModel();
}
private static void RegisterMappers()

View File

@@ -1,6 +1,7 @@
using System;
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using NLog;
using NzbDrone.Common.Cache;
using NzbDrone.Common.Http;
@@ -142,15 +143,19 @@ namespace NzbDrone.Core.Download.Clients.DownloadStation.Proxies
return authResponse.Data.SId;
}
protected HttpRequestBuilder BuildRequest(DownloadStationSettings settings, string methodName, int apiVersion, HttpMethod httpVerb = HttpMethod.GET)
protected HttpRequestBuilder BuildRequest(DownloadStationSettings settings, string methodName, int apiVersion, HttpMethod httpVerb = null)
{
httpVerb ??= HttpMethod.Get;
var info = GetApiInfo(_apiType, settings);
return BuildRequest(settings, info, methodName, apiVersion, httpVerb);
}
private HttpRequestBuilder BuildRequest(DownloadStationSettings settings, DiskStationApiInfo apiInfo, string methodName, int apiVersion, HttpMethod httpVerb = HttpMethod.GET)
private HttpRequestBuilder BuildRequest(DownloadStationSettings settings, DiskStationApiInfo apiInfo, string methodName, int apiVersion, HttpMethod httpVerb = null)
{
httpVerb ??= HttpMethod.Get;
var requestBuilder = new HttpRequestBuilder(settings.UseSsl, settings.Host, settings.Port).Resource($"webapi/{apiInfo.Path}");
requestBuilder.Method = httpVerb;
requestBuilder.LogResponseContent = true;
@@ -163,7 +168,7 @@ namespace NzbDrone.Core.Download.Clients.DownloadStation.Proxies
throw new ArgumentOutOfRangeException(nameof(apiVersion));
}
if (httpVerb == HttpMethod.POST)
if (httpVerb == HttpMethod.Post)
{
if (apiInfo.NeedsAuthentication)
{

View File

@@ -1,4 +1,5 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.Net.Http;
using NLog;
using NzbDrone.Common.Cache;
using NzbDrone.Common.Extensions;
@@ -24,7 +25,7 @@ namespace NzbDrone.Core.Download.Clients.DownloadStation.Proxies
public void AddTaskFromData(byte[] data, string filename, string downloadDirectory, DownloadStationSettings settings)
{
var requestBuilder = BuildRequest(settings, "create", 2, HttpMethod.POST);
var requestBuilder = BuildRequest(settings, "create", 2, HttpMethod.Post);
if (downloadDirectory.IsNotNullOrWhiteSpace())
{

View File

@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using NLog;
using NzbDrone.Common.Cache;
using NzbDrone.Common.Http;
@@ -107,7 +108,7 @@ namespace NzbDrone.Core.Download.Clients.Flood
{
var verifyRequest = BuildRequest(settings).Resource("/auth/verify").Build();
verifyRequest.Method = HttpMethod.GET;
verifyRequest.Method = HttpMethod.Get;
HandleRequest(verifyRequest, settings);
}
@@ -180,7 +181,7 @@ namespace NzbDrone.Core.Download.Clients.Flood
{
var getTorrentsRequest = BuildRequest(settings).Resource("/torrents").Build();
getTorrentsRequest.Method = HttpMethod.GET;
getTorrentsRequest.Method = HttpMethod.Get;
return Json.Deserialize<TorrentListSummary>(HandleRequest(getTorrentsRequest, settings).Content).Torrents;
}
@@ -189,7 +190,7 @@ namespace NzbDrone.Core.Download.Clients.Flood
{
var contentsRequest = BuildRequest(settings).Resource($"/torrents/{hash}/contents").Build();
contentsRequest.Method = HttpMethod.GET;
contentsRequest.Method = HttpMethod.Get;
return Json.Deserialize<List<TorrentContent>>(HandleRequest(contentsRequest, settings).Content).ConvertAll(content => content.Path);
}
@@ -198,7 +199,7 @@ namespace NzbDrone.Core.Download.Clients.Flood
{
var tagsRequest = BuildRequest(settings).Resource("/torrents/tags").Build();
tagsRequest.Method = HttpMethod.PATCH;
tagsRequest.Method = HttpMethod.Patch;
var body = new Dictionary<string, object>
{

View File

@@ -57,7 +57,7 @@ namespace NzbDrone.Core.Download.Clients.RTorrent
[FieldDefinition(8, Label = "Priority", Type = FieldType.Select, SelectOptions = typeof(RTorrentPriority), HelpText = "Priority to use when grabbing items")]
public int Priority { get; set; }
[FieldDefinition(9, Label = "Add Stopped", Type = FieldType.Checkbox, HelpText = "Enabling will prevent magnets from downloading before downloading")]
[FieldDefinition(9, Label = "Add Stopped", Type = FieldType.Checkbox, HelpText = "Enabling will add torrents and magnets to ruTorrent in a stopped state")]
public bool AddStopped { get; set; }
public NzbDroneValidationResult Validate()

View File

@@ -0,0 +1,48 @@
using System.Linq;
using NLog;
using NzbDrone.Core.Indexers;
using NzbDrone.Core.Indexers.Cardigann;
using NzbDrone.Core.IndexerVersions;
using NzbDrone.Core.Localization;
using NzbDrone.Core.ThingiProvider.Events;
namespace NzbDrone.Core.HealthCheck.Checks
{
[CheckOn(typeof(ProviderDeletedEvent<IIndexer>))]
public class NoDefinitionCheck : HealthCheckBase
{
private readonly IIndexerDefinitionUpdateService _indexerDefinitionUpdateService;
private readonly IIndexerFactory _indexerFactory;
public NoDefinitionCheck(IIndexerDefinitionUpdateService indexerDefinitionUpdateService, IIndexerFactory indexerFactory, ILocalizationService localizationService)
: base(localizationService)
{
_indexerDefinitionUpdateService = indexerDefinitionUpdateService;
_indexerFactory = indexerFactory;
}
public override HealthCheck Check()
{
var currentDefs = _indexerDefinitionUpdateService.All();
var noDefIndexers = _indexerFactory.AllProviders(false)
.Where(i => i.Definition.Implementation == "Cardigann" && !currentDefs.Any(d => d.File == ((CardigannSettings)i.Definition.Settings).DefinitionFile)).ToList();
if (noDefIndexers.Count == 0)
{
return new HealthCheck(GetType());
}
var healthType = HealthCheckResult.Error;
var healthMessage = string.Format(_localizationService.GetLocalizedString("IndexerNoDefCheckMessage"),
string.Join(", ", noDefIndexers.Select(v => v.Definition.Name)));
return new HealthCheck(GetType(),
healthType,
healthMessage,
"#indexers-have-no-definition");
}
public override bool CheckOnSchedule => false;
}
}

View File

@@ -35,10 +35,13 @@ namespace NzbDrone.Core.HealthCheck.Checks
return new HealthCheck(GetType());
}
var healthType = HealthCheckResult.Warning;
var healthMessage = string.Format(_localizationService.GetLocalizedString("IndexerObsoleteCheckMessage"),
string.Join(", ", oldIndexers.Select(v => v.Definition.Name)));
return new HealthCheck(GetType(),
HealthCheckResult.Warning,
string.Format(_localizationService.GetLocalizedString("IndexerObsoleteCheckMessage"),
string.Join(", ", oldIndexers.Select(v => v.Definition.Name))),
healthType,
healthMessage,
"#indexers-are-obsolete");
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using FluentValidation.Results;
using Newtonsoft.Json;
using NLog;
@@ -115,7 +116,7 @@ namespace NzbDrone.Core.IndexerProxies.FlareSolverr
var userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36";
var maxTimeout = Settings.RequestTimeout * 1000;
if (request.Method == HttpMethod.GET)
if (request.Method == HttpMethod.Get)
{
req = new FlareSolverrRequestGet
{
@@ -125,7 +126,7 @@ namespace NzbDrone.Core.IndexerProxies.FlareSolverr
UserAgent = userAgent
};
}
else if (request.Method == HttpMethod.POST)
else if (request.Method == HttpMethod.Post)
{
var contentTypeType = request.Headers.ContentType;
@@ -167,7 +168,7 @@ namespace NzbDrone.Core.IndexerProxies.FlareSolverr
var newRequest = new HttpRequest(apiUrl, HttpAccept.Json);
newRequest.Headers.ContentType = "application/json";
newRequest.Method = HttpMethod.POST;
newRequest.Method = HttpMethod.Post;
newRequest.SetContent(req.ToJson());
_logger.Debug("Applying FlareSolverr Proxy {0} to request {1}", Name, request.Url);

View File

@@ -10,6 +10,7 @@ namespace NzbDrone.Core.IndexerSearch.Definitions
public int? TmdbId { get; set; }
public int? TraktId { get; set; }
public int? Year { get; set; }
public string Genre { get; set; }
public override bool RssSearch
{
@@ -64,6 +65,11 @@ namespace NzbDrone.Core.IndexerSearch.Definitions
builder = builder.Append($" TraktId:[{TraktId}]");
}
if (Genre.IsNotNullOrWhiteSpace())
{
builder = builder.Append($" Genre:[{Genre}]");
}
return builder.ToString().Trim();
}
}

View File

@@ -7,6 +7,8 @@ namespace NzbDrone.Core.IndexerSearch.Definitions
public string Album { get; set; }
public string Artist { get; set; }
public string Label { get; set; }
public string Genre { get; set; }
public int? Year { get; set; }
public override bool RssSearch
{

View File

@@ -16,6 +16,7 @@ namespace NzbDrone.Core.IndexerSearch.Definitions
public int? RId { get; set; }
public int? TvMazeId { get; set; }
public int? TraktId { get; set; }
public int? TmdbId { get; set; }
public string SanitizedTvSearchString => (SanitizedSearchTerm + " " + EpisodeSearchString).Trim();
public string EpisodeSearchString => GetEpisodeSearchString();
@@ -74,6 +75,11 @@ namespace NzbDrone.Core.IndexerSearch.Definitions
builder.Append($" TraktId:[{TraktId}]");
}
if (TmdbId.HasValue)
{
builder.Append($" TmdbId:[{TmdbId}]");
}
builder = builder.Append(searchEpisodeTerm);
return builder.ToString().Trim();
}

View File

@@ -89,6 +89,8 @@ namespace NzbDrone.Core.IndexerSearch
new XAttribute("type", protocol == DownloadProtocol.Torrent ? "application/x-bittorrent" : "application/x-nzb")),
r.Categories == null ? null : from c in r.Categories select GetNabElement("category", c.Id, protocol),
r.IndexerFlags == null ? null : from f in r.IndexerFlags select GetNabElement("tag", f.Name, protocol),
r.Languages == null ? null : from c in r.Languages select GetNabElement("language", c.Id, protocol),
r.Subs == null ? null : from c in r.Subs select GetNabElement("subs", c.Id, protocol),
GetNabElement("rageid", r.TvRageId, protocol),
GetNabElement("tvdbid", r.TvdbId, protocol),
GetNabElement("imdb", r.ImdbId.ToString("D7"), protocol),

View File

@@ -60,6 +60,7 @@ namespace NzbDrone.Core.IndexerSearch
searchSpec.TmdbId = request.tmdbid;
searchSpec.TraktId = request.traktid;
searchSpec.Year = request.year;
searchSpec.Genre = request.genre;
return new NewznabResults { Releases = await Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec) };
}
@@ -71,6 +72,8 @@ namespace NzbDrone.Core.IndexerSearch
searchSpec.Artist = request.artist;
searchSpec.Album = request.album;
searchSpec.Label = request.label;
searchSpec.Genre = request.genre;
searchSpec.Year = request.year;
return new NewznabResults { Releases = await Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec) };
}
@@ -84,6 +87,7 @@ namespace NzbDrone.Core.IndexerSearch
searchSpec.TvdbId = request.tvdbid;
searchSpec.ImdbId = request.imdbid;
searchSpec.TraktId = request.traktid;
searchSpec.TmdbId = request.tmdbid;
searchSpec.RId = request.rid;
searchSpec.TvMazeId = request.tvmazeid;
@@ -138,7 +142,7 @@ namespace NzbDrone.Core.IndexerSearch
private async Task<List<ReleaseInfo>> Dispatch(Func<IIndexer, Task<IndexerPageableQueryResult>> searchAction, SearchCriteriaBase criteriaBase)
{
var indexers = _indexerFactory.GetAvailableProviders();
var indexers = _indexerFactory.Enabled();
if (criteriaBase.IndexerIds != null && criteriaBase.IndexerIds.Count > 0)
{

View File

@@ -6,10 +6,11 @@ using NLog;
using NzbDrone.Common.Cache;
using NzbDrone.Common.Disk;
using NzbDrone.Common.EnvironmentInfo;
using NzbDrone.Common.Extensions;
using NzbDrone.Common.Http;
using NzbDrone.Core.Indexers.Cardigann;
using NzbDrone.Core.Lifecycle;
using NzbDrone.Core.Messaging.Commands;
using NzbDrone.Core.Messaging.Events;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
@@ -18,15 +19,18 @@ namespace NzbDrone.Core.IndexerVersions
public interface IIndexerDefinitionUpdateService
{
List<CardigannMetaDefinition> All();
CardigannDefinition GetDefinition(string fileKey);
CardigannDefinition GetCachedDefinition(string fileKey);
List<string> GetBlocklist();
}
public class IndexerDefinitionUpdateService : IIndexerDefinitionUpdateService, IExecute<IndexerDefinitionUpdateCommand>
public class IndexerDefinitionUpdateService : IIndexerDefinitionUpdateService, IExecute<IndexerDefinitionUpdateCommand>, IHandle<ApplicationStartedEvent>
{
/* Update Service will fall back if version # does not exist for an indexer per Ta */
private const string DEFINITION_BRANCH = "master";
private const int DEFINITION_VERSION = 3;
//Used when moving yml to C#
private readonly List<string> _defintionBlocklist = new List<string>()
{
"aither",
@@ -49,6 +53,7 @@ namespace NzbDrone.Core.IndexerVersions
private readonly IHttpClient _httpClient;
private readonly IAppFolderInfo _appFolderInfo;
private readonly IDiskProvider _diskProvider;
private readonly IIndexerDefinitionVersionService _versionService;
private readonly ICached<CardigannDefinition> _cache;
private readonly Logger _logger;
@@ -60,11 +65,13 @@ namespace NzbDrone.Core.IndexerVersions
public IndexerDefinitionUpdateService(IHttpClient httpClient,
IAppFolderInfo appFolderInfo,
IDiskProvider diskProvider,
IIndexerDefinitionVersionService versionService,
ICacheManager cacheManager,
Logger logger)
{
_appFolderInfo = appFolderInfo;
_diskProvider = diskProvider;
_versionService = versionService;
_cache = cacheManager.GetCache<CardigannDefinition>(typeof(CardigannDefinition), "definitions");
_httpClient = httpClient;
_logger = logger;
@@ -76,43 +83,24 @@ namespace NzbDrone.Core.IndexerVersions
try
{
var request = new HttpRequest($"https://indexers.prowlarr.com/master/{DEFINITION_VERSION}");
var response = _httpClient.Get<List<CardigannMetaDefinition>>(request);
indexerList = response.Resource.Where(i => !_defintionBlocklist.Contains(i.File)).ToList();
var definitionFolder = Path.Combine(_appFolderInfo.AppDataFolder, "Definitions", "Custom");
var directoryInfo = new DirectoryInfo(definitionFolder);
if (directoryInfo.Exists)
// Grab latest def list from server or fallback to disk
try
{
var files = directoryInfo.GetFiles($"*.yml");
foreach (var file in files)
{
_logger.Debug("Loading Custom Cardigann definition " + file.FullName);
try
{
var definitionString = File.ReadAllText(file.FullName);
var definition = _deserializer.Deserialize<CardigannMetaDefinition>(definitionString);
definition.File = Path.GetFileNameWithoutExtension(file.Name);
if (indexerList.Any(i => i.File == definition.File || i.Name == definition.Name))
{
_logger.Warn("Custom Cardigann definition {0} does not have unique file name or Indexer name", file.FullName);
continue;
}
indexerList.Add(definition);
}
catch (Exception e)
{
_logger.Error($"Error while parsing custom Cardigann definition {file.FullName}\n{e}");
}
}
var request = new HttpRequest($"https://indexers.prowlarr.com/{DEFINITION_BRANCH}/{DEFINITION_VERSION}");
var response = _httpClient.Get<List<CardigannMetaDefinition>>(request);
indexerList = response.Resource.Where(i => !_defintionBlocklist.Contains(i.File)).ToList();
}
catch
{
var definitionFolder = Path.Combine(_appFolderInfo.AppDataFolder, "Definitions");
indexerList = ReadDefinitionsFromDisk(indexerList, definitionFolder);
}
//Check for custom definitions
var customDefinitionFolder = Path.Combine(_appFolderInfo.AppDataFolder, "Definitions", "Custom");
indexerList = ReadDefinitionsFromDisk(indexerList, customDefinitionFolder);
}
catch
{
@@ -122,14 +110,14 @@ namespace NzbDrone.Core.IndexerVersions
return indexerList;
}
public CardigannDefinition GetDefinition(string file)
public CardigannDefinition GetCachedDefinition(string fileKey)
{
if (string.IsNullOrEmpty(file))
if (string.IsNullOrEmpty(fileKey))
{
throw new ArgumentNullException(nameof(file));
throw new ArgumentNullException(nameof(fileKey));
}
var definition = _cache.Get(file, () => LoadIndexerDef(file));
var definition = _cache.Get(fileKey, () => GetUncachedDefinition(fileKey));
return definition;
}
@@ -139,15 +127,46 @@ namespace NzbDrone.Core.IndexerVersions
return _defintionBlocklist;
}
private CardigannDefinition GetHttpDefinition(string id)
private List<CardigannMetaDefinition> ReadDefinitionsFromDisk(List<CardigannMetaDefinition> defs, string path, SearchOption options = SearchOption.TopDirectoryOnly)
{
var req = new HttpRequest($"https://indexers.prowlarr.com/master/{DEFINITION_VERSION}/{id}");
var response = _httpClient.Get(req);
var definition = _deserializer.Deserialize<CardigannDefinition>(response.Content);
return CleanIndexerDefinition(definition);
var indexerList = defs;
var directoryInfo = new DirectoryInfo(path);
if (directoryInfo.Exists)
{
var files = directoryInfo.GetFiles($"*.yml", options);
foreach (var file in files)
{
_logger.Debug("Loading definition " + file.FullName);
try
{
var definitionString = File.ReadAllText(file.FullName);
var definition = _deserializer.Deserialize<CardigannMetaDefinition>(definitionString);
definition.File = Path.GetFileNameWithoutExtension(file.Name);
if (indexerList.Any(i => i.File == definition.File || i.Name == definition.Name))
{
_logger.Warn("Definition {0} does not have unique file name or Indexer name", file.FullName);
continue;
}
indexerList.Add(definition);
}
catch (Exception e)
{
_logger.Error($"Error while parsing Cardigann definition {file.FullName}\n{e}");
}
}
}
return indexerList;
}
private CardigannDefinition LoadIndexerDef(string fileKey)
private CardigannDefinition GetUncachedDefinition(string fileKey)
{
if (string.IsNullOrEmpty(fileKey))
{
@@ -182,9 +201,26 @@ namespace NzbDrone.Core.IndexerVersions
}
}
var dbDefs = _versionService.All();
//Check to ensure it's in versioned defs before we go to web
if (dbDefs.Count > 0 && dbDefs.Any(x => x.File == fileKey))
{
throw new ArgumentNullException(nameof(fileKey));
}
//No definition was returned locally, go to the web
return GetHttpDefinition(fileKey);
}
private CardigannDefinition GetHttpDefinition(string id)
{
var req = new HttpRequest($"https://indexers.prowlarr.com/{DEFINITION_BRANCH}/{DEFINITION_VERSION}/{id}");
var response = _httpClient.Get(req);
var definition = _deserializer.Deserialize<CardigannDefinition>(response.Content);
return CleanIndexerDefinition(definition);
}
private CardigannDefinition CleanIndexerDefinition(CardigannDefinition definition)
{
if (definition.Settings == null)
@@ -224,6 +260,12 @@ namespace NzbDrone.Core.IndexerVersions
return definition;
}
public void Handle(ApplicationStartedEvent message)
{
// Sync indexers on app start
UpdateLocalDefinitions();
}
public void Execute(IndexerDefinitionUpdateCommand message)
{
UpdateLocalDefinitions();
@@ -238,30 +280,47 @@ namespace NzbDrone.Core.IndexerVersions
private void UpdateLocalDefinitions()
{
var request = new HttpRequest($"https://indexers.prowlarr.com/master/{DEFINITION_VERSION}");
var startupFolder = _appFolderInfo.AppDataFolder;
var request = new HttpRequest($"https://indexers.prowlarr.com/{DEFINITION_BRANCH}/{DEFINITION_VERSION}");
var response = _httpClient.Get<List<CardigannMetaDefinition>>(request);
foreach (var def in response.Resource)
var currentDefs = _versionService.All().ToDictionary(x => x.DefinitionId, x => x.Sha);
try
{
try
EnsureDefinitionsFolder();
foreach (var def in response.Resource)
{
var startupFolder = _appFolderInfo.AppDataFolder;
try
{
var saveFile = Path.Combine(startupFolder, "Definitions", $"{def.File}.yml");
EnsureDefinitionsFolder();
if (currentDefs.TryGetValue(def.Id, out var defSha) && defSha == def.Sha)
{
_logger.Trace("Indexer already up to date: {0}", def.File);
var saveFile = Path.Combine(startupFolder, "Definitions", $"{def.File}.yml");
continue;
}
_httpClient.DownloadFile($"https://indexers.prowlarr.com/master/{DEFINITION_VERSION}/{def.File}", saveFile);
_httpClient.DownloadFile($"https://indexers.prowlarr.com/{DEFINITION_BRANCH}/{DEFINITION_VERSION}/{def.File}", saveFile);
_cache.Remove(def.File);
_versionService.Upsert(new IndexerDefinitionVersion { Sha = def.Sha, DefinitionId = def.Id, File = def.File, LastUpdated = DateTime.UtcNow });
_logger.Debug("Updated definition: {0}", def.File);
}
catch (Exception ex)
{
_logger.Error("Definition download failed: {0}, {1}", def.File, ex.Message);
_cache.Remove(def.File);
_logger.Debug("Updated definition: {0}", def.File);
}
catch (Exception ex)
{
_logger.Error("Definition download failed: {0}, {1}", def.File, ex.Message);
}
}
}
catch (Exception ex)
{
_logger.Error(ex, "Definition download failed, error creating definitions folder in {0}", startupFolder);
}
}
}
}

View File

@@ -0,0 +1,13 @@
using System;
using NzbDrone.Core.Datastore;
namespace NzbDrone.Core.IndexerVersions
{
public class IndexerDefinitionVersion : ModelBase
{
public string File { get; set; }
public string Sha { get; set; }
public DateTime LastUpdated { get; set; }
public string DefinitionId { get; set; }
}
}

View File

@@ -0,0 +1,25 @@
using System;
using System.Linq;
using NzbDrone.Core.Datastore;
using NzbDrone.Core.Messaging.Events;
namespace NzbDrone.Core.IndexerVersions
{
public interface IIndexerDefinitionVersionRepository : IBasicRepository<IndexerDefinitionVersion>
{
public IndexerDefinitionVersion GetByDefId(string defId);
}
public class IndexerDefinitionVersionRepository : BasicRepository<IndexerDefinitionVersion>, IIndexerDefinitionVersionRepository
{
public IndexerDefinitionVersionRepository(IMainDatabase database, IEventAggregator eventAggregator)
: base(database, eventAggregator)
{
}
public IndexerDefinitionVersion GetByDefId(string defId)
{
return Query(x => x.DefinitionId == defId).SingleOrDefault();
}
}
}

View File

@@ -0,0 +1,66 @@
using System.Collections.Generic;
using System.Linq;
namespace NzbDrone.Core.IndexerVersions
{
public interface IIndexerDefinitionVersionService
{
IndexerDefinitionVersion Get(int indexerVersionId);
IndexerDefinitionVersion GetByDefId(string defId);
List<IndexerDefinitionVersion> All();
IndexerDefinitionVersion Add(IndexerDefinitionVersion defVersion);
IndexerDefinitionVersion Upsert(IndexerDefinitionVersion defVersion);
void Delete(int indexerVersionId);
}
public class IndexerDefinitionVersionService : IIndexerDefinitionVersionService
{
private readonly IIndexerDefinitionVersionRepository _repo;
public IndexerDefinitionVersionService(IIndexerDefinitionVersionRepository repo)
{
_repo = repo;
}
public IndexerDefinitionVersion Get(int indexerVersionId)
{
return _repo.Get(indexerVersionId);
}
public IndexerDefinitionVersion GetByDefId(string defId)
{
return _repo.GetByDefId(defId);
}
public List<IndexerDefinitionVersion> All()
{
return _repo.All().ToList();
}
public IndexerDefinitionVersion Add(IndexerDefinitionVersion defVersion)
{
_repo.Insert(defVersion);
return defVersion;
}
public IndexerDefinitionVersion Upsert(IndexerDefinitionVersion defVersion)
{
var existing = _repo.GetByDefId(defVersion.DefinitionId);
if (existing != null)
{
defVersion.Id = existing.Id;
}
defVersion = _repo.Upsert(defVersion);
return defVersion;
}
public void Delete(int indexerVersionId)
{
_repo.Delete(indexerVersionId);
}
}
}

View File

@@ -30,7 +30,7 @@ namespace NzbDrone.Core.Indexers.Definitions
public override string Language => "ru-RU";
public override Encoding Encoding => Encoding.UTF8;
public override DownloadProtocol Protocol => DownloadProtocol.Torrent;
public override IndexerPrivacy Privacy => IndexerPrivacy.SemiPublic;
public override IndexerPrivacy Privacy => IndexerPrivacy.SemiPrivate;
public override IndexerCapabilities Capabilities => SetCapabilities();
public Anidub(IIndexerHttpClient httpClient, IEventAggregator eventAggregator, IIndexerStatusService indexerStatusService, IConfigService configService, Logger logger)
@@ -60,7 +60,7 @@ namespace NzbDrone.Core.Indexers.Definitions
var mainPage = await ExecuteAuth(new HttpRequest(Settings.BaseUrl));
requestBuilder.Method = Common.Http.HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
requestBuilder.SetCookies(mainPage.GetCookies());
@@ -167,7 +167,7 @@ namespace NzbDrone.Core.Indexers.Definitions
if (isSearch)
{
request.HttpRequest.Method = NzbDrone.Common.Http.HttpMethod.POST;
request.HttpRequest.Method = HttpMethod.Post;
var postData = new NameValueCollection
{
{ "do", "search" },

View File

@@ -106,7 +106,37 @@ namespace NzbDrone.Core.Indexers.Definitions
{
}
private IEnumerable<IndexerRequest> GetPagedRequests(string searchType, string term, int[] categories)
public IndexerPageableRequestChain GetSearchRequests(MovieSearchCriteria searchCriteria)
=> GetRequestWithSearchType(searchCriteria, "anime");
public IndexerPageableRequestChain GetSearchRequests(MusicSearchCriteria searchCriteria)
=> GetRequestWithSearchType(searchCriteria, "music");
public IndexerPageableRequestChain GetSearchRequests(TvSearchCriteria searchCriteria)
=> GetRequestWithSearchType(searchCriteria, "anime");
public IndexerPageableRequestChain GetSearchRequests(BookSearchCriteria searchCriteria)
=> GetRequestWithSearchType(searchCriteria, "anime");
public IndexerPageableRequestChain GetSearchRequests(BasicSearchCriteria searchCriteria)
=> GetRequestWithSearchType(searchCriteria, "anime");
private IndexerPageableRequestChain GetRequestWithSearchType(SearchCriteriaBase searchCriteria, string searchType)
{
var pageableRequests = new IndexerPageableRequestChain();
// TODO: Remove this once Prowlarr has proper support for non Pageable Indexers and can tell Sonarr that indexer doesn't support pagination in a proper way, for now just return empty release list on all request containing an offset
if (searchCriteria.Offset is > 0)
{
return pageableRequests;
}
pageableRequests.Add(GetRequest(searchType, searchCriteria.SanitizedSearchTerm, searchCriteria.Categories));
return pageableRequests;
}
private IEnumerable<IndexerRequest> GetRequest(string searchType, string term, int[] categories)
{
var searchUrl = string.Format("{0}/scrape.php", Settings.BaseUrl.TrimEnd('/'));
@@ -135,51 +165,6 @@ namespace NzbDrone.Core.Indexers.Definitions
yield return request;
}
public IndexerPageableRequestChain GetSearchRequests(MovieSearchCriteria searchCriteria)
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests("anime", searchCriteria.SanitizedSearchTerm, searchCriteria.Categories));
return pageableRequests;
}
public IndexerPageableRequestChain GetSearchRequests(MusicSearchCriteria searchCriteria)
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests("music", searchCriteria.SanitizedSearchTerm, searchCriteria.Categories));
return pageableRequests;
}
public IndexerPageableRequestChain GetSearchRequests(TvSearchCriteria searchCriteria)
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests("anime", searchCriteria.SanitizedSearchTerm, searchCriteria.Categories));
return pageableRequests;
}
public IndexerPageableRequestChain GetSearchRequests(BookSearchCriteria searchCriteria)
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests("anime", searchCriteria.SanitizedSearchTerm, searchCriteria.Categories));
return pageableRequests;
}
public IndexerPageableRequestChain GetSearchRequests(BasicSearchCriteria searchCriteria)
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests("anime", searchCriteria.SanitizedSearchTerm, searchCriteria.Categories));
return pageableRequests;
}
public Func<IDictionary<string, string>> GetCookies { get; set; }
public Action<IDictionary<string, string>, DateTime?> CookiesUpdater { get; set; }

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using AngleSharp.Html.Parser;
@@ -57,7 +58,7 @@ namespace NzbDrone.Core.Indexers.Definitions
};
var loginPage = await ExecuteAuth(new HttpRequest(LoginUrl));
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
requestBuilder.SetCookies(loginPage.GetCookies());

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using AngleSharp.Html.Parser;
@@ -56,7 +57,7 @@ namespace NzbDrone.Core.Indexers.Definitions
AllowAutoRedirect = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;

View File

@@ -36,11 +36,11 @@ namespace NzbDrone.Core.Indexers.Definitions
{
TvSearchParams = new List<TvSearchParam>
{
TvSearchParam.Q, TvSearchParam.Season, TvSearchParam.Ep, TvSearchParam.ImdbId
TvSearchParam.Q, TvSearchParam.Season, TvSearchParam.Ep, TvSearchParam.ImdbId, TvSearchParam.TvdbId
},
MovieSearchParams = new List<MovieSearchParam>
{
MovieSearchParam.Q, MovieSearchParam.ImdbId
MovieSearchParam.Q, MovieSearchParam.ImdbId, MovieSearchParam.TmdbId
}
};

View File

@@ -1,5 +1,6 @@
using System;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentValidation.Results;
using NLog;
@@ -105,7 +106,7 @@ namespace NzbDrone.Core.Indexers.Definitions.Avistaz
LogResponseContent = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var authLoginRequest = requestBuilder

View File

@@ -77,6 +77,10 @@ namespace NzbDrone.Core.Indexers.Definitions.Avistaz
{
parameters.Add("imdb", searchCriteria.FullImdbId);
}
else if (searchCriteria.TmdbId.HasValue)
{
parameters.Add("tmdb", searchCriteria.TmdbId.Value.ToString());
}
else
{
parameters.Add("search", GetSearchTerm(searchCriteria.SanitizedSearchTerm).Trim());
@@ -105,6 +109,12 @@ namespace NzbDrone.Core.Indexers.Definitions.Avistaz
if (searchCriteria.ImdbId.IsNotNullOrWhiteSpace())
{
parameters.Add("imdb", searchCriteria.FullImdbId);
parameters.Add("search", GetSearchTerm(searchCriteria.EpisodeSearchString).Trim());
}
else if (searchCriteria.TvdbId.HasValue)
{
parameters.Add("tvdb", searchCriteria.TvdbId.Value.ToString());
parameters.Add("search", GetSearchTerm(searchCriteria.EpisodeSearchString).Trim());
}
else
{

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -55,7 +56,7 @@ namespace NzbDrone.Core.Indexers.Definitions
AllowAutoRedirect = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using AngleSharp.Dom;
@@ -79,7 +80,7 @@ namespace NzbDrone.Core.Indexers.Definitions
var loginPage = await ExecuteAuth(new HttpRequest(LoginUrl));
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
requestBuilder.SetCookies(loginPage.GetCookies());

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using FluentValidation;
using Newtonsoft.Json;
using NLog;
@@ -110,7 +111,7 @@ namespace NzbDrone.Core.Indexers.Definitions
var request = new HttpRequest(searchUrl, HttpAccept.Json);
request.Headers.Add("Content-type", "application/json");
request.Method = HttpMethod.POST;
request.Method = HttpMethod.Post;
request.SetContent(body.ToJson());
var indexerRequest = new IndexerRequest(request);

View File

@@ -37,7 +37,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
{
var generator = _generatorCache.Get(Settings.DefinitionFile, () =>
new CardigannRequestGenerator(_configService,
_definitionService.GetDefinition(Settings.DefinitionFile),
_definitionService.GetCachedDefinition(Settings.DefinitionFile),
_logger)
{
HttpClient = _httpClient,
@@ -57,7 +57,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
public override IParseIndexerResponse GetParser()
{
return new CardigannParser(_configService,
_definitionService.GetDefinition(Settings.DefinitionFile),
_definitionService.GetCachedDefinition(Settings.DefinitionFile),
_logger)
{
Settings = Settings
@@ -128,7 +128,12 @@ namespace NzbDrone.Core.Indexers.Cardigann
IndexerUrls = definition.Links.ToArray(),
Settings = new CardigannSettings { DefinitionFile = definition.File },
Protocol = DownloadProtocol.Torrent,
Privacy = definition.Type == "private" ? IndexerPrivacy.Private : IndexerPrivacy.Public,
Privacy = definition.Type switch
{
"private" => IndexerPrivacy.Private,
"public" => IndexerPrivacy.Public,
_ => IndexerPrivacy.SemiPrivate
},
SupportsRss = SupportsRss,
SupportsSearch = SupportsSearch,
SupportsRedirect = SupportsRedirect,

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using AngleSharp.Html.Dom;
@@ -184,7 +185,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(loginUrl)
{
LogResponseContent = true,
Method = HttpMethod.POST,
Method = HttpMethod.Post,
AllowAutoRedirect = true,
SuppressHttpError = true,
Encoding = _encoding
@@ -329,7 +330,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(captchaUrl.ToString())
{
LogResponseContent = true,
Method = HttpMethod.GET,
Method = HttpMethod.Get,
Encoding = _encoding
};
@@ -394,7 +395,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(submitUrl.ToString())
{
LogResponseContent = true,
Method = HttpMethod.POST,
Method = HttpMethod.Post,
AllowAutoRedirect = true,
Encoding = _encoding
};
@@ -423,7 +424,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(submitUrl.ToString())
{
LogResponseContent = true,
Method = HttpMethod.POST,
Method = HttpMethod.Post,
AllowAutoRedirect = true,
SuppressHttpError = true,
Encoding = _encoding
@@ -466,7 +467,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(loginUrl)
{
LogResponseContent = true,
Method = HttpMethod.GET,
Method = HttpMethod.Get,
SuppressHttpError = true,
Encoding = _encoding
};
@@ -491,7 +492,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(loginUrl)
{
LogResponseContent = true,
Method = HttpMethod.GET,
Method = HttpMethod.Get,
SuppressHttpError = true,
Encoding = _encoding
};
@@ -565,7 +566,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
var requestBuilder = new HttpRequestBuilder(loginUrl.AbsoluteUri)
{
LogResponseContent = true,
Method = HttpMethod.GET,
Method = HttpMethod.Get,
Encoding = _encoding
};
@@ -666,21 +667,21 @@ namespace NzbDrone.Core.Indexers.Cardigann
Dictionary<string, string> pairs = null;
var queryCollection = new NameValueCollection();
var method = HttpMethod.GET;
var method = HttpMethod.Get;
if (string.Equals(request.Method, "post", StringComparison.OrdinalIgnoreCase))
{
method = HttpMethod.POST;
method = HttpMethod.Post;
pairs = new Dictionary<string, string>();
}
foreach (var input in request.Inputs)
{
var value = ApplyGoTemplateText(input.Value, variables);
if (method == HttpMethod.GET)
if (method == HttpMethod.Get)
{
queryCollection.Add(input.Key, value);
}
else if (method == HttpMethod.POST)
else if (method == HttpMethod.Post)
{
pairs.Add(input.Key, value);
}
@@ -707,7 +708,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
httpRequest.Method = method;
// Add form data for POST requests
if (method == HttpMethod.POST)
if (method == HttpMethod.Post)
{
foreach (var param in pairs)
{
@@ -724,7 +725,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
public async Task<HttpRequest> DownloadRequest(Uri link)
{
Cookies = GetCookies();
var method = HttpMethod.GET;
var method = HttpMethod.Get;
var headers = new Dictionary<string, string>();
var variables = GetBaseTemplateVariables();
@@ -759,7 +760,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
if (download.Method == "post")
{
method = HttpMethod.POST;
method = HttpMethod.Post;
}
if (download.Infohash != null)
@@ -1014,11 +1015,11 @@ namespace NzbDrone.Core.Indexers.Cardigann
// HttpUtility.UrlPathEncode seems to only encode spaces, we use UrlEncode and replace + with %20 as a workaround
var searchUrl = ResolvePath(ApplyGoTemplateText(searchPath.Path, variables, WebUtility.UrlEncode).Replace("+", "%20")).AbsoluteUri;
var queryCollection = new List<KeyValuePair<string, string>>();
var method = HttpMethod.GET;
var method = HttpMethod.Get;
if (string.Equals(searchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
{
method = HttpMethod.POST;
method = HttpMethod.Post;
}
var inputsList = new List<Dictionary<string, string>>();
@@ -1064,7 +1065,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
}
}
if (method == HttpMethod.GET)
if (method == HttpMethod.Get)
{
if (queryCollection.Count > 0)
{
@@ -1079,7 +1080,7 @@ namespace NzbDrone.Core.Indexers.Cardigann
requestbuilder.Method = method;
// Add FormData for searchs that POST
if (method == HttpMethod.POST)
if (method == HttpMethod.Post)
{
foreach (var param in queryCollection)
{

View File

@@ -22,7 +22,7 @@ namespace NzbDrone.Core.Indexers.Definitions
public class DanishBytes : TorrentIndexerBase<DanishBytesSettings>
{
public override string Name => "DanishBytes";
public override string[] IndexerUrls => new string[] { "https://danishbytes.club/" };
public override string[] IndexerUrls => new string[] { "https://danishbytes.club/", "https://danishbytes2.org/" };
public override string Description => "DanishBytes is a Private Danish Tracker";
public override DownloadProtocol Protocol => DownloadProtocol.Torrent;
public override IndexerPrivacy Privacy => IndexerPrivacy.Private;

View File

@@ -1,4 +1,5 @@
using System;
using System.Net.Http;
using System.Threading.Tasks;
using NLog;
using NzbDrone.Common.Http;
@@ -56,7 +57,7 @@ namespace NzbDrone.Core.Indexers.Gazelle
LogResponseContent = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;

View File

@@ -52,6 +52,8 @@ namespace NzbDrone.Core.Indexers.Gazelle
foreach (var result in jsonResponse.Resource.Response.Results)
{
var posterUrl = GetPosterUrl(result.Cover);
if (result.Torrents != null)
{
foreach (var torrent in result.Torrents)
@@ -66,9 +68,11 @@ namespace NzbDrone.Core.Indexers.Gazelle
title += " [Cue]";
}
var infoUrl = GetInfoUrl(result.GroupId, id);
var release = new GazelleInfo()
{
Guid = string.Format("Gazelle-{0}", id),
Guid = infoUrl,
Title = WebUtility.HtmlDecode(title),
Container = torrent.Encoding,
Files = torrent.FileCount,
@@ -76,11 +80,14 @@ namespace NzbDrone.Core.Indexers.Gazelle
Codec = torrent.Format,
Size = long.Parse(torrent.Size),
DownloadUrl = GetDownloadUrl(id),
InfoUrl = GetInfoUrl(result.GroupId, id),
InfoUrl = infoUrl,
Seeders = int.Parse(torrent.Seeders),
Peers = int.Parse(torrent.Leechers) + int.Parse(torrent.Seeders),
PublishDate = torrent.Time.ToUniversalTime(),
Scene = torrent.Scene,
PosterUrl = posterUrl,
DownloadVolumeFactor = torrent.IsFreeLeech || torrent.IsNeutralLeech || torrent.IsPersonalFreeLeech ? 0 : 1,
UploadVolumeFactor = torrent.IsNeutralLeech ? 0 : 1
};
var category = torrent.Category;
@@ -100,19 +107,23 @@ namespace NzbDrone.Core.Indexers.Gazelle
{
var id = result.TorrentId;
var groupName = WebUtility.HtmlDecode(result.GroupName);
var infoUrl = GetInfoUrl(result.GroupId, id);
var release = new GazelleInfo()
{
Guid = string.Format("Gazelle-{0}", id),
Guid = infoUrl,
Title = groupName,
Size = long.Parse(result.Size),
DownloadUrl = GetDownloadUrl(id),
InfoUrl = GetInfoUrl(result.GroupId, id),
InfoUrl = infoUrl,
Seeders = int.Parse(result.Seeders),
Peers = int.Parse(result.Leechers) + int.Parse(result.Seeders),
Files = result.FileCount,
Grabs = result.Snatches,
PublishDate = DateTimeOffset.FromUnixTimeSeconds(result.GroupTime).UtcDateTime,
PosterUrl = posterUrl,
DownloadVolumeFactor = result.IsFreeLeech || result.IsNeutralLeech || result.IsPersonalFreeLeech ? 0 : 1,
UploadVolumeFactor = result.IsNeutralLeech ? 0 : 1
};
var category = result.Category;
@@ -147,7 +158,19 @@ namespace NzbDrone.Core.Indexers.Gazelle
return url.FullUri;
}
private string GetInfoUrl(string groupId, int torrentId)
protected virtual string GetPosterUrl(string cover)
{
if (!string.IsNullOrEmpty(cover))
{
return cover.StartsWith("http") ?
new HttpUri(cover).FullUri :
new HttpUri(_settings.BaseUrl).CombinePath(cover).FullUri;
}
return null;
}
protected virtual string GetInfoUrl(string groupId, int torrentId)
{
var url = new HttpUri(_settings.BaseUrl)
.CombinePath("/torrents.php")

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using NzbDrone.Common.Extensions;
using NzbDrone.Common.Http;
using NzbDrone.Common.Serializer;
@@ -50,7 +51,7 @@ namespace NzbDrone.Core.Indexers.HDBits
.Resource("/api/torrents")
.Build();
request.Method = HttpMethod.POST;
request.Method = HttpMethod.Post;
const string appJson = "application/json";
request.Headers.Accept = appJson;
request.Headers.ContentType = appJson;

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -57,7 +58,7 @@ namespace NzbDrone.Core.Indexers.Definitions
AllowAutoRedirect = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using AngleSharp.Html.Parser;
@@ -52,7 +53,7 @@ namespace NzbDrone.Core.Indexers.Definitions
LogResponseContent = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;
@@ -178,7 +179,7 @@ namespace NzbDrone.Core.Indexers.Definitions
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SearchTerm), searchCriteria.Categories));
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SanitizedSearchTerm), searchCriteria.Categories));
return pageableRequests;
}
@@ -187,7 +188,7 @@ namespace NzbDrone.Core.Indexers.Definitions
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SearchTerm), searchCriteria.Categories, searchCriteria.FullImdbId));
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SanitizedSearchTerm), searchCriteria.Categories, searchCriteria.FullImdbId));
return pageableRequests;
}
@@ -196,7 +197,7 @@ namespace NzbDrone.Core.Indexers.Definitions
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SearchTerm), searchCriteria.Categories));
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SanitizedSearchTerm), searchCriteria.Categories));
return pageableRequests;
}
@@ -205,7 +206,7 @@ namespace NzbDrone.Core.Indexers.Definitions
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SearchTerm), searchCriteria.Categories));
pageableRequests.Add(GetPagedRequests(string.Format("{0}", searchCriteria.SanitizedSearchTerm), searchCriteria.Categories));
return pageableRequests;
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using AngleSharp.Html.Parser;
using FluentValidation;
@@ -54,7 +55,7 @@ namespace NzbDrone.Core.Indexers.Definitions
LogResponseContent = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;

View File

@@ -0,0 +1,341 @@
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Web;
using AngleSharp.Dom;
using AngleSharp.Html.Dom;
using AngleSharp.Html.Parser;
using FluentValidation;
using NLog;
using NzbDrone.Common.Http;
using NzbDrone.Core.Annotations;
using NzbDrone.Core.Configuration;
using NzbDrone.Core.IndexerSearch.Definitions;
using NzbDrone.Core.Messaging.Events;
using NzbDrone.Core.Parser;
using NzbDrone.Core.Parser.Model;
using NzbDrone.Core.Validation;
namespace NzbDrone.Core.Indexers.Definitions;
public class MoreThanTV : TorrentIndexerBase<MoreThanTVSettings>
{
public override string Name => "MoreThanTV";
public override string[] IndexerUrls => new[] { "https://www.morethantv.me/" };
public override string Description => "Private torrent tracker for TV / MOVIES";
public override DownloadProtocol Protocol => DownloadProtocol.Torrent;
public override IndexerPrivacy Privacy => IndexerPrivacy.Private;
public override IndexerCapabilities Capabilities => SetCapabilities();
public override bool FollowRedirect => true;
public MoreThanTV(IIndexerHttpClient httpClient, IEventAggregator eventAggregator, IIndexerStatusService indexerStatusService, IConfigService configService, Logger logger)
: base(httpClient, eventAggregator, indexerStatusService, configService, logger)
{
}
public override IIndexerRequestGenerator GetRequestGenerator()
=> new MoreThanTVRequestGenerator(Settings, Capabilities);
public override IParseIndexerResponse GetParser()
=> new MoreThanTVParser
{
Settings = Settings
};
private IndexerCapabilities SetCapabilities()
{
var caps = new IndexerCapabilities
{
TvSearchParams = new List<TvSearchParam>
{
TvSearchParam.Q, TvSearchParam.Season, TvSearchParam.Ep
},
MovieSearchParams = new List<MovieSearchParam>
{
MovieSearchParam.Q
}
};
caps.Categories.AddCategoryMapping(1, NewznabStandardCategory.Movies, "Movies");
caps.Categories.AddCategoryMapping(2, NewznabStandardCategory.TV, "TV");
return caps;
}
protected override IDictionary<string, string> GetCookies()
{
return CookieUtil.CookieHeaderToDictionary(Settings.Cookie);
}
}
public class MoreThanTVRequestGenerator : IIndexerRequestGenerator
{
private MoreThanTVSettings Settings { get; }
private IndexerCapabilities Capabilities { get; }
private NameValueCollection BrowserHeaders { get; }
public MoreThanTVRequestGenerator(MoreThanTVSettings settings, IndexerCapabilities capabilities)
{
Settings = settings;
Capabilities = capabilities;
BrowserHeaders = new NameValueCollection()
{
{ "referer", settings.BaseUrl },
{ "Upgrade-Insecure-Requests", "1" },
{ "User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 Safari/537.36" }
};
}
public IndexerPageableRequestChain GetSearchRequests(MovieSearchCriteria searchCriteria)
=> PerformRequest(searchCriteria);
public IndexerPageableRequestChain GetSearchRequests(MusicSearchCriteria searchCriteria)
=> PerformRequest(searchCriteria);
public IndexerPageableRequestChain GetSearchRequests(TvSearchCriteria searchCriteria)
=> PerformRequest(searchCriteria);
public IndexerPageableRequestChain GetSearchRequests(BookSearchCriteria searchCriteria)
=> PerformRequest(searchCriteria);
public IndexerPageableRequestChain GetSearchRequests(BasicSearchCriteria searchCriteria)
=> PerformRequest(searchCriteria);
public Func<IDictionary<string, string>> GetCookies { get; set; }
public Action<IDictionary<string, string>, DateTime?> CookiesUpdater { get; set; }
private IndexerPageableRequestChain PerformRequest(SearchCriteriaBase query)
{
var chain = new IndexerPageableRequestChain();
var requests = new List<IndexerRequest> { new (new HttpRequest(GetTorrentSearchUrl(query)) { Headers = new HttpHeader(BrowserHeaders), AllowAutoRedirect = true }) };
if (query is TvSearchCriteria tvSearchCriteria)
{
// Always search for torrent groups (complete seasons) too
var seasonRegex = new Regex(@".*\s[Ss]{1}\d{2}([Ee]{1}\d{2,3})?$", RegexOptions.Compiled);
var seasonMatch = seasonRegex.Match(query.SanitizedSearchTerm);
if (seasonMatch.Success)
{
var seasonReplaceRegex = new Regex(@"[Ss]{1}\d{2}([Ee]{1}\d{2,3})?", RegexOptions.Compiled);
var newSearchQuery = seasonReplaceRegex.Replace(query.SanitizedSearchTerm, $"Season {tvSearchCriteria.Season}");
requests.Add(new IndexerRequest(new HttpRequest(GetTorrentSearchUrl(query, newSearchQuery)) { Headers = new HttpHeader(BrowserHeaders), AllowAutoRedirect = true }));
}
}
chain.Add(requests);
return chain;
}
private string GetTorrentSearchUrl(SearchCriteriaBase query, string overrideSearchTerm = null)
{
var qc = new NameValueCollection
{
{ "action", "advanced" },
{ "sizetype", "gb" },
{ "sizerange", "0.01" },
{ "title", overrideSearchTerm ?? GetSearchString(query.SanitizedSearchTerm) }
};
switch (query)
{
case MovieSearchCriteria:
qc.Add("filter_cat[1]", "1"); // HD Movies
qc.Add("filter_cat[2]", "1"); // SD Movies
break;
case TvSearchCriteria:
qc.Add("filter_cat[3]", "1"); // HD EPISODE
qc.Add("filter_cat[4]", "1"); // SD Episode
qc.Add("filter_cat[5]", "1"); // HD Season
qc.Add("filter_cat[6]", "1"); // SD Season
break;
}
return $"{Settings.BaseUrl}torrents.php?{qc.GetQueryString()}";
}
private string GetSearchString(string input)
{
input = input.Replace("Marvels", "Marvel"); // strip 's for better results
var regex = new Regex(@"(S\d{2})$", RegexOptions.Compiled);
return regex.Replace(input, "$1*"); // If we're just seaching for a season (no episode) append an * to include all episodes of that season.
}
}
public class MoreThanTVParser : IParseIndexerResponse
{
public MoreThanTVSettings Settings { get; init; }
public IList<ReleaseInfo> ParseResponse(IndexerResponse indexerResponse)
{
var releases = new List<ReleaseInfo>();
try
{
var parser = new HtmlParser();
var document = parser.ParseDocument(indexerResponse.Content);
var torrents = document.QuerySelectorAll("#torrent_table > tbody > tr.torrent");
var movies = new[] { "movie" };
var tv = new[] { "season", "episode" };
// Loop through all torrents checking for groups
foreach (var torrent in torrents)
{
// Parse required data
var torrentGroup = torrent.QuerySelectorAll("table a[href^=\"/torrents.php?action=download\"]");
foreach (var downloadAnchor in torrentGroup)
{
var title = downloadAnchor.ParentElement.ParentElement.ParentElement.TextContent.Trim();
title = CleanUpTitle(title);
var category = torrent.QuerySelector(".cats_col div").GetAttribute("title");
// default to Other
var indexerCategory = NewznabStandardCategory.Other;
if (movies.Any(category.Contains))
{
indexerCategory = NewznabStandardCategory.Movies;
}
else if (tv.Any(category.Contains))
{
indexerCategory = NewznabStandardCategory.TV;
}
releases.Add(GetReleaseInfo(torrent, downloadAnchor, title, indexerCategory));
}
}
return releases;
}
catch (Exception ex)
{
throw new Exception("Error while parsing torrent response", ex);
}
}
/// <summary>
/// Gather Release info from torrent table. Target using css
/// </summary>
/// <param name="row"></param>
/// <param name="downloadAnchor"></param>
/// <param name="title"></param>
/// <param name="category"></param>
/// <returns></returns>
private ReleaseInfo GetReleaseInfo(IElement row, IElement downloadAnchor, string title, IndexerCategory category)
{
// count from bottom
const int FILES_COL = 8;
/*const int COMMENTS_COL = 7;*/
const int DATE_COL = 6;
const int FILESIZE_COL = 5;
const int SNATCHED_COL = 4;
const int SEEDS_COL = 3;
const int LEECHERS_COL = 2;
/*const int USER_COL = 1;*/
var downloadAnchorHref = (downloadAnchor as IHtmlAnchorElement).Href;
var queryParams = HttpUtility.ParseQueryString(downloadAnchorHref, Encoding.UTF8);
var torrentId = queryParams["id"];
var qFiles = row.QuerySelector("td:nth-last-child(" + FILES_COL + ")").TextContent;
var fileCount = ParseUtil.CoerceInt(qFiles);
var qPublishDate = row.QuerySelector("td:nth-last-child(" + DATE_COL + ") .time").Attributes["title"].Value;
var publishDate = DateTime.ParseExact(qPublishDate, "MMM dd yyyy, HH:mm", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
var qPoster = row.QuerySelector("div.tp-banner img")?.GetAttribute("src");
var poster = (qPoster != null && !qPoster.Contains("caticons")) ? qPoster : null;
var description = row.QuerySelector("div.tags")?.TextContent.Trim();
var fileSize = row.QuerySelector("td:nth-last-child(" + FILESIZE_COL + ")").TextContent.Trim();
var snatched = row.QuerySelector("td:nth-last-child(" + SNATCHED_COL + ")").TextContent.Trim();
var seeds = row.QuerySelector("td:nth-last-child(" + SEEDS_COL + ")").TextContent.Trim();
var leechs = row.QuerySelector("td:nth-last-child(" + LEECHERS_COL + ")").TextContent.Trim();
if (fileSize.Length <= 0 || snatched.Length <= 0 || seeds.Length <= 0 || leechs.Length <= 0)
{
// Size (xx.xx GB[ (Max)]) Snatches (xx) Seeders (xx) Leechers (xx)
throw new Exception($"We expected 4 torrent datas.");
}
var detailUrl = $"{Settings.BaseUrl}details.php";
var size = ParseUtil.GetBytes(fileSize);
var grabs = int.Parse(snatched, NumberStyles.AllowThousands, CultureInfo.InvariantCulture);
var seeders = int.Parse(seeds, NumberStyles.AllowThousands, CultureInfo.InvariantCulture);
var leechers = int.Parse(leechs, NumberStyles.AllowThousands, CultureInfo.InvariantCulture);
var detailsUrl = $"{detailUrl}?torrentid={torrentId}";
var downloadUrl = $"{detailUrl}?action=download&id={torrentId}";
var categories = new List<IndexerCategory> { category };
return new TorrentInfo
{
Title = title,
Categories = categories,
DownloadUrl = downloadUrl,
PublishDate = publishDate,
PosterUrl = poster,
Description = description,
Seeders = seeders,
Peers = seeders + leechers,
Files = fileCount,
Size = size,
Grabs = grabs,
Guid = downloadUrl,
InfoUrl = detailsUrl,
DownloadVolumeFactor = 0, // ratioless tracker
UploadVolumeFactor = 1
};
}
/// <summary>
/// Clean Up any title stuff
/// </summary>
/// <param name="title"></param>
/// <returns></returns>
private string CleanUpTitle(string title)
{
return title
.Replace(".", " ")
.Replace("4K", "2160p"); // sonarr cleanup
}
public Action<IDictionary<string, string>, DateTime?> CookiesUpdater { get; set; }
}
public class MoreThanTVSettingsValidator : AbstractValidator<MoreThanTVSettings>
{
public MoreThanTVSettingsValidator()
{
RuleFor(c => c.Cookie).NotEmpty();
}
}
public class MoreThanTVSettings : IIndexerSettings
{
private static readonly MoreThanTVSettingsValidator Validator = new ();
public MoreThanTVSettings()
{
Cookie = "";
}
[FieldDefinition(1, Label = "Base Url", Type = FieldType.Select, SelectOptionsProviderAction = "getUrls", HelpText = "Select which baseurl Prowlarr will use for requests to the site")]
public string BaseUrl { get; set; }
[FieldDefinition(2, Label = "Cookie", HelpText = "Enter the cookies for the site, just copy everything after 'cookie:' from the request headers to the site", HelpLink = "https://wiki.servarr.com/prowlarr/faq#finding-cookies")]
public string Cookie { get; set; }
[FieldDefinition(3)]
public IndexerBaseSettings BaseSettings { get; set; } = new IndexerBaseSettings();
public NzbDroneValidationResult Validate()
{
return new NzbDroneValidationResult(Validator.Validate(this));
}
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -53,7 +54,7 @@ namespace NzbDrone.Core.Indexers.Definitions
LogResponseContent = true
};
requestBuilder.Method = HttpMethod.POST;
requestBuilder.Method = HttpMethod.Post;
requestBuilder.PostProcess += r => r.RequestTimeout = TimeSpan.FromSeconds(15);
var cookies = Cookies;

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Xml;
using System.Xml.Linq;
using NLog;
@@ -50,7 +51,7 @@ namespace NzbDrone.Core.Indexers.Newznab
var request = new HttpRequest(url, HttpAccept.Rss);
request.AllowAutoRedirect = true;
request.Method = HttpMethod.GET;
request.Method = HttpMethod.Get;
HttpResponse response;

Some files were not shown because too many files have changed in this diff Show More