diff --git a/README.md b/README.md index bf9df4dd..c3931153 100644 --- a/README.md +++ b/README.md @@ -492,6 +492,15 @@ If github is not your cup of tea; > - You can find a full list of all possible parameters and their respective values at "https://panel.orionoid.com/" in the "Developers" menu, section "API Docs" under "Stream API". > > +> +>
+> zilean: +> +> - Zilean is a service that allows you to search for [DebridMediaManager](https://github.com/debridmediamanager/debrid-media-manager) sourced arr-less content. +> - You can integrate zilean into plex_debrid by navigating to '/Settings/Scraper/Sources/Edit/Add source/zilean'. +> - Details of this project can be found at https://github.com/iPromKnight/zilean +> +>
  ### :arrow_down_small: Debrid Services: diff --git a/content/classes.py b/content/classes.py index 2349d2b5..f17e1bc7 100644 --- a/content/classes.py +++ b/content/classes.py @@ -926,17 +926,19 @@ def watch(self): retries = int(float(trigger[2])) if retries == 0: return + + message = 'retrying download in ' + str( + round(int(ui_settings.loop_interval_seconds) / 60)) + 'min for item: ' + self.query() + ' - version/s [' + '],['.join( + names) + ']' if not self in media.ignore_queue: self.ignored_count = 1 media.ignore_queue += [self] - ui_print('retrying download in 30min for item: ' + self.query() + ' - version/s [' + '],['.join( - names) + '] - attempt ' + str(self.ignored_count) + '/' + str(retries)) + ui_print(message + ' - attempt ' + str(self.ignored_count) + '/' + str(retries)) else: match = next((x for x in media.ignore_queue if self == x), None) if match.ignored_count < retries: match.ignored_count += 1 - ui_print('retrying download in 30min for item: ' + self.query() + ' - version/s [' + '],['.join( - names) + '] - attempt ' + str(match.ignored_count) + '/' + str(retries)) + ui_print(message + ' - attempt ' + str(match.ignored_count) + '/' + str(retries)) else: media.ignore_queue.remove(match) ignore.add(self) @@ -1366,7 +1368,7 @@ def download(self, retries=0, library=[], parentReleases=[]): if len(self.Episodes) > 2: if self.season_pack(scraped_releases): debrid_downloaded, retry = self.debrid_download() - # if scraper.traditional() or debrid_downloaded: + if scraper.traditional() or debrid_downloaded: for episode in self.Episodes: episode.skip_scraping = True # If there was nothing downloaded, scrape specifically for this season diff --git a/scraper/services/__init__.py b/scraper/services/__init__.py index fe45e7e3..bd609bf7 100644 --- a/scraper/services/__init__.py +++ b/scraper/services/__init__.py @@ -7,10 +7,11 @@ from scraper.services import orionoid from scraper.services import nyaa from scraper.services import torrentio +from scraper.services import zilean #define subclass method def __subclasses__(): - return [rarbg,x1337,jackett,prowlarr,orionoid,nyaa,torrentio] + return [rarbg,x1337,jackett,prowlarr,orionoid,nyaa,torrentio,zilean] active = ['torrentio'] overwrite = [] diff --git a/scraper/services/zilean.py b/scraper/services/zilean.py new file mode 100644 index 00000000..5331c23c --- /dev/null +++ b/scraper/services/zilean.py @@ -0,0 +1,130 @@ +# import modules +from base import * +from ui.ui_print import * +import urllib.parse +import releases +import re + +base_url = "http://localhost:8181" +name = "zilean" +timeout_sec = 10 +session = requests.Session() + + +def setup(cls, new=False): + from settings import settings_list + from scraper.services import active + settings = [] + for category, allsettings in settings_list: + for setting in allsettings: + if setting.cls == cls: + settings += [setting] + if settings == []: + if not cls.name in active: + active += [cls.name] + back = False + if not new: + while not back: + print("0) Back") + indices = [] + for index, setting in enumerate(settings): + print(str(index + 1) + ') ' + setting.name) + indices += [str(index + 1)] + print() + if settings == []: + print("Nothing to edit!") + print() + time.sleep(3) + return + choice = input("Choose an action: ") + if choice in indices: + settings[int(choice) - 1].input() + if not cls.name in active: + active += [cls.name] + back = True + elif choice == '0': + back = True + else: + print() + indices = [] + for setting in settings: + if setting.name == "Zilean Base URL": + setting.setup() + if not cls.name in active: + active += [cls.name] + + +def scrape(query, altquery): + from scraper.services import active + ui_print("[zilean] searching for " + query + " accepting titles that regex match " + altquery) + global base_url + scraped_releases = [] + if not 'zilean' in active: + return scraped_releases + + matches_regex = altquery + if altquery == "(.*)": + matches_regex = query + media_type = "show" if regex.search(r'(S[0-9]|complete|S\?[0-9])', matches_regex, regex.I) else "movie" + + opts = [] + title = query + if media_type == "show": + s = (regex.search(r'(?<=S)([0-9]+)', matches_regex, regex.I).group() + if regex.search(r'(?<=S)([0-9]+)', matches_regex, regex.I) else None) + e = (regex.search(r'(?<=E)([0-9]+)', matches_regex, regex.I).group() + if regex.search(r'(?<=E)([0-9]+)', matches_regex, regex.I) else None) + if s is not None and int(s) != 0: + opts.append('season=' + str(int(s))) + if e is not None and int(e) != 0: + opts.append('episode=' + str(int(e))) + title = re.sub(r'S[0-9]+', '', title, flags=re.IGNORECASE).strip() + title = re.sub(r'E[0-9]+', '', title, flags=re.IGNORECASE).strip() + else: + # find year match at the end of the query string + year_regex = regex.search(r'(.*)\.([12][0-9]{3})$', query, regex.I) + if year_regex: + opts.append('year=' + year_regex.group(2)) + title = year_regex.group(1) + + title = title.replace('.', ' ').replace('?', ' ').strip() + opts.append('query=' + urllib.parse.quote(title)) + + if base_url.endswith('/'): + base_url = base_url[:-1] + search_url = base_url + "/dmm/filtered?" + '&'.join(opts) + + try: + ui_print("[zilean] using search URL: " + search_url) + response = session.get(search_url, timeout=timeout_sec) + + if not response.status_code == 200: + ui_print('[zilean] error ' + str( + response.status_code) + ': failed response from zilean. ' + response.content) + return [] + + except requests.exceptions.Timeout: + ui_print('[zilean] error: zilean request timed out.') + return [] + except: + ui_print( + '[zilean] error: zilean couldn\'t be reached. Make sure your zilean base url [' + base_url + '] is correctly formatted.') + return [] + + try: + response = json.loads(response.content, object_hook=lambda d: SimpleNamespace(**d)) + except: + ui_print('[zilean] error: unable to parse response:' + response.content) + return [] + + ui_print('[zilean] ' + str(len(response)) + ' results found.') + for result in response[:]: + if regex.match(r'(' + altquery + ')', result.rawTitle, regex.I): + links = ['magnet:?xt=urn:btih:' + result.infoHash + '&dn=&tr='] + seeders = 0 # not available + scraped_releases += [releases.release( + '[zilean]', 'torrent', result.rawTitle, [], float(result.size) / 1000000000, links, seeders)] + else: + ui_print('[zilean] skipping ' + result.rawTitle + ' because it does not match deviation ' + altquery) + + return scraped_releases diff --git a/settings/__init__.py b/settings/__init__.py index ed619d83..61ee39ec 100644 --- a/settings/__init__.py +++ b/settings/__init__.py @@ -380,6 +380,7 @@ def get(self): setting('Nyaa sleep time', 'Enter a time in seconds to sleep between requests (default: "5"): ',scraper.services.nyaa, 'sleep', hidden=True), setting('Nyaa proxy', 'Enter a proxy to use for nyaa (default: "nyaa.si"): ',scraper.services.nyaa, 'proxy', hidden=True), setting('Torrentio Scraper Parameters','Please enter a valid torrentio manifest url: ',scraper.services.torrentio, 'default_opts', entry="parameter", help='This settings lets you control the torrentio scraping parameters. Visit "https://torrentio.strem.fun/configure" and configure your settings. Dont choose a debrid service. The "manifest url" will be copied to your clipboard.', hidden=True), + setting('Zilean Base URL', 'Please specify your Zilean base URL: ', scraper.services.zilean, 'base_url', hidden=True), ] ], ['Debrid Services', [ @@ -412,6 +413,7 @@ def get(self): setting('Show Menu on Startup', 'Please enter "true" or "false": ', ui_settings, 'run_directly'), setting('Debug printing', 'Please enter "true" or "false": ', ui_settings, 'debug'), setting('Log to file', 'Please enter "true" or "false": ', ui_settings, 'log'), + setting('Watchlist loop interval (sec)', 'Please enter an integer value in seconds: ', ui_settings, 'loop_interval_seconds'), setting('version', 'No snooping around! :D This is for compatability reasons.', ui_settings, 'version', hidden=True), ] diff --git a/ui/__init__.py b/ui/__init__.py index 51bfe927..c115feb6 100644 --- a/ui/__init__.py +++ b/ui/__init__.py @@ -403,7 +403,7 @@ def threaded(stop): else: print("Type 'exit' to return to the main menu.") timeout = 5 - regular_check = 1800 + regular_check = int(ui_settings.loop_interval_seconds) timeout_counter = 0 library = content.classes.library()[0]() # get entire plex_watchlist diff --git a/ui/ui_settings.py b/ui/ui_settings.py index f46f6bb1..94102449 100644 --- a/ui/ui_settings.py +++ b/ui/ui_settings.py @@ -2,3 +2,4 @@ run_directly = "true" debug = "false" log = "false" +loop_interval_seconds = 1800 \ No newline at end of file