def checkLibrary(self, result): result = merge_dictionaries(copy.deepcopy(self.default_info), copy.deepcopy(result)) if result and result.get('imdb'): return merge_dictionaries(result, self.getLibraryTags(result['imdb'])) return result
def add_options(self, section_name, options): # no additional actions (related to ro-rw options) are required here if not self.options.get(section_name): self.options[section_name] = options else: self.options[section_name] = merge_dictionaries( self.options[section_name], options)
def search(self, group): movie_name = get_title(group) url = self.urls['api'] % self.movieUrlName(movie_name) try: data = self.getCache('hdtrailers.%s' % get_identifier(group), url, show_error=False) except HTTPError: log.debug('No page found for: %s', movie_name) data = None result_data = {'480p': [], '720p': [], '1080p': []} if not data: return result_data did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get('404') and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = merge_dictionaries(result_data, results) return result_data
def search(self, q = '', types = None, **kwargs): # Make sure types is the correct instance if isinstance(types, str): types = [types] elif isinstance(types, (list, tuple, set)): types = list(types) imdb_identifier = get_imdb(q) if not types: if imdb_identifier: result = fire_event('movie.info', identifier=imdb_identifier, merge=True) result = {result['type']: [result]} else: result = fire_event('info.search', q=q, merge=True) else: result = {} for media_type in types: if imdb_identifier: result[media_type] = fire_event('%s.info' % media_type, identifier=imdb_identifier) else: result[media_type] = fire_event('%s.search' % media_type, q=q) return merge_dictionaries({ 'success': True, }, result)
def fill_result(self, result): defaults = { 'id': 0, 'status': 'busy', 'downloader': self.provider.getName(), 'folder': '', 'files': [], } return merge_dictionaries(defaults, result)
def single(self, identifier=''): db = get_db() quality_dict = {} try: quality = db.get('quality', identifier, with_doc=True)['doc'] except RecordNotFound: log.error("Unable to find '%s' in the quality DB", indentifier) quality = None if quality: quality_dict = merge_dictionaries( self.getQuality(quality['identifier']), quality) return quality_dict
def all(self): if self.cached_qualities: return self.cached_qualities db = get_db() temp = [] for quality in self.qualities: quality_doc = db.get('quality', quality.get('identifier'), with_doc=True)['doc'] q = merge_dictionaries(quality, quality_doc) temp.append(q) if len(temp) == len(self.qualities): self.cached_qualities = temp return temp
def combineOnIMDB(self, results): temp = {} order = [] # Combine on imdb id for item in results: random_string = random_string() imdb = item.get('imdb', random_string) imdb = imdb if imdb else random_string if not temp.get(imdb): temp[imdb] = self.getLibraryTags(imdb) order.append(imdb) # Merge dicts temp[imdb] = merge_dictionaries(temp[imdb], item) # Make it a list again temp_list = [temp[x] for x in order] return temp_list
def fillResult(self, result): defaults = { 'id': 0, 'protocol': self.provider.protocol, 'type': self.provider.type, 'provider': self.provider.getName(), 'download': self.provider.loginDownload if self.provider.urls.get('login') else self.provider.download, 'seed_ratio': Env.setting('seed_ratio', section=self.provider.getName().lower(), default=''), 'seed_time': Env.setting('seed_time', section=self.provider.getName().lower(), default=''), 'url': '', 'name': '', 'age': 0, 'size': 0, 'description': '', 'score': 0 } return merge_dictionaries(defaults, result)
def call(self, request_params, use_json=True, **kwargs): url = clean_host(self.conf('host'), ssl=self.conf('ssl')) + 'api?' + try_url_encode( merge_dictionaries(request_params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout=60, show_error=False, headers={'User-Agent': Env.getIdentifier()}, **kwargs) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d.get(request_params['mode']) or d else: return data
def _search(self, media, quality, results): movie_title = get_title(media) quality_id = quality['identifier'] params = merge_dictionaries(self.quality_search_params[quality_id].copy(), { 'order_by': 'relevance', 'order_way': 'descending', 'searchstr': get_identifier(media) }) url = '%s?json=noredirect&%s' % (self.urls['torrent'], try_url_encode(params)) res = self.getJsonData(url) try: if not 'Movies' in res: return authkey = res['AuthKey'] passkey = res['PassKey'] for ptpmovie in res['Movies']: if not 'Torrents' in ptpmovie: log.debug('Movie %s (%s) has NO torrents', (ptpmovie['Title'], ptpmovie['Year'])) continue log.debug('Movie %s (%s) has %d torrents', (ptpmovie['Title'], ptpmovie['Year'], len(ptpmovie['Torrents']))) for torrent in ptpmovie['Torrents']: torrent_id = try_int(torrent['Id']) torrentdesc = '' torrentscore = 0 if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']: torrentdesc += ' HQ' if self.conf('prefer_golden'): torrentscore += 5000 if 'FreeleechType' in torrent: torrentdesc += ' Freeleech' if self.conf('prefer_freeleech'): torrentscore += 7000 if 'Scene' in torrent and torrent['Scene']: torrentdesc += ' Scene' if self.conf('prefer_scene'): torrentscore += 2000 if self.conf('no_scene'): torrentscore -= 2000 if 'RemasterTitle' in torrent and torrent['RemasterTitle']: torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle']) torrent_name = torrent['ReleaseName'] + ' - %s' % torrentdesc def extra_check(item): return self.torrentMeetsQualitySpec(item, quality_id) results.append({ 'id': torrent_id, 'name': torrent_name, 'Source': torrent['Source'], 'Checked': 'true' if torrent['Checked'] else 'false', 'Resolution': torrent['Resolution'], 'url': '%s?action=download&id=%d&authkey=%s&torrent_pass=%s' % (self.urls['torrent'], torrent_id, authkey, passkey), 'detail_url': self.urls['detail'] % torrent_id, 'date': try_int(time.mktime(parse(torrent['UploadTime']).timetuple())), 'size': try_int(torrent['Size']) / (1024 * 1024), 'seeders': try_int(torrent['Seeders']), 'leechers': try_int(torrent['Leechers']), 'score': torrentscore, 'extra_check': extra_check, }) except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def fire_event(name, *args, **kwargs): if name not in events: return #log.debug('Firing event %s', name) try: options = { 'is_after_event': False, # Fire after event 'on_complete': False, # onComplete event 'single': False, # Return single handler 'merge': False, # Merge items 'in_order': False, # Fire them in specific order, waits for the other to finish } # Do options for x in options: try: val = kwargs[x] del kwargs[x] options[x] = val except: pass if len(events[name]) == 1: single = None try: single = events[name][0]['handler'](*args, **kwargs) except: log.error('Failed running single event: %s', traceback.format_exc()) # Don't load thread for single event result = { 'single': (single is not None, single), } else: e = Event(name=name, threads=10, exc_info=True, traceback=True) for event in events[name]: e.handle(event['handler'], priority=event['priority']) # Make sure only 1 event is fired at a time when order is wanted kwargs['event_order_lock'] = threading.RLock( ) if options['in_order'] or options['single'] else None kwargs['event_return_on_result'] = options['single'] # Fire result = e(*args, **kwargs) result_keys = list(result.keys()) result_keys.sort(key=nat_sort_key) if options['single'] and not options['merge']: results = None # Loop over results, stop when first not None result is found. for r_key in result_keys: r = result[r_key] if r[0] is True and r[1] is not None: results = r[1] break elif r[1]: error_handler(r[1]) else: log.debug('Assume disabled eventhandler for: %s', name) else: results = [] for r_key in result_keys: r = result[r_key] if r[0] == True and r[1]: results.append(r[1]) elif r[1]: error_handler(r[1]) # Merge if options['merge'] and len(results) > 0: # Dict if isinstance(results[0], dict): results.reverse() merged = {} for result in results: merged = merge_dictionaries(merged, result, prepend_list=True) results = merged # Lists elif isinstance(results[0], list): merged = [] for result in results: if result not in merged: merged += result results = merged modified_results = fire_event('result.modify.%s' % name, results, single=True) if modified_results: log.debug('Return modified results for %s', name) results = modified_results if not options['is_after_event']: fire_event('%s.after' % name, is_after_event=True) if options['on_complete']: options['on_complete']() return results except Exception: log.error('%s: %s', (name, traceback.format_exc()))