def checkLibrary(self, result): result = mergeDicts(copy.deepcopy(self.default_info), copy.deepcopy(result)) if result and result.get('imdb'): return mergeDicts(result, self.getLibraryTags(result['imdb'])) return result
def search(self, q = '', types = None, **kwargs): # Make sure types is the correct instance if isinstance(types, (str, unicode)): types = [types] elif isinstance(types, (list, tuple, set)): types = list(types) imdb_identifier = getImdb(q) if not types: if imdb_identifier: result = fireEvent('movie.info', identifier = imdb_identifier, merge = True) result = {result['type']: [result]} else: result = fireEvent('info.search', q = q, merge = True) else: result = {} for media_type in types: if imdb_identifier: result[media_type] = fireEvent('%s.info' % media_type, identifier = imdb_identifier) else: result[media_type] = fireEvent('%s.search' % media_type, q = q) return mergeDicts({ 'success': True, }, result)
def search(self, group): movie_name = getTitle(group) url = self.urls["api"] % self.movieUrlName(movie_name) try: data = self.getCache("hdtrailers.%s" % getIdentifier(group), url, show_error=False) except HTTPError: log.debug("No page found for: %s", movie_name) data = None result_data = {"480p": [], "720p": [], "1080p": []} if not data: return result_data did_alternative = False for provider in self.providers: results = self.findByProvider(data, provider) # Find alternative if results.get("404") and not did_alternative: results = self.findViaAlternative(group) did_alternative = True result_data = mergeDicts(result_data, results) return result_data
def single(self, identifier = ''): db = get_db() quality_dict = {} quality = db.get('quality', identifier, with_doc = True)['doc'] if quality: quality_dict = mergeDicts(self.getQuality(quality['identifier']), quality) return quality_dict
def fillResult(self, result): defaults = { 'id': 0, 'status': 'busy', 'downloader': self.provider.getName(), 'folder': '', 'files': [], } return mergeDicts(defaults, result)
def all(self): if self.cached_qualities: return self.cached_qualities db = get_db() temp = [] for quality in self.qualities: quality_doc = db.get('quality', quality.get('identifier'), with_doc = True)['doc'] q = mergeDicts(quality, quality_doc) temp.append(q) if len(temp) == len(self.qualities): self.cached_qualities = temp return temp
def fillResult(self, result): defaults = { 'id': 0, 'protocol': self.provider.protocol, 'type': self.provider.type, 'provider': self.provider.getName(), 'download': self.provider.loginDownload if self.provider.urls.get('login') else self.provider.download, 'seed_ratio': Env.setting('seed_ratio', section = self.provider.getName().lower(), default = ''), 'seed_time': Env.setting('seed_time', section = self.provider.getName().lower(), default = ''), 'url': '', 'name': '', 'age': 0, 'size': 0, 'description': '', 'score': 0 } return mergeDicts(defaults, result)
def combineOnIMDB(self, results): temp = {} order = [] # Combine on imdb id for item in results: random_string = randomString() imdb = item.get('imdb', random_string) imdb = imdb if imdb else random_string if not temp.get(imdb): temp[imdb] = self.getLibraryTags(imdb) order.append(imdb) # Merge dicts temp[imdb] = mergeDicts(temp[imdb], item) # Make it a list again temp_list = [temp[x] for x in order] return temp_list
def _search(self, media, quality, results): movie_title = getTitle(media) quality_id = quality['identifier'] params = mergeDicts(self.quality_search_params[quality_id].copy(), { 'order_by': 'relevance', 'order_way': 'descending', 'searchstr': getIdentifier(media) }) url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params)) res = self.getJsonData(url) try: if not 'Movies' in res: return authkey = res['AuthKey'] passkey = res['PassKey'] for ptpmovie in res['Movies']: if not 'Torrents' in ptpmovie: log.debug('Movie %s (%s) has NO torrents', (ptpmovie['Title'], ptpmovie['Year'])) continue log.debug('Movie %s (%s) has %d torrents', (ptpmovie['Title'], ptpmovie['Year'], len(ptpmovie['Torrents']))) for torrent in ptpmovie['Torrents']: torrent_id = tryInt(torrent['Id']) torrentdesc = '%s %s %s' % (torrent['Resolution'], torrent['Source'], torrent['Codec']) torrentscore = 0 if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']: torrentdesc += ' HQ' if self.conf('prefer_golden'): torrentscore += 5000 if 'FreeleechType' in torrent: torrentdesc += ' Freeleech' if self.conf('prefer_freeleech'): torrentscore += 7000 if 'Scene' in torrent and torrent['Scene']: torrentdesc += ' Scene' if self.conf('prefer_scene'): torrentscore += 2000 if 'RemasterTitle' in torrent and torrent['RemasterTitle']: torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle']) torrentdesc += ' (%s)' % quality_id torrent_name = re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) - %s' % (movie_title, ptpmovie['Year'], torrentdesc)) def extra_check(item): return self.torrentMeetsQualitySpec(item, quality_id) results.append({ 'id': torrent_id, 'name': torrent_name, 'Source': torrent['Source'], 'Checked': 'true' if torrent['Checked'] else 'false', 'Resolution': torrent['Resolution'], 'url': '%s?action=download&id=%d&authkey=%s&torrent_pass=%s' % (self.urls['torrent'], torrent_id, authkey, passkey), 'detail_url': self.urls['detail'] % torrent_id, 'date': tryInt(time.mktime(parse(torrent['UploadTime']).timetuple())), 'size': tryInt(torrent['Size']) / (1024 * 1024), 'seeders': tryInt(torrent['Seeders']), 'leechers': tryInt(torrent['Leechers']), 'score': torrentscore, 'extra_check': extra_check, }) except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def addOptions(self, section_name, options): if not self.options.get(section_name): self.options[section_name] = options else: self.options[section_name] = mergeDicts(self.options[section_name], options)
def call(self, request_params, use_json = True, **kwargs): url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api?' + tryUrlencode(mergeDicts(request_params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d.get(request_params['mode']) or d else: return data
def fireEvent(name, *args, **kwargs): if name not in events: return #log.debug('Firing event %s', name) try: options = { 'is_after_event': False, # Fire after event 'on_complete': False, # onComplete event 'single': False, # Return single handler 'merge': False, # Merge items 'in_order': False, # Fire them in specific order, waits for the other to finish } # Do options for x in options: try: val = kwargs[x] del kwargs[x] options[x] = val except: pass if len(events[name]) == 1: single = None try: single = events[name][0]['handler'](*args, **kwargs) except: log.error('Failed running single event: %s', traceback.format_exc()) # Don't load thread for single event result = { 'single': (single is not None, single), } else: e = Event(name = name, threads = 10, exc_info = True, traceback = True) for event in events[name]: e.handle(event['handler'], priority = event['priority']) # Make sure only 1 event is fired at a time when order is wanted kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None kwargs['event_return_on_result'] = options['single'] # Fire result = e(*args, **kwargs) result_keys = result.keys() result_keys.sort(key = natsortKey) if options['single'] and not options['merge']: results = None # Loop over results, stop when first not None result is found. for r_key in result_keys: r = result[r_key] if r[0] is True and r[1] is not None: results = r[1] break elif r[1]: errorHandler(r[1]) else: log.debug('Assume disabled eventhandler for: %s', name) else: results = [] for r_key in result_keys: r = result[r_key] if r[0] == True and r[1]: results.append(r[1]) elif r[1]: errorHandler(r[1]) # Merge if options['merge'] and len(results) > 0: # Dict if isinstance(results[0], dict): results.reverse() merged = {} for result in results: merged = mergeDicts(merged, result, prepend_list = True) results = merged # Lists elif isinstance(results[0], list): merged = [] for result in results: if result not in merged: merged += result results = merged modified_results = fireEvent('result.modify.%s' % name, results, single = True) if modified_results: log.debug('Return modified results for %s', name) results = modified_results if not options['is_after_event']: fireEvent('%s.after' % name, is_after_event = True) if options['on_complete']: options['on_complete']() return results except Exception: log.error('%s: %s', (name, traceback.format_exc()))