def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.title = self.info.get("title") self.year = self.info.get("year") if self.year: self.rootname = '%s (%s)' % (self.title, self.year) else: self.rootname = self.title self.season = self.info.get("season") self.episode = self.info.get("episode") self.query = clean_title(self.title) self.file_query = self._episode_query_list( ) if self.db_type == 'episode' else self._year_query_list() self.extensions = supported_video_extensions() self._scrape_cloud() if not self.scrape_results: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for item in self.scrape_results: try: file_name = normalize(item['name']) path = item['path'] file_dl = item['id'] size = float(item['size']) / 1073741824 video_quality = get_release_quality(file_name, path) details = get_file_info(file_name) if not details: details = get_file_info(path) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, 'downloads': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('pm-cloud_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN premiumize scraper Exception', e) return self.sources
def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.download_path = settings.download_directory(self.db_type) self.title = self.info.get("title") self.year = self.info.get("year") self.season = self.info.get("season") self.episode = self.info.get("episode") self.title_query = clean_title(self.title) self.folder_query = self._season_query_list( ) if self.db_type == 'episode' else self._year_query_list() self.file_query = self._episode_query_list( ) if self.db_type == 'episode' else self._year_query_list() self._scrape_directory(self.download_path) if not self.scrape_results: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for item in self.scrape_results: try: file_name = item[0] file_dl = item[1] size = self._get_size(file_dl) details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, 'downloads': True, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('downloads_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN downloads scraper Exception', e) return self.sources
def results(self, info): try: self.info = info search_name = self._search_name() files = EasyNews.search(search_name) files = files[0:self.max_results] self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for item in files: try: if self.max_bytes: match = re.search('([\d.]+)\s+(.*)', item['size']) if match: size_bytes = self.to_bytes(*match.groups()) if size_bytes > self.max_bytes: continue file_name = normalize(item['name']) file_dl = item['url_dl'] size = float(int(item['rawSize'])) / 1073741824 details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, 'local': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('easynews_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN easynews scraper Exception', e) return self.sources
def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.title = self.info.get("title") self.year = self.info.get("year") self.season = self.info.get("season") self.episode = self.info.get("episode") self.db_info = self._get_library_video(self.db_type, self.title, self.year, self.season, self.episode) if not self.db_info: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) file_name = self.db_info.get("name") file_id = self.db_info.get("file_id") file_dl = self.db_info.get("file_id") size = self._get_size(file_dl) details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'url': file_dl, 'id': file_id, 'local': True, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) window.setProperty('local_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN local scraper Exception', e) return self.sources
def results(self, info): try: self.info = info search_name = self._search_name() files = Furk.search(search_name) if not files: return self.sources active_downloads = self.get_active_downloads() cached_files = [ i for i in files if i.get('type') not in ('default', 'audio', '') and i.get('is_ready') == '1' ][0:self.furk_limit] uncached_files = [ i for i in files if i.get('type') not in ('default', 'audio', '') and i not in cached_files ] self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for i in cached_files: try: file_name = normalize(i['name']) file_id = i['id'] files_num_video = i['files_num_video'] size = float(int(i['size'])) / 1073741824 if not int(files_num_video) > 3: if size > self.max_gb: continue file_dl = i['url_dl'] details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) furk_settings = { 'files_num_video': files_num_video, 'uncached': False, 'active_download': False } label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality, **furk_settings) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_id, 'local': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except Exception as e: from modules.utils import logger logger('FURK ERROR - 65', e) pass for i in uncached_files: try: file_name = i['name'] info_hash = i['info_hash'] try: files_num_video = i['files_num_video'] except: files_num_video = 1 try: size = float(int(i['size'])) / 1073741824 except: size = 0 active_download = True if info_hash in active_downloads else False details = get_file_info(file_name) video_quality = get_release_quality(file_name) furk_settings = { 'files_num_video': files_num_video, 'uncached': True, 'active_download': active_download } label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality, **furk_settings) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': info_hash, 'id': info_hash, 'local': False, 'direct': True, 'uncached': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except Exception as e: from modules.utils import logger logger('FURK ERROR - 96', e) pass window.setProperty( 'furk_source_results', json.dumps([i for i in self.sources if not 'uncached' in i])) except Exception as e: from modules.utils import logger logger('FEN furk scraper Exception', e) pass return self.sources
def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.folder_path = settings.source_folders_directory( self.db_type, self.scrape_provider) if not self.folder_path: return self.sources self.title = self.info.get("title") self.year = self.info.get("year") self.season = self.info.get("season") self.episode = self.info.get("episode") self.title_query = clean_title(self.title) self.folder_query = self._season_query_list( ) if self.db_type == 'episode' else self._year_query_list() self.file_query = self._episode_query_list( ) if self.db_type == 'episode' else self._year_query_list() cache_name = 'fen_%s_SCRAPER_%s_%s_%s_%s' % ( self.scrape_provider, self.title, self.year, self.season, self.episode) cache = _cache.get(cache_name) if cache: self.scrape_results = cache else: self._scrape_directory(self.folder_path) _cache.set(cache_name, self.scrape_results, expiration=datetime.timedelta(hours=2)) if not self.scrape_results: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], 'folders', self.scraper_name) for item in self.scrape_results: try: file_name = item[0] file_dl = item[1] size = self._get_size( file_dl) if not file_dl.endswith('.strm') else 'strm' details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, self.scrape_provider: True, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('%s_source_results' % self.scrape_provider, json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN folders scraper Exception', e) return self.sources