def results(self, info): try: if not enabled_debrids_check('ad'): return internal_results(self.scrape_provider, self.sources) self.title_filter = filter_by_name(self.scrape_provider) self.sources, self.folder_results, self.scrape_results = [], [], [] self.db_type = info.get('db_type') self.title = info.get('title') self.year = info.get('year') if self.year: self.rootname = '%s (%s)' % (self.title, self.year) else: self.rootname = self.title self.season = info.get('season') self.episode = info.get('episode') if self.db_type == 'episode': self.seas_ep_query_list = seas_ep_query_list(self.season, self.episode) self.extensions = supported_video_extensions() self.folder_query = clean_title(normalize(self.title)) self._scrape_cloud() if not self.scrape_results: return internal_results(self.scrape_provider, self.sources) self.aliases = get_aliases_titles(info.get('aliases', [])) def _process(): for item in self.scrape_results: try: file_name = normalize(item['filename']) if self.title_filter and not 'assigned_folder' in item: if not check_title(self.title, file_name, self.aliases, self.year, self.season, self.episode): continue file_dl = item['link'] URLName = clean_file_name(file_name).replace('html', ' ').replace('+', ' ').replace('-', ' ') size = round(float(int(item['size']))/1073741824, 2) video_quality, details = get_file_info(name_info=release_info_format(file_name)) source_item = {'name': file_name, 'title': file_name, 'URLName': URLName, 'quality': video_quality, 'size': size, 'size_label': '%.2f GB' % size, 'extraInfo': details, 'url_dl': file_dl, 'id': file_dl, 'downloads': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider} yield source_item except: pass self.sources = list(_process()) except Exception as e: from modules.kodi_utils import logger logger('FEN alldebrid scraper Exception', e) internal_results(self.scrape_provider, self.sources) return self.sources
def results(self, info): try: self.info = info self.db_type = self.info.get('db_type') self.folder_path = source_folders_directory(self.db_type, self.scrape_provider) if not self.folder_path: return internal_results(self.scraper_name, self.sources) self.title = self.info.get('title') self.year = self.info.get('year') if self.year: self.rootname = '%s (%s)' % (self.title, self.year) else: self.rootname = self.title self.season = self.info.get('season') self.episode = self.info.get('episode') self.title_query = clean_title(normalize(self.title)) self.folder_query = self._season_query_list() if self.db_type == 'episode' else self._year_query_list() self._scrape_directory((self.folder_path, False)) if not self.scrape_results: return internal_results(self.scraper_name, self.sources) self.aliases = get_aliases_titles(info.get('aliases', [])) def _process(): for item in self.scrape_results: try: file_name = normalize(item[0]) if self.title_filter: if not check_title(self.title, file_name, self.aliases, self.year, self.season, self.episode): continue file_dl = item[1] URLName = clean_file_name(file_name).replace('html', ' ').replace('+', ' ').replace('-', ' ') try: size = item[2] except: size = self._get_size(file_dl) video_quality, details = get_file_info(name_info=release_info_format(file_name)) source_item = {'name': file_name, 'title': file_name, 'URLName': URLName, 'quality': video_quality, 'size': size, 'size_label': '%.2f GB' % size, 'extraInfo': details, 'url_dl': file_dl, 'id': file_dl, self.scrape_provider : True, 'direct': True, 'source': self.scraper_name, 'scrape_provider': 'folders'} yield source_item except: pass self.sources = list(_process()) except Exception as e: from modules.kodi_utils import logger logger('FEN folders scraper Exception', e) internal_results(self.scraper_name, self.sources) return self.sources
def _scrape_cloud(self): try: threads = [] results_append = self.folder_results.append append = threads.append try: my_cloud_files = AllDebrid.user_cloud()['magnets'] except: return self.sources my_cloud_files = [i for i in my_cloud_files if i['statusCode'] == 4] for item in my_cloud_files: folder_name = clean_title(normalize(item['filename'])) assigned_content = self._assigned_content(normalize(item['filename'])) if assigned_content: if assigned_content == self.rootname: results_append((normalize(item['filename']), item, True)) elif self.folder_query in folder_name or not folder_name: results_append((normalize(item['filename']), item, False)) if not self.folder_results: return self.sources for i in self.folder_results: append(Thread(target=self._scrape_folders, args=(i,))) [i.start() for i in threads] [i.join() for i in threads] except: pass
def _process(item): file_type = item[1] normalized = normalize(item[0]) item_name = clean_title(normalized) if file_type == 'file': ext = os.path.splitext(urlparse(item[0]).path)[-1].lower() if ext in self.extensions: if self.db_type == 'movie': if self.assigned_content or self.title_query in item_name: url_path = self.url_path(folder_name, item[0]) size = self._get_size(url_path) scrape_results_append((item[0], url_path, size)) else: if seas_ep_filter(self.season, self.episode, normalized): if self.assigned_content or not folder_name in self.folder_path: url_path = self.url_path(folder_name, item[0]) size = self._get_size(url_path) scrape_results_append((item[0], url_path, size)) elif self.title_query in item_name: url_path = self.url_path(folder_name, item[0]) size = self._get_size(url_path) scrape_results_append((item[0], url_path, size)) elif file_type == 'folder': if not assigned_folder: self.assigned_content = self._assigned_content(normalize(item[0])) if self.assigned_content: if self.assigned_content == self.rootname: new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, True)) elif self.title_query in item_name or any(x in item_name for x in self.folder_query): new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, self.assigned_content)) elif assigned_folder: if any(x in item_name for x in self.folder_query): new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, True)) elif self.title_query in item_name or any(x in item_name for x in self.folder_query): new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, self.assigned_content))
def _scrape_folders(self, folder_info): try: assigned_folder = folder_info[2] torrent_folder = folder_info[1] links = torrent_folder['links'] append = self.scrape_results.append links = [i for i in links if i['filename'].lower().endswith(tuple(self.extensions))] for item in links: match = False normalized = normalize(item['filename']) filename = clean_title(normalized) if assigned_folder and self.db_type == 'movie': match = True else: if self.db_type == 'movie': if any(x in filename for x in self._year_query_list()) and self.folder_query in filename: match = True else: if assigned_folder: if any(x in normalized.lower() for x in self.seas_ep_query_list): match = True elif seas_ep_filter(self.season, self.episode, normalized): match = True if match: if assigned_folder: item['assigned_folder'] = True append(item) except: return