Esempio n. 1
0
    def results(self, info):
        try:
            self.title = info.get('title')
            self.search_title = clean_file_name(self.title).replace('&', 'and')
            self.db_type = info.get('db_type')
            self.year = info.get('year')
            self.season = info.get('season')
            self.episode = info.get('episode')
            search_name = self._search_name()
            files = EasyNews.search(search_name)
            if not files:
                return internal_results(self.scrape_provider, self.sources)
            self.aliases = get_aliases_titles(info.get('aliases', []))

            def _process():
                for item in files:
                    try:
                        file_name = normalize(item['name'])
                        if self.title_filter and not check_title(
                                self.title, file_name, self.aliases, self.year,
                                self.season, self.episode):
                            continue
                        if self.filter_lang and not any(
                                i in self.lang_filters
                                for i in item['language']):
                            continue
                        URLName = clean_file_name(file_name).replace(
                            'html', ' ').replace('+', ' ').replace('-', ' ')
                        url_dl = item['url_dl']
                        size = round(
                            float(int(item['rawSize'])) / 1073741824, 2)
                        video_quality, details = get_file_info(
                            name_info=release_info_format(file_name))
                        source_item = {
                            'name': file_name,
                            'title': file_name,
                            'URLName': URLName,
                            'quality': video_quality,
                            'size': size,
                            'size_label': '%.2f GB' % size,
                            'extraInfo': details,
                            'url_dl': url_dl,
                            'id': url_dl,
                            'local': False,
                            'direct': True,
                            'source': self.scrape_provider,
                            'scrape_provider': self.scrape_provider
                        }
                        yield source_item
                    except Exception as e:
                        from modules.kodi_utils import logger
                        logger('FEN easynews scraper yield source error',
                               str(e))

            self.sources = list(_process())
        except Exception as e:
            from modules.kodi_utils import logger
            logger('FEN easynews scraper Exception', str(e))
        internal_results(self.scrape_provider, self.sources)
        return self.sources
Esempio n. 2
0
	def results(self, info):
		try:
			if not enabled_debrids_check('ad'): return internal_results(self.scrape_provider, self.sources)
			self.title_filter = filter_by_name(self.scrape_provider)
			self.sources, self.folder_results, self.scrape_results = [], [], []
			self.db_type = info.get('db_type')
			self.title = info.get('title')
			self.year = info.get('year')
			if self.year: self.rootname = '%s (%s)' % (self.title, self.year)
			else: self.rootname = self.title
			self.season = info.get('season')
			self.episode = info.get('episode')
			if self.db_type == 'episode': self.seas_ep_query_list = seas_ep_query_list(self.season, self.episode)
			self.extensions = supported_video_extensions()
			self.folder_query = clean_title(normalize(self.title))
			self._scrape_cloud()
			if not self.scrape_results: return internal_results(self.scrape_provider, self.sources)
			self.aliases = get_aliases_titles(info.get('aliases', []))
			def _process():
				for item in self.scrape_results:
					try:
						file_name = normalize(item['filename'])
						if self.title_filter and not 'assigned_folder' in item:
							if not check_title(self.title, file_name, self.aliases, self.year, self.season, self.episode): continue
						file_dl = item['link']
						URLName = clean_file_name(file_name).replace('html', ' ').replace('+', ' ').replace('-', ' ')
						size = round(float(int(item['size']))/1073741824, 2)
						video_quality, details = get_file_info(name_info=release_info_format(file_name))
						source_item = {'name': file_name,
										'title': file_name,
										'URLName': URLName,
										'quality': video_quality,
										'size': size,
										'size_label': '%.2f GB' % size,
										'extraInfo': details,
										'url_dl': file_dl,
										'id': file_dl,
										'downloads': False,
										'direct': True,
										'source': self.scrape_provider,
										'scrape_provider': self.scrape_provider}
						yield source_item
					except: pass
			self.sources = list(_process())
		except Exception as e:
				from modules.kodi_utils import logger
				logger('FEN alldebrid scraper Exception', e)
		internal_results(self.scrape_provider, self.sources)
		return self.sources
Esempio n. 3
0
	def results(self, info):
		try:
			self.info = info
			self.db_type = self.info.get('db_type')
			self.folder_path = source_folders_directory(self.db_type, self.scrape_provider)
			if not self.folder_path: return internal_results(self.scraper_name, self.sources)
			self.title = self.info.get('title')
			self.year = self.info.get('year')
			if self.year: self.rootname = '%s (%s)' % (self.title, self.year)
			else: self.rootname = self.title
			self.season = self.info.get('season')
			self.episode = self.info.get('episode')
			self.title_query = clean_title(normalize(self.title))
			self.folder_query = self._season_query_list() if self.db_type == 'episode' else self._year_query_list()
			self._scrape_directory((self.folder_path, False))
			if not self.scrape_results: return internal_results(self.scraper_name, self.sources)
			self.aliases = get_aliases_titles(info.get('aliases', []))
			def _process():
				for item in self.scrape_results:
					try:
						file_name = normalize(item[0])
						if self.title_filter:
							if not check_title(self.title, file_name, self.aliases, self.year, self.season, self.episode): continue
						file_dl = item[1]
						URLName = clean_file_name(file_name).replace('html', ' ').replace('+', ' ').replace('-', ' ')
						try: size = item[2]
						except: size = self._get_size(file_dl)
						video_quality, details = get_file_info(name_info=release_info_format(file_name))
						source_item = {'name': file_name,
										'title': file_name,
										'URLName': URLName,
										'quality': video_quality,
										'size': size,
										'size_label': '%.2f GB' % size,
										'extraInfo': details,
										'url_dl': file_dl,
										'id': file_dl,
										self.scrape_provider : True,
										'direct': True,
										'source': self.scraper_name,
										'scrape_provider': 'folders'}
						yield source_item
					except: pass
			self.sources = list(_process())
		except Exception as e:
			from modules.kodi_utils import logger
			logger('FEN folders scraper Exception', e)
		internal_results(self.scraper_name, self.sources)
		return self.sources
Esempio n. 4
0
    def results(self, info):
        try:
            self.info = info
            self.title = self.info.get('title')
            self.search_title = clean_file_name(self.title).replace(
                ' ', '+').replace('&', 'and')
            self.db_type = self.info.get('db_type')
            self.year = self.info.get('year')
            self.season = self.info.get('season')
            self.episode = self.info.get('episode')
            search_name = self._search_name()
            files = Furk.search(search_name)
            if not files:
                return internal_results(self.scrape_provider, self.sources)
            cached_files = [
                i for i in files
                if i.get('type') not in ('default', 'audio',
                                         '') and i.get('is_ready') == '1'
            ]
            self.aliases = get_aliases_titles(info.get('aliases', []))

            def _process():
                for i in cached_files:
                    try:
                        size = round(float(int(i['size'])) / 1073741824, 2)
                        if self.info.get('db_type') == 'movie':
                            files_num_video = 1
                        else:
                            files_num_video = int(i['files_num_video'])
                        if files_num_video > 3:
                            package = 'true'
                            size = float(size) / files_num_video
                        else:
                            package = 'false'
                        file_name = normalize(i['name'])
                        if self.title_filter and package == 'false':
                            if not check_title(self.title, file_name,
                                               self.aliases, self.year,
                                               self.season, self.episode):
                                continue
                        file_id = i['id']
                        file_dl = i['url_dl']
                        URLName = clean_file_name(file_name).replace(
                            'html', ' ').replace('+', ' ').replace('-', ' ')
                        video_quality, details = get_file_info(
                            name_info=release_info_format(file_name))
                        source_item = {
                            'name': file_name,
                            'title': file_name,
                            'URLName': URLName,
                            'quality': video_quality,
                            'size': size,
                            'size_label': '%.2f GB' % size,
                            'extraInfo': details,
                            'url_dl': file_dl,
                            'id': file_id,
                            'local': False,
                            'direct': True,
                            'package': package,
                            'source': self.scrape_provider,
                            'scrape_provider': self.scrape_provider
                        }
                        yield source_item
                    except Exception as e:
                        from modules.kodi_utils import logger
                        logger('FURK ERROR - 65', e)

            self.sources = list(_process())
        except Exception as e:
            from modules.kodi_utils import logger
            logger('FEN furk scraper Exception', e)
        internal_results(self.scrape_provider, self.sources)
        return self.sources