def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.title = self.info.get("title") self.year = self.info.get("year") if self.year: self.rootname = '%s (%s)' % (self.title, self.year) else: self.rootname = self.title self.season = self.info.get("season") self.episode = self.info.get("episode") self.query = clean_title(self.title) self.file_query = self._episode_query_list( ) if self.db_type == 'episode' else self._year_query_list() self.extensions = supported_video_extensions() self._scrape_cloud() if not self.scrape_results: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for item in self.scrape_results: try: file_name = normalize(item['name']) path = item['path'] file_dl = item['id'] size = float(item['size']) / 1073741824 video_quality = get_release_quality(file_name, path) details = get_file_info(file_name) if not details: details = get_file_info(path) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, 'downloads': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('pm-cloud_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN premiumize scraper Exception', e) return self.sources
def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.download_path = settings.download_directory(self.db_type) self.title = self.info.get("title") self.year = self.info.get("year") self.season = self.info.get("season") self.episode = self.info.get("episode") self.title_query = clean_title(self.title) self.folder_query = self._season_query_list( ) if self.db_type == 'episode' else self._year_query_list() self.file_query = self._episode_query_list( ) if self.db_type == 'episode' else self._year_query_list() self._scrape_directory(self.download_path) if not self.scrape_results: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for item in self.scrape_results: try: file_name = item[0] file_dl = item[1] size = self._get_size(file_dl) details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, 'downloads': True, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('downloads_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN downloads scraper Exception', e) return self.sources
def results(self, info): try: self.info = info search_name = self._search_name() files = EasyNews.search(search_name) files = files[0:self.max_results] self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for item in files: try: if self.max_bytes: match = re.search('([\d.]+)\s+(.*)', item['size']) if match: size_bytes = self.to_bytes(*match.groups()) if size_bytes > self.max_bytes: continue file_name = normalize(item['name']) file_dl = item['url_dl'] size = float(int(item['rawSize'])) / 1073741824 details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, 'local': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('easynews_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN easynews scraper Exception', e) return self.sources
def main(): configutation = Configuration().load() path = configutation.get('upload_folder') if path: for file_name in get_file_names(path): file_changed = False result = None f = os.path.basename(file_name) file_object = get_file_object_or_create(f) current_file_info = get_file_info(file_name) current_file_timestamp = current_file_info[0].replace('-', '').replace(':', '').replace(' ', '') # file exist if file_object.timestamp: if file_object.timestamp < current_file_timestamp: file_changed = True # 2. TODO: Do POST, set result result = 200 # 2. Update model file_object.updated_at = datetime.utcnow() file_object.timestamp = current_file_timestamp else: # 1. TODO: Do POST, set result result = 200 file_object.updated_at = datetime.utcnow() file_object.timestamp = current_file_timestamp file_changed = True db.session.add(file_object) db.session.commit() if file_changed: event_type_file_changed = EventType.query.get(4) event_file_changed = Event(name='%s changed ' % f, event_type=event_type_file_changed, file=file_object, date=None, file_attrs=str(current_file_info), result_code=None) db.session.add(event_file_changed) db.session.commit() if result: event_type_uploading = EventType.query.get(3) event_file_uploading = Event(name='%s uploading ' % f, event_type=event_type_uploading, file=file_object, date=None, file_attrs=str(current_file_info), result_code=result) db.session.add(event_file_uploading) db.session.commit()
def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.title = self.info.get("title") self.year = self.info.get("year") self.season = self.info.get("season") self.episode = self.info.get("episode") self.db_info = self._get_library_video(self.db_type, self.title, self.year, self.season, self.episode) if not self.db_info: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) file_name = self.db_info.get("name") file_id = self.db_info.get("file_id") file_dl = self.db_info.get("file_id") size = self._get_size(file_dl) details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'url': file_dl, 'id': file_id, 'local': True, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) window.setProperty('local_source_results', json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN local scraper Exception', e) return self.sources
def run(self): configuration = Configuration() configuration.load() path = configuration.upload_folder tick = configuration.tick #file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), 'fuckup.log')) while True: if path: for file_name in get_file_names(path): file_changed = False result = None f = os.path.basename(file_name) file_object = get_file_object_or_create(f) current_file_info = get_file_info(file_name) current_file_timestamp = current_file_info[0].replace('-', '').replace(':', '').replace(' ', '') #file exist if file_object.timestamp: if int(file_object.timestamp) < int(current_file_timestamp): file_changed = True upload_file(file_name, file_object, current_file_timestamp) else: file_changed = True upload_file(file_name, file_object, current_file_timestamp) time.sleep(float(tick))
def results(self, info): try: self.info = info search_name = self._search_name() files = Furk.search(search_name) if not files: return self.sources active_downloads = self.get_active_downloads() cached_files = [ i for i in files if i.get('type') not in ('default', 'audio', '') and i.get('is_ready') == '1' ][0:self.furk_limit] uncached_files = [ i for i in files if i.get('type') not in ('default', 'audio', '') and i not in cached_files ] self.label_settings = label_settings(self.info['scraper_settings'], self.scrape_provider) for i in cached_files: try: file_name = normalize(i['name']) file_id = i['id'] files_num_video = i['files_num_video'] size = float(int(i['size'])) / 1073741824 if not int(files_num_video) > 3: if size > self.max_gb: continue file_dl = i['url_dl'] details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) furk_settings = { 'files_num_video': files_num_video, 'uncached': False, 'active_download': False } label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality, **furk_settings) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_id, 'local': False, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except Exception as e: from modules.utils import logger logger('FURK ERROR - 65', e) pass for i in uncached_files: try: file_name = i['name'] info_hash = i['info_hash'] try: files_num_video = i['files_num_video'] except: files_num_video = 1 try: size = float(int(i['size'])) / 1073741824 except: size = 0 active_download = True if info_hash in active_downloads else False details = get_file_info(file_name) video_quality = get_release_quality(file_name) furk_settings = { 'files_num_video': files_num_video, 'uncached': True, 'active_download': active_download } label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality, **furk_settings) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': info_hash, 'id': info_hash, 'local': False, 'direct': True, 'uncached': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except Exception as e: from modules.utils import logger logger('FURK ERROR - 96', e) pass window.setProperty( 'furk_source_results', json.dumps([i for i in self.sources if not 'uncached' in i])) except Exception as e: from modules.utils import logger logger('FEN furk scraper Exception', e) pass return self.sources
def results(self, info): try: self.info = info self.db_type = self.info.get("db_type") self.folder_path = settings.source_folders_directory( self.db_type, self.scrape_provider) if not self.folder_path: return self.sources self.title = self.info.get("title") self.year = self.info.get("year") self.season = self.info.get("season") self.episode = self.info.get("episode") self.title_query = clean_title(self.title) self.folder_query = self._season_query_list( ) if self.db_type == 'episode' else self._year_query_list() self.file_query = self._episode_query_list( ) if self.db_type == 'episode' else self._year_query_list() cache_name = 'fen_%s_SCRAPER_%s_%s_%s_%s' % ( self.scrape_provider, self.title, self.year, self.season, self.episode) cache = _cache.get(cache_name) if cache: self.scrape_results = cache else: self._scrape_directory(self.folder_path) _cache.set(cache_name, self.scrape_results, expiration=datetime.timedelta(hours=2)) if not self.scrape_results: return self.sources self.label_settings = label_settings(self.info['scraper_settings'], 'folders', self.scraper_name) for item in self.scrape_results: try: file_name = item[0] file_dl = item[1] size = self._get_size( file_dl) if not file_dl.endswith('.strm') else 'strm' details = get_file_info(file_name) video_quality = get_release_quality(file_name, file_dl) label, multiline_label = build_internal_scrapers_label( self.label_settings, file_name, details, size, video_quality) self.sources.append({ 'name': file_name, 'label': label, 'multiline_label': multiline_label, 'title': file_name, 'quality': video_quality, 'size': size, 'url_dl': file_dl, 'id': file_dl, self.scrape_provider: True, 'direct': True, 'source': self.scrape_provider, 'scrape_provider': self.scrape_provider }) except: pass window.setProperty('%s_source_results' % self.scrape_provider, json.dumps(self.sources)) except Exception as e: from modules.utils import logger logger('FEN folders scraper Exception', e) return self.sources
def upload_file(filename, file_object, current_file_timestamp): """ server file uploader """ base_name = os.path.basename(filename) file_attrs_str = str(get_file_info(filename)) try: configuration = Configuration() configuration.load() api_key = configuration.api_key url = configuration.url payload = {'api_key': api_key, 'hide_old': 0} code, response = post_multipart(url=url, payload=payload, file_name=filename) print response event_type_uploading = EventType.query.get(3) event_post_results = Event(name='%s post results: %s' % (base_name, str({'code': code, 'response': response})), event_type=event_type_uploading, file=None, date=None, file_attrs=file_attrs_str, result_code=code) db.session.add(event_post_results) db.session.commit() if code == 200: task_id = (json.loads(response)).get('task_id') if task_id: ready = False t0 = time.time()# time started expected_waiting_time = 35 loop_num = 0 while not ready: try: r = requests.get(url, params={'task_id': task_id, 'api_key': api_key, }, verify=False) print r.text if r.status_code == 200: if json.loads(r.text).get('ready') is True: # log ready available = json.loads(r.text).get('available') updated = json.loads(r.text).get('updated') file_object.updated_at = datetime.utcnow() file_object.timestamp = current_file_timestamp db.session.add(file_object) db.session.commit() event_results = Event(name='%s upload successful!!! results: available: %d, updated %d' % (base_name, available, updated), event_type=event_type_uploading, file=file_object, date=None, file_attrs=file_attrs_str, result_code=r.status_code) db.session.add(event_results) db.session.commit() break else: # reason? reason = 'unknown' event_results = Event(name='%s results: file not uploaded, reason %s' % (base_name, reason), event_type=event_type_uploading, file=None, date = None, file_attrs=file_attrs_str, result_code= r.status_code) db.session.add(event_results) db.session.commit() break except Exception as e: event_results = Event(name='%s: system error %s' % (base_name, e.args[0]), event_type=event_type_uploading, file=None, date = None, file_attrs=file_attrs_str, result_code= None) db.session.add(event_results) db.session.commit() break time.sleep(expected_waiting_time) current, total = 0, 0 try: current = json.loads(r.text).get('state').get('current') total = json.loads(r.text).get('state').get('total') except: pass t = math.floor(time.time() - t0) if total != 0 and current != 0 and loop_num != 0 : expected_waiting_time = math.floor((total - current) * t / current) loop_num += 1 except ValueError as ve: event_type_error = EventType.query.get(2) event_configuration_error = Event(name='%d config error %s' % (base_name, ve.args[0]), event_type=event_type_error,file=None, date=None, file_attrs=file_attrs_str, result_code=None) db.session.add(event_configuration_error) db.session.commit()