def install(): # install xunlei-lixian import tarfile import requests print_info('Downloading xunlei-lixian from https://github.com/iambus/xunlei-lixian/') r = requests.get('https://github.com/iambus/xunlei-lixian/tarball/master', stream=True, headers={'Accept-Encoding': ''}) f = NamedTemporaryFile(delete=False) with f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) f.close() print_success('Download successfully, save at %s, extracting ...' % f.name) zip_file = tarfile.open(f.name, 'r:gz') zip_file.extractall(os.path.join(BGMI_PATH, 'tools/xunlei-lixian')) dir_name = zip_file.getnames()[0] print_info('Create link file ...') if not os.path.exists(XUNLEI_LX_PATH): os.symlink(os.path.join(BGMI_PATH, 'tools/xunlei-lixian/{0}/lixian_cli.py'.format(dir_name)), XUNLEI_LX_PATH) else: print_warning('{0} already exists'.format(XUNLEI_LX_PATH)) print_success('All done') print_info('Please run command \'{0} config\' to configure your lixian-xunlei ' '(Notice: only for Thunder VIP)'.format(XUNLEI_LX_PATH))
def __new__(cls, *args, **kwargs): if cls._defined is None: script_files = glob.glob('{}{}*.py'.format(SCRIPT_PATH, os.path.sep)) for i in script_files: try: s = imp.load_source('script', os.path.join(SCRIPT_PATH, i)) script_class = getattr(s, 'Script')() if cls.check(script_class): cls.scripts.append(script_class) print_info('Load script {} successfully.'.format(i)) except: print_warning('Load script {} failed, ignored'.format(i)) if os.getenv('DEBUG_SCRIPT'): # pragma: no cover traceback.print_exc() # self.scripts = filter(self._check_followed, self.scripts) # self.scripts = filter(self._check_bangumi, self.scripts) cls._defined = super(ScriptRunner, cls).__new__(cls, *args, **kwargs) return cls._defined
def search_by_keyword(self, keyword, count=None): if not count: count = 3 rows = [] result = [] for i in range(count): data = get_response(SEARCH_URL, 'POST', json={'query': keyword, 'p': i + 1}) if not 'torrents' in data: print_warning('No torrents in response data, please re-run') return [] rows.extend(data['torrents']) for info in rows: if True: result.append({ 'download': info['magnet'], 'name': keyword, 'subtitle_group': info['team_id'], 'title': info['title'], 'episode': self.parse_episode(info['title']), 'time': int(time.mktime(datetime.datetime.strptime(info['publish_time'].split('.')[0], "%Y-%m-%dT%H:%M:%S").timetuple())) }) # Avoid bangumi collection. It's ok but it will waste your traffic and bandwidth. result = result[::-1] return result
def create_dir(): path_to_create = ( BGMI_PATH, SAVE_PATH, TMP_PATH, SCRIPT_PATH, TOOLS_PATH, FRONT_STATIC_PATH, ) if not os.environ.get("HOME", os.environ.get("USERPROFILE", "")): print_warning("$HOME not set, use '/tmp/'") # bgmi home dir try: for path in path_to_create: if not os.path.exists(path): os.makedirs(path) print_success("%s created successfully" % path) OLD = os.path.join(BGMI_PATH, "old") # create OLD if not exist oninstall if not os.path.exists(OLD): with open(OLD, "w") as f: f.write(__version__) except OSError as e: print_error("Error: {}".format(str(e)))
def __new__(cls, *args, **kwargs): # type: ignore if cls._defined is None: script_files = glob.glob(f"{SCRIPT_PATH}{os.path.sep}*.py") for i in script_files: try: loader = SourceFileLoader("script", os.path.join(SCRIPT_PATH, i)) mod = types.ModuleType(loader.name) loader.exec_module(mod) script_class = mod.Script() if cls.check(script_class): cls.scripts.append(script_class) print_info(f"Load script {i} successfully.") except Exception: print_warning(f"Load script {i} failed, ignored") if os.getenv("DEBUG_SCRIPT"): # pragma: no cover traceback.print_exc() cls._defined = super().__new__(cls, *args, **kwargs) return cls._defined
def bangumi_calendar(self, force_update=False, save=True, cover=None): if force_update and not test_connection(): force_update = False print_warning('Network is unreachable') if force_update: print_info('Fetching bangumi info ...') Bangumi.delete_all() weekly_list = self.fetch(save=save) else: weekly_list = Bangumi.get_updating_bangumi() if not weekly_list: print_warning('Warning: no bangumi schedule, fetching ...') weekly_list = self.fetch(save=save) if cover is not None: # download cover to local cover_to_be_download = cover for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: _, file_path = convert_cover_to_path(bangumi['cover']) if not glob.glob(file_path): cover_to_be_download.append(bangumi['cover']) if cover_to_be_download: print_info('Updating cover ...') download_cover(cover_to_be_download) return weekly_list
def search_by_keyword(self, keyword, count=None): if not count: count = 3 rows = [] result = [] for i in range(count): data = get_response(SEARCH_URL, 'POST', json={'query': keyword, 'p': i + 1}) if not 'torrents' in data: print_warning('No torrents in response data, please re-run') return [] rows.extend(data['torrents']) for info in rows: if True: result.append({ 'download': TORRENT_URL + info['_id'] + '/download.torrent', 'name': keyword, 'subtitle_group': info['team_id'], 'title': info['title'], 'episode': self.parse_episode(info['title']), 'time': int(time.mktime(datetime.datetime.strptime(info['publish_time'].split('.')[0], "%Y-%m-%dT%H:%M:%S").timetuple())) }) # Avoid bangumi collection. It's ok but it will waste your traffic and bandwidth. result = result[::-1] return result
def setup(): if not os.path.exists(BGMI_PATH): print_warning('BGMI_PATH %s does not exist, installing' % BGMI_PATH) create_dir() install_crontab() init_db()
def cal(force_update: bool = False, save: bool = False, cover: Optional[List[str]] = None) -> Dict[str, List[Dict[str, Any]]]: logger.debug("cal force_update: %r save: %r", force_update, save) weekly_list = Bangumi.get_updating_bangumi() if not weekly_list: print_warning("Warning: no bangumi schedule, fetching ...") force_update = True if force_update: print_info("Fetching bangumi info ...") website.fetch(save=save) weekly_list = Bangumi.get_updating_bangumi() if cover is not None: # download cover to local cover_to_be_download = cover for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: _, file_path = convert_cover_url_to_path(bangumi["cover"]) if not (os.path.exists(file_path) and bool(imghdr.what(file_path))): cover_to_be_download.append(bangumi["cover"]) if cover_to_be_download: print_info("Updating cover ...") download_cover(cover_to_be_download) runner = ScriptRunner() patch_list = runner.get_models_dict() for i in patch_list: weekly_list[i["update_time"].lower()].append(i) logger.debug(weekly_list) # for web api, return all subtitle group info r = weekly_list # type: Dict[str, List[Dict[str, Any]]] for day, value in weekly_list.items(): for index, bangumi in enumerate(value): bangumi["cover"] = normalize_path(bangumi["cover"]) subtitle_group = list( map( lambda x: { "name": x["name"], "id": x["id"] }, Subtitle.get_subtitle_by_id( bangumi["subtitle_group"].split(", " "")), )) r[day][index]["subtitle_group"] = subtitle_group logger.debug(r) return r
def setup(): need_to_init = False if not os.path.exists(BGMI_PATH): need_to_init = True print_warning('BGMI_PATH %s does not exist, installing' % BGMI_PATH) create_dir() init_db() if need_to_init: install_crontab()
def check_aria2c_version(): url = ARIA2_RPC_URL.split('/') url[2] = ARIA2_RPC_TOKEN + '@' + url[2] url = '/'.join(url) s = ServerProxy(url) r = s.aria2.getVersion(ARIA2_RPC_TOKEN, ) version = r['version'] if version: Aria2DownloadRPC.old_version = version < '1.18.4' else: print_warning('Get aria2c version failed')
def check_aria2c_version(): url = ARIA2_RPC_URL.split('/') url[2] = ARIA2_RPC_TOKEN + '@' + url[2] url = '/'.join(url) s = ServerProxy(url) r = s.aria2.getVersion(ARIA2_RPC_TOKEN, ) version = r['version'] if version: Aria2DownloadRPC.old_version = version < '1.18.4' else: print_warning('Get aria2c version failed')
def check_aria2c_version(): url = config.ARIA2_RPC_URL.split("/") url[2] = config.ARIA2_RPC_TOKEN + "@" + url[2] url = "/".join(url) s = xmlrpc.client.ServerProxy(url) r = s.aria2.getVersion(config.ARIA2_RPC_TOKEN) version = r["version"] if version: Aria2DownloadRPC.old_version = version < "1.18.4" else: print_warning("Get aria2c version failed")
def download(self): print_warning('XunleiLixian is deprecated, please choose aria2-rpc or transmission-rpc.') overwrite = '--overwrite' if self.overwrite else '' command = [XUNLEI_LX_PATH, 'download', '--torrent', overwrite, '--output-dir={0}'.format(self.save_path), self.torrent, '--verification-code-path={0}'.format(os.path.join(TMP_PATH, 'vcode.jpg'))] print_info('Run command {0}'.format(' '.join(command))) print_warning('Verification code path: {0}'.format(os.path.join(TMP_PATH, 'vcode.jpg'))) self.call(command)
def setup() -> None: need_to_init = False if not os.path.exists(BGMI_PATH): need_to_init = True print_warning(f"BGMI_PATH {BGMI_PATH} does not exist, installing") create_dir() init_db() if need_to_init: install_crontab() write_default_config()
def install_crontab(): if IS_WINDOWS: copy(os.path.join(os.path.dirname(__file__), 'cron.vbs'), BGMI_PATH) print_info('cron.vbs is located as {}'.format( os.path.join(BGMI_PATH, 'cron.vbs'))) print_warning( 'if you want to enable bgmi autoupdate, see https://github.com/BGmi/BGmi/blob/master/README.windows.md for next step' ) else: print_info('Installing crontab job') path = os.path.join(os.path.dirname(__file__), 'crontab.sh') os.system('sh \'%s\'' % path)
def download(self): overwrite = '--overwrite' if self.overwrite else '' command = [ XUNLEI_LX_PATH, 'download', '--torrent', overwrite, '--output-dir={0}'.format(self.save_path), self.torrent, '--verification-code-path={0}'.format( os.path.join(TMP_PATH, 'vcode.jpg')) ] print_info('Run command {0}'.format(' '.join(command))) print_warning('Verification code path: {0}'.format( os.path.join(TMP_PATH, 'vcode.jpg'))) self.call(command)
def check_aria2c_version(self): url = config.ARIA2_RPC_URL.split("/") url[2] = config.ARIA2_RPC_TOKEN + "@" + url[2] url = "/".join(url) s = xmlrpc.client.ServerProxy(url) r = s.aria2.getVersion(config.ARIA2_RPC_TOKEN) version = r["version"] if version: old_version = [int(x) for x in version.split(".")] < [1, 18, 4] if old_version: print_error( "you are using old aria2 version, please upgrade to it >1.18.4" ) else: print_warning("Get aria2c version failed")
def create_dir(): path_to_create = (BGMI_PATH, SAVE_PATH, TMP_PATH, SCRIPT_PATH, TOOLS_PATH, FRONT_STATIC_PATH) if not os.environ.get('HOME', os.environ.get('USERPROFILE', '')): print_warning('$HOME not set, use \'/tmp/\'') # bgmi home dir try: for path in path_to_create: if not os.path.exists(path): os.makedirs(path) print_success('%s created successfully' % path) except OSError as e: print_error('Error: {0}'.format(str(e)))
def download(self): print_warning( 'XunleiLixian is deprecated, please choose aria2-rpc or transmission-rpc.' ) overwrite = '--overwrite' if self.overwrite else '' command = [ XUNLEI_LX_PATH, 'download', '--torrent', overwrite, '--output-dir={0}'.format(self.save_path), self.torrent, '--verification-code-path={0}'.format( os.path.join(TMP_PATH, 'vcode.jpg')) ] print_info('Run command {0}'.format(' '.join(command))) print_warning('Verification code path: {0}'.format( os.path.join(TMP_PATH, 'vcode.jpg'))) self.call(command)
def create_dir(): if not os.environ.get('HOME', ''): print_warning('$HOME not set, use \'/tmp/\'') tools_path = os.path.join(BGMI_PATH, 'tools') # bgmi home dir path_to_create = (BGMI_PATH, SAVE_PATH, TMP_PATH, SCRIPT_PATH, tools_path, FRONT_STATIC_PATH) try: for path in path_to_create: if not os.path.exists(path): print_success('%s created successfully' % path) os.mkdir(path) else: print_warning('%s already exists' % path) except OSError as e: print_error('Error: {0}'.format(str(e)))
def install(): # install xunlei-lixian import tarfile import requests print_info( "Downloading xunlei-lixian from https://github.com/iambus/xunlei-lixian/" ) r = requests.get( "https://github.com/iambus/xunlei-lixian/tarball/master", stream=True, headers={"Accept-Encoding": ""}, ) f = NamedTemporaryFile(delete=False) with f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) f.close() print_success("Download successfully, save at %s, extracting ..." % f.name) zip_file = tarfile.open(f.name, "r:gz") zip_file.extractall(os.path.join(BGMI_PATH, "tools/xunlei-lixian")) dir_name = zip_file.getnames()[0] print_info("Create link file ...") if not os.path.exists(XUNLEI_LX_PATH): os.symlink( os.path.join( BGMI_PATH, "tools/xunlei-lixian/{}/lixian_cli.py".format(dir_name) ), XUNLEI_LX_PATH, ) else: print_warning("{} already exists".format(XUNLEI_LX_PATH)) print_success("All done") print_info( "Please run command '{} config' to configure your lixian-xunlei " "(Notice: only for Thunder VIP)".format(XUNLEI_LX_PATH) )
def create_dir(): path_to_create = (BGMI_PATH, SAVE_PATH, TMP_PATH, SCRIPT_PATH, TOOLS_PATH, FRONT_STATIC_PATH) if not os.environ.get('HOME', os.environ.get('USERPROFILE', '')): print_warning('$HOME not set, use \'/tmp/\'') # bgmi home dir try: for path in path_to_create: if not os.path.exists(path): os.makedirs(path) print_success('%s created successfully' % path) OLD = os.path.join(BGMI_PATH, 'old') # create OLD if not exist oninstall if not os.path.exists(OLD): with open(OLD, 'w') as f: f.write(__version__) except OSError as e: print_error('Error: {0}'.format(str(e)))
def create_dir(): path_to_create = (BGMI_PATH, SAVE_PATH, TMP_PATH, SCRIPT_PATH, TOOLS_PATH, FRONT_STATIC_PATH) if not os.environ.get('HOME', os.environ.get('USERPROFILE', '')): print_warning('$HOME not set, use \'/tmp/\'') # bgmi home dir try: for path in path_to_create: if not os.path.exists(path): os.makedirs(path) print_success('%s created successfully' % path) OLD = os.path.join(BGMI_PATH, 'old') # create OLD if not exist oninstall if not os.path.exists(OLD): with open(OLD, 'w') as f: f.write(__version__) except OSError as e: print_error('Error: {0}'.format(str(e)))
def bangumi_calendar(self, force_update=False, save=True, cover=None): """ :param force_update: :type force_update: bool :param save: set true to enable save bangumi data to database :type save: bool :param cover: list of cover url (of scripts) want to download :type cover: list[str] """ if force_update and not test_connection(): force_update = False print_warning('Network is unreachable') if force_update: print_info('Fetching bangumi info ...') weekly_list = self.fetch(save=save) else: weekly_list = Bangumi.get_updating_bangumi() if not weekly_list: print_warning('Warning: no bangumi schedule, fetching ...') weekly_list = self.fetch(save=save) if cover is not None: # download cover to local cover_to_be_download = cover for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: _, file_path = convert_cover_url_to_path(bangumi['cover']) if not (os.path.exists(file_path) and imghdr.what(file_path)): cover_to_be_download.append(bangumi['cover']) if cover_to_be_download: print_info('Updating cover ...') download_cover(cover_to_be_download) return weekly_list
def fetch_(ret): bangumi_obj = Bangumi(name=ret.name) bangumi_obj.select_obj() followed_obj = Followed(bangumi_name=bangumi_obj.name) followed_obj.select_obj() followed_filter_obj = Filter(bangumi_name=ret.name) followed_filter_obj.select_obj() print_filter(followed_filter_obj) if bangumi_obj: print_info('Fetch bangumi {0} ...'.format(bangumi_obj.name)) _, data = website.get_maximum_episode( bangumi_obj, ignore_old_row=False if ret.not_ignore else True) if not data: print_warning('Nothing.') for i in data: print_success(i['title']) else: print_error('Bangumi {0} not exist'.format(ret.name))
def download_status(status=None): last_status = -1 for download_data in Download.get_all_downloads(status=status): latest_status = download_data['status'] name = ' {0}. <{1}: {2}>'.format(download_data['id'], download_data['name'], download_data['episode']) if latest_status != last_status: if latest_status == STATUS_DOWNLOADING: print('Downloading items:') elif latest_status == STATUS_NOT_DOWNLOAD: print('Not downloaded items:') elif latest_status == STATUS_DOWNLOADED: print('Downloaded items:') if download_data['status'] == STATUS_NOT_DOWNLOAD: print_info(name, indicator=False) elif download_data['status'] == STATUS_DOWNLOADING: print_warning(name, indicator=False) elif download_data['status'] == STATUS_DOWNLOADED: print_success(name, indicator=False) last_status = download_data['status']
def download(self): print_warning( "XunleiLixian is deprecated, please choose aria2-rpc or transmission-rpc." ) overwrite = "--overwrite" if self.overwrite else "" command = [ XUNLEI_LX_PATH, "download", "--torrent", overwrite, "--output-dir={}".format(self.save_path), self.torrent, "--verification-code-path={}".format(os.path.join(TMP_PATH, "vcode.jpg")), ] print_info("Run command {}".format(" ".join(command))) print_warning( "Verification code path: {}".format(os.path.join(TMP_PATH, "vcode.jpg")) ) self.call(command)
def bangumi_calendar(self, force_update=False, save=True, cover=False): if force_update and not test_connection(): force_update = False print_warning('network is unreachable') if force_update: print_info('fetching bangumi info ...') Bangumi.delete_all() weekly_list = self.fetch(save=save) else: weekly_list = Bangumi.get_all_bangumi() if not weekly_list: print_warning('warning: no bangumi schedule, fetching ...') weekly_list = self.fetch(save=save) runner = ScriptRunner() patch_list = runner.get_models_dict() for i in patch_list: weekly_list[i['update_time'].lower()].append(i) if cover: # download cover to local cover_to_be_download = [] for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: followed_obj = Followed(bangumi_name=bangumi['name']) if followed_obj: bangumi['status'] = followed_obj.status _, file_path, _ = self.convert_cover_to_path( bangumi['cover']) if not glob.glob(file_path): cover_to_be_download.append(bangumi['cover']) if cover_to_be_download: print_info('updating cover') for cover in tqdm.tqdm(cover_to_be_download): self.download_cover(cover) return weekly_list
def search_by_keyword(self, keyword: str, count: Optional[int] = None) -> list: if not count: count = 3 rows = [] result = [] for i in range(count): data = get_response(SEARCH_URL, "POST", json={ "query": keyword, "p": i + 1 }) if "torrents" not in data: print_warning("No torrents in response data, please re-run") return [] rows.extend(data["torrents"]) for info in rows: result.append( Episode( download=TORRENT_URL + info["_id"] + "/download.torrent", name=keyword, subtitle_group=info["team_id"], title=info["title"], episode=self.parse_episode(info["title"]), time=int( time.mktime( datetime.datetime.strptime( info["publish_time"].split(".")[0], "%Y-%m-%dT%H:%M:%S").timetuple())), )) # Avoid bangumi collection. # It's ok but it will waste your traffic and bandwidth. result = result[::-1] return result
def download_status(status=None): last_status = -1 for download_data in Download.get_all_downloads(status=status): latest_status = download_data['status'] name = ' {0}. <{1}: {2}>'.format(download_data['id'], download_data['name'], download_data['episode']) if latest_status != last_status: if latest_status == STATUS_DOWNLOADING: print('Downloading items:') elif latest_status == STATUS_NOT_DOWNLOAD: print('Not downloaded items:') elif latest_status == STATUS_DOWNLOADED: print('Downloaded items:') if download_data['status'] == STATUS_NOT_DOWNLOAD: print_info(name, indicator=False) elif download_data['status'] == STATUS_DOWNLOADING: print_warning(name, indicator=False) elif download_data['status'] == STATUS_DOWNLOADED: print_success(name, indicator=False) last_status = download_data['status']
def install(): # install xunlei-lixian import tarfile import requests print_info( 'Downloading xunlei-lixian from https://github.com/iambus/xunlei-lixian/' ) r = requests.get( 'https://github.com/iambus/xunlei-lixian/tarball/master', stream=True, headers={'Accept-Encoding': ''}) f = NamedTemporaryFile(delete=False) with f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) f.close() print_success('Download successfully, save at %s, extracting ...' % f.name) zip_file = tarfile.open(f.name, 'r:gz') zip_file.extractall(os.path.join(BGMI_PATH, 'tools/xunlei-lixian')) dir_name = zip_file.getnames()[0] print_info('Create link file ...') if not os.path.exists(XUNLEI_LX_PATH): os.symlink( os.path.join( BGMI_PATH, 'tools/xunlei-lixian/{0}/lixian_cli.py'.format(dir_name)), XUNLEI_LX_PATH) else: print_warning('{0} already exists'.format(XUNLEI_LX_PATH)) print_success('All done') print_info( 'Please run command \'{0} config\' to configure your lixian-xunlei ' '(Notice: only for Thunder VIP)'.format(XUNLEI_LX_PATH))
def download_status(status=None): last_status = -1 for download_data in Download.get_all_downloads(status=status): latest_status = download_data["status"] name = " {}. <{}: {}>".format( download_data["id"], download_data["name"], download_data["episode"] ) if latest_status != last_status: if latest_status == STATUS_DOWNLOADING: print("Downloading items:") elif latest_status == STATUS_NOT_DOWNLOAD: print("Not downloaded items:") elif latest_status == STATUS_DOWNLOADED: print("Downloaded items:") if download_data["status"] == STATUS_NOT_DOWNLOAD: print_info(name, indicator=False) elif download_data["status"] == STATUS_DOWNLOADING: print_warning(name, indicator=False) elif download_data["status"] == STATUS_DOWNLOADED: print_success(name, indicator=False) last_status = download_data["status"]
def download_status(status=None): Aria2DownloadRPC.check_aria2c_version() print_info("Print download status in database") BaseDownloadService.download_status(status=status) print() print_info("Print download status in aria2c-rpc") try: server = xmlrpc.client.ServerProxy(config.ARIA2_RPC_URL) # self.server.aria2 status_dict = { STATUS_DOWNLOADING: ["tellActive"], STATUS_NOT_DOWNLOAD: ["tellWaiting"], STATUS_DOWNLOADED: ["tellStopped"], None: ["tellStopped", "tellWaiting", "tellActive"], } for method in status_dict.get(status): if method not in ("tellActive",): params = (0, 1000) else: params = () if Aria2DownloadRPC.old_version: data = getattr(server.aria2, method)(*params) else: data = getattr(server.aria2, method)( config.ARIA2_RPC_TOKEN, *params ) if data: print_warning(f"RPC {method}:", indicator=False) for row in data: print_success("- {}".format(row["dir"]), indicator=False) for file_ in row["files"]: print_info(" * {}".format(file_["path"]), indicator=False) except Exception: print_error("Cannot connect to aria2-rpc server")
def __new__(cls, *args, **kwargs): if cls._defined is None: script_files = glob.glob('{}{}*.py'.format(SCRIPT_PATH, os.path.sep)) for i in script_files: try: s = imp.load_source('script', os.path.join(SCRIPT_PATH, i)) script_class = getattr(s, 'Script')() if cls.check(script_class): cls.scripts.append(script_class) print_info('Load script {} successfully.'.format(i)) except: print_warning('Load script {} failed, ignored'.format(i)) if os.getenv('DEBUG_SCRIPT'): # pragma: no cover traceback.print_exc() # self.scripts = filter(self._check_followed, self.scripts) # self.scripts = filter(self._check_bangumi, self.scripts) cls._defined = super(ScriptRunner, cls).__new__(cls, *args, **kwargs) return cls._defined
def fetch_(ret): try: bangumi_obj = Bangumi.get(name=ret.name) except Bangumi.DoesNotExist: print_error('Bangumi {0} not exist'.format(ret.name)) return try: Followed.get(bangumi_name=bangumi_obj.name) except Followed.DoesNotExist: print_error('Bangumi {0} is not followed'.format(ret.name)) return followed_filter_obj = Filter.get(bangumi_name=ret.name) print_filter(followed_filter_obj) print_info('Fetch bangumi {0} ...'.format(bangumi_obj.name)) _, data = website.get_maximum_episode(bangumi_obj, ignore_old_row=False if ret.not_ignore else True) if not data: print_warning('Nothing.') for i in data: print_success(i['title'])
def download_status(status=None): Aria2DownloadRPC.check_aria2c_version() print_info('Print download status in database') DownloadService.download_status(status=status) print() print_info('Print download status in aria2c-rpc') try: server = PatchedServerProxy(ARIA2_RPC_URL) # self.server.aria2 status_dict = { STATUS_DOWNLOADING: ['tellActive'], STATUS_NOT_DOWNLOAD: ['tellWaiting'], STATUS_DOWNLOADED: ['tellStopped'], None: ['tellStopped', 'tellWaiting', 'tellActive'], } for method in status_dict.get(status): if method not in ('tellActive', ): params = (0, 1000) else: params = () if Aria2DownloadRPC.old_version: data = server.aria2[method](*params) else: data = server.aria2[method](ARIA2_RPC_TOKEN, *params) if data: print_warning('RPC {0}:'.format(method), indicator=False) for row in data: print_success('- {0}'.format(row['dir']), indicator=False) for file_ in row['files']: print_info(' * {0}'.format(file_['path']), indicator=False) except Exception as e: print_error('Cannot connect to aria2-rpc server')
def download_status(status=None): Aria2DownloadRPC.check_aria2c_version() print_info('Print download status in database') BaseDownloadService.download_status(status=status) print() print_info('Print download status in aria2c-rpc') try: server = PatchedServerProxy(ARIA2_RPC_URL) # self.server.aria2 status_dict = { STATUS_DOWNLOADING: ['tellActive'], STATUS_NOT_DOWNLOAD: ['tellWaiting'], STATUS_DOWNLOADED: ['tellStopped'], None: ['tellStopped', 'tellWaiting', 'tellActive'], } for method in status_dict.get(status): if method not in ('tellActive', ): params = (0, 1000) else: params = () if Aria2DownloadRPC.old_version: data = server.aria2[method](*params) else: data = server.aria2[method](ARIA2_RPC_TOKEN, *params) if data: print_warning('RPC {0}:'.format(method), indicator=False) for row in data: print_success('- {0}'.format(row['dir']), indicator=False) for file_ in row['files']: print_info(' * {0}'.format(file_['path']), indicator=False) except Exception as e: print_error('Cannot connect to aria2-rpc server')
def fetch_(ret): try: bangumi_obj = Bangumi.get(name=ret.name) except Bangumi.DoesNotExist: print_error('Bangumi {0} not exist'.format(ret.name)) return try: Followed.get(bangumi_name=bangumi_obj.name) except Bangumi.DoesNotExist: print_error('Bangumi {0} is not followed'.format(ret.name)) return followed_filter_obj = Filter.get(bangumi_name=ret.name) print_filter(followed_filter_obj) print_info('Fetch bangumi {0} ...'.format(bangumi_obj.name)) _, data = website.get_maximum_episode( bangumi_obj, ignore_old_row=False if ret.not_ignore else True) if not data: print_warning('Nothing.') for i in data: print_success(i['title'])
def install(): print_warning('Please install aria2 by yourself')
def check_path(self): if not os.path.exists(self.save_path): print_warning('Create dir {0}'.format(self.save_path)) os.makedirs(self.save_path)
def install(): print_warning('Please run `pip install transmissionrpc`')
def cal_wrapper(ret): force_update = ret.force_update today = ret.today save = not ret.no_save runner = ScriptRunner() if ret.download_cover: cover = runner.get_download_cover() else: cover = None weekly_list = website.bangumi_calendar( force_update=force_update, save=save, cover=cover) patch_list = runner.get_models_dict() for i in patch_list: weekly_list[i['update_time'].lower()].append(i) def shift(seq, n): n %= len(seq) return seq[n:] + seq[:n] if today: weekday_order = (Bangumi.week[datetime.datetime.today().weekday()],) else: weekday_order = shift(Bangumi.week, datetime.datetime.today().weekday()) env_columns = 42 if os.environ.get('TRAVIS_CI', False) else get_terminal_col() col = 42 if env_columns < col: print_warning('terminal window is too small.') env_columns = col row = int(env_columns / col if env_columns / col <= 3 else 3) def print_line(): num = col - 3 split = '-' * num + ' ' print(split * row) for index, weekday in enumerate(weekday_order): if weekly_list[weekday.lower()]: print( '%s%s. %s' % ( GREEN, weekday if not today else 'Bangumi Schedule for Today (%s)' % weekday, COLOR_END), end='') print() print_line() for i, bangumi in enumerate(weekly_list[weekday.lower()]): if bangumi['status'] in (STATUS_UPDATED, STATUS_FOLLOWED) and 'episode' in bangumi: bangumi['name'] = '%s(%d)' % ( bangumi['name'], bangumi['episode']) half = len(re.findall('[%s]' % string.printable, bangumi['name'])) full = (len(bangumi['name']) - half) space_count = col - 2 - (full * 2 + half) for s in SPACIAL_APPEND_CHARS: if s in bangumi['name']: space_count += bangumi['name'].count(s) for s in SPACIAL_REMOVE_CHARS: if s in bangumi['name']: space_count -= bangumi['name'].count(s) if bangumi['status'] == STATUS_FOLLOWED: bangumi['name'] = '%s%s%s' % ( YELLOW, bangumi['name'], COLOR_END) if bangumi['status'] == STATUS_UPDATED: bangumi['name'] = '%s%s%s' % ( GREEN, bangumi['name'], COLOR_END) try: print(' ' + bangumi['name'], ' ' * space_count, end='') except UnicodeEncodeError: continue if (i + 1) % row == 0 or i + 1 == len(weekly_list[weekday.lower()]): print() print()
def update(name, download=None, not_ignore=False): logger.debug('updating bangumi info with args: download: {}'.format(download)) result = {'status': 'info', 'message': '', 'data': {'updated': [], 'downloaded': []}} ignore = not bool(not_ignore) print_info('marking bangumi status ...') now = int(time.time()) for i in Followed.get_all_followed(): if i['updated_time'] and int(i['updated_time'] + 60 * 60 * 24) < now: followed_obj = Followed.get(bangumi_name=i['bangumi_name']) followed_obj.status = STATUS_FOLLOWED followed_obj.save() for script in ScriptRunner().scripts: obj = script.Model().obj if obj.updated_time and int(obj.updated_time + 60 * 60 * 24) < now: obj.status = STATUS_FOLLOWED obj.save() print_info('updating subscriptions ...') download_queue = [] if download: if not name: print_warning('No specified bangumi, ignore `--download` option') if len(name) > 1: print_warning('Multiple specified bangumi, ignore `--download` option') if not name: updated_bangumi_obj = Followed.get_all_followed() else: updated_bangumi_obj = [] for i in name: try: f = Followed.get(bangumi_name=i) f = model_to_dict(f) updated_bangumi_obj.append(f) except DoesNotExist: pass runner = ScriptRunner() script_download_queue = runner.run() for subscribe in updated_bangumi_obj: print_info('fetching %s ...' % subscribe['bangumi_name']) try: bangumi_obj = Bangumi.get(name=subscribe['bangumi_name']) except Bangumi.DoesNotExist: print_error('Bangumi<{0}> does not exists.'.format(subscribe['bangumi_name']), exit_=False) continue try: followed_obj = Followed.get(bangumi_name=subscribe['bangumi_name']) except Followed.DoesNotExist: print_error('Bangumi<{0}> is not followed.'.format(subscribe['bangumi_name']), exit_=False) continue episode, all_episode_data = website.get_maximum_episode(bangumi=bangumi_obj, ignore_old_row=ignore, max_page=1) if (episode.get('episode') > subscribe['episode']) or (len(name) == 1 and download): if len(name) == 1 and download: episode_range = download else: episode_range = range( subscribe['episode'] + 1, episode.get('episode', 0) + 1) print_success('%s updated, episode: %d' % (subscribe['bangumi_name'], episode['episode'])) followed_obj.episode = episode['episode'] followed_obj.status = STATUS_UPDATED followed_obj.updated_time = int(time.time()) followed_obj.save() result['data']['updated'].append({'bangumi': subscribe['bangumi_name'], 'episode': episode['episode']}) for i in episode_range: for epi in all_episode_data: if epi['episode'] == i: download_queue.append(epi) break if download is not None: result['data']['downloaded'] = download_queue download_prepare(download_queue) download_prepare(script_download_queue) print_info('Re-downloading ...') download_prepare(Download.get_all_downloads( status=STATUS_NOT_DOWNLOAD)) return result