def run(self) -> None: logging.basicConfig( level=utils.get_log_level(self.config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', handlers=[ logging.FileHandler(os.path.join( self.config.get('root', {}).get('logger', {}).get('log_path', "./log"), "VideoChecker_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), "a", encoding="utf-8") ]) while True: video_info = self.common_request("GET", self.check_url, { 'bvid': self.bvid }).json() try: if video_info['code'] == 0 and video_info['data']['state'] == 0: logging.info("稿件%s 已开放浏览,准备删除 %s", self.bvid, self.path) utils.del_files_and_dir(self.path) return else: logging.info("稿件%s 未开放浏览", self.bvid) time.sleep(self.check_interval) except KeyError: pass
def run(self) -> None: logging.basicConfig( level=utils.get_log_level(self.config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', handlers=[ logging.FileHandler(os.path.join( self.config['root']['logger']['log_path'], "LiveRecoder_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), "a", encoding="utf-8") ]) while True: try: if self.live_status: urls = self.get_live_urls() filename = utils.generate_filename(self.room_id) c_filename = os.path.join(self.record_dir, filename) self.record(urls[0], c_filename) logging.info(self.generate_log('录制完成' + c_filename)) else: logging.info(self.generate_log('下播了')) break except Exception as e: logging.error( self.generate_log('Error while checking or recording:' + str(e) + traceback.format_exc()))
def __init__(self, config: Dict, record_dir: str, danmu_path: str): super().__init__(config) self.record_dir = record_dir self.danmu_path = danmu_path self.global_start = utils.get_global_start_from_records( self.record_dir) self.merge_conf_path = utils.get_merge_conf_path( self.room_id, self.global_start, config['root']['data_path']) self.merged_file_path = utils.get_mergd_filename( self.room_id, self.global_start, config['root']['data_path']) self.outputs_dir = utils.init_outputs_dir(self.room_id, self.global_start, config['root']['data_path']) self.splits_dir = utils.init_splits_dir( self.room_id, self.global_start, self.config['root']['data_path']) self.times = [] self.live_start = self.global_start self.live_duration = 0 logging.basicConfig( level=utils.get_log_level(config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join( config['root']['logger']['log_path'], "Processor_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), filemode='a')
def run(self): logging.basicConfig(level=utils.get_log_level(self.config), format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', handlers=[logging.FileHandler(os.path.join(self.config['root']['logger']['log_path'], "DanmuRecoder_"+datetime.datetime.now( ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), "a", encoding="utf-8")]) try: asyncio.run(self.__startup()) except KeyboardInterrupt: logging.info(self.generate_log("键盘指令退出"))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--name', type=str, required=True, help='the name of the stack to create.') parser.add_argument('--templateurl', type=str, required=True, help='the url where the stack template can be fetched.') parser.add_argument('--params', type=str, required=True, help='the key value pairs for the parameters of the stack.') parser.add_argument('--topicarn', type=str, required=True, help='the SNS topic arn for notifications to be sent to.') parser.add_argument('--log', type=str, default="INFO", required=False, help='which log level. DEBUG, INFO, WARNING, CRITICAL') parser.add_argument('--tags', type=str, required=False, help='the tags to attach to the stack.') parser.add_argument('--config', type=str, required=False, help='the config file used for the application.') args = parser.parse_args() # init LOGGER logging.basicConfig(level=get_log_level(args.log), format=LOG_FORMAT) #load the client using app config or default client = boto3.Session(profile_name='pythonAutomation') try: # setup the model template_object = get_json(args.templateurl) params = make_kv_from_args(args.params, "Parameter", False) tags = make_kv_from_args(args.tags) response = client.create_stack( StackName=args.name, TemplateBody=json.dumps(template_object), Parameters=params, DisableRollback=False, TimeoutInMinutes=2, NotificationARNs=[args.topicarn], Tags=tags ) # we expect a response, if its missing on non 200 then show response if 'ResponseMetadata' in response and \ response['ResponseMetadata']['HTTPStatusCode'] < 300: logging.info("succeed. response: {0}".format(json.dumps(response))) else: logging.critical("There was an Unexpected error. response: {0}".format(json.dumps(response))) except ValueError as e: logging.critical("Value error caught: {0}".format(e)) except botocore.exceptions.ClientError as e: logging.critical("Boto client error caught: {0}".format(e)) except: # catch any failure logging.critical("Unexpected error: {0}".format(sys.exc_info()[0]))
def __init__(self, config: dict, global_start: datetime.datetime): super().__init__(config) self.log_filename = utils.init_danmu_log_file( self.room_id, global_start, config['root']['global_path']['data_path']) self.room_server_api = 'wss://broadcastlv.chat.bilibili.com/sub' logging.basicConfig( level=utils.get_log_level(config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join( config['root']['logger']['log_path'], datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), filemode='a')
def __init__(self, config: Dict, record_dir: str, danmu_path: str): super().__init__(config) self.record_dir = record_dir self.danmu_path = danmu_path self.global_start = utils.get_global_start_from_records( self.record_dir) self.merge_conf_path = utils.get_merge_conf_path( self.room_id, self.global_start, config.get('root', {}).get('data_path', "./")) self.merged_file_path = utils.get_merged_filename( self.room_id, self.global_start, config.get('root', {}).get('data_path', "./")) self.outputs_dir = utils.init_outputs_dir( self.room_id, self.global_start, config.get('root', {}).get('data_path', "./")) self.splits_dir = utils.init_splits_dir( self.room_id, self.global_start, self.config.get('root', {}).get('data_path', "./")) self.times = [] self.live_start = self.global_start self.live_duration = 0 logging.basicConfig( level=utils.get_log_level(config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', handlers=[ logging.FileHandler( os.path.join( config.get('root', {}).get('logger', {}).get('log_path', "./log"), "Processor_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log', ), mode='a', encoding='utf-8', ) ], force=True) self.ffmpeg_logfile_hander = open(os.path.join( config.get('root', {}).get('logger', {}).get('log_path', "./log"), "FFMpeg_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), mode="a", encoding="utf-8")
def main(): parser = argparse.ArgumentParser() parser.add_argument('--name', type=str, required=True, help='the name of the stack to create.') parser.add_argument('--retain', type=str, required=False, help='the names (comma separated) of the resources to retain.') parser.add_argument('--log', type=str, default="INFO", required=False, help='which log level. DEBUG, INFO, WARNING, CRITICAL') parser.add_argument('--config', type=str, required=False, help='the config file used for the application.') args = parser.parse_args() # init LOGGER logging.basicConfig(level=get_log_level(args.log), format=LOG_FORMAT) #load the client using app config or default client = make_cloudformation_client(args.config) try: retained_resources = [] if args.retain and len(args.retain)>0: retained_respources = args.retain.split(",") response = client.delete_stack( StackName=args.name, RetainResources=retained_resources ) # we expect a response, if its missing on non 200 then show response if 'ResponseMetadata' in response and \ response['ResponseMetadata']['HTTPStatusCode'] < 300: logging.info("succeed. response: {0}".format(json.dumps(response))) else: logging.critical("There was an Unexpected error. response: {0}".format(json.dumps(response))) except ValueError as e: logging.critical("Value error caught: {0}".format(e)) except: # catch any failure logging.critical("Unexpected error: {0}".format(sys.exc_info()[0]))
def main(arguments): """Pacemaker Deploy. Usage: deploy.py create DEPLOYMENT_FILE [-q] [-f LOG_FILE] [-l LOG_LEVEL] deploy.py destroy DEPLOYMENT_NAME [-q] [-f LOG_FILE] [-l LOG_LEVEL] deploy.py provision HOST (--only=PROVISION_PHASE | --from=PROVISION_PHASE) [-q] [-f LOG_FILE] [-l LOG_LEVEL] deploy.py (-h | --help) deploy.py (-v | --version) Arguments: DEPLOYMENT_FILE File containing deployment specification DEPLOYMENT_NAME Name of the deployment (specified on creation inside specification file) HOST Host IP to provision Options: -h, --help Show this screen. -v, --version Show version. -q, --quiet Do not log to stdout -f LOG_FILE, --logfile=LOG_FILE Send logging to file -l LOG_LEVEL, --loglevel=LOG_LEVEL Logging level (one of DEBUG, INFO, WARNING, ERROR, CRITICAL) [default: INFO] --only=PROVISION_PHASE Executes only the provision phase specified (one of INSTALL, CONFIG or START) --from=PROVISION_PHASE Executes all provision phases starting from specified Examples: deploy.py create three_node_cluster.json -q --logfile=output.log deploy.py destroy cluster1 --loglevel=WARN deploy.py provision --host=10.162.30.40 --from=CONFIG -l CRITICAL deploy.py -h deploy.py --version """ # log handlers handlers = [] if not arguments["--quiet"]: handlers.append(logging.StreamHandler()) if arguments["--logfile"]: logfile = arguments["--logfile"] handlers.append(logging.FileHandler(logfile, mode="w")) loglevel = utils.get_log_level(arguments["--loglevel"], logging.INFO) logging.basicConfig( level=loglevel, format= "[%(asctime)s] %(levelname)s - %(module)s[%(lineno)d] - %(message)s", datefmt="%m/%d/%Y %I:%M:%S %p", handlers=handlers) # if no handlers, full disable logging if len(handlers) == 0: logging.disable(1024) # execute actions if arguments["create"]: deployment_file = arguments["DEPLOYMENT_FILE"] res = create(deployment_file) return if arguments["destroy"]: deployment_name = arguments["DEPLOYMENT_NAME"] res = destroy(deployment_name) return if arguments["provision"]: host = arguments["HOST"] phase = (arguments["--from"] or arguments["--only"]).upper() only = arguments["--only"] != None phases = { "INSTALL": ("tmp", "i", "install"), "CONFIG": ("root", "c", "config"), "START": ("root", "s", "start") } if not phase in phases: print( f"Used PROVISION_PHASE ({phase}) not in [INSTALL, CONFIG, START]" ) print(f"Use deploy.py --help to show usage") return if only: res = provision_task(host, [phases[phase]]) else: provision_phases = [phases[phase]] if phase == "INSTALL": provision_phases.append(phases["CONFIG"]) provision_phases.append(phases["START"]) if phase == "CONFIG": provision_phases.append(phases["START"]) res = provision_task(host, provision_phases) return
def upload(self, global_start: datetime.datetime) -> dict: logging.basicConfig( level=utils.get_log_level(self.config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join( self.config.get('root', {}).get('logger', {}).get('log_path', "./log"), "Uploader_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), filemode='a') return_dict = {} datestr = global_start.strftime('%Y{y}%m{m}%d{d}').format(y='年', m='月', d='日') format_dict = { "date": datestr, "year": global_start.year, "month": global_start.month, "day": global_start.day, "hour": global_start.hour, "minute": global_start.minute, "second": global_start.second, "rough_time": utils.get_rough_time(global_start.hour), "room_name": self.room_info['room_name'] } try: if self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('upload_clips', False): clips_video_data = Data() clips_video_data.copyright = self.config.get('spec', {}).get( 'uploader', {}).get('copyright', 2) clips_video_data.title = self.config.get('spec', {}).get( 'uploader', {}).get('clips', {}).get('title', "").format(**format_dict) clips_video_data.desc = self.config.get('spec', {}).get( 'uploader', {}).get('clips', {}).get('desc', "").format(**format_dict) clips_video_data.source = "https://live.bilibili.com/" + self.room_id clips_video_data.tid = self.config.get('spec', {}).get( 'uploader', {}).get('clips', {}).get('tid', 27) clips_video_data.set_tag( self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('tags', [])) self.uploader.video = clips_video_data filelists = os.listdir(self.output_dir) for filename in filelists: if os.path.getsize(os.path.join(self.output_dir, filename)) < 1024 * 1024: continue file_path = os.path.join(self.output_dir, filename) video_part = self.uploader.upload_file(file_path) video_part['title'] = os.path.splitext(filename)[0].split( "_")[-1] video_part['desc'] = self.config.get('spec', {}).get( 'uploader', {}).get('clips', {}).get('desc', "").format(**format_dict) clips_video_data.append(video_part) if os.path.exists( self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('cover', "")): clips_video_data.cover = self.uploader.cover_up( self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('cover', "")) clips_video_ret = self.uploader.submit() if clips_video_ret['code'] == 0 and clips_video_ret[ 'data'] is not None: return_dict["clips"] = { "avid": clips_video_ret['data']['aid'], "bvid": clips_video_ret['data']['bvid'] } if self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('upload_record', False): record_video_data = Data() record_video_data.copyright = self.config.get('spec', {}).get( 'uploader', {}).get('copyright', 2) record_video_data.title = self.config.get('spec', {}).get( 'uploader', {}).get('record', {}).get('title', "").format(**format_dict) record_video_data.desc = self.config.get('spec', {}).get( 'uploader', {}).get('record', {}).get('desc', "").format(**format_dict) record_video_data.source = "https://live.bilibili.com/" + self.room_id record_video_data.tid = self.config.get('spec', {}).get( 'uploader', {}).get('record', {}).get('tid', 27) record_video_data.set_tag( self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('tags', [])) self.uploader.video = record_video_data filelists = os.listdir(self.splits_dir) for filename in filelists: if os.path.getsize(os.path.join(self.splits_dir, filename)) < 1024 * 1024: continue file_path = os.path.join(self.splits_dir, filename) video_part = self.uploader.upload_file(file_path) video_part['title'] = os.path.splitext(filename)[0].split( "_")[-1] video_part['desc'] = self.config.get('spec', {}).get( 'uploader', {}).get('record', {}).get('desc', "").format(**format_dict) record_video_data.append(video_part) if os.path.exists( self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('cover', "")): record_video_data.cover = self.uploader.cover_up( self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('cover', "")) record_video_ret = self.uploader.submit() if record_video_ret['code'] == 0 and record_video_ret[ 'data'] is not None: return_dict["record"] = { "avid": record_video_ret['data']['aid'], "bvid": record_video_ret['data']['bvid'] } except Exception as e: logging.error( self.generate_log('Error while uploading:' + str(e) + traceback.format_exc())) return None finally: self.uploader.close() return return_dict
if __name__ == "__main__": root_config_filename = sys.argv[1] spec_config_filename = sys.argv[2] with open(root_config_filename, "r") as f: root_config = json.load(f) with open(spec_config_filename, "r") as f: spec_config = json.load(f) config = { 'root': root_config, 'spec': spec_config } utils.check_and_create_dir(config['root']['global_path']['data_path']) utils.check_and_create_dir(config['root']['logger']['log_path']) logging.basicConfig(level=utils.get_log_level(config), format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join(config['root']['logger']['log_path'], datetime.datetime.now( ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), filemode='a') utils.init_data_dirs(config['root']['global_path']['data_path']) bl = BiliLive(config) prev_live_status = False while True: if not prev_live_status and bl.live_status: print("开播啦~") prev_live_status = bl.live_status start = datetime.datetime.now() blr = BiliLiveRecorder(config, start) bdr = BiliDanmuRecorder(config, start)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--name', type=str, required=True, help='the name of the stack to create.') parser.add_argument( '--templateurl', type=str, required=True, help='the url where the stack template can be fetched.') parser.add_argument( '--params', type=str, required=True, help='the key value pairs for the parameters of the stack.') parser.add_argument( '--topicarn', type=str, required=True, help='the SNS topic arn for notifications to be sent to.') parser.add_argument('--log', type=str, default="INFO", required=False, help='which log level. DEBUG, INFO, WARNING, CRITICAL') parser.add_argument('--tags', type=str, required=False, help='the tags to attach to the stack.') parser.add_argument('--config', type=str, required=False, help='the config file used for the application.') args = parser.parse_args() # init LOGGER logging.basicConfig(level=get_log_level(args.log), format=LOG_FORMAT) #load the client using app config or default client = make_cloudformation_client(args.config) try: # setup the model #template_object = get_json_from_url(args.templateurl) template_object = get_template_json() params = make_kv_from_args(args.params, "Parameter", False) tags = make_kv_from_args(args.tags) logging.info("params: " + str(params)) # params hardcoded for testing params = [{ 'ParameterValue': 'rj-tokyo-dev', 'UsePreviousValue': False, 'ParameterKey': 'KeyName' }, { 'ParameterValue': 'rjcluster', 'UsePreviousValue': False, 'ParameterKey': 'EcsClusterName' }, { 'ParameterValue': 'rjinstance', 'UsePreviousValue': False, 'ParameterKey': 'EcsInstanceType' }] #return # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudformation.html#CloudFormation.Client.create_stack response = client.create_stack( StackName=args.name, TemplateBody=json.dumps(template_object), Parameters=params, DisableRollback=False, TimeoutInMinutes=2, NotificationARNs=[args.topicarn], Tags=tags, ResourceTypes=[ "AWS::EC2::Instance", "AWS::EC2::VPC", "AWS::EC2::RouteTable" "AWS::EC2::Route", "AWS::EC2::Subnet", "AWS::EC2::SubnetRouteTableAssociation" ], # this is not working ) # we expect a response, if its missing on non 200 then show response if 'ResponseMetadata' in response and \ response['ResponseMetadata']['HTTPStatusCode'] < 300: logging.info("succeed. response: {0}".format(json.dumps(response))) else: logging.critical( "There was an Unexpected error. response: {0}".format( json.dumps(response))) except ValueError as e: logging.critical("Value error caught: {0}".format(e)) except botocore.exceptions.ClientError as e: logging.critical("Boto client error caught: {0}".format(e)) except: # catch any failure logging.critical("Unexpected error: {0}".format(sys.exc_info()[0]))
def upload(self, global_start: datetime.datetime) -> dict: logging.basicConfig( level=utils.get_log_level(self.config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join( self.config.get('root', {}).get('logger', {}).get('log_path', "./log"), "Uploader_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), filemode='a') return_dict = {} datestr = global_start.strftime('%Y{y}%m{m}%d{d}').format(y='年', m='月', d='日') year = global_start.strftime('%Y') month = global_start.strftime('%m') day = global_start.strftime('%d') rough_time = { 0: '凌晨', 1: '上午', 2: '下午', 3: '晚上' }[int(global_start.hour / 6)] room_name = self.get_room_info()['roomname'] area_name = self.get_room_info()['area_name'] parent_area_name = self.get_room_info()['parent_area_name'] try: if self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('upload_clips', False): output_parts = [] filelists = os.listdir(self.output_dir) for filename in filelists: if os.path.getsize(os.path.join(self.output_dir, filename)) < 1024 * 1024: continue title = os.path.splitext(filename)[0].split("_")[-1] output_parts.append( VideoPart( path=os.path.join(self.output_dir, filename), title=title, desc=self.config.get('spec', {}).get( 'uploader', {}).get('clips', {}).get('desc', "").format( date=datestr, year=year, month=month, day=day, rough_time=rough_time, room_name=room_name, area_name=area_name, parent_area_name=parent_area_name), )) avid, bvid = upload( self.uploader, output_parts, cr=self.config.get('spec', {}).get('uploader', {}).get('copyright', 2), title=self.config.get('spec', {}).get('uploader', {}).get( 'clips', {}).get('title', "").format(date=datestr, year=year, month=month, day=day, rough_time=rough_time, room_name=room_name, area_name=area_name, parent_area_name=parent_area_name), tid=self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('tid', 27), tags=self.config.get('spec', {}).get('uploader', {}).get( 'clips', {}).get('tags', []).format(area_name=area_name, parent_area_name=parent_area_name), desc=self.config.get('spec', {}).get('uploader', {}).get( 'clips', {}).get('desc', "").format(date=datestr, year=year, month=month, day=day, rough_time=rough_time, room_name=room_name, area_name=area_name, parent_area_name=parent_area_name), source="https://live.bilibili.com/" + self.room_id, thread_pool_workers=self.config.get('root', {}).get( 'uploader', {}).get('thread_pool_workers', 1), max_retry=self.config.get('root', {}).get('uploader', {}).get('max_retry', 10), upload_by_edit=self.config.get('root', {}).get( 'uploader', {}).get('upload_by_edit', False)) return_dict["clips"] = {"avid": avid, "bvid": bvid} if self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('upload_record', False): splits_parts = [] filelists = os.listdir(self.splits_dir) for filename in filelists: if os.path.getsize(os.path.join(self.splits_dir, filename)) < 1024 * 1024: continue title = filename splits_parts.append( VideoPart( path=os.path.join(self.splits_dir, filename), title=title, desc=self.config.get('spec', {}).get( 'uploader', {}).get('record', {}).get('desc', "").format(date=datestr), )) avid, bvid = upload( self.uploader, splits_parts, cr=self.config.get('spec', {}).get('uploader', {}).get('copyright', 2), title=self.config.get('spec', {}).get('uploader', {}).get( 'record', {}).get('title', "").format(date=datestr, year=year, month=month, day=day, rough_time=rough_time, room_name=room_name, area_name=area_name, parent_area_name=parent_area_name), tid=self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('tid', 27), tags=self.config.get('spec', {}).get('uploader', {}).get( 'record', {}).get('tags', []).format(area_name=area_name, parent_area_name=parent_area_name), desc=self.config.get('spec', {}).get('uploader', {}).get( 'record', {}).get('desc', "").format(date=datestr, year=year, month=month, day=day, rough_time=rough_time, room_name=room_name, area_name=area_name, parent_area_name=parent_area_name), source="https://live.bilibili.com/" + self.room_id, thread_pool_workers=self.config.get('root', {}).get( 'uploader', {}).get('thread_pool_workers', 1), max_retry=self.config.get('root', {}).get('uploader', {}).get('max_retry', 10), upload_by_edit=self.config.get('root', {}).get( 'uploader', {}).get('upload_by_edit', False)) return_dict["record"] = {"avid": avid, "bvid": bvid} except Exception as e: logging.error( self.generate_log('Error while uploading:' + str(e) + traceback.format_exc())) return return_dict
formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True, ) parser.add_argument("-d", "--device", required=True, type=str, help="Device Codename") parser.add_argument("-l", "--list", required=True, type=str, help="Json list mapping") parser.add_argument( "-q", "--quiet", action="count", help="Decrease output verbosity \ Default level is INFO", ) parser.add_argument( "-t", "--test-mode", required=False, action="store_true", help="Use Test Mode without mounting img", ) args = parser.parse_args() log.setLevel(get_log_level(args.quiet)) main(args.device, args.list, args.test_mode)
def upload(self, global_start: datetime.datetime) -> dict: logging.basicConfig( level=utils.get_log_level(self.config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join( self.config['root']['logger']['log_path'], "Uploader_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), filemode='a') return_dict = {} try: if self.config['spec']['uploader']['clips']['upload_clips']: output_parts = [] datestr = global_start.strftime('%Y{y}%m{m}%d{d}').format( y='年', m='月', d='日') filelists = os.listdir(self.output_dir) for filename in filelists: if os.path.getsize(os.path.join(self.output_dir, filename)) < 1024 * 1024: continue title = os.path.splitext(filename)[0].split("_")[-1] output_parts.append( VideoPart( path=os.path.join(self.output_dir, filename), title=title, desc=self.config['spec']['uploader']['clips'] ['desc'].format(date=datestr), )) avid, bvid = upload( self.uploader, output_parts, title=self.config['spec']['uploader']['clips'] ['title'].format(date=datestr), tid=self.config['spec']['uploader']['clips']['tid'], tags=self.config['spec']['uploader']['clips']['tags'], desc=self.config['spec']['uploader']['clips'] ['desc'].format(date=datestr), source="https://live.bilibili.com/" + self.room_id, thread_pool_workers=self.config['root']['uploader'] ['thread_pool_workers'], max_retry=self.config['root']['uploader']['max_retry'], upload_by_edit=self.config['root']['uploader'] ['upload_by_edit']) return_dict["clips"] = {"avid": avid, "bvid": bvid} if self.config['spec']['uploader']['record']['upload_record']: splits_parts = [] datestr = global_start.strftime('%Y{y}%m{m}%d{d}').format( y='年', m='月', d='日') filelists = os.listdir(self.splits_dir) for filename in filelists: if os.path.getsize(os.path.join(self.splits_dir, filename)) < 1024 * 1024: continue title = filename splits_parts.append( VideoPart( path=os.path.join(self.splits_dir, filename), title=title, desc=self.config['spec']['uploader']['record'] ['desc'].format(date=datestr), )) avid, bvid = upload( self.uploader, splits_parts, title=self.config['spec']['uploader']['record'] ['title'].format(date=datestr), tid=self.config['spec']['uploader']['record']['tid'], tags=self.config['spec']['uploader']['record']['tags'], desc=self.config['spec']['uploader']['record'] ['desc'].format(date=datestr), source="https://live.bilibili.com/" + self.room_id, thread_pool_workers=self.config['root']['uploader'] ['thread_pool_workers'], max_retry=self.config['root']['uploader']['max_retry'], upload_by_edit=self.config['root']['uploader'] ['upload_by_edit']) return_dict["record"] = {"avid": avid, "bvid": bvid} except Exception as e: logging.error( self.generate_log('Error while uploading:' + str(e) + traceback.format_exc())) return return_dict
bp = ByPy() bp.upload(p.merged_file_path) if __name__ == "__main__": root_config_filename = sys.argv[1] spec_config_filename = sys.argv[2] with open(root_config_filename, "r", encoding="UTF-8") as f: root_config = json.load(f) with open(spec_config_filename, "r", encoding="UTF-8") as f: spec_config = json.load(f) config = {'root': root_config, 'spec': spec_config} utils.check_and_create_dir(config['root']['global_path']['data_path']) utils.check_and_create_dir(config['root']['logger']['log_path']) logging.basicConfig( level=utils.get_log_level(config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', handlers=[ logging.FileHandler(os.path.join( config['root']['logger']['log_path'], "Main_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), "a", encoding="utf-8") ]) utils.init_data_dirs(config['root']['global_path']['data_path']) if config['root']['enable_baiduyun']: from bypy import ByPy bp = ByPy()
def main(): parser = argparse.ArgumentParser() parser.add_argument( '-r', '--region', type=str, required=False, help= 'AWS Region in which the ec2 instance is being created (default: us-east-1)', default='us-east-1') parser.add_argument('-i', '--instance-type', type=str, required=False, help='Instance type.(default: t2.micro)', default='t2.micro') parser.add_argument('-p', '--pem-name', type=str, required=True, help='The key pair to be used for the instance.') parser.add_argument('-l', '--log-level', type=str, required=False, help='DEBUG, INFO, WARNING, CRITICAL', default='INFO') args = parser.parse_args() client = boto3.client('cloudformation', region_name=args.region) fileData = get_yaml('templates/ec2Template.yml') logging.basicConfig(level=get_log_level(args.log_level), format=LOG_FORMAT) try: response = client.create_stack(StackName='my-stack', TemplateBody=yaml.dump(fileData), Parameters=[{ 'ParameterKey': 'KeyName', 'ParameterValue': args.pem_name }, { 'ParameterKey': 'InstanceType', 'ParameterValue': args.instance_type }], TimeoutInMinutes=3, OnFailure='ROLLBACK') if 'ResponseMetadata' in response and response['ResponseMetadata'][ 'HTTPStatusCode'] < 300: logging.info("succeed. response: {0}".format(json.dumps(response))) else: logging.critical( "There was an Unexpected error. response: {0}".format( json.dumps(response))) except botocore.exceptions.ClientError as e: logging.critical("Boto error caught: {0}".format(e))
def cross_validate_model_fold(chunk_input: WorkerInput) -> ModelResult: log("Execution fold", level=2) timer = Timer() classifier = chunk_input['classifier'] X_train = chunk_input['X_train'] y_train = chunk_input['y_train'] X_test = chunk_input['X_test'] return_model = chunk_input['return_model'] if get_log_level() == 1: print(".") feature_names = \ chunk_input['feature_names'] if \ ('feature_names' in chunk_input and chunk_input['feature_names'] is not None) \ else list(X_train.columns) classifier.fit(X_train, y_train, **chunk_input['fit_kwargs']) y_predict = Series(classifier.predict(X_test), index=X_test.index) y_train_predict = Series(classifier.predict(X_train), index=X_train.index) try: y_predict_probabilities_raw = classifier.predict_proba(X_test) y_train_predict_probabilities_raw = classifier.predict_proba(X_train) except AttributeError: y_predict_probabilities = y_predict y_train_predict_probabilities = y_train_predict else: probability_columns = [ f'y_predict_probabilities_{i}' for i in range(y_predict_probabilities_raw.shape[1]) ] y_predict_probabilities = DataFrame(y_predict_probabilities_raw, index=X_test.index, columns=probability_columns) y_train_predict_probabilities = DataFrame( y_train_predict_probabilities_raw, index=X_train.index, columns=probability_columns) if y_predict.dtype == np.float: y_predict = y_predict \ .map(lambda v: 0 if v < 0 else v) \ .map(lambda v: 1 if v > 1 else v) \ .map(lambda v: round(v)) try: feature_importance = Series( classifier[-1].feature_importances_, index=feature_names, ) except (TypeError, AttributeError): try: classifier[-1].coef_ except AttributeError: feature_importance = None logging.debug("No feature importance in the result") else: feature_importance = None # feature_importance = Series(classifier[-1].coef_[0], index=feature_names) if not return_model: try: classifier[-1].get_booster().__del__() except AttributeError: pass return ModelResult(y_test_score=y_predict_probabilities, y_test_predict=y_predict, y_train_predict=y_train_predict, y_train_score=y_train_predict_probabilities, feature_importance=feature_importance, model=classifier[-1] if return_model else None, elapsed=timer.elapsed_cpu())
try: if len(sys.argv) > 1: all_config_filename = sys.argv[1] with open(all_config_filename, "r", encoding="UTF-8") as f: all_config = json.load(f) else: with open("config.json", "r", encoding="UTF-8") as f: all_config = json.load(f) except Exception as e: print("解析配置文件时出现错误,请检查配置文件!") print("错误详情:" + str(e)) os.system('pause') utils.check_and_create_dir(all_config['root']['logger']['log_path']) logging.basicConfig( level=utils.get_log_level(all_config), format= '%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', handlers=[ logging.FileHandler(os.path.join( all_config['root']['logger']['log_path'], "Main_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.log'), "a", encoding="utf-8") ]) runner_list = [] for room_id in all_config['room_ids']: config = {'root': all_config['root'], 'spec': {"room_id": room_id}}
import remote_control from utils import get_config, get_log_level _cfg = get_config() _api_cfg = _cfg["api"] bind = _api_cfg["listen"] workers = 4 loglevel = get_log_level(_cfg["logging"]["log_level"].lower()) _ssl_path = _api_cfg["ssl_path"] certfile = f"{_ssl_path}/ssl.crt" keyfile = f"{_ssl_path}/ssl.key" # Server Hooks on_exit = remote_control.on_exit