def RB_RENDER(self): #5 if self.CheckNetuse(): print("Net use finished!") else: print("Net use not finished yet.") sys.exit(101) self.format_log('渲染', 'start') self.G_DEBUG_LOG.info('[BASE.RB_RENDER.start.....]') self.AnalyzeCallBack() if not self._run_code_result: self.LogsCreat("Errors calls,try to kill apps...") else: self.LogsCreat("Job finished,try to kill apps...") if len(self._Killapps): mainapp = [] self._Killapps.extend(mainapp) try: CLASS_COMMON_UTIL.kill_app_list(self._Killapps) except: pass self.LogsCreat("[kill apps done]") # if errors if not self._run_code_result: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, "AnalyzeCallBack()", 456) self.G_DEBUG_LOG.info('[BASE.RB_RENDER.end.....]') self.format_log('done', 'end')
def RB_HAN_RESULT(self): #7 self.format_log('.结果处理...', 'start') self.G_DEBUG_LOG.info('[BASE.RB_HAN_RESULT.start.....]') if self.G_ACTION in ['Analyze']: task_json_name = 'task.json' asset_json_name = 'asset.json' tips_json_name = 'tips.json' node_task_json = os.path.join(self.G_WORK_RENDER_TASK_CFG, task_json_name) node_asset_json = os.path.join(self.G_WORK_RENDER_TASK_CFG, asset_json_name) node_tips_json = os.path.join(self.G_WORK_RENDER_TASK_CFG, tips_json_name) if not os.path.exists(node_task_json): CLASS_COMMON_UTIL.error_exit_log( self.G_DEBUG_LOG, 'Analyze file failed . task.json not exists') self.copy_cfg_to_server(node_task_json, node_asset_json, node_tips_json) else: self.result_action() self.G_DEBUG_LOG.info('[BASE.RB_HAN_RESULT.end.....]') self.format_log('done', 'end')
def RB_FEE(self): # if not self.G_RENDER_CORE_TYPE=="gpu": if self.G_RENDER_OS != '0': CLASS_COMMON_UTIL.del_net_use() CLASS_COMMON_UTIL.del_subst() if self.G_ACTION not in ['Analyze', 'Pre']: self.format_log('write fee', 'start') self.G_DEBUG_LOG.info('[BASE.RB_FEE.start.....]') self.G_FEE_PARSER.set('render', 'type', self.G_RENDER_CORE_TYPE) # self.G_FEE_PARSER.set('render','start_time','') # self.G_FEE_PARSER.set('render','end_time','') # self.G_FEE_PARSER.set('render','big_pic','') # self.G_FEE_PARSER.set('render','small_pic','') try: with codecs.open(self.G_FEE_FILE, 'w', 'utf-8') as fee_file: self.G_FEE_PARSER.write(fee_file) except Exception as e: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'Write Fee File Failed', is_exit=False) self.G_DEBUG_LOG.info('[BASE.RB_FEE.end.....]') self.format_log('done', 'end')
def RB_RENDER(self): #5 self.format_log('渲染', 'start') self.G_DEBUG_LOG.info('[BASE.RB_RENDER.start.....]') self.RenderCallBack() if not self._run_code_result: self.LogsCreat("Errors calls,try to kill apps...") else: self.LogsCreat("Job finished,try to kill apps...") if len(self._Killapps): mainapp = ["hython.exe"] self._Killapps.extend(mainapp) try: CLASS_COMMON_UTIL.kill_app_list(self._Killapps) except: pass self.LogsCreat("[kill apps done]") # if errors if not self._run_code_result: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, "RenderCallBack()", 456) self.G_DEBUG_LOG.info('[BASE.RB_RENDER.end.....]') self.format_log('done', 'end')
def RB_CONFIG(self): #4 if self.CheckNetuse(): print("Net use finished!") else: print("Net use not finish yet.") sys.exit() self.format_log('渲染配置', 'start') self.G_DEBUG_LOG.info('[BASE.RB_CONFIG.start.....]') ## setup Houdini software and plugins for analysis self.Execute_Hfs() if not self._run_code_result: self.LogsCreat("Errors calls,try to kill apps...") print(self._Killapps) if len(self._Killapps): mainapp = [] self._Killapps.extend(mainapp) try: CLASS_COMMON_UTIL.kill_app_list(self._Killapps) except: pass self.G_DEBUG_LOG.info('[BASE.RB_RENDER.end.....]') CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, "Execute_Hfs()", 123) self.G_DEBUG_LOG.info('[BASE.RB_CONFIG.end.....]') self.format_log('done', 'end')
def main(self, local_path, server_path, my_log=None): nuke_path = r'C:/Program Files/Nuke10.0v4' check_file = self.get_file_size(local_path, server_path, my_log) if not check_file: CLASS_COMMON_UTIL.error_exit_log(my_log, 'output have no file!') if platform.system() == 'Linux': pass else: if os.path.exists(nuke_path): self.check_texture(nuke_path, check_file, my_log)
def RB_KAFKA_NOTIFY(self): #9 if self.G_ACTION not in ['Analyze', 'Pre']: self.format_log('kafka发送消息给平台', 'start') self.G_DEBUG_LOG.info('[BASE.RB_KAFKA_NOTIFY.start.....]') send_time = str(int(time.time())) self.G_KAFKA_MESSAGE_DICT["munu_task_id"] = self.G_MUNU_ID self.G_KAFKA_MESSAGE_DICT["munu_job_id"] = self.G_JOB_ID self.G_KAFKA_MESSAGE_DICT["recommit_flag"] = self.G_RECOMMIT_FLAG self.G_KAFKA_MESSAGE_DICT["action"] = self.G_ACTION self.G_KAFKA_MESSAGE_DICT['platform'] = self.G_PLATFORM self.G_KAFKA_MESSAGE_DICT['send_time'] = send_time self.G_KAFKA_MESSAGE_DICT['zone'] = self.G_ZONE self.G_KAFKA_MESSAGE_DICT['node_name'] = self.G_NODE_ID self.G_KAFKA_MESSAGE_DICT['task_id'] = self.G_TASK_ID self.G_KAFKA_MESSAGE_DICT['render_type'] = self.G_RENDER_CORE_TYPE #self.G_KAFKA_MESSAGE_DICT['start_time']=self.G_START_TIME #self.G_KAFKA_MESSAGE_DICT['big_pic']=[] #self.G_KAFKA_MESSAGE_DICT['small_pic']=[] #self.G_KAFKA_MESSAGE_DICT['end_time']=self.G_END_TIME #self.G_KAFKA_MESSAGE_DICT['distribute_node'] = '1' self.G_DEBUG_LOG.info('G_KAFKA_MESSAGE_DICT=' + str(self.G_KAFKA_MESSAGE_DICT)) # write kafka message json file(e.g. C:\work\render\10002736\2017120500004_0_1.json) kafka_message_filename = self.G_MUNU_ID + '_' + self.G_JOB_ID + '.json' if self.G_RECOMMIT_FLAG != '': kafka_message_filename = self.G_MUNU_ID + '_' + self.G_JOB_ID + '_' + self.G_RECOMMIT_FLAG + '.json' kafka_message_json = os.path.join(self.G_WORK_RENDER_TASK, kafka_message_filename) kafka_message_json_str = json.dumps(self.G_KAFKA_MESSAGE_DICT, ensure_ascii=False) CLASS_COMMON_UTIL.write_file(kafka_message_json_str, kafka_message_json) CLASS_COMMON_UTIL.python_copy(kafka_message_json, self.G_CONFIG_PATH) try: kafka_result = CLASS_KAFKA.produce(self.G_KAFKA_MESSAGE_DICT, self.G_KAFKA_SERVER, self.G_KAFKA_TOPIC) self.G_DEBUG_LOG.info('kafka_result=' + str(kafka_result)) except: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'Send Kafka Message Failed', is_exit=False) self.G_DEBUG_LOG.info('[BASE.RB_KAFKA_NOTIFY.end.....]') self.format_log('done', 'end')
def check_result(self): self.G_DEBUG_LOG.info('================[check_result]===============') # server_output=self.G_OUTPUT_USER_PATH.encode(sys.getfilesystemencoding()) server_output = self.G_OUTPUT_USER_PATH node_output = self.G_WORK_RENDER_TASK_OUTPUT.replace('\\', '/') self.G_DEBUG_LOG.info('') self.G_DEBUG_LOG.info(node_output) self.G_DEBUG_LOG.info(server_output) self.G_DEBUG_LOG.info('') node_img_dict = {} server_img_dict = {} output_list = os.listdir(node_output) if not output_list: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'output is empty!') for root, dirs, files in os.walk(node_output): for name in files: #------------node output file---------------- node_img_file = os.path.join(root, name).replace('\\', '/') img_file = node_img_file.replace(node_output, '') img_file_stat = os.stat(node_img_file) img_file_size = str(os.path.getsize(node_img_file)) node_img_dict[img_file] = img_file_size date = datetime.datetime.fromtimestamp(img_file_stat.st_ctime) img_file_ctime = date.strftime('%Y%m%d%H%M%S') #img_file_ctime=str(img_file_stat.st_ctime) img_file_info = '[node]' + img_file + ' [' + img_file_size + '] [' + img_file_ctime + ']' self.G_DEBUG_LOG.info(img_file_info) #------------server output file---------------- server_img_file = (server_output + img_file).replace('/', '\\') server_img_file_info = '' if os.path.exists(server_img_file): img_file_stat = os.stat(server_img_file) img_file_size = str(os.path.getsize(server_img_file)) date = datetime.datetime.fromtimestamp( img_file_stat.st_ctime) img_file_ctime = date.strftime('%Y%m%d%H%M%S') server_img_file_info = '[server]' + img_file + ' [' + img_file_size + '] [' + img_file_ctime + ']' server_img_dict[img_file] = img_file_size else: server_img_file_info = '[server]' + img_file + ' [missing]' self.G_DEBUG_LOG.info(server_img_file_info) self.G_DEBUG_LOG.info('') self.G_DEBUG_LOG.info('')
def RB_KAFKA_NOTIFY(self): #9 if self.G_ACTION not in ['Analyze', 'Pre']: self.format_log('kafka发送消息给平台', 'start') self.G_DEBUG_LOG.info('[BASE.RB_KAFKA_NOTIFY.start.....]') send_time = str(int(time.time())) self.G_KAFKA_MESSAGE_DICT["action"] = self.G_ACTION self.G_KAFKA_MESSAGE_DICT['platform'] = self.G_PLATFORM self.G_KAFKA_MESSAGE_DICT['sendTime'] = send_time self.G_KAFKA_MESSAGE_DICT['zone'] = self.G_ZONE self.G_KAFKA_MESSAGE_DICT['nodeName'] = self.G_NODE_ID self.G_KAFKA_MESSAGE_DICT['jobId'] = self.G_JOB_ID self.G_KAFKA_MESSAGE_DICT['taskId'] = self.G_TASK_ID self.G_KAFKA_MESSAGE_DICT['renderType'] = self.G_RENDER_CORE_TYPE #self.G_KAFKA_MESSAGE_DICT['startTime']=self.G_START_TIME #self.G_KAFKA_MESSAGE_DICT['bigPic']=[] #self.G_KAFKA_MESSAGE_DICT['smallPic']=[] #self.G_KAFKA_MESSAGE_DICT['endTime']=self.G_END_TIME #self.G_KAFKA_MESSAGE_DICT['distributeNode'] = '1' self.G_DEBUG_LOG.info('G_KAFKA_MESSAGE_DICT=' + str(self.G_KAFKA_MESSAGE_DICT)) # write kafka message json file(e.g. C:\work\render\10002736\1849754-10002736-frame0001.json) # kafka_message_filename = self.G_USER_ID+'-'+self.G_TASK_ID+'-'+self.G_JOB_NAME+'.json' kafka_message_filename = self.G_JOB_ID + '_' + self.G_ACTION + '_' + send_time + '.json' kafka_message_json = os.path.join(self.G_WORK_RENDER_TASK, kafka_message_filename) kafka_message_json_str = json.dumps(self.G_KAFKA_MESSAGE_DICT, ensure_ascii=False) CLASS_COMMON_UTIL.write_file(kafka_message_json_str, kafka_message_json) CLASS_COMMON_UTIL.python_copy(kafka_message_json, self.G_CONFIG_PATH) try: kafka_result = CLASS_KAFKA.produce(self.G_KAFKA_MESSAGE_DICT, self.G_KAFKA_SERVER, self.G_KAFKA_TOPIC) self.G_DEBUG_LOG.info('kafka_result=' + str(kafka_result)) except: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'Send Kafka Message Failed', is_exit=False) self.G_DEBUG_LOG.info('[BASE.RB_KAFKA_NOTIFY.end.....]') self.format_log('done', 'end')
def RB_RENDER(self):#5 self.format_log('渲染','start') self.G_DEBUG_LOG.info('[BASE.RB_RENDER.start.....]') start_time = int(time.time()) ## multiframes if self.g_one_machine_multiframe is True: # self.monitor_complete_thread.start() render_list = CLASS_COMMON_UTIL.need_render_from_frame(self.G_CG_FRAMES) ## render_list => [10,11,12,........] self.render_record.update({render_list[0]: {'start_time': start_time, 'end_time': 0}}) ## {"10":{'start_time': int(), 'end_time': int()}, "11"} self.G_FEE_PARSER.set('render', 'start_time', str(start_time)) self.RenderCallBack() if not self._run_code_result: self.LogsCreat("Errors calls,try to kill apps...") self.LogsCreat('') self.LogsCreat("Eroor Information: %s"%self._erorr_code_info) self.LogsCreat("Eroor Called Code: %s"%self._erorr_code) self.LogsCreat('') self.LogsCreat('') else: self.LogsCreat("Job finished,try to kill apps...") if len(self._Killapps): mainapp = ["hython.exe"] self._Killapps.extend(mainapp) try: CLASS_COMMON_UTIL.kill_app_list(self._Killapps) except: pass self.LogsCreat("[kill apps done]") # if errors if not self._run_code_result: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG,"RenderCallBack()",456) end_time = int(time.time()) self.G_DEBUG_LOG.info('Render time in G_FEE_PARSER: %s '%str(end_time-start_time)) self.G_FEE_PARSER.set('render', 'end_time', str(end_time)) self.G_DEBUG_LOG.info('[BASE.RB_RENDER.end.....]') self.format_log('done','end')
def RB_KAFKA_NOTIFY(self): #9 self.format_log('kafka发送消息给平台', 'start') self.G_DEBUG_LOG.info('[BASE.RB_KAFKA_NOTIFY.start.....]') start_time = time.time() params = {} params["messageKey"] = self.G_ACTION params['platform'] = self.G_PLATFORM params['messageTime'] = str(int(start_time)) params['messageId'] = self.G_TASK_ID + '_' + params[ "messageKey"] + '_' + self.G_JOB_ID self.G_KAFKA_MESSAGE_BODY_DICT['zone'] = self.G_ZONE self.G_KAFKA_MESSAGE_BODY_DICT['nodeName'] = self.G_NODE_ID self.G_KAFKA_MESSAGE_BODY_DICT['jobId'] = self.G_JOB_ID self.G_KAFKA_MESSAGE_BODY_DICT['taskId'] = self.G_TASK_ID self.G_KAFKA_MESSAGE_BODY_DICT['renderType'] = self.G_RENDER_CORE_TYPE #self.G_KAFKA_MESSAGE_BODY_DICT['startTime']=self.G_START_TIME #self.G_KAFKA_MESSAGE_BODY_DICT['smallPic']=self.G_SMALL_PIC #self.G_KAFKA_MESSAGE_BODY_DICT['endTime']=self.G_END_TIME params['messageBody'] = self.G_KAFKA_MESSAGE_BODY_DICT self.G_DEBUG_LOG.info('params=' + str(params)) # write kafka message json file(e.g. C:\work\render\10002736\1849754-10002736-frame0001.json) kafka_message_filename = self.G_USER_ID + '-' + self.G_TASK_ID + '-' + self.G_JOB_NAME + '.json' kafka_message_json = os.path.join(self.G_WORK_RENDER_TASK, kafka_message_filename) kafka_message_json_str = json.dumps(params, ensure_ascii=False) CLASS_COMMON_UTIL.write_file(kafka_message_json_str, kafka_message_json) CLASS_COMMON_UTIL.python_copy(kafka_message_json, self.G_CONFIG_PATH) try: kafka_result = CLASS_KAFKA.produce(params, self.G_KAFKA_SERVER, self.G_KAFKA_TOPIC) self.G_DEBUG_LOG.info('kafka_result=' + str(kafka_result)) except: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'Send Kafka Message Failed', is_exit=False) self.G_DEBUG_LOG.info('[BASE.RB_KAFKA_NOTIFY.end.....]') self.format_log('done', 'end')
def RB_HAN_FILE(self):#3 copy max.7z and so on self.format_log('拷贝脚本文件','start') self.G_DEBUG_LOG.info('[RenderMax.RB_HAN_FILE.start.....]'+self.G_RENDER_CORE_TYPE) CLASS_COMMON_UTIL.python_move(self.G_WORK_RENDER_TASK_OUTPUT,self.G_WORK_RENDER_TASK_OUTPUTBAK) if int(self.G_CG_TILE_COUNT)>1 and self.G_CG_TILE_COUNT==self.G_CG_TILE:#merge Pic self.G_RENDER_WORK_TASK_BLOCK=os.path.join(self.G_RENDER_WORK_TASK,'block').replace('/','\\') block_path1=os.path.join(self.G_TEMP_PATH,self.G_TASK_ID,'block').replace('/','\\') self.G_DEBUG_LOG.info(block_path1) if not os.path.exists(block_path1): CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG,'block not exists in temp folder') copy_block_cmd='c:\\fcopy\\FastCopy.exe /speed=full /force_close /no_confirm_stop /force_start "'+block_path1+'\\*.*" /to="'+self.G_RENDER_WORK_TASK_BLOCK.replace('/','\\')+'"' # copy_block_cmd=copy_block_cmd.encode(sys.getfilesystemencoding()) CLASS_COMMON_UTIL.cmd(copy_block_cmd,my_log=self.G_DEBUG_LOG) else: #----------------copy max 7z------------------- max_7z=os.path.join(self.G_TEMP_PATH,'max.7z') if not os.path.exists(max_7z): CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG,'max.7z not exists in temp folder') copy_max_7z_cmd='c:\\fcopy\\FastCopy.exe /cmd=diff /speed=full /force_close /no_confirm_stop /force_start "'+max_7z.replace('/','\\')+'" /to="'+self.G_WORK_RENDER_TASK_MAX.replace('/','\\')+'"' self.G_DEBUG_LOG.info(copy_max_7z_cmd) # CLASS_COMMON_UTIL.cmd(copy_max_7z_cmd.encode(sys.getfilesystemencoding()),my_log=self.G_DEBUG_LOG,my_shell=True) CLASS_COMMON_UTIL.cmd(copy_max_7z_cmd,my_log=self.G_DEBUG_LOG,my_shell=True) node_max_7z=os.path.join(self.G_WORK_RENDER_TASK_MAX,'max.7z') if not os.path.exists(node_max_7z): CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG,('max.7z not exists in '+self.G_WORK_RENDER_TASK_MAX)) #----------------send cmd to node------------------- self.vray_distribute_node() #------------------unpack max.7z---------------- self.G_DEBUG_LOG.info('unpack 7z...') unpack_cmd=self.G_DRIVERC_7Z+' x "'+node_max_7z+'" -y -aos -o"'+self.G_WORK_RENDER_TASK_MAX+'"' self.G_DEBUG_LOG.info(unpack_cmd) # CLASS_COMMON_UTIL.cmd(unpack_cmd.encode(sys.getfilesystemencoding()),my_log=self.G_DEBUG_LOG,my_shell=True) CLASS_COMMON_UTIL.cmd(unpack_cmd,my_log=self.G_DEBUG_LOG,my_shell=True) #----------------copy photon------------------- self.copy_photon() self.G_DEBUG_LOG.info('[RenderMax.RB_HAN_FILE.end.....]') self.format_log('done','end')
def __init__(self,**param_dict): print '[BASE.init.start.....]' self.G_RENDER_CORE_TYPE = 'cpu' print param_dict # define global variables # G_JOB_ID,G_USER_ID,G_USER_ID_PARENT,G_TASK_ID,G_SCRIPT_POOL,G_ACTION,G_RENDER_OS,G_SYS_ARGVS,G_TASK_JSON && G_CG_TILE,G_CG_TILE_COUNT,G_CG_FRAMES,G_CG_LAYER_NAME,G_SCHEDULER_CLUSTER_ID,G_SCHEDULER_CLUSTER_NODES #G_NODE_PY for key in param_dict.keys(): if key.startswith('G'): exec('self.'+key+'=param_dict["'+key+'"]') self.G_ACTION_ID=self.G_ACTION+'_'+self.G_JOB_ID self.G_MUNU_ID=self.G_SYS_ARGVS[1] self.G_JOB_NAME=self.G_SYS_ARGVS[3] self.G_NODE_ID=self.G_SYS_ARGVS[4] #11338764789520 self.G_NODE_NAME=self.G_SYS_ARGVS[5] self.G_WORK='c:/work' self.G_LOG_WORK='C:/log/render' if self.G_RENDER_OS=='Linux': self.G_LOG_WORK='/tmp/nzs-data/log/render' self.G_WORK='/tmp/nzs-data/work' #-----------------------------------------log----------------------------------------------- self.G_DEBUG_LOG=logging.getLogger('debug_log') self.G_RENDER_LOG=logging.getLogger('render_log') self.init_log() #-----------------------------------------analyse frame----------------------------------------------- if 'G_CG_FRAMES' in param_dict: self.G_CG_START_FRAME = None self.G_CG_END_FRAME = None self.G_CG_BY_FRAME = None patt = '(-?\d+)(?:-?(-?\d+)(?:\[(-?\d+)\])?)?' m = re.match(patt,self.G_CG_FRAMES) if m != None: self.G_CG_START_FRAME = m.group(1) self.G_CG_END_FRAME = m.group(2) self.G_CG_BY_FRAME = m.group(3) if self.G_CG_END_FRAME == None: self.G_CG_END_FRAME = self.G_CG_START_FRAME if self.G_CG_BY_FRAME == None: self.G_CG_BY_FRAME = '1' else: print 'frames is not match' if 'G_CG_TILE' not in param_dict or ('G_CG_TILE' in param_dict and (self.G_CG_TILE == None or self.G_CG_TILE == '')): self.G_CG_TILE='0' if 'G_CG_TILE_COUNT' not in param_dict or ('G_CG_TILE_COUNT' in param_dict and (self.G_CG_TILE_COUNT == None or self.G_CG_TILE_COUNT == '')): self.G_CG_TILE_COUNT='1' #-----------------------------------------work directory----------------------------------------------- self.G_WORK_RENDER=os.path.normpath(os.path.join(self.G_WORK,'render')) self.G_WORK_RENDER_TASK=os.path.normpath(os.path.join(self.G_WORK_RENDER,self.G_TASK_ID)) self.G_WORK_RENDER_TASK_CFG=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'cfg')) self.G_WORK_RENDER_TASK_OUTPUT=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'output')) self.G_WORK_RENDER_TASK_OUTPUTBAK=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'outputbak')) self.G_WORK_RENDER_TASK_SMALL=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'small')) self.make_dir() #-----------------------------------------kafka----------------------------------------------- # self.G_KAFKA_HOST='10.60.96.143' # self.G_KAFKA_PORT=9091 self.G_KAFKA_SERVER = ["10.60.96.143:9091"] self.G_KAFKA_TOPIC= 'dev-munu-topic' self.G_KAFKA_MESSAGE_BODY_DICT={} self.G_KAFKA_MESSAGE_BODY_DICT['startTime']=str(int(time.time())) self.G_KAFKA_MESSAGE_BODY_DICT['endTime']=str(int(time.time())) #self.G_START_TIME='' #self.G_SMALL_PIC='' #self.G_END_TIME='' #-----------------------------------------task.json----------------------------------------------- self.G_DEBUG_LOG.info(self.G_TASK_JSON) if not os.path.exists(self.G_TASK_JSON): CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG,'task.json not exists') CLASS_COMMON_UTIL.python_copy(os.path.normpath(self.G_TASK_JSON),os.path.normpath(self.G_WORK_RENDER_TASK_CFG)) self.G_TASK_JSON=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK_CFG,'task.json')) self.G_TASK_JSON_DICT=eval(open(self.G_TASK_JSON, 'r').read()) self.G_DEBUG_LOG.info(str(self.G_TASK_JSON_DICT)) self.G_CG_CONFIG_DICT=self.G_TASK_JSON_DICT['software_config'] self.G_CG_VERSION=self.G_TASK_JSON_DICT['software_config']['cg_name']+' '+self.G_TASK_JSON_DICT['software_config']['cg_version'] self.G_ZONE=self.G_TASK_JSON_DICT['system_info']['common']['zone'] self.G_PLATFORM=self.G_TASK_JSON_DICT['system_info']['common']['platform'] self.G_TILES_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['tiles_path']) self.G_INPUT_CG_FILE=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['input_cg_file']) self.G_CHANNEL=self.G_TASK_JSON_DICT['system_info']['common']['channel'] self.G_INPUT_PROJECT_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['input_project_path']) self.G_CONFIG_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['config_path']+'/') self.G_SMALL_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['small_Path']) self.G_OUTPUT_USER_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['output_user_path']) self.G_INPUT_USER_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['input_user_path']) self.G_PLUGIN_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['plugin_path']) self.G_TEMP_PATH=os.path.normpath(self.G_TASK_JSON_DICT['system_info']['common']['temp_path']) self.G_TIPS_JSON=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK_CFG,'tips.json')) self.G_DRIVERC_7Z=os.path.normpath('c:/7-Zip/7z.exe') #-----------------------------------------assert.json----------------------------------------------- self.G_ASSET_JSON=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK_CFG,'asset.json')) asset_json=os.path.join(self.G_CONFIG_PATH,'asset.json') if os.path.exists(asset_json): CLASS_COMMON_UTIL.python_copy(asset_json,self.G_WORK_RENDER_TASK_CFG) self.G_ASSET_JSON_DICT=eval(open(asset_json, 'r').read()) print '[BASE.init.end.....]'
def vray_distribute_root(self): if self.MAX_VRAY_DISTRIBUTE: self.G_DEBUG_LOG.info('---------TODO Vray dist---------') content = "" content_top_list = [] content_top = "" content_bottom = "" ##get IP #local_ip = gethostbyname(gethostname()) #169.254.41.243 ip_list = gethostbyname_ex(gethostname( )) #('GA010', [], ['10.60.1.10', '169.254.41.243']) local_ip = ip_list[2][0] #get localhost IP:10.60.1.10 self.G_DEBUG_LOG.info('LOCAL IP:%s' % local_ip) node_ip_list = [] for node_ip in self.G_SCHEDULER_CLUSTER_NODES.split(','): node_ip_list.append(node_ip) self.G_DEBUG_LOG.info('NODE IP LIST:%s' % node_ip_list) node_ip_num = len(node_ip_list) #numbers of node ip if node_ip_list: for node_ip in node_ip_list: if self.G_CG_CONFIG_DICT['plugins']['vray'].startswith( '3'): content_top_list.append('%s 1 20204\n' % (node_ip)) else: content_top_list.append('%s 1\n' % (node_ip)) ####write cfg#### for i in range(len(content_top_list)): content_top = content_top + content_top_list[i] if self.G_CG_CONFIG_DICT['plugins']['vray'].startswith('1'): self.G_DEBUG_LOG.info('vray1\n') content_bottom = """restart_slaves 1 list_in_scene 1 """ elif self.G_CG_CONFIG_DICT['plugins']['vray'].startswith('2'): self.G_DEBUG_LOG.info('vray2\n') content_bottom = """restart_slaves 1 list_in_scene 1 max_servers %s """ % (node_ip_num) elif self.G_CG_CONFIG_DICT['plugins']['vray'].startswith('3'): self.G_DEBUG_LOG.info('vray3\n') content_bottom = """restart_slaves 1 list_in_scene 1 max_servers %s use_local_machine 1 transfer_missing_assets 1 use_cached_assets 1 cache_limit_type 2 cache_limit 100.000000 """ % (node_ip_num) elif self.G_CG_CONFIG_DICT['plugins']['vray'].startswith('0'): self.G_DEBUG_LOG.info('vray0000\n') content_bottom = """restart_slaves 1 list_in_scene 1 max_servers %s use_local_machine 1 transfer_missing_assets 1 use_cached_assets 1 cache_limit_type 2 cache_limit 100.000000 """ % (node_ip_num) else: CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'vray version error\n') content = content_top.strip() + '\n' + content_bottom.strip() self.G_DEBUG_LOG.info(content) configure_tmp_path = 'D:\\work\\render\\%s\\cfg\\vray_dr.cfg' % ( self.G_TASK_ID) #tmp path with open(configure_tmp_path, 'w') as f: f.write(content) version_year = self.G_CG_VERSION[-4:] if int(version_year) < 2013: configure_path = 'C:\\users\\enfuzion\\AppData\\Local\\Autodesk\\3dsmax\\%s - 64bit\\ENU\\plugcfg' % ( version_year) else: configure_path = 'C:\\Users\\enfuzion\\AppData\\Local\\Autodesk\\3dsMax\\%s - 64bit\\ENU\\en-US\\plugcfg' % ( version_year) self.G_DEBUG_LOG.info(configure_path) file_path = os.path.join(configure_path, "vray_dr.cfg") self.G_DEBUG_LOG.info(file_path) if not os.path.exists(configure_path): os.makedirs(configure_path) shutil.copy(configure_tmp_path, file_path) #os.system("copy %s %s" % (configure_tmp_path,file_path)) #cmdcp=subprocess.Popen("copy %s %s" % (configure_tmp_path,configure_path),stdin = subprocess.PIPE,stdout = subprocess.PIPE, stderr = subprocess.STDOUT, shell = True) if os.path.isfile(file_path): self.G_DEBUG_LOG.info('Success\n') else: self.G_DEBUG_LOG.info('node_ip_list is empty\n') time.sleep(120)
def __init__(self, **param_dict): print '[BASE.init.start.....]' self.G_RENDER_CORE_TYPE = 'cpu' self.G_JOB_ID = param_dict['G_JOB_ID'] self.G_USER_ID = param_dict['G_USER_ID'] self.G_USER_ID_PARENT = param_dict['G_USER_ID_PARENT'] self.G_TASK_ID = param_dict['G_TASK_ID'] self.G_SCRIPT_POOL = param_dict['G_SCRIPT_POOL'] self.G_ACTION = param_dict['G_ACTION'] self.G_CG_FRAMES = param_dict['G_CG_FRAMES'] self.G_CG_LAYER_NAME = param_dict['G_CG_LAYER_NAME'] self.G_SYS_ARGVS = param_dict[ 'G_SYS_ARGVS'] #taskid,jobindex,jobid,nodeid,nodename self.G_JOB_NAME = self.G_SYS_ARGVS[3].replace('"', '') self.G_NODE_NAME = self.G_SYS_ARGVS[5] self.G_WORK = 'c:/work' self.G_LOG_WORK = 'C:/log/render' self.G_RENDER_OS = param_dict['G_RENDER_OS'] if self.G_RENDER_OS == 'Linux': self.G_LOG_WORK = '/tmp/nzs-data/log/render' self.G_WORK = '/tmp/nzs-data/work' self.G_CG_TILE = '0' self.G_CG_TILE_COUNT = '1' print param_dict if 'G_CG_TILE' in param_dict: self.G_CG_TILE = param_dict['G_CG_TILE'] if 'G_CG_TILE_COUNT' in param_dict: self.G_CG_TILE_COUNT = param_dict['G_CG_TILE_COUNT'] if 'G_SCHEDULER_CLUSTER_ID' in param_dict: self.G_SCHEDULER_CLUSTER_ID = param_dict['G_SCHEDULER_CLUSTER_ID'] if 'G_SCHEDULER_CLUSTER_NODES' in param_dict: self.G_SCHEDULER_CLUSTER_NODES = param_dict[ 'G_SCHEDULER_CLUSTER_NODES'] #-----------------------------------------log----------------------------------------------- self.G_DEBUG_LOG = logging.getLogger('debug_log') self.G_RENDER_LOG = logging.getLogger('render_log') self.init_log() #-----------------------------------------work directory----------------------------------------------- self.G_WORK_RENDER = os.path.normpath( os.path.join(self.G_WORK, 'render')) self.G_WORK_RENDER_TASK = os.path.normpath( os.path.join(self.G_WORK_RENDER, self.G_TASK_ID)) #___abort___self.G_WORK_RENDER_TASK_BLOCK=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'block')) self.G_WORK_RENDER_TASK_CFG = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'cfg')) #___abort___self.G_WORK_RENDER_TASK_GRAB=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'grab')) #___abort___self.G_WORK_RENDER_TASK_MAX=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'max')) #___abort___self.G_WORK_RENDER_TASK_MAXBAK=os.path.normpath(os.path.join(self.G_WORK_RENDER_TASK,'maxbak')) self.G_WORK_RENDER_TASK_OUTPUT = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'output')) self.G_WORK_RENDER_TASK_OUTPUTBAK = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'outputbak')) self.G_WORK_RENDER_TASK_SMALL = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'small')) self.make_dir() #-----------------------------------------kafka----------------------------------------------- self.G_KAFKA_HOST = '10.60.96.142' self.G_KAFKA_PORT = 9092 self.G_KAFKA_TOPIC = 'dev-munu-topic-01' self.G_START_TIME = '' self.G_SMALL_PIC = '' self.G_END_TIME = '' #-----------------------------------------task.json----------------------------------------------- self.G_TASK_JSON = param_dict['G_TASK_JSON'] self.G_DEBUG_LOG.info(self.G_TASK_JSON) if not os.path.exists(self.G_TASK_JSON): CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'task.json not exists') self.G_TASK_JSON_DICT = eval(open(self.G_TASK_JSON, 'r').read()) #jsonStr = (open(self.G_TASK_JSON, 'r').read()) #self.G_TASK_JSON_DICT=json.load(jsonStr) self.G_ZONE = self.G_TASK_JSON_DICT['system_info']['common']['zone'] self.G_PLATFORM = self.G_TASK_JSON_DICT['system_info']['common'][ 'platform'] self.G_TILES_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['tiles_path']) self.G_INPUT_CG_FILE = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['input_cg_file']) self.G_CHANNEL = self.G_TASK_JSON_DICT['system_info']['common'][ 'channel'] self.G_INPUT_PROJECT_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common'] ['input_project_path']) self.G_CONFIG_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['config_path'] + '/') self.G_SMALL_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['small_Path']) self.G_OUTPUT_USER_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['output_user_path']) self.G_INPUT_USER_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['input_user_path']) self.G_PLUGIN_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['plugin_path']) self.G_TEMP_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['temp_path']) self.G_PRE_PY = os.path.join(self.G_WORK_RENDER_TASK_CFG, 'pre.py') self.G_POST_PY = os.path.join(self.G_WORK_RENDER_TASK_CFG, 'post.py') #-----------------------------------------assert.json----------------------------------------------- self.G_TASK_JSON_DIR = os.path.dirname(self.G_TASK_JSON) self.G_ASSET_JSON = os.path.join(self.G_TASK_JSON_DIR, 'asset.json') self.G_ASSET_JSON_DICT = eval(open(self.G_ASSET_JSON, 'r').read()) #-----------------------------------------frames----------------------------------------------- self.G_CG_START_FRAME = None self.G_CG_END_FRAME = None self.G_CG_BY_FRAME = None patt = '(-?\d+)(?:-?(-?\d+)(?:\[(-?\d+)\])?)?' m = re.match(patt, self.G_CG_FRAMES) if m != None: self.G_CG_START_FRAME = m.group(1) self.G_CG_END_FRAME = m.group(2) self.G_CG_BY_FRAME = m.group(3) if self.G_CG_END_FRAME == None: self.G_CG_END_FRAME = self.G_CG_START_FRAME if self.G_CG_BY_FRAME == None: self.G_CG_BY_FRAME = '1' else: print 'frames is not match'
def __init__(self, **paramDict): print '[BASE.init.start.....]' #self.G_DOPY_NAME='do.py' self.G_RENDER_CORE_TYPE = 'cpu' self.G_JOB_ID = paramDict['G_JOB_ID'] self.G_USER_ID = paramDict['G_USER_ID'] self.G_USER_ID_PARENT = paramDict['G_USER_ID_PARENT'] self.G_TASK_ID = paramDict['G_TASK_ID'] self.G_SCRIPT_POOL = paramDict['G_SCRIPT_POOL'] self.G_ACTION = paramDict['G_ACTION'] self.G_SYS_ARGVS = paramDict[ 'G_SYS_ARGVS'] #taskid,jobindex,jobid,nodeid,nodename self.G_JOB_NAME = self.G_SYS_ARGVS[3].replace('"', '') self.G_NODE_NAME = self.G_SYS_ARGVS[5] #self.G_HELPER_WORK='C:/WORK/helper' self.G_WORK = 'c:/work' self.G_LOG_RENDER = 'C:/LOG/render' self.G_RENDER_OS = paramDict['G_RENDER_OS'] if self.G_RENDER_OS == 'Linux': self.G_LOG_RENDER = '/tmp/nzs-data/log/render' #self.G_HELPER_WORK='/tmp/nzs-data/work/helper' self.G_WORK = '/tmp/nzs-data/work' self.G_WORK_RENDER = os.path.normpath( os.path.join(self.G_WORK, 'render')) self.G_WORK_RENDER_TASK = os.path.normpath( os.path.join(self.G_WORK_RENDER, self.G_TASK_ID)) self.G_WORK_RENDER_TASK_CFG = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'cfg')) #-----------------------------------------init log----------------------------------------------- self.G_ANALYZE_LOG = logging.getLogger('analyse_log') fm = logging.Formatter("%(asctime)s %(levelname)s - %(message)s", "%Y-%m-%d %H:%M:%S") analyseLogPath = os.path.join(self.G_LOG_RENDER, self.G_TASK_ID) analyseLog = os.path.join(analyseLogPath, (self.G_JOB_NAME + '.txt')) if not os.path.exists(analyseLogPath): os.makedirs(analyseLogPath) self.G_ANALYZE_LOG.setLevel(logging.DEBUG) renderLogHandler = logging.FileHandler(analyseLog) renderLogHandler.setFormatter(fm) self.G_ANALYZE_LOG.addHandler(renderLogHandler) console = logging.StreamHandler() console.setLevel(logging.INFO) self.G_ANALYZE_LOG.addHandler(console) #-----------------------------------------kafka----------------------------------------------- self.G_KAFKA_HOST = '10.60.96.142' self.G_KAFKA_PORT = 9092 self.G_KAFKA_TOPIC = 'dev-munu-topic-01' #-----------------------------------------task.json----------------------------------------------- self.G_TASK_JSON = paramDict['G_TASK_JSON'] self.G_ANALYZE_LOG.info(self.G_TASK_JSON) if not os.path.exists(self.G_TASK_JSON): CLASS_COMMON_UTIL.error_exit_log(self.G_ANALYZE_LOG, 'task.json not exists') self.G_TASK_JSON_DICT = eval(open(self.G_TASK_JSON, "r").read()) #jsonStr = (open(self.G_TASK_JSON, "r").read()) #self.G_TASK_JSON_DICT=json.load(jsonStr) self.G_ZONE = self.G_TASK_JSON_DICT['system_info']['common']["zone"] self.G_PLATFORM = self.G_TASK_JSON_DICT['system_info']['common'][ "platform"] self.G_TILES_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["tiles_path"]) self.G_INPUT_CG_FILE = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["input_cg_file"]) self.G_CHANNEL = self.G_TASK_JSON_DICT['system_info']['common'][ "channel"] self.G_INPUT_PROJECT_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common'] ["input_project_path"]) self.G_CONFIG_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["config_path"] + '/') self.G_SMALL_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["small_Path"]) self.G_OUTPUT_USER_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["output_user_path"]) self.G_INPUT_USER_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["input_user_path"]) self.G_PLUGIN_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']["plugin_path"]) self.G_PRE_PY = os.path.join(self.G_WORK_RENDER_TASK_CFG, 'pre.py') self.G_POST_PY = os.path.join(self.G_WORK_RENDER_TASK_CFG, 'post.py') print '[BASE.init.end.....]'
def __init__(self, **param_dict): print('[BASE.init.start.....]') # print(param_dict) # define global variables # G_JOB_NAME,G_JOB_ID,G_CG_FRAMES,G_CG_LAYER_NAME,G_CG_OPTION,G_CG_TILE,G_CG_TILECOUNT # G_CG_NAME,G_ACTION,G_USER_ID,G_TASK_ID,G_TASK_JSON,G_USER_ID_PARENT,G_SCRIPT_POOL,G_RENDER_OS,G_SYS_ARGVS,G_NODE_PY && G_SCHEDULER_CLUSTER_ID,G_SCHEDULER_CLUSTER_NODES for key in list(param_dict.keys()): if key.startswith('G'): exec('self.' + key + '=param_dict["' + key + '"]') self.G_MUNU_ID = self.G_SYS_ARGVS[1] #munu_task_id self.G_JOB_ID = self.G_SYS_ARGVS[2] #munu_job_id self.G_JOB_NAME = self.G_SYS_ARGVS[3] #munu_job_name self.G_NODE_ID = self.G_SYS_ARGVS[4] #11338764789520 self.G_NODE_NAME = self.G_SYS_ARGVS[5] #GD232 self.G_RECOMMIT_FLAG = self.G_SYS_ARGVS[ 6] #recommit_flag,default is "0" self.G_ACTION_ID = self.G_ACTION + '_' + self.G_MUNU_ID + '_' + self.G_JOB_ID self.G_CG_PROCESS_FLAG = 0 # use with B:\config\cg_process.json self.G_WORK = 'c:/work' self.G_LOG_WORK = 'C:/log/render' if self.G_RENDER_OS == '0': #G_RENDER_OS:0 linux ,1 windows self.G_LOG_WORK = '/tmp/nzs-data/log/render' self.G_WORK = '/tmp/nzs-data/work' #-----------------------------------------log----------------------------------------------- self.G_DEBUG_LOG = logging.getLogger('debug_log') self.G_RENDER_LOG = logging.getLogger('render_log') self.init_log() #-----------------------------------------analyse frame----------------------------------------------- if 'G_CG_FRAMES' in param_dict: self.G_CG_START_FRAME = None self.G_CG_END_FRAME = None self.G_CG_BY_FRAME = None patt = '(-?\d+)(?:-?(-?\d+)(?:\[(-?\d+)\])?)?' m = re.match(patt, self.G_CG_FRAMES) if m != None: self.G_CG_START_FRAME = m.group(1) self.G_CG_END_FRAME = m.group(2) self.G_CG_BY_FRAME = m.group(3) if self.G_CG_END_FRAME == None: self.G_CG_END_FRAME = self.G_CG_START_FRAME if self.G_CG_BY_FRAME == None: self.G_CG_BY_FRAME = '1' else: print('frames is not match') if 'G_CG_TILE' not in param_dict or ('G_CG_TILE' in param_dict and (self.G_CG_TILE == None or self.G_CG_TILE == '')): self.G_CG_TILE = '0' if 'G_CG_TILE_COUNT' not in param_dict or ( 'G_CG_TILE_COUNT' in param_dict and (self.G_CG_TILE_COUNT == None or self.G_CG_TILE_COUNT == '')): self.G_CG_TILE_COUNT = '1' #-----------------------------------------work directory----------------------------------------------- self.G_WORK_RENDER = os.path.normpath( os.path.join(self.G_WORK, 'render')) self.G_WORK_RENDER_TASK = os.path.normpath( os.path.join(self.G_WORK_RENDER, self.G_TASK_ID)) self.G_WORK_RENDER_TASK_CFG = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'cfg')) self.G_WORK_RENDER_TASK_OUTPUT = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'output')) self.G_WORK_RENDER_TASK_OUTPUTBAK = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'outputbak')) self.G_WORK_RENDER_TASK_SMALL = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK, 'small')) self.make_dir() # -----------------------------------------kafka----------------------------------------------- # self.G_KAFKA_MESSAGE_DICT={} # self.G_KAFKA_MESSAGE_DICT['start_time']=str(int(time.time())) # self.G_KAFKA_MESSAGE_DICT['end_time']=str(int(time.time())) # self.G_KAFKA_MESSAGE_DICT['big_pic']=[] # self.G_KAFKA_MESSAGE_DICT['small_pic']=[] # self.G_KAFKA_MESSAGE_DICT['distribute_node'] = '1' # self.G_KAFKA_HOST='10.60.96.142' # self.G_KAFKA_PORT=9092 # self.G_KAFKA_TOPIC= 'dev-munu-topic' # self.G_KAFKA_MESSAGE_BODY_DICT={} # self.G_KAFKA_MESSAGE_BODY_DICT['startTime']=str(int(time.time())) # self.G_KAFKA_MESSAGE_BODY_DICT['endTime']=str(int(time.time())) #self.G_START_TIME='' #self.G_SMALL_PIC='' #self.G_END_TIME='' #-----------------------------------------task.json----------------------------------------------- self.G_DEBUG_LOG.info(self.G_TASK_JSON) if not os.path.exists(self.G_TASK_JSON): CLASS_COMMON_UTIL.error_exit_log(self.G_DEBUG_LOG, 'task.json not exists') CLASS_COMMON_UTIL.python_copy( os.path.normpath(self.G_TASK_JSON), os.path.normpath(self.G_WORK_RENDER_TASK_CFG)) self.G_TASK_JSON = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK_CFG, 'task.json')) self.G_TASK_JSON_DICT = eval( codecs.open(self.G_TASK_JSON, 'r', 'utf-8').read()) self.G_DEBUG_LOG.info(str(self.G_TASK_JSON_DICT)) self.G_CG_CONFIG_DICT = self.G_TASK_JSON_DICT['software_config'] self.G_CG_VERSION = self.G_TASK_JSON_DICT['software_config'][ 'cg_name'] + ' ' + self.G_TASK_JSON_DICT['software_config'][ 'cg_version'] self.G_ZONE = self.G_TASK_JSON_DICT['system_info']['common']['zone'] self.G_PLATFORM = self.G_TASK_JSON_DICT['system_info']['common'][ 'platform'] self.G_AUTO_COMMIT = str( self.G_TASK_JSON_DICT['system_info']['common']['auto_commit']) self.G_TILES_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['tiles_path']) self.G_INPUT_CG_FILE = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['input_cg_file']) self.G_CHANNEL = self.G_TASK_JSON_DICT['system_info']['common'][ 'channel'] self.G_INPUT_PROJECT_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common'] ['input_project_path']) self.G_CONFIG_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['config_path'] + '/') self.G_SMALL_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['small_Path']) self.G_INPUT_USER_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['input_user_path']) self.G_PLUGIN_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['plugin_path']) self.G_TEMP_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['temp_path']) self.G_GRAB_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['grab_path']) self.G_OUTPUT_USER_PATH = os.path.normpath( self.G_TASK_JSON_DICT['system_info']['common']['output_user_path']) self.G_OUTPUT_USER_PATH = os.path.join( self.G_OUTPUT_USER_PATH, self.G_SMALL_TASK_ID + self.G_OUTPUT_USER_PATH.split(os.sep)[-1][len(self.G_TASK_ID):]) ##KAFKA # self.G_KAFKA_SERVER=self.G_TASK_JSON_DICT['system_info']['common']['kafka_server'] # self.G_KAFKA_TOPIC=self.G_TASK_JSON_DICT['system_info']['common']['kafka_topic'] ##cpu/gpu self.G_RENDER_CORE_TYPE = 'cpu' if 'render_core_type' in self.G_TASK_JSON_DICT['system_info'][ 'common']: self.G_RENDER_CORE_TYPE = self.G_TASK_JSON_DICT['system_info'][ 'common']['render_core_type'] self.G_TIPS_JSON = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK_CFG, 'tips.json')) self.G_DRIVERC_7Z = os.path.normpath('d:/7-Zip/7z.exe') #-----------------------------------------assert.json----------------------------------------------- self.G_ASSET_JSON = os.path.normpath( os.path.join(self.G_WORK_RENDER_TASK_CFG, 'asset.json')) asset_json = os.path.join(self.G_CONFIG_PATH, 'asset.json') if os.path.exists(asset_json): CLASS_COMMON_UTIL.python_copy(asset_json, self.G_WORK_RENDER_TASK_CFG) self.G_ASSET_JSON_DICT = eval( codecs.open(asset_json, 'r', 'utf-8').read()) # -----------------------------------------fee----------------------------------------------- self.G_FEE_PARSER = configparser.ConfigParser() if not self.G_FEE_PARSER.has_section('render'): self.G_FEE_PARSER.add_section('render') fee_dir = os.path.normpath( os.path.join(self.G_WORK, 'fee', self.G_PLATFORM, self.G_MUNU_ID)) if not os.path.exists(fee_dir): os.makedirs(fee_dir) fee_filename = r'%s_%s.ini' % (self.G_JOB_ID, self.G_RECOMMIT_FLAG) self.G_FEE_FILE = os.path.join( fee_dir, fee_filename ) #c:/work/fee/<platform>/<munu_task_id>/<munu_job_id>_<resubmit>.ini #----------------------------------------------turn global param to str--------------------------------------------------- global_param_dict = self.__dict__ for key, value in list(global_param_dict.items()): if isinstance(value, bytes): exec('self.' + key + ' = CLASS_COMMON_UTIL.bytes_to_str(self.' + key + ')') # global_param_dict= self.__dict__ # for key,value in global_param_dict.items(): # print type(value) # print (key+'='+str(value)) print('[BASE.init.end.....]')