def _reload_module(module): """ reload module reload模块 (已经加载过的模块 否则的话需要用__import__) """ saved_dict = {} saved_dict.update(getattr(module, '__dict__', {})) sucess, error = True, '' try: reload(module) new_dict = getattr(module, '__dict__', {}) oldnames = set(saved_dict) newnames = set(new_dict) for attr_name in newnames & oldnames: new_dict[attr_name] = __update(saved_dict[attr_name], new_dict[attr_name]) except: # reload失败,将模块还原 new_dict.clear() new_dict.update(saved_dict) error, sucess = traceback.format_exc(), False logger.fatal('Fail to reload module = %s', module) logger.fatal(error) # 返回reload结果和模块的旧数据方便恢复 return sucess, saved_dict, error
def _reset_hard_attack_num_failed(self, err, req, timer): logger.fatal("Reset anneal hard attack num failed[reason=%s]" % err) res = anneal_pb2.ResetHardAttackNumRes() res.status = -1 response = res.SerializeToString() logger.notice("Reset anneal hard attack num failed" "[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _finish_battle_failed(self, err, req, timer): logger.fatal("Finish transfer battle failed[reason=%s]" % err) res = transfer_arena_pb2.FinishTransferArenaBattleRes() res.status = -1 response = res.SerializeToString() logger.notice( "Finish transfer battle failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _receive_failed(self, err, req, timer): logger.fatal("Receive transfer notice failed[reason=%s]" % err) res = internal_pb2.InternalTransferNoticeRes() res.status = -1 response = res.SerializeToString() logger.notice( "Receive transfer notice failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _receive_clear_worldboss_merit_failed(self, err, req, timer): logger.fatal("Receive clear worldboss merit failed[reason=%s]" % err) res = boss_pb2.ReceiveClearWorldBossMeritRes() res.status = -1 response = res.SerializeToString() logger.notice("Receive clear worldboss merit failed" "[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _buy_failed(self, err, req, timer): logger.fatal("Buy transfer attack times failed[reason=%s]" % err) res = transfer_arena_pb2.BuyChallengeTimesRes() res.status = -1 response = res.SerializeToString() logger.notice( "Buy transfer attack times failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _receive_notice_failed(self, err, req, timer): logger.fatal("Receive battle notice failed[reason=%s]" % err) res = internal_pb2.BattleResultNoticeRes() res.status = -1 response = res.SerializeToString() logger.notice( "Receive battle notice failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _refresh_chat_failed(self, err, req, timer): logger.fatal("Refresh chat failed[reason=%s]" % err) res = friend_pb2.RefreshStatusRes() res.status = -1 response = res.SerializeToString() logger.notice( "Refresh chat notice failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _clear_lucky_event_failed(self, err, req, timer): logger.fatal("Clear lucky event failed[reason=%s]" % err) res = map_pb2.ClearLuckyEventRes() res.status = -1 response = res.SerializeToString() logger.notice( "Clear lucky event failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _abandon_node_failed(self, err, req, timer): logger.fatal("Abandon node failed[reason=%s]" % err) res = event_pb2.AbandonNodeRes() res.status = -1 response = res.SerializeToString() logger.notice( "Abandon node failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _rematch_node_failed(self, err, req, timer): logger.fatal("Rematch node failed[reason=%s]" % err) res = map_pb2.RematchNodeRes() res.status = -1 response = res.SerializeToString() logger.notice( "Rematch node failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _trigger_event_failed(self, err, type, req, timer): logger.fatal("Trigger event failed[reason=%s]" % err) res = map_pb2.TriggerEventRes() res.status = -1 response = res.SerializeToString() logger.notice( "Trigger event failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _query_suggested_country_failed(self, err, req, timer): logger.fatal("Query suggested country failed[reason=%s]" % err) res = monarch_pb2.QuerySuggestedCountryRes() res.status = -1 response = res.SerializeToString() logger.notice( "Query suggested country failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _query_failed(self, err, req, timer): logger.fatal("Query transfer arena failed[reason=%s]" % err) res = transfer_arena_pb2.QueryTransferArenaRes() res.status = -1 response = res.SerializeToString() logger.notice( "Query transfer arena failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _check_failed(self, err, req, timer): logger.fatal("Check legend city info failed[reason=%s]" % err) res = unit_pb2.UnitCheckLegendCityRes() res.status = -1 response = res.SerializeToString() logger.notice("Check legend city info failed" "[city_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _add_prosperity_failed(self, err, req, timer): logger.fatal("Add prosperity failed[reason=%s]" % err) res = internal_union_pb2.InternalAddUnionProsperityRes() res.status = -1 response = res.SerializeToString() logger.notice("Add prosperity failed" "[union_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _finish_mission_failed(self, err, req, timer): logger.fatal("Finish mission failed[reason=%s]" % err) res = mission_pb2.FinishMissionRes() res.status = -1 response = res.SerializeToString() logger.notice( "Finish mission failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _query_failed(self, err, req, timer): logger.fatal("Query common worldboss failed[reason=%s]" % err) res = boss_pb2.QueryCommonWorldBossRes() res.status = -1 response = res.SerializeToString() logger.notice( "Query common worldboss failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _get_draw_status_failed(self, err, req, timer): logger.fatal("Get draw status failed[reason=%s]" % err) res = wineShop_pb2.QueryDrawStatusRes() res.status = -1 response = res.SerializeToString() logger.notice("Get draw status failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _query_melee_ranking_failed(self, err, req, timer): logger.fatal("Query melee ranking failed[reason=%s]" % err) res = arena_pb2.QueryArenaInfoRes() res.status = -1 response = res.SerializeToString() logger.notice("Query melee ranking failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _draw_treasure_failed(self, err, req, timer): logger.fatal("Lucky treasure draw failed[reason=%s]" % err) res = activity_pb2.TurntableDrawRes() res.status = -1 response = res.SerializeToString() logger.notice("Lucky draw failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _draw_failed(self, err, req, timer): logger.fatal("Lucky draw failed[reason=%s]" % err) res = wineShop_pb2.WineShopDrawRes() res.status = -1 response = res.SerializeToString() logger.notice("Lucky draw failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _awaken_failed(self, err, req, timer): logger.fatal("Awaken hero failed[reason=%s]" % err) res = hero_pb2.AwakeningHeroRes() res.status = -1 response = res.SerializeToString() logger.notice("Awaken hero failed" "[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _query_chest_failed(self, err, req, timer): logger.fatal("Query chest failed[reason=%s]" % err) res = chest_pb2.QueryChestRes() res.status = -1 response = res.SerializeToString() logger.notice("Query chest failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def biobambam_filter_alignments(self, bam_file_in, bam_file_out): # pylint: disable=no-self-use """ Sorts and filters the bam file. It is important that all duplicate alignments have been removed. This can be run as an intermediate step, but should always be run as a check to ensure that the files are sorted and duplicates have been removed. Parameters ---------- bam_file_in : str Location of the input bam file bam_file_out : str Location of the output bam file tmp_dir : str Tmp location for intermediate files during the sorting Returns ------- bam_file_out : str Location of the output bam file """ td_list = bam_file_in.split("/") logger.info("BIOBAMBAM: bam_file_in: " + bam_file_in) logger.info("BIOBAMBAM: bam_file_out: " + bam_file_out) tmp_dir = "/".join(td_list[0:-1]) command_line = 'bamsormadup --tmpfile=' + tmp_dir bam_tmp_out = tmp_dir + '/' + td_list[-1] + '.filtered.tmp.bam' logger.info("BIOBAMBAM: command_line: " + command_line) try: with open(bam_file_in, "r") as f_in: with open(bam_tmp_out, "w") as f_out: process = subprocess.Popen(command_line, shell=True, stdin=f_in, stdout=f_out) process.wait() except (IOError, OSError) as msg: logger.fatal("I/O error({0}) - bamsormadup: {1}\n{2}".format( msg.errno, msg.strerror, command_line)) return False try: with open(bam_file_out, "wb") as f_out: with open(bam_tmp_out, "rb") as f_in: f_out.write(f_in.read()) except IOError as error: logger.fatal("I/O error({0}): {1}".format(error.errno, error.strerror)) return False return True
def _query_anneal_record_failed(self, err, req, timer): logger.fatal("Query anneal record failed[reason=%s]" % err) res = anneal_pb2.QueryAnnealRecordRes() res.status = -1 response = res.SerializeToString() logger.notice( "Query anneal record failed[req=%s][res=%s][consume=%d]" % (req, res, timer.count_ms())) return response
def _start_failed(self, err, req, timer): logger.fatal("Start union aid failed[reason=%s]" % err) res = internal_union_pb2.InternalStartUnionAidRes() res.status = -1 response = res.SerializeToString() logger.notice("Start union aid failed" "[union_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _get_position_rank_failed(self, err, req, timer): logger.fatal("Get position rank failed[reason=%s]" % err) res = unit_pb2.UnitGetPositionRankRes() res.status = -1 response = res.SerializeToString() logger.notice("Get position rank failed" "[city_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _delete_city_failed(self, err, req, timer): logger.fatal("Delete city failed[reason=%s]" % err) res = unit_pb2.UnitAddReputationRes() res.status = -1 response = res.serializetostring() logger.notice("Delete city failed[city_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _read_mail_failed(self, err, req, timer): logger.fatal("Use mail failed[reason=%s]" % err) res = mail_pb2.UseMailRes() res.status = -1 response = res.SerializeToString() logger.notice( "Read mail failed[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def _refine_upgrade_failed(self, err, req, timer): logger.fatal("Refine hero upgrade failed[reason=%s]" % err) res = hero_pb2.RefineHeroRes() res.status = -1 response = res.SerializeToString() logger.notice("Refine hero upgrade failed" "[user_id=%d][req=%s][res=%s][consume=%d]" % (timer.id, req, res, timer.count_ms())) return response
def validate_and_assess(self, genes_loc, metrics_ref_dir_loc, assess_dir_loc, public_ref_dir_loc, metrics_loc, tar_view_loc): # pylint: disable=no-self-use participant_id = self.configuration['participant_id'] cancer_types = self.configuration['cancer_type'] inputDir = os.path.dirname(genes_loc) inputBasename = os.path.basename(genes_loc) tag = self.docker_tag uid = str(os.getuid()) retval_stage = 'validation' validation_params = [ "docker","run","--rm","-u", uid, '-v',inputDir + ":/app/input:ro", '-v',public_ref_dir_loc+":/app/ref:ro", "tcga_validation:" + tag, '-i',"/app/input/"+inputBasename,'-r','/app/ref/' ] #print("DEBUG: "+' '.join(validation_params),file=sys.stderr) retval = subprocess.call(validation_params) resultsDir = None resultsTarDir = None if retval == 0: retval_stage = 'metrics' resultsDir = tempfile.mkdtemp() resultsTarDir = tempfile.mkdtemp() metrics_params = [ "docker","run","--rm","-u", uid, '-v',inputDir + ":/app/input:ro", '-v',metrics_ref_dir_loc+":/app/metrics:ro", '-v',resultsDir+":/app/results:rw", "tcga_metrics:" + tag, '-i',"/app/input/"+inputBasename,'-m','/app/metrics/','-p',participant_id,'-o','/app/results/', '-c' ] metrics_params.extend(cancer_types) retval = subprocess.call(metrics_params) if retval == 0: retval_stage = 'assessment' retval = subprocess.call([ "docker","run","--rm","-u", uid, '-v',assess_dir_loc+":/app/assess:ro", '-v',resultsDir+":/app/results:rw", '-v',resultsTarDir+":/app/resultsTar:rw", "tcga_assessment:" + tag, '-b',"/app/assess/",'-p','/app/results/','-o','/app/resultsTar/' ]) try: if retval == 0: # Create the MuG/VRE metrics file metricsArray = [] for metrics_file in os.listdir(resultsDir): abs_metrics_file = os.path.join(resultsDir, metrics_file) if fnmatch.fnmatch(metrics_file,"*.json") and os.path.isfile(abs_metrics_file): with io.open(abs_metrics_file,mode='r',encoding="utf-8") as f: metrics = json.load(f) metricsArray.append(metrics) with io.open(metrics_loc, mode='w', encoding="utf-8") as f: jdata = json.dumps(metricsArray, sort_keys=True, indent=4, separators=(',', ': ')) f.write(unicode(jdata,"utf-8")) # And create the MuG/VRE tar file with tarfile.open(tar_view_loc,mode='w:gz',bufsize=1024*1024) as tar: tar.add(resultsTarDir,arcname='data',recursive=True) else: logger.fatal("ERROR: TCGA CD evaluation failed, in step "+retval_stage) raise Exception("ERROR: TCGA CD evaluation failed, in step "+retval_stage) return False except IOError as error: logger.fatal("I/O error({0}): {1}".format(error.errno, error.strerror)) return False finally: # Cleaning up in any case if resultsDir is not None: shutil.rmtree(resultsDir) if resultsTarDir is not None: shutil.rmtree(resultsTarDir) return True
def run(self, input_files, input_metadata, output_files): """ The main function to run the compute_metrics tool Parameters ---------- input_files : dict List of input files - In this case there are no input files required input_metadata: dict Matching metadata for each of the files, plus any additional data output_files : dict List of the output files that are to be generated Returns ------- output_files : dict List of files with a single entry. output_metadata : dict List of matching metadata for the returned files """ project_path = self.configuration.get('project','.') participant_id = self.configuration['participant_id'] metrics_path = output_files.get("metrics") if metrics_path is None: metrics_path = os.path.join(project_path,participant_id+'.json') metrics_path = os.path.abspath(metrics_path) output_files['metrics'] = metrics_path tar_view_path = output_files.get("tar_view") if tar_view_path is None: tar_view_path = os.path.join(project_path,participant_id+'.tar.gz') tar_view_path = os.path.abspath(tar_view_path) output_files['tar_view'] = tar_view_path results = self.validate_and_assess( os.path.abspath(input_files["genes"]), os.path.abspath(input_files['metrics_ref_datasets']), os.path.abspath(input_files['assessment_datasets']), os.path.abspath(input_files['public_ref']), metrics_path, tar_view_path ) results = compss_wait_on(results) if results is False: logger.fatal("TCGA CD pipeline failed. See logs") raise Exception("TCGA CD pipeline failed. See logs") return {}, {} # BEWARE: Order DOES MATTER when there is a dependency from one output on another output_metadata = { "metrics": Metadata( # These ones are already known by the platform # so comment them by now data_type="metrics", file_type="TXT", file_path=metrics_path, # Reference and golden data set paths should also be here sources=[input_metadata["genes"].file_path], meta_data={ "tool": "TCGA_CD" } ), "tar_view": Metadata( # These ones are already known by the platform # so comment them by now data_type="tool_statistics", file_type="TAR", file_path=tar_view_path, # Reference and golden data set paths should also be here sources=[metrics_path], meta_data={ "tool": "TCGA_CD" } ), } return (output_files, output_metadata)