def load_conf(conf_path): dummy_conf = { "auth_file_path": "headers_auth.json", "online_catalog_cache_file_path": "cache.json", "auto_create_playlist_format": "Upload List (%Y/%m/%d %H:%M:%S)" } conf = object() if conf_path == "" or not os.path.exists(conf_path): conf = dummy_conf conf_path = os.path.expanduser("~/.yootto/config.json") os.makedirs(os.path.dirname(conf_path), exist_ok=True) try: with open(conf_path, "w") as f: json.dump(dummy_conf, f) except: pass else: try: with open(conf_path, "r") as f: conf = json.load(f) except json.JSONDecodeError as e: logging.FATAL("JSONDecodeError: ", e) return dummy_conf basedir = os.path.dirname(os.path.abspath(conf_path)) if not Path(conf["auth_file_path"]).is_absolute(): conf["auth_file_path"] = os.path.join(basedir, conf["auth_file_path"]) if not Path(conf["online_catalog_cache_file_path"]).is_absolute(): conf["online_catalog_cache_file_path"] = os.path.join( basedir, conf["online_catalog_cache_file_path"]) return conf
def main(files, array): if array == '450k': mlh1_probes, cimp_cutoff, cimp_probes = MLH1_PROBES_450k, CIMP_CUTOFF_450k, CIMP_PROBES_450k elif array == '27k': mlh1_probes, cimp_cutoff, cimp_probes = MLH1_PROBES_27k, CIMP_CUTOFF_27k, CIMP_PROBES_27k else: logging.FATAL('Unrecognized array type %s', array) sys.exit(1) sys.stdout.write('Sample\tMLH1\tCIMP\t{}\t{}\n'.format( '\t'.join(sorted(mlh1_probes)), '\t'.join([ '\t'.join( ['{}_{}'.format(gene, x) for x in sorted(cimp_probes[gene])]) for gene in sorted(cimp_probes) ]))) for f in open(files, 'r'): f = f.strip('\n') logging.info('processing %s...', f) mlh1_values = {} cimp_values = collections.defaultdict(list) probes = {} for line in open(f, 'r'): fields = line.strip('\n').split('\t') if fields[0] in mlh1_probes: mlh1_values[fields[0]] = fields[1] for gene in cimp_probes: # each cimp gene if fields[0] in cimp_probes[gene]: # relevant probe try: cimp_values[gene].append(float(fields[1])) except ValueError: logging.debug( 'skipping non-float value %s in probe %s', fields[1], fields[0]) probes[fields[0]] = fields[1] sample = 'TCGA{}'.format( f.split('TCGA')[-1].replace('.gdc_hg38.txt', '')) sys.stdout.write('{}\t{:.2f}\t'.format( sample, sum([float(mlh1_values[x]) for x in mlh1_values]) / len(mlh1_values))) cimp_count = 0 for gene in cimp_probes: if sum(cimp_values[gene]) / len(cimp_values[gene]) > cimp_cutoff: cimp_count += 1 sys.stdout.write('{}\t'.format(cimp_count)) # mlh1 probes sys.stdout.write('\t'.join( mlh1_values.get(x, 'NA') for x in sorted(mlh1_probes))) # cimp probes for gene in sorted(cimp_probes): for probe in sorted(cimp_probes[gene]): sys.stdout.write('\t{}'.format(probes.get(probe, 'NA'))) sys.stdout.write('\n') logging.info('processing %s: done', f)
def BaseLoss(feature_maps=None, bboxes=None, orig_size=None): assert feature_maps is not None, logging.FATAL( "No feature maps have been provided.") assert bboxes is not None, logging.FATAL( "No bounding boxes have been provided.") assert orig_size is not None, logging.FATAL( "Original image size has not been provided.") #Here orig_size is assumed to be of the type 3 x width x height batchsize = feature_maps.shape[0] losses = [] #print(orig_size) H = orig_size[0].numpy() W = orig_size[1].numpy() C = orig_size[2].numpy() #print(len(orig_size)) #print(np.shape(bboxes)) print(bboxes) bboxes = bboxes[0] bboxes = bboxes[1] #print(bboxes) for batch in range(batchsize): feat2 = feature_maps[batch, :, :, :].cpu().detach().numpy() feat2 = np.transpose(feat2, (2, 1, 0)) #print(feat2.shape) #print(" W = {}".format(W[batch])) #print(" H = {}".format(H[batch])) feat2 = cv2.resize(feat2, (W[batch], H[batch]), cv2.INTER_CUBIC) #at = scipy.misc.imresize(feat,(orig_size[1],orig_size[2])) feat2 = np.transpose(feat2, (2, 1, 0)) mask, bbox_pixels, non_bbox_pixels = create_mask( feat2.shape[1:], bboxes[batch]) feat = np.square(feat2) feat = np.sum(feat, axis=0) bboxvalues = float(np.sum(feat[mask]) / (bbox_pixels * feat.shape[0])) restvalues = float( np.sum(feat[~mask]) / (non_bbox_pixels * feat.shape[0])) losses.append(restvalues - bboxvalues) losses = Variable(torch.DoubleTensor(losses), requires_grad=True) return sum(losses) / len(losses), feat
def bbplot_img_from_df(df): imagefilename = df['file_name'].iloc[0] savefilename = df['save_name'].iloc[0] try: img = cv2.imread(imagefilename) except: logging.FATAL("The image {} could not be read.".format(imagefilename)) plotonimage(img, df, savefilename) return None
def _wrapper(*args, **kwargs): # print(f"stuff stuff: {func.__doc__}") # _wrapper.__doc__ = func.__doc__ try: print(f"Running: {func.__name__}( {args} , {kwargs} )") v = func(*args, **kwargs) except Exception as e: logging.FATAL(f"Failed running {func.__name__}: {e}") report_failure(func.__name__, *args, failure=e, **kwargs) rep_id = report_invocation(func.__name__, *args, scope='id', **kwargs) report_results(func.__name__, *args, invocation_report_id=rep_id, **kwargs) print(f"Finished: {func.__name__}( {args} , {kwargs} )") return v
def parse_doc_to_nlss(doc_info): l_nlss = [] # e_id, sent id, sent, l_e docno = doc_info.get('docno', "") if not docno: docno = doc_info.get(title_field) e_grid = doc_info.get(E_GRID_FIELD, {}) if type(e_grid) is not dict: logging.FATAL('%s is not dict from [%s]', json.dumps(e_grid), docno) raise TypeError for p, sent_grid in enumerate( doc_info.get(E_GRID_FIELD, {}).get(body_field, [])): sent_id = docno + '_s%d' % p sent = sent_grid['sent'] l_ana = sent_grid[SPOT_FIELD] l_e = list(set([ana['id'] for ana in l_ana])) l_e = [e for e in l_e if e] for e in l_e: l_nlss.append([e, sent_id, sent, l_e]) logging.debug('[%s] [%d] nlss pair', docno, len(l_nlss)) return l_nlss
def draw(self): if len(self.Game) == 2: # counts if the game hasnt changed pyxel.cls(7) ''' text on screen''' pyxel.text(90, 40, "Adventure", 0) pyxel.text(90, 60, "Pong", 0) pyxel.text(0, 0, f'{datetime.datetime.now()}', 0) if self.choice == 1: # shows the choice the player can select pyxel.rectb(85, 35, 50, 15, 0) if self.choice == 2: # shows the choice the player can select pyxel.rectb(85, 55, 50, 15, 0) elif self.Game == {2}: # starts the adventure game Adventure.App() Adventure.API_find() elif self.Game == {1}: # starts pong Pong.Start() else: # error occured logging.FATAL("game softlocked ") raise Game_exception("Game doesnt equel 2 or 1")
def load_playlist(playlist_path, encoding): ret = [] try: fileobj = open(playlist_path, "r", encoding=encoding) while True: line = fileobj.readline().replace("\n", "") if line and line[0] != "#" and ord(line[0]) != 65279: if not Path(line).is_absolute(): if platform.system() == "Windows": line = line.replace("/", "\\") else: line = line.replace("\\", "/") basedir = os.path.dirname(os.path.abspath(playlist_path)) line = os.path.join(basedir, line) ret.append(get_tag_from_file(line)) if line: pass else: break except: logging.FATAL("Playlist load error.") return [] return ret