def get_weather(): url = "http://ip-api.com/json/" if request.headers.getlist("X-Forwarded-For"): ip = request.headers.getlist("X-Forwarded-For")[0] else: ip = request.remote_addr # uncomment for local testing # ip = "2601:602:9804:4396:8d29:6f17:a182:e8ed" list = [] r = requests.get(url+ip) json_string = json.dumps(r.json()) json_obj = json.loads(json_string) if json_obj['status'] == "fail" : return [url+ip, emojify("Something went wrong :bee: :sunny: :umbrella:")] forecast = forecastio.load_forecast(api_key, json_obj['lat'], json_obj['lon']) current = forecast.currently() if "Rain" not in current.summary: list.append(emojify(":sunny: It's Not Raining :sunny:")) else: list.append(emojify(":umbrella: It's Raining :umbrella:")) list.append(json_obj['city']) list.append(forecast.minutely().summary) list.append(forecast.hourly().summary) list.append(forecast.currently().temperature) return list
def load_checkpoint(checkpoint_dir, epoch, iteration): """Load checkpoint from path @Args checkpoint_dir: (str) absolute path to checkpoint folder epoch: (int) epoch of checkpoint iteration: (int) iteration of checkpoint in one epoch @Returns start_epoch: (int) start_iteration: (int) state_dict: (dict) state of model """ path = opj(checkpoint_dir, str(epoch) + '.' + str(iteration) + '.ckpt') if not os.path.isfile(path): raise Exception( emojify("Checkpoint in epoch %d doesn't exist :poop:" % epoch)) checkpoint = torch.load(path) start_epoch = checkpoint['epoch'] state_dict = checkpoint['state_dict'] start_iteration = checkpoint['iteration'] assert epoch == start_epoch, emojify( "`epoch` != checkpoint's `start_epoch` :poop:") assert iteration == start_iteration, emojify( "`iteration` != checkpoint's `start_iteration` :poop:") return start_epoch, start_iteration, state_dict
def feleciteer_speler(vraag_counter): dikke_duim = emojify(" :thumbsup: joepie!! het klopt je hebt gewonnen!!") print dikke_duim print "je hebt het binnen {} keer geraden!".format(vraag_counter) gefeliciteerd = emojify(":sparkles: gefeliciteerd :sparkles:") print gefeliciteerd sys.exit(1)
def save_checkpoint(checkpoint_dir, epoch, iteration, save_dict): """Save checkpoint to path @Args path: (str) absolute path to checkpoint folder epoch: (int) epoch of checkpoint file iteration: (int) iteration of checkpoint in one epoch save_dict: (dict) saving parameters dict """ os.makedirs(checkpoint_dir, exist_ok=True) path = opj(checkpoint_dir, str(epoch) + '.' + str(iteration) + '.ckpt') assert epoch == save_dict['epoch'], emojify( "`epoch` != save_dict's `start_epoch` :poop:") assert iteration == save_dict['iteration'], emojify( "`iteration` != save_dict's `start_iteration` :poop:") if os.path.isfile(path): print( emojify( "Overwrite checkpoint in epoch %d, iteration %d :exclamation:" % (epoch, iteration))) try: torch.save(save_dict, path) except Exception: raise Exception(emojify("Fail to save checkpoint :sob:")) print( emojify("Checkpoint %s saved :heavy_check_mark:" % (str(epoch) + '.' + str(iteration) + '.ckpt')))
def parse_args(): '''Parse command line arguments. Returns Options object with command line argument values as attributes. Will exit the program on a command line error. ''' parser = ArgumentParser(description='Read one or more FASTQ files, compute quality stats for each file, print as emoji... for some reason.'+emojify(":smile:")) parser.add_argument( '--minlen', metavar='N', type=int, default=DEFAULT_MIN_LEN, help='Minimum length sequence to include in stats (default {})'.format( DEFAULT_MIN_LEN)) parser.add_argument('--scale', action='store_true', help='show relevant scale in output') parser.add_argument('--version', action='version', version='%(prog)s ' + PROGRAM_VERSION) parser.add_argument('--mean', default=True, action='store_true', help='show mean quality per position (DEFAULT)') parser.add_argument('--custom', metavar='CUSTOM_DICT', type=str, help='use a mapping of custom emoji to quality in CUSTOM_DICT ('+emojify(":snake:")+emojify(":palm_tree:")+')') parser.add_argument('--bin', action='store_true', help='use binned scores ('+emojify(":no_entry_sign:")+emojify(":skull:") +emojify(":poop:")+emojify(":warning:")+" "+emojify(":smile:")+emojify(":laughing:")+emojify(":sunglasses:")+emojify(":heart_eyes:")+")") parser.add_argument('--noemoji', action='store_true', help='use mapping without emoji (▁▂▃▄▅▆▇█)') parser.add_argument('--min', action='store_true', help='show minimum quality per position') parser.add_argument('--max', action='store_true', help='show maximum quality per position') parser.add_argument('--output', metavar='OUTPUT_FILE', type=FileType('w'), help = 'write output to OUTPUT_FILE instead of stdout' ) parser.add_argument('--long', metavar='READ_LENGTH', type=int, help='enable long reads up to READ_LENGTH bp long') parser.add_argument('--log', metavar='LOG_FILE', type=str, help='record program progress in LOG_FILE') parser.add_argument('fasta_files', nargs='*', metavar='FASTQ_FILE', type=str, help='Input FASTQ files') return parser.parse_args()
def user_input(self): print emojify( ":sparkles: :sparkles: input coords :sparkles: :sparkles:") try: x, y, f = input("(x,y, flag boolean):") except ValueError: print "woops! forgot an arg there. Probably the flag. Try again." x, y, f = input("(x,y, flag boolean):") self.board.play(x, y, f)
def draw(self): for row in self.grid: for tile in row: if tile.visible and tile.label not in count_emoji_dictionary.values( ): print emojify(":white_small_square:"), else: print emojify(tile.label), print "\n"
def convert_fasta(options, mapping_dict=local_seq_emoji_map): '''Convert FASTA file to emoji. If no FASTA files are specified on the command line then read from the standard input (stdin). Arguments: options: the command line options of the program Result: None ''' if options.custom: with open(options.custom) as f: mapping_dict_use = ast.literal_eval(f.read()) else: mapping_dict_use = mapping_dict if options.fasta_files: for fasta_filename in options.fasta_files: logging.info("Processing FASTA file from %s", fasta_filename) try: if fasta_filename.endswith(".gz"): fasta_file = gzip.open(fasta_filename, 'rt') else: fasta_file = open(fasta_filename) except IOError as exception: exit_with_error(str(exception), EXIT_FILE_IO_ERROR) else: with fasta_file: #stats = FastaStats().from_file(fasta_file, options.minlen) for seq in SeqIO.parse(fasta_file, "fasta"): print(emojify(":arrow_forward:") + " " + seq.id) #print(">"+seq.id) original = seq.seq bioemojify = "".join([ emojify(mapping_dict_use.get(s, ":heart_eyes:")) for s in original ]) print(bioemojify) else: logging.info("Processing FASTA file from stdin") #stats = FastaStats().from_file(sys.stdin, options.minlen) if (binascii.hexlify(sys.stdin.buffer.peek(1)[:2]) == b'1f8b'): # print("zipped") stdin_file = gzip.open(sys.stdin.buffer, 'rt') else: stdin_file = sys.stdin for seq in SeqIO.parse(stdin_file, "fasta"): print(emojify(":arrow_forward:") + " " + seq.id) #print(">"+seq.id) original = seq.seq bioemojify = "".join([ emojify(mapping_dict_use.get(s, ":heart_eyes:")) for s in original ]) print(bioemojify)
def get_vcf_filter(filter_val): filt_emoji = "" if filter_val == None: filt_emoji = emojify(":question:") elif filter_val == []: filt_emoji = emojify(":thumbsup:") else: filt_emoji = emojify(":thumbsdown:")+":"+ str(",".join(filter_val)) return(filt_emoji)
def print_scale(full_quals, binned): count = 0 if binned: print("#scale for fastqe (binned)") for i in full_quals: print("# ", count, i, emojify(emaps.fastq_emoji_map_binned.get(i, ':heart_eyes:'))) count = count + 1 else: print("#scale for fastqe") for i in full_quals: print("# ", count, i, emojify(emaps.fastq_emoji_map.get(i, ':heart_eyes:'))) count = count + 1
def draw_detection(img_path, detection, reso, type): """Draw detection result @Args img_path: (str) path to image detection: (np.array) detection result 1. (type == 'pred') with size [#bbox, [batch_idx, top-left x, top-left y, bottom-right x, bottom-right y, objectness, conf, class idx]] 2. (type == 'gt') with size [#box, [top-left x, top-left y, bottom-right x, bottom-right y]] reso: (int) image resolution type: (str) prediction or ground truth @Returns img: (Pillow.Image) detection result """ class_names = config.datasets['coco']['class_names'] img = Image.open(img_path) w, h = img.size h_ratio = h / reso w_ratio = w / reso h_ratio, w_ratio draw = ImageDraw.Draw(img) font = ImageFont.truetype("../assets/Roboto-Bold.ttf", 15) for i in range(detection.shape[0]): if type == 'pred': bbox = detection[i, 1:5] category = int(detection[i, -1]) label = class_names[category] conf = '%.2f' % detection[i, -2] caption = str(label) + ' ' + str(conf) elif type == 'gt': bbox = transform_coord(detection[i, 0:4], src='center', dst='corner') category = int(detection[i, -1]) label = class_names[category] caption = str(label) w_ratio = w h_ratio = h else: raise Exception(emojify("detection type not supported! :shit:")) if category not in config.colors.keys(): config.colors[category] = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)) x1, y1, x2, y2 = bbox[0], bbox[1], bbox[2], bbox[3] draw.rectangle( ((x1 * w_ratio, y1 * h_ratio, x2 * w_ratio, y2 * h_ratio)), outline=config.colors[category]) draw.rectangle((bbox[0] * w_ratio, bbox[1] * h_ratio - 15, bbox[2] * w_ratio, bbox[1] * h_ratio), fill=config.colors[category]) draw.text((bbox[0] * w_ratio + 2, bbox[1] * h_ratio - 15), caption, fill='white', font=font) return img
def main(): page = requests.get("https://slickdeals.net") if page.status_code != 200: print("Can't connect to slickdeals {}".format( emojify(":disappointed:"))) else: get_to_work(bs(page.text, "html.parser"))
def mean_emoji(filename): # works for up to 500bp reads means = np.zeros(500) seq_count = 0 for r in SeqIO.parse(filename, "fastq"): index = 0 for s in r.letter_annotations["phred_quality"]: means[index] += s index = index + 1 seq_count = seq_count + 1 cleaned = np.trim_zeros(means) means_fp = cleaned / seq_count fake_seq = ''.join(["a"] * len(means_fp.round())) record = SeqRecord(Seq(fake_seq), id="test", name="mean scores", description="example with mean fastq socres", letter_annotations={ 'phred_quality': list(means_fp.round().astype(int)) }) print("".join([ emojify(fastq_emoji_map[s]) for s in QualityIO._get_sanger_quality_str(record) ]))
def get_vcf_qual(quality): '''Map a quality value to an emoji''' # Hack to do this quickly - use same trick as FASTQE and convert from value to a PHRED encoding then map #TODO make this better # if quality == None: bioemojify_qual = emojify(":question:") else: fake_seq = 'N' record_qual = SeqRecord(Seq(fake_seq), id="test", name="lookup", description="example", letter_annotations={'phred_quality': [int(quality)]}) mapping_dict_qual_use = emaps.fastq_emoji_map_binned original_qual = QualityIO._get_sanger_quality_str(record_qual) #print(original_qual) bioemojify_qual = "".join([emojify(mapping_dict_qual_use.get(s, ":heart_eyes:")) for s in original_qual]) return(bioemojify_qual)
def _find_type(md: Text): try: return ( pyemojify.emojify( md.split(">")[1].split("<")[0].strip().split("(")[1] ) .replace(")", "") .strip() ) except IndexError: return None
def get_to_work(soup): frontpage = soup.find_all( "div", attrs={"data-module-name": "Frontpage Slickdeals"}) if not frontpage: print("Something went wrong {}".format(emojify(":disappointed:"))) sys.exit(1) fp_deals = frontpage[0] all_deals = filter(lambda x: x.get("data-threadid"), fp_deals.find_all("div")) print_deals(list(all_deals))
def convert_fasta(options): '''Convert FASTA file to emoji. If no FASTA files are specified on the command line then read from the standard input (stdin). Arguments: options: the command line options of the program Result: None ''' if options.fasta_files: for fasta_filename in options.fasta_files: logging.info("Processing FASTA file from %s", fasta_filename) try: fasta_file = open(fasta_filename) except IOError as exception: exit_with_error(str(exception), EXIT_FILE_IO_ERROR) else: with fasta_file: #stats = FastaStats().from_file(fasta_file, options.minlen) for seq in SeqIO.parse(fasta_file, "fasta"): print(">" + seq.id) original = seq.seq bioemojify = " ".join([ emojify(emaps.seq_emoji_map.get(s, ":heart_eyes:")) for s in original ]) print(bioemojify) else: logging.info("Processing FASTA file from stdin") #stats = FastaStats().from_file(sys.stdin, options.minlen) print("stdin") for seq in SeqIO.parse(sys.stdin, "fasta"): print(">" + seq.id) original = seq.seq bioemojify = " ".join([ emojify(emaps.seq_emoji_map.get(s, ":heart_eyes:")) for s in original ]) print(bioemojify)
def play(self, x, y, flag=False): # flag a certain tile if not self.inbounds(x, y): raise ValueError('Coords out of bound.') if self.inbounds(x, y) and flag: self.grid[x][y].flagged = True self.grid[x][y].label = emojify(":triangular_flag_on_post:") return # if not flagging and it is a bomb if self.inbounds(x, y) and self.grid[x][y].bomb: self.state = -1 return # start recursive call self.reveal(x, y)
def filter_improvements_by_code(code_list: str) -> FrozenSet[str]: all_codes = frozenset( [improvement.CODE for improvement in ALL_IMPROVEMENTS]) codes = frozenset([code.strip() for code in code_list.split(",")]) - {""} if not codes: return frozenset() wrong_codes = codes.difference(all_codes) if wrong_codes: print( emojify( f":no_entry_sign: Unknown improvements selected: {','.join(wrong_codes)}" )) return frozenset() return codes
def print_deals(deals): benki, all_else = [], [] for d in deals: ele = d.find("div", attrs={"class": "priceLine"}) ele = ele["title"] link = d.find("div", attrs={"class": "itemBottomRow"}) link = link["data-href"] if d.find("span", attrs={"class": "fire icon icon-fire"}): benki.append((ele, link)) else: all_else.append((ele, link)) for d in benki: print("{} {}\n{}\n".format(emojify(":fire:"), d[0], get_link(d[1]))) for d in all_else: print("{}\n{}\n".format(d[0], get_link(d[1])))
def transform_coord(bbox, src='center', dst='corner'): """Transform bbox coordinates |---------| (x1,y1) *---------| | | | | | (x,y) h | | | | | | |____w____| |_________* (x2,y2) center corner @Args bbox: (Tensor) bbox with size [..., 4] @Returns bbox_transformed: (Tensor) bbox with size [..., 4] """ flag = False if len(bbox.size()) == 1: bbox = bbox.unsqueeze(0) flag = True bbox_transformed = bbox.new(bbox.size()) if src == 'center' and dst == 'corner': bbox_transformed[..., 0] = (bbox[..., 0] - bbox[..., 2] / 2) bbox_transformed[..., 1] = (bbox[..., 1] - bbox[..., 3] / 2) bbox_transformed[..., 2] = (bbox[..., 0] + bbox[..., 2] / 2) bbox_transformed[..., 3] = (bbox[..., 1] + bbox[..., 3] / 2) elif src == 'corner' and dst == 'center': bbox_transformed[..., 0] = (bbox[..., 0] + bbox[..., 2]) / 2 bbox_transformed[..., 1] = (bbox[..., 1] + bbox[..., 3]) / 2 bbox_transformed[..., 2] = bbox[..., 2] - bbox[..., 0] bbox_transformed[..., 3] = bbox[..., 3] + bbox[..., 1] else: raise Exception(emojify("format not supported! :shit:")) if flag == True: bbox_transformed = bbox_transformed.squeeze(0) return bbox_transformed
def print_scale(full_quals, mapping_dict, binned): count = 0 print("#scale for fastqe") for i in full_quals: print("# ", count, i, emojify(mapping_dict.get(i, ':heart_eyes:'))) count = count + 1
def get_vcf_emoji(orig_c, map_dict=local_seq_emoji_map, default=":heart_eyes:"): if (orig_c == "None"): return(emojify((":x:"))) #print("orig:",orig_c,"\n") return "".join([emojify(map_dict.get(e, ":heart_eyes:")) for e in orig_c])
def main(paths, noop: bool, show_diff: bool, selected: str, excluded: str, exit_code: int): if not paths: print(emojify("Nothing to do. :sleeping:")) return selected_improvements = list(ALL_IMPROVEMENTS) if selected and excluded: print( emojify( ":no_entry_sign: '--select' and '--exclude' options are mutually exclusive!" )) return if selected: selected_codes = filter_improvements_by_code(selected) selected_improvements = [ improvement for improvement in ALL_IMPROVEMENTS if improvement.CODE in selected_codes ] elif excluded: excluded_codes = filter_improvements_by_code(excluded) selected_improvements = [ improvement for improvement in ALL_IMPROVEMENTS if improvement.CODE not in excluded_codes ] if not selected_improvements: print(emojify(":sleeping: No improvements to apply.")) return python_files = filter(lambda fn: fn.endswith(".py"), resolve_paths(*paths)) are_fixes_applied = False total_start_ts = time.process_time() for path_to_source in python_files: with open(path_to_source, "r+") as source_file: original_source: str = source_file.read() start_ts = time.process_time() processed_source, applied = process_file(original_source, selected_improvements) end_ts = time.process_time() if original_source == processed_source: continue print(f"--> Fixed '{source_file.name}'...") for case in applied: print(f" [+] ({case.CODE}) {case.DESCRIPTION}") print() print(f" Time taken: {end_ts - start_ts:.2f} seconds") if show_diff: print() print( create_diff( original_source, processed_source, source_file.name, # ANSI codes used for highlighting source code # will be saved to file and cause `patch` to bail out. highlight=sys.stderr.isatty(), ), file=sys.stderr, ) are_fixes_applied = True if noop: continue source_file.seek(0) source_file.truncate() source_file.write(processed_source) print() time_taken = prettify_time_interval(time.process_time() - total_start_ts) print(emojify(f":sparkles: All done! :sparkles: :clock2: {time_taken}")) if are_fixes_applied: sys.exit(exit_code)
def parse_args(error=False): '''Parse command line arguments. Returns Options object with command line argument values as attributes. Will exit the program on a command line error. ''' description = 'Read one or more FASTA or FASTQ files, and convert them to emoji.😀' parser = ArgumentParser(description=description) parser.add_argument('--version', action='version', version='%(prog)s ' + PROGRAM_VERSION) parser.add_argument('--log', metavar='LOG_FILE', type=str, help='record program progress in LOG_FILE') subparsers = parser.add_subparsers(help='sub-command help') # FASTA processing parser_fasta = subparsers.add_parser('fasta', help='fasta --help') parser_fasta.add_argument( '--minlen', metavar='N', type=int, default=DEFAULT_MIN_LEN, help='Minimum length sequence to include in stats (default {})'.format( DEFAULT_MIN_LEN)) parser_fasta.add_argument('--custom', metavar='CUSTOM_DICT', type=str, help='use a mapping of custom emoji to nucleotides in CUSTOM_DICT (' + emojify(":yellow_heart:") + emojify( ":blue_heart:") + ')') parser_fasta.add_argument('fasta_files', nargs='*', metavar='FASTA_FILE', type=str, help='Input FASTA files') parser_fasta.set_defaults(func=convert_fasta) # FASTA protein processing parser_fasta_protein = subparsers.add_parser('fasta_protein', help='fasta_protein --help') parser_fasta_protein.add_argument( '--minlen', metavar='N', type=int, default=DEFAULT_MIN_LEN, help='Minimum length sequence to include in stats (default {})'.format( DEFAULT_MIN_LEN)) parser_fasta_protein.add_argument('--custom', metavar='CUSTOM_DICT', type=str, help='use a mapping of custom emoji to proteins in CUSTOM_DICT (' + emojify(":yellow_heart:") + emojify( ":blue_heart:") + ')') parser_fasta_protein.add_argument('fasta_files', nargs='*', metavar='FASTA_FILE', type=str, help='Input FASTA files') parser_fasta_protein.set_defaults(func=convert_fasta_protein) #TODO add FASTQ parser and convert both sequence and quality # FASTQ processing parser_fastq = subparsers.add_parser('fastq', help='fastq --help') parser_fastq.add_argument( '--minlen', metavar='N', type=int, default=DEFAULT_MIN_LEN, help='Minimum length sequence to convert (default {})'.format( DEFAULT_MIN_LEN)) parser_fastq.add_argument('--bin', action='store_true', help='use binned scores (' + emojify(":no_entry_sign:") + emojify(":skull:") + emojify(":poop:") + emojify(":warning:") + " " + emojify(":smile:") + emojify( ":laughing:") + emojify(":sunglasses:") + emojify(":heart_eyes:") + ")") parser_fastq.add_argument('--custom', metavar='CUSTOM_DICT', type=str, help='use a mapping of custom emoji to nucleotides in CUSTOM_DICT (' + emojify(":yellow_heart:") + emojify( ":blue_heart:") + ')') parser_fastq.add_argument('--custom_qual', metavar='CUSTOM_DICT', type=str, help='use a mapping of custom emoji to quality scores in CUSTOM_DICT (' + emojify(":moneybag:") + emojify( ":snake:") + ')') parser_fastq.add_argument('fastq_files', nargs='*', metavar='FASTQ_FILE', type=str, help='Input FASTQ files') parser_fastq.set_defaults(func=convert_fastq) # file processing template parser_vcf = subparsers.add_parser('vcf', help='vcf --help') parser_vcf.add_argument('vcf_files', nargs='*', metavar='VCF_FILE', type=str, help='(experimental) Input VCF files') parser_vcf.set_defaults(func=convert_vcf) # # # file processing template # parser_filetype = subparsers.add_parser('filetype', help='filetype help') # parser_filetype.add_argument( # '--minlen', # metavar='N', # type=int, # default=DEFAULT_MIN_LEN, # help='Minimum length sequence to include in stats (default {})'.format( # DEFAULT_MIN_LEN)) # parser_filetype.add_argument('--custom', # metavar='CUSTOM_DICT', # type=str, # help='use a mapping of custom emoji to proteins in CUSTOM_DICT (' + emojify(":yellow_heart:") + emojify( # ":blue_heart:") + ')') # parser_filetype.add_argument('fasta_files', # nargs='*', # metavar='FASTA_FILE', # type=str, # help='Input FASTA files') # parser_filetype.set_defaults(func=convert_filetype) if(error): parser.print_help() return else: return parser.parse_args()
def label_to_emoji(label): return emojify(emoji_dictionary[str(label)])
def process_files(options): '''Compute and print FastaStats for each input FASTA file specified on the command line. If no FASTA files are specified on the command line then read from the standard input (stdin). Arguments: options: the command line options of the program Result: None ''' if options.fasta_files: for fasta_filename in options.fasta_files: logging.info( "Processing FASTA file from {}".format(fasta_filename)) try: fasta_file = open(fasta_filename) except IOError as exception: exit_with_error(str(exception), EXIT_FILE_IO_ERROR) else: with fasta_file: stats = FastaStats().from_file(fasta_file, options.minlen) #print(stats.pretty(fasta_filename)) if options.scale: print_scale(emaps.all_qualities, options.bin) #rewrite this if options.bin: logging.info("Binned calculations") if options.max: logging.info("Calculate max quality per position") print( stats.pretty(fasta_filename), "max (binned)", " ".join([ emojify( emaps.fastq_emoji_map_binned.get( s, ':heart_eyes:')) for s in QualityIO._get_sanger_quality_str( stats.quality_scores_maxs) ]), sep='\t') logging.info("Calculate mean quality per position") print(stats.pretty(fasta_filename), "mean (binned)", " ".join([ emojify( emaps.fastq_emoji_map_binned.get( s, ':heart_eyes:')) for s in QualityIO._get_sanger_quality_str( stats.quality_scores_mean) ]), sep='\t') if options.min: logging.info("Calculate min quality per position") print( stats.pretty(fasta_filename), "min (binned)", " ".join([ emojify( emaps.fastq_emoji_map_binned.get( s, ':heart_eyes:')) for s in QualityIO._get_sanger_quality_str( stats.quality_scores_mins) ]), sep='\t') else: if options.max: logging.info("Calculate max quality per position") print( stats.pretty(fasta_filename), "max", " ".join([ emojify( emaps.fastq_emoji_map.get( s, ':heart_eyes:')) for s in QualityIO._get_sanger_quality_str( stats.quality_scores_maxs) ]), sep='\t') logging.info("Calculate mean quality per position") print(stats.pretty(fasta_filename), "mean", " ".join([ emojify( emaps.fastq_emoji_map.get( s, ':heart_eyes:')) for s in QualityIO._get_sanger_quality_str( stats.quality_scores_mean) ]), sep='\t') if options.min: logging.info("Calculate min quality per position") print( stats.pretty(fasta_filename), "min", " ".join([ emojify( emaps.fastq_emoji_map.get( s, ':heart_eyes:')) for s in QualityIO._get_sanger_quality_str( stats.quality_scores_mins) ]), sep='\t') #print("MAX: "," ".join([s for s in QualityIO._get_sanger_quality_str(stats.quality_scores_maxs)])) #print("MEAN: "," ".join([s for s in QualityIO._get_sanger_quality_str(stats.quality_scores_mean)])) #print("MIN: "," ".join([s for s in QualityIO._get_sanger_quality_str(stats.quality_scores_mins)])) else: logging.info("Processing FASTA file from stdin") stats = FastaStats().from_file(sys.stdin, options.minlen) print(stats.pretty("stdin"))
def print_line(text): text = emojify(text) print(text)
import os import shutil import click from pyemojify import emojify from tqdm import tqdm from catpossible.detector import cat_detector from catpossible.utils import resolve_paths CUSTOM_BAR_FORMAT = emojify( "{percentage:3.0f}% {bar} {n_fmt}/{total_fmt} :alarm_clock: {remaining}{postfix}" ) BAR_POSTFIX = emojify(":cat2: {0} :no_good: {1}") @click.command() @click.argument("source", metavar="PHOTO_DIR", type=click.Path(exists=True)) @click.option( "-d", "--dest", "destination", type=click.Path(), help="Destination for found cat photos (default: current dir).", default=".", ) @click.option( "-m/--move", "move_not_copy", type=bool, help="Move found cat photos instead of copying.",
def __init__(self, x, y): self.bomb = False self.label = emojify(":black_small_square:") self.visible = False self.flagged = False