def mediaing(ctx, prefix): if prefix and '%' in prefix: click.echo("Don't use wildcards in prefix", err=True) click.abort() ctx.obj = {'prefix': prefix} monkey.patch_all() from idigbio_ingestion import mediaing # noqa
def main(fps, frame_addr, test_io): click.echo("Tesseract spidev LED driver running.") context = zmq.Context() frame_socket = context.socket(zmq.SUB) frame_socket.connect(frame_addr) frame_socket.setsockopt_string(zmq.SUBSCRIBE, u"") # receive everything if test_io: tester = Tester(gsclk=14, blank=3, vprg=5, xlat=6, dcprg=7, sin=12, sclk=14) tester.test_pins() return # 3 906 250 Hz is the maximum SPI bus speed at which we can # correctly clock data into the TLC chips. The factor of 4 was # emperically determined as a good balance between time spent # in Python code and time spent clocking data in the SPI (faster SPI # writes gives faster rendering of each layer, but looping through the # layers more often means the LEDs appear less bright because they are only # lit while the SPI clock is being toggled). max_spispeed = 3906250 spispeed = max_spispeed / 4 tlcs = TLCs(tlcs=5, blank=3, vprg=5, xlat=6, dcprg=7, spibus=0, spidevice=0, spispeed=spispeed) fc = frame_utils.FrameConstants(fps=fps, ttype="tesseract") pwm_buffers = PWMBuffers(tlcs, fc) tlcs.init_tlcs() while True: rlist, _, elist = zmq.select([frame_socket], [], [frame_socket], timeout=0) if elist: click.echo("Frame socket error.") click.abort() if rlist: data = frame_socket.recv() frame = np.frombuffer(data, dtype=frame_utils.FRAME_DTYPE) frame.shape = frame_utils.FRAME_SHAPE pwm_buffers.update(frame) for pwm_buffer in pwm_buffers.buffers: tlcs.write_pwm_packed(pwm_buffer) click.echo("Tesseract spidev LED driver exited.")
def show_settings(ctx: click.Context, fmt: str = "json"): """ Show the entire content of the settings file. Be aware this will not obfuscate secret data. """ if not os.path.isfile(ctx.obj["settings_path"]): click.abort("No settings file found at {}".format(ctx.obj["settings_path"])) settings = load_settings(ctx.obj["settings_path"]) or {} if fmt == "json": click.echo(json.dumps(settings, indent=2)) elif fmt == "yaml": click.echo(yaml.dump(settings, indent=2))
def create(elastic_ip_id, email, rds_database, secret, hosted_zone_id): """This is a shortcut to create the Deis cluster, the addons server, and the main TigerHost server in that order. This also configures the DNS for Deis and the main server. """ if secret is None: secret = _get_secret() if elastic_ip_id is None: if not settings.DEBUG: echo_heading('Allocating a new Elastic IP.', marker='-', marker_color='magenta') client = boto3.client('ec2') elastic_ip_id = client.allocate_address(Domain='vpc')['AllocationId'] click.echo('Done. Allocation ID: {}'.format(elastic_ip_id)) else: # not used anyways elastic_ip_id = 'dummy-ip-id' subprocess.check_call([settings.APP_NAME, 'deis', 'create']) subprocess.check_call([settings.APP_NAME, 'deis', 'configure-dns', '--hosted-zone-id', hosted_zone_id]) database_url = None addons_ip = None if rds_database: # TODO implement this click.abort() else: db_container_name = random_string(length=50) subprocess.check_call( [settings.APP_NAME, 'addons', 'create', '--database', db_container_name]) addons_ip = docker_machine.check_output( ['ip', 'tigerhost-addons-aws']).strip() database_url = 'postgres://{name}@{ip}:5432/{name}'.format( name=db_container_name, ip=addons_ip, ) subprocess.check_call( [settings.APP_NAME, 'main', 'create', '--database', database_url, '--elastic-ip-id', elastic_ip_id, '--secret', secret, ]) subprocess.check_call([settings.APP_NAME, 'main', 'configure-dns', '--elastic-ip-id', elastic_ip_id, '--hosted-zone-id', hosted_zone_id]) subprocess.check_call([settings.APP_NAME, 'deis', 'create-admin', '--email', email])
def cli(rasters, output, feather_radius=4, resample_to=1, resample_method='bilinear'): if 1 > resample_to > len(rasters): click.abort('--resample-to out of range') if feather_radius < 1: click.abort('--feather-radius must be a positive integer') resampling = Resampling[resample_method] srcs = [rasterio.open(f) for f in rasters] res = srcs[resample_to - 1].res out_img, _ = merge(srcs, res=res, resampling=resampling) profile = srcs[0].profile profile.update(dtype=np.float32, nodata=np.nan) with rasterio.open(output, 'w', **profile) as dst: dst.write(out_img)
def gen(domain, lang, input_file, input_dir): if input_dir and input_file: click.echo('duplication input:{} {}'.format(input_file, input_dir)) click.abort() conf = json.load(open('{}/i18n.json'.format(os.getcwd()))) locale_dir = conf['locale_dir'] input_files = [] if input_file: input_files.append(input_file) if input_dir: for root, _, files in os.walk(input_dir): input_files.extend([os.path.join(root, f) for f in files if f.endswith('.py')]) po_msgs = [] for file in input_files: extracted_msgs = extract(file, '_') po_msgs.extend(extracted_msgs) po_msgs = list(set(po_msgs)) po_entries = [polib.POEntry(msgid=msg, msgstr="") for msg in po_msgs] p = po.gen(po_entries, **conf['metadata']) po.save(p, domain, locale_dir, lang)
def user_param_to_user(value): from sentry.utils.auth import find_users users = find_users(value) if not users: click.abort('No user matching `{}`'.format(value)) if len(users) > 1: click.abort('Found more than one user matching `{}`'.format(value)) user = users[0] if not user.is_superuser: click.abort('User `{}` does not have superuser status'.format(user.username)) return user
def parse_yaml(file): try: import yaml except ImportError: click.abort('Have you installed PyYAML?') try: with open(file, 'rb') as f: return yaml.load(f.read()) except IOError: click.abort("Yaml configuration not found.") except yaml.parser.ParserError: click.abort("Invalid Yaml.")
def user_param_to_user(value): from sentry.utils.auth import find_users users = find_users(value) if not users: click.abort('No user matching `{}`'.format(value)) if len(users) > 1: click.abort('Found more than one user matching `{}`'.format(value)) user = users[0] if not user.is_superuser: click.abort('User `{}` does not have superuser status'.format( user.username)) return user
def generate_sentences(token: str, output_filename: str, online: bool): """ Generates sentences """ notion_client = GermanBankNotionClient(token) if online: if not token: click.abort("Missing token") nouns = notion_client.get_bank_nouns() verbs = [ verb for verb in notion_client.get_bank_verbs() if all([ verb.conj_ich_1ps, verb.conj_du_2ps, verb.conj_er_3ps, verb.conj_wir_1pp, verb.conj_ihr_2pp, verb.conj_sie_3pp, ]) and 'generate' in verb.tags ] else: nouns = BANK_NOUNS verbs = BANK_VERBS deck = genanki.Deck( deck_id=1878326705, # Hard-coded value selected by me name="German::Grammar", ) added_count = 0 basic_sentence = BasicSentence.make_random(nouns, verbs) # TODO: sometimes add an adjective? # TODO: make questions? while True: note = basic_sentence.to_anki_note() print(f"{note.fields[1]} ({note.fields[3]})") print(note.fields[0]) print() try: response = input("Add? [y/R/n]") except KeyboardInterrupt: click.echo("Exiting!") click.echo("") break def _rotate(basic_sentence: BasicSentence) -> BasicSentence: try: res = basic_sentence.rotate() click.echo("Rotated!") click.echo("") except StopIteration: res = basic_sentence.first() click.echo("Rotated! (back to beginning!)") click.echo("") except MissingGermanPluralWord: res = BasicSentence.make_random(nouns, verbs) click.echo("No plural! New word!") click.echo("") return res if response == 'y': deck.add_note(note) added_count += 1 click.echo("Added!") click.echo("") basic_sentence = _rotate(basic_sentence) elif response == 'r' or response == '': basic_sentence = _rotate(basic_sentence) elif response == 'n': basic_sentence = BasicSentence.make_random(nouns, verbs) click.echo("New sentence!") click.echo("") if added_count: genanki.Package([deck]).write_to_file(output_filename) click.echo(f"Complete! Added {added_count} cards. Now import {output_filename} to Anki, fix any changes, and sync Anki to AnkiCloud.")
def wrapper(root_input_path, working_directory_path, summary_mail, error_mail): """ Wrapper for finding and running single runs. Supplied root_path will be parsed for runs containing .fastq or .fastq.gz directly below, so no recursion for lower levels. """ # Setup wrapper log wrapper_log_name = 'wrapper.log' wrapper_log_path = os.path.join(working_directory_path, wrapper_log_name) logger = setup_logger('wrap', log_path=wrapper_log_path) run_paths = glob.glob(os.path.join( root_input_path, '*')) # Should be a non-manual directory, as no filtering is done if not run_paths: logger.info(f'No new runs detected. Exiting.') click.abort() logger.info('Starting processing of runs') run_summary = RunSummary() # Keep track of fail/success of runs for run_path in run_paths: run_context = RunContext(run_path) # Check if run is fully transferred to seqstore if not run_context.has_finished_dump(): logger.warning( f'Run {run_context.run_name} has not yet finished transferring.' ) continue # Setup run log run_log_path = os.path.join(working_directory_path, run_context.run_name + '.log') # Begin reformatting, upload, and mailing of results try: cleanup = True test = False logger.info(f'Calling on runner for {run_context.run_name}') run_state = run_single(run_path, working_directory_path, run_log_path, cleanup, test) if run_state is RunState.finished: logger.info('Run state returned as success') run_summary.add_success_run(run_context.run_name) else: logger.warning( f'Run state returned as unsuccessul: {run_state.value}') run_summary.add_incomplete_run( run_context.run_name, run_state.value) # Value is str message except Exception: run_summary.add_error_run(run_context.run_name) format_exc = traceback.format_exc() logger.error(f'Crashed running of {run_path}') logger.error(format_exc) # Send an error mail if address supplied if error_mail: logger.info(f'Sending error mail to {error_mail}') subject = f'Error! Microbiology S5 automatic GENSAM upload.' body = '\n'.join([ f'Crash for run: {run_context.run_name}', '', f'Run path: {run_context.run_path}', f'Wrapper log path: {wrapper_log_path}', f'Run log path: {run_log_path}', '', 'Error:', format_exc ]) send_email(subject, body, error_mail) # Send a summary mail if address supplied and new runs found if summary_mail and run_summary.contains_runs(): logger.info(f'Sending summary mail to {summary_mail}') subject = 'Summary of Microbiology S5 automatic GENSAM upload.' body = '\n'.join([ 'Successfully uploaded the following runs:', '\n'.join(run_summary.success_runs), '', 'Incomplete uploads for the following runs:', '\n'.join([f'{r[0]}: {r[1]}' for r in run_summary.incomplete_runs ]), '', 'Failed uploading the following runs:', '\n'.join(run_summary.error_runs) ]) send_email(subject, body, summary_mail) logger.info('Finished processing runs')
def exit(): click.abort() click.close() click.exit(0)