def main(**kwargs): """Find view definition files in TARGET and execute them.""" args = SimpleNamespace(**kwargs) client = bigquery.Client() # set log level try: logging.basicConfig(level=args.log_level, format="%(levelname)s %(message)s") except ValueError as e: click.error(f"argument --log-level: {e}") worker_entrypoint = partial(_process_file, client, args) sql_files = [] for target in args.target: if os.path.isdir(target): for root, dirs, files in os.walk(target): dataset_dir = os.path.dirname(root) if not args.user_facing_only or not dataset_dir.endswith( NON_USER_FACING_DATASET_SUFFIXES): if "view.sql" in files: sql_files.append(os.path.join(root, "view.sql")) else: sql_files.append(target) with ThreadPool(args.parallelism) as p: result = p.map(worker_entrypoint, sql_files, chunksize=1) if all(result): exitcode = 0 else: exitcode = 1 sys.exit(exitcode)
def publish( name, sql_dir, project_id, target_project, log_level, parallelism, dry_run, user_facing_only, ): """Publish views.""" # set log level try: logging.basicConfig(level=log_level, format="%(levelname)s %(message)s") except ValueError as e: click.error(f"argument --log-level: {e}") view_files = paths_matching_name_pattern(name, sql_dir, project_id, files=("view.sql", )) views = [View.from_file(f) for f in view_files] views = [v for v in views if not user_facing_only or v.is_user_facing] with ThreadPool(parallelism) as p: publish_view = functools.partial(_publish_view, target_project, dry_run) result = p.map(publish_view, views, chunksize=1) if not all(result): sys.exit(1) click.echo("All have been published.")
def create(filepath): option_val = '%s' % filepath if not option_val: click.error("file path should be specified ") error, json = CmdFactory("create").make_requests() if error: click.echo("error occured {}".format(json)) return if json: click.echo("file create")
def cmd_waveforms(ctx, waveform, velocity, current, name): ''' Convert steps to electrode current as a function of time ''' import larf.store import larf.config from larf.models import Result, Array if not waveform: waveform = name cfg = ctx.obj['cfg'] tocall = larf.config.methods_params(cfg, 'waveform %s' % waveform) ses = ctx.obj['session'] velores = larf.store.result_typed(ses, 'velocity', velocity) varrs = velores.array_data_by_type() velo = varrs['gvector'] vgrid = varrs['mgrid'] curres= larf.store.result_typed(ses, 'raster', current) carr = curres.array_data_by_type() cfield = carr['gscalar'] cgrid = carr['mgrid'] if velo[0].shape != cfield.shape: click.error("Velocity and current fields have incompatible shapes.") return 1 if not numpy.all(vgrid == cgrid): click.error("Velocity and current fields have incompatible grids.") return 1 # fixme: allow multiple methname, params = tocall[0] meth = larf.util.get_method(methname) par = ctx.obj['params'] params.update(par) pts, waveforms = meth(velo, cfield, vgrid, **params) res = Result(name=name, type='waveforms', parents=[velores, curres], params=dict(method=methname, params=params), arrays=[ Array(name='points', type='path', data=pts), Array(name='current', type='pscalar', data=waveforms)]) # fixme, pscalar is wrong type ses.add(res) ses.flush() resid = res.id ses.commit() announce_result('waveforms', res) return
def verify_docker_reachable(): try: p = subprocess.run( ["docker", "ps"], stdout = subprocess.PIPE, stderr = subprocess.PIPE, check = True ) except: try: click.error(p.stderr) except: pass raise click.ClickException("Unable to verify docker is installed and reachable. Is it?")
def newapp(path): """ Generates all files for a new vodka app at the specified location. Will generate to current directory if no path is specified """ app_path = os.path.join(VODKA_INSTALL_DIR, "resources", "blank_app") if not os.path.exists(path): os.makedirs(path) elif os.path.exists(os.path.join(path, "application.py")): click.error("There already exists a vodka app at %s, please specify a different path" % path) os.makedirs(os.path.join(path, "plugins")) shutil.copy(os.path.join(app_path, "application.py"), os.path.join(path, "application.py")) shutil.copy(os.path.join(app_path, "__init__.py"), os.path.join(path, "__init__.py")) shutil.copy(os.path.join(app_path, "plugins", "example.py"), os.path.join(path, "plugins", "example.py")) shutil.copy(os.path.join(app_path, "plugins", "__init__.py"), os.path.join(path, "plugins", "__init__.py"))
def build_pip_module_from_dir(self, source_dir): if not build_cache_helpers.has_build_hash_changed_for_path( self.build_cache_key, source_dir): click.echo( "Skipping module: {}. No change since last build.".format( source_dir)) return click.echo("Building pip module: {}".format(source_dir)) pip_build_args = [ sys.executable, "-u", "setup.py", "-q", "sdist", "--dist-dir", os.path.abspath(self.output_directory) ] previous_working_dir = os.getcwd() os.chdir(source_dir) try: p = subprocess.run(pip_build_args, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except Exception as e: try: click.error(p.stderr) except: pass raise finally: os.chdir(previous_working_dir) build_cache_helpers.write_build_hash_for_path(self.build_cache_key, source_dir)
def main(query, queryfile, tokenfile, render, show, user, debug, bz_id, dump, restore, log_level, dump_query): if log_level: if log_level == 'debug': logger.setLevel(logging.DEBUG) elif log_level == 'info': logger.setLevel(logging.INFO) else: logger.setLevel(logging.CRITICAL) adviser = Adviser(debug) if 'list' in show: click.echo(adviser) sys.exit() for advice in show: real_name_advice = advice if advice.find('no-') == 0: real_name_advice = advice[3:] if real_name_advice not in adviser.available_advices: raise click.UsageError( "Advice \"{}\" is not available.\n{}".format(advice, adviser)) if 'list' in render: click.echo(Renderer(None).list_str()) sys.exit() renderer_list = Renderer(None).list() for display in render: if display not in renderer_list: raise click.UsageError( "Render \"{}\" is not available.\n{}".format( display, Renderer(None).list_str())) if dump_query: if bz_id: query = Query.from_bz(bz_id) click.echo("[{}]\nurl = {}\ndocumentation = \nextra = {}".format( dump_query, query.bz_query_url, ','.join([str(bz) for bz in bz_id]))) sys.exit() else: raise click.UsageError("dump-query work only with --bz-id option.") if restore: bzc = BzCollector.from_pickle(restore) bzs = bzc._bugs if bz_id: bzs = [bz for bz in bzs if bz.id in bz_id] if user: bzs = [bz for bz in bzs if bz.assigned_to == user] elif bz_id: query = Query.from_bz(bz_id) q_json = query.request() bzc = BzCollector(tokenfile) bzs = bzc.bugs(q_json) else: if not queryfile: raise click.UsageError("You must give a filter file.") queryfile = os.path.abspath(queryfile) if not query: raise click.UsageError( "You must give a query to choose from queryfile.") if query == 'list': click.echo(QueryCollector.from_file(queryfile).list_str()) sys.exit() if query not in QueryCollector.from_file(queryfile).list(): raise click.UsageError("Query \"{}\" is not available.\n{}".format( query, QueryCollector.from_file(queryfile).list_str())) qcollector = QueryCollector.from_file(queryfile) extra_fields = {} if user: extra_fields.update({'assigned_to': user}) query = qcollector.select(query) q_json = query.request(extra_fields) if not q_json: click.error('{} is not part of {}'.format(q_json, queryfile)) bzc = BzCollector(tokenfile, query.dfg) bzs = bzc.bugs(q_json) if dump: bzc.to_pickle(dump) for bz in bzs: adviser.advice(bz) selected_bz = [] filter_removal = [] for f in show: real_name_advice = f if f.find('no-') == 0: filter_removal += [f[3:]] else: selected_bz += [ b for b in bzs if getattr(b, '_{}'.format(f), False) ] if not selected_bz and not show: selected_bz = bzs for f in filter_removal: selected_bz = [ b for b in selected_bz if not getattr(b, '_{}'.format(f), False) ] renderer = Renderer(selected_bz) for display in render: if len(render) > 1: click.echo("{} ======".format(display)) getattr(renderer, 'r_{}'.format(display))()
def add_error_message(self, line): click.error(line)
async def run(command_cfg, master_cfg): try: await upgradeDatabase(command_cfg, master_cfg) except Exception as e: click.error(e)
def push_scripts(instance, scripts, config_only=True): """ Push selected scripts to instance. - instance - instance object to push scripts to - scripts - a list of dictionaries containing configurations for scripts """ click.echo('INFO: Pushing scripts') click.echo('INFO: Pulling remote scripts') endpoints = {} remote_scripts_mapping = defaultdict(list) for remote_script in instance.scripts.all(): remote_scripts_mapping[remote_script.label].append(remote_script) existing_endpoints = defaultdict(list) for endpoint in instance.script_endpoints.all(): existing_endpoints[endpoint.script].append(endpoint.name) endpoints[endpoint.name] = endpoint click.echo('INFO: Pushing local scripts') for s in scripts: if s['label'] in remote_scripts_mapping: remote_count = len(remote_scripts_mapping[s['label']]) if remote_count > 1: click.error('ERROR: You have {0} scripts with label {1} on' ' syncano. Skipping'.format( remote_count, s['label'])) continue remote_script = remote_scripts_mapping[s['label']][0] else: remote_script = instance.scripts.model(label=s['label'], runtime_name=s['runtime'], instance_name=instance.name, config={}) with open(s['script'], 'rb') as source: remote_script.source = source.read() config = s.get('config', {}) remote_script.config.update(config) click.echo('INFO: Pushing script {label}'.format(**s)) remote_script.save() existing_set = {name for name in existing_endpoints[remote_script.id]} script_endpoints = set(s.get('endpoints', [])) new_endpoints = script_endpoints - existing_set old_endpoints = existing_set - script_endpoints for name in old_endpoints: endpoints[name].delete() for name in new_endpoints: endpoint = instance.script_endpoints.model( instance_name=instance.instance_name, name=name, script=remote_script.id) try: endpoint.save() except SyncanoRequestError as e: raise ValueError( 'Error when saving endpoint "{0}" for script "{1}": {2}.'. format(name, remote_script.label, e.message))
def _error(msg): click.error(msg) sys.exit(1)