def add_single(id, alias): """ Add an alias to a single ID. """ gi, cnfg, aliases = utils._login() if not alias: alias = namesgenerator.get_random_name() click.echo("Alias assigned to ID {}: ".format(id) + click.style(alias, bold=True)) aliases[alias] = id _update_aliases(aliases)
def list_(): """ List all aliases currently assigned to IDs. """ gi, cnfg, aliases = utils._login() alias, id = ['Alias'], ['ID'] for a in aliases: alias.append(a) id.append(aliases[a]) utils._tabulate([alias, id])
def upload(path, public, file_type): gi, cnfg, aliases = utils._login() # id = aliases.get(id, id) # if the user provided an alias, return the id; else assume they provided a raw id if path[-3:] == '.ga': # decide based on ext whether to upload as wf or ds. is this sufficient? with open(path) as f: # quote from @bgruening: 'Only support the newer yaml based workflow files', don't know what this is though # wf_dict = yaml.safe_load(f) wf_dict = json.load(f) wf_dict['tags'].append('gxwf') gi.workflows.import_workflow_dict(wf_dict, publish=public) # could use import_workflow_from_local_path, but then would need a second call to add the gxwf tag as below # gi.workflows.update_workflow(wf['id'], tags=wf['tags'] + ['gxwf']) else: ds_id = gi.tools.upload_file(path, cnfg['hid'], file_type=file_type)['outputs'][0]['id'] gi.histories.update_dataset(cnfg['hid'], ds_id, tags=['gxwf'])
def delete(alias, all_): """ Remove a single currently assigned alias with --alias, or all aliases with --all """ if bool(alias) == bool(all_): click.echo(click.get_current_context().get_help( )) # raise help, we need either option but not both or neither return gi, cnfg, aliases = utils._login() if all_: aliases = {} elif alias: aliases.pop(alias) else: return _update_aliases(aliases)
def datasets(search, all): gi, cnfg, aliases = utils._login() aliases_inverted = {v: k for k, v in aliases.items()} # need this below if all: # replace all this rubbish with gi.datasets.get_datasets() when the PR is merged dataset_list = gi.datasets._get('?limit=1000000000000') # for h in gi.histories.get_histories(): # h_name = gi.histories.show_history(h['id'])['name'] # history_list = gi.histories.show_history(h['id'], contents=True) # for dataset in history_list: # dataset['history_name'] = h_name # dataset_list += history_list else: dataset_list = gi.histories.show_history(cnfg['hid'], contents=True) for dataset in dataset_list: if 'gxwf' not in dataset['tags']: gi.histories.update_dataset(cnfg['hid'], dataset['id'], tags=['gxwf']) ds_name, ds_id, ds_alias, ds_ext = ['Dataset name'], ['ID'], ['Alias'], [ 'Extension' ] #, ['History'] for ds in dataset_list: if search: if search not in ds.get('name', ''): continue if ds.get('deleted') == False and ds.get( 'state') == 'ok': # could show non-ok datasets too? ds_name.append(ds.get('name', '')) ds_id.append(ds.get('id', '')) ds_alias.append(aliases_inverted.get(ds.get('id'), '')) ds_ext.append(str(ds.get('extension', ''))) # ds_hist.append(ds.get('history_name', '')) # could hide this option when --all is not set utils._tabulate([ ds_name, ds_ext, ds_id, ds_alias, ]) # ds_hist])
def list_workflows(public, search): gi, cnfg, aliases = utils._login() aliases_inverted = {v: k for k, v in aliases.items()} # need this below if search: workflows = [wf for wf in gi.workflows.get_workflows(published=public) if search in wf['name'] or search in wf['owner']] else: workflows = gi.workflows.get_workflows(published=public) wf_name, wf_id, wf_alias, steps, owner = ['Workflow name'], ['ID'], ['Alias'], ['Steps'], ['Owner'] # do we need separate id / alias columns? if we make sure everything can be done via alias for wf in workflows: wf_name.append(wf['name']) wf_id.append(wf['id']) wf_alias.append(aliases_inverted.get(wf['id'], '')) steps.append(str(wf['number_of_steps'])) owner.append(wf['owner']) utils._tabulate([wf_name, wf_id, wf_alias, steps, owner])
def add_all(): """ Add randomly generated aliases to all workflows and datasets which do not currently have one. """ gi, cnfg, aliases = utils._login() workflow_ids = [wf['id'] for wf in gi.workflows.get_workflows()] dataset_ids = [ ds['id'] for ds in gi.histories.show_history(cnfg['hid'], contents=True) ] for id in workflow_ids + dataset_ids: if id not in aliases.values( ): # we do not overwrite if an alias already exists while True: alias = namesgenerator.get_random_name() # we can allow one id to have multiple aliases but NOT the reverse if alias not in aliases: break click.echo("Alias assigned to ID {}: ".format(id) + click.style(alias, bold=True)) aliases[alias] = id _update_aliases(aliases)
def invocations(id_): gi, cnfg, aliases = utils._login() if id_: id_ = aliases.get( id_, id_ ) # if the user provided an alias, return the id; else assume they provided a raw id invocations = gi.workflows.get_invocations( id_) # will be deprecated, use line below in future # invocations = gi.invocations.get_invocations(workflow_id=id_) else: # get all invocations - whether this is actually useful or not I don't know, but you get to see a lot of pretty colours invocations = gi.invocations.get_invocations() for n in range(len(invocations)): click.echo(click.style("\nInvocation {}".format(n + 1), bold=True)) invoc_id = invocations[n]['id'] step_no = 1 state_colors = { 'ok': 'green', 'running': 'yellow', 'error': 'red', 'paused': 'cyan', 'deleted': 'magenta', 'deleted_new': 'magenta', 'new': 'cyan', 'queued': 'yellow' } for state in state_colors: for k in range( gi.invocations.get_invocation_summary(invoc_id) ['states'].get(state, 0)): click.echo( click.style(u'\u2B24' + ' Job {} ({})'.format(k + step_no, state), fg=state_colors[state])) step_no += k + 1