def main(): args = get_args() try: auth = ff.get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) print("Working on {}".format(auth.get('server'))) itemids = scu.get_item_ids_from_args(args.input, auth, args.search) seen = [] failed = [] for itemid in itemids: print("Touching ", itemid) if args.dbupdate: try: res = ff.patch_metadata({}, itemid, auth) print(res.get('status')) if res.get('status') == 'success': seen.append(itemid) except Exception: print(itemid, ' failed to patch') failed.append(itemid) continue else: print('dry run!') for i in seen: print(i) print("Failures") for f in failed: print(f)
def main(): # pragma: no cover # initial set up args = get_args(sys.argv[1:]) try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) # bucket addresses ff_health = get_metadata('/health', auth) source_bucket = ff_health['file_upload_bucket'] target_bucket = ff_health['processed_file_bucket'] s3 = boto3.resource('s3') # get the uuids for the files query = 'type=FileVistrack' uids = scu.get_item_ids_from_args([query], auth, True) files2copy = [get_metadata(uid, auth).get('upload_key') for uid in uids] for file_key in files2copy: copy_source = {'Bucket': source_bucket, 'Key': file_key} try: # print(file_key + ' cp from ' + source_bucket + ' to ' + target_bucket) s3.meta.client.copy(copy_source, target_bucket, file_key) except Exception: print('Can not find file on source', file_key) continue print('{} file copied'.format(file_key))
def main(): args = get_args(sys.argv[1:]) try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) dryrun = not args.dbupdate file_list = scu.get_item_ids_from_args(args.input, auth, args.search) wf_data = get_metadata(args.workflow, auth) for f in file_list: file_info = get_metadata(f, auth) parents = file_info.get('produced_from') if parents: inputs = [] for p in parents: inputs.append(get_metadata(p, auth)) wfr_json = create_wfr_meta_only_json(auth, wf_data, inputs, [file_info]) if dryrun: print('DRY RUN -- will post') print(wfr_json) else: res = post_metadata(wfr_json, 'workflow_run_awsem', auth) # and add a notes_to_tsv to the file patchstatus = add_notes_to_tsv(file_info, auth) print(res) print(patchstatus)
def test_get_item_ids_from_search(mocker, auth, items_w_uuids): ids = ['a', 'b', 'c'] mocker.patch('functions.script_utils.search_metadata', return_value=[]) mocker.patch('functions.script_utils.get_metadata', return_value=items_w_uuids) result = scu.get_item_ids_from_args('search', auth, True) for a in [i in ids for i in result]: assert a
def main(): args = get_args() try: auth = ff.get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) itemids = scu.get_item_ids_from_args([args.query], auth, True) for itemid in itemids: print(itemid)
def find_and_patch_item_references(auth, olduuid, newuuid, dryrun): search = "type=Item&references.uuid=" + olduuid itemids = scu.get_item_ids_from_args([search], auth, True) complete = True if not itemids: print("No references to %s found." % olduuid) for iid in itemids: ok = patch_and_report(auth, {'references': [newuuid]}, None, iid, dryrun) if not ok and complete: complete = False return complete
def main(): # pragma: no cover args = get_args() try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) itemids = scu.get_item_ids_from_args(args.input, auth, args.search) taggable = scu.get_types_that_can_have_field(auth, 'tags') if args.types2exclude is not None: # remove explicitly provide types not to tag taggable = [t for t in taggable if t not in args.types2exclude] seen = [ ] # only need to add tag once so this keeps track of what's been seen to_patch = {} # keep track of those to patch # main loop through the top level item ids for itemid in itemids: items2tag = {} if args.taglinked: # need to get linked items and tag them linked = scu.get_linked_items(auth, itemid, {}) items2tag = scu.filter_dict_by_value(linked, taggable, include=True) else: # only want to tag provided items itype = scu.get_item_type(auth, itemid) if itype in taggable: items2tag = {itemid: itype} for i, t in items2tag.items(): if i not in seen: seen.append(i) item = get_metadata(i, auth) if not scu.has_field_value(item, 'tags', args.tag): # not already tagged with this tag so make a patch and add 2 dict to_patch[i] = make_tag_patch(item, args.tag) # now do the patching or reporting for pid, patch in to_patch.items(): if args.dbupdate: pres = patch_metadata(patch, pid, auth) print(pres['status']) else: print("DRY RUN: patch ", pid, " with ", patch)
def main(): # pragma: no cover args = get_args(sys.argv[1:]) try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) print('#', auth.get('server')) id_list = scu.get_item_ids_from_args(args.input, auth, args.search) for itemid in id_list: # get the existing data in other p item_data = get_metadata(itemid, auth, add_on='frame=raw') pfiles = item_data.get('processed_files') if not pfiles: continue patch_data = item_data.get('other_processed_files', []) if patch_data: # does the same title exist if args.title in [i['title'] for i in patch_data]: print(itemid, 'already has preliminary results') continue patch_data.append({ 'title': args.title, 'type': 'preliminary', 'files': pfiles }) if patch_data: patch = {'other_processed_files': patch_data} if args.dbupdate: res = patch_metadata(patch, obj_id=itemid, key=auth, add_on='delete_fields=processed_files') print(res.get('status')) else: print("DRY RUN -- will patch") print(patch) print('and delete processed_files field value')
def main(): args = get_args(sys.argv[1:]) try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) itemids = scu.get_item_ids_from_args(args.input, auth, args.search) excluded_types = get_excluded(args.types2exclude, args.types2include) no_child = ['Publication', 'Lab', 'User', 'Award'] # default no_childs if args.no_children: no_child.extend(args.no_children) no_child = list(set(no_child)) all_linked_ids = [] # main loop through the top level item ids for itemid in itemids: linked = scu.get_linked_items(auth, itemid, {}) if excluded_types is not None: linked = scu.filter_dict_by_value(linked, excluded_types, include=False) ll = [(k, linked[k]) for k in sorted(linked, key=linked.get)] for i, t in ll: suff = '' if i == itemid: suff = '\tINPUT' if is_released(i, auth): suff = '\tRELEASED' + suff if not args.include_released: print(i, '\t', t, '\tSKIPPING', suff) continue if i not in all_linked_ids: all_linked_ids.append(i) else: suff = suff + '\tSEEN' print(i, '\t', t, suff) for a in all_linked_ids: print(a)
def main(): args = get_args(sys.argv[1:]) try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) print("Working on {}".format(auth.get('server'))) itemids = scu.get_item_ids_from_args(args.input, auth, args.search) field = args.field val = args.value if val == 'True': val = True elif val == 'False': val = False if args.isarray: val = [v for v in val.split("'") if v] ntype = args.numtype if ntype: if ntype == 'i': val = int(val) elif ntype == 'f': val = float(val) for iid in itemids: print("PATCHING", iid, "to", field, "=", val) if (args.dbupdate): # do the patch if val == '*delete*': res = delete_field(iid, field, auth) else: res = patch_metadata({args.field: val}, iid, auth) if res['status'] == 'success': print("SUCCESS!") else: print("FAILED TO PATCH", iid, "RESPONSE STATUS", res['status'], res['description'])
def test_get_item_ids_from_list(auth): ids = ['a', 'b', 'c'] result = scu.get_item_ids_from_args(ids, auth) for a in [i in ids for i in result]: assert a
def main(): # pragma: no cover args = get_args() dbupdate = args.dbupdate try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) cnts = Counter() reltag = args.reltag # build the search query string query = 'type=DataReleaseUpdate&update_tag=' + reltag relupdates = scu.get_item_ids_from_args([query], auth, True) update_items = [] for u in relupdates: res = get_metadata(u, auth) for ui in res.get('update_items'): if ui.get('primary_id'): update_items.append(ui['primary_id']) seen = [] # update_items = ['experiment-set-replicates/4DNESOI2ALTL'] for item in update_items: res = get_metadata(item, auth) uid = res.get('uuid') type = get_attype(res) cnts[type] += 1 if (not uid) or (uid in seen) or ('ExperimentSet' not in type): # case for first freeze (no processed files included) print("SKIPPING ", uid) cnts['skipped'] += 1 continue add_tag2item(auth, uid, reltag, seen, cnts, type, dbupdate) if 'ExperimentSet' in type: # get the experiments and files exps = res.get('experiments_in_set') if exps is not None: cnts['Experiment'] += len(exps) for exp in exps: # import pdb; pdb.set_trace() add_tag2item(auth, exp, reltag, seen, cnts, 'Experiment', dbupdate) files = exp.get('files') if files is not None: cnts['FileFastq'] += len(files) for file in files: file = add_tag2item(auth, file, reltag, seen, cnts, 'FileFastq', dbupdate) epfiles = exp.get('processed_files') # epfiles = None # case for first freeze (no processed files included) if epfiles is not None: cnts['FileProcessed'] += len(epfiles) for epf in epfiles: add_tag2item(auth, epf, reltag, seen, cnts, 'FileProcessed', dbupdate) # check the processed files directly associated to the eset # pfiles = res.get('procesed_files') pfiles = None # case for first freeze (no processed files included) if pfiles is not None: cnts['FileProcessed'] += len(pfiles) for pf in pfiles: add_tag2item(auth, pf, reltag, seen, cnts, 'FileProcessed', dbupdate) print(cnts)
def main(): # pragma: no cover args = get_args() try: auth = get_authentication_with_server(args.key, args.env) except Exception: print("Authentication failed") sys.exit(1) print('#', auth.get('server')) id_list = scu.get_item_ids_from_args(args.input, auth, args.search) if args.fields: fields = args.fields header = '#id\t' + '\t'.join(fields) if args.noid is True: header = header.replace('#id\t', '#') print(header) problems = [] for iid in id_list: try: res = get_metadata(iid, auth, add_on='frame=object') except Exception: problems.append(iid) continue if args.fields: line = '' # counts = {} for f in fields: val = res.get(f) # if val is not None: # added in for specific use case if isinstance(val, dict): val = val.get('uuid') elif isinstance(val, list): # counts[f] = len(val) # added in for specific use case # if len(counts) > 1: # print(iid, '\t', counts) # else: # cnt = list(counts.values())[0] # if cnt > 1: # print(iid, '\t', cnt) vs = '' for v in val: if isinstance(v, dict): v = v.get('uuid') else: v = str(v) vs = vs + v + ', ' val = vs if val.endswith(', '): val = val[:-2] line = line + str(val) + '\t' if args.noid == 'False': line = iid + '\t' + line print(line) else: if args.noid is True: print(res) else: print(iid, '\t', res) if problems: print('THERE WAS A PROBLEM GETTING METADATA FOR THE FOLLOWING:') for p in problems: print(p)