def main(): # pragma: no cover args = get_args() try: connection = fdn_connection(args.keyfile, keyname=args.key) except Exception as e: print("Connection failed") sys.exit(1) id_list = scu.get_item_ids_from_args(args.input, connection, args.search) if args.fields: fields = args.fields header = '#id\t' + '\t'.join(fields) if args.noid is True: header = header.replace('#id\t', '#') print(header) for iid in id_list: res = get_FDN(iid, connection) if args.fields: line = '' for f in fields: val = res.get(f) if isinstance(val, list): val = ', '.join(val) if val.endswith(', '): val = val[:-2] line = line + str(val) + '\t' if args.noid == 'False': line = iid + '\t' + line print(line) else: if args.noid is True: print(res) else: print(iid, '\t', res)
def main(): args = get_args() try: connection = ff.fdn_connection(args.keyfile, keyname=args.key) except Exception as e: print("Connection failed") sys.exit(1) itemids = scu.get_item_ids_from_args([args.query], connection, True) for itemid in itemids: print(itemid)
def find_and_patch_item_references(connection, olduuid, newuuid, dryrun): search = "type=Item&references.uuid=" + olduuid itemids = scu.get_item_ids_from_args([search], connection, True) complete = True if not itemids: print("No references to %s found." % olduuid) for iid in itemids: ok = patch_and_report(connection, {'references': [newuuid]}, None, iid, dryrun) if not ok and complete: complete = False return complete
def main(): # pragma: no cover args = get_args() try: connection = fdn_connection(args.keyfile, keyname=args.key) except Exception as e: print("Connection failed") sys.exit(1) itemids = scu.get_item_ids_from_args(args.input, connection, args.search) taggable = scu.get_types_that_can_have_field(connection, 'tags') if args.types2exclude is not None: # remove explicitly provide types not to tag taggable = [t for t in taggable if t not in args.types2exclude] seen = [] # only need to add tag once so this keeps track of what's been seen to_patch = {} # keep track of those to patch # main loop through the top level item ids for itemid in itemids: items2tag = {} if args.taglinked: # need to get linked items and tag them linked = scu.get_linked_items(connection, itemid, {}) items2tag = scu.filter_dict_by_value(linked, taggable, include=True) else: # only want to tag provided items itype = scu.get_item_type(connection, itemid) if itype in taggable: items2tag = {itemid: itype} for i, t in items2tag.items(): if i not in seen: seen.append(i) item = get_FDN(i, connection) if not scu.has_field_value(item, 'tags', args.tag): # not already tagged with this tag so make a patch and add 2 dict to_patch[i] = make_tag_patch(item, args.tag) # now do the patching or reporting for pid, patch in to_patch.items(): if args.dbupdate: pres = patch_FDN(pid, connection, patch) print(pres['status']) else: print("DRY RUN: patch ", pid, " with ", patch)
def main(): args = get_args() try: connection = fdn_connection(args.keyfile, keyname=args.key) except Exception as e: print("Connection failed") sys.exit(1) id_list = scu.get_item_ids_from_args(args.input, connection, args.search) val = args.value if args.isarray: val = val.split("'")[1::2] for iid in id_list: print("PATCHING", iid, "to", args.field, "=", val) if (args.dbupdate): # do the patch res = patch_FDN(iid, connection, {args.field: val}) if res['status'] == 'success': print("SUCCESS!") else: print("FAILED TO PATCH", iid, "RESPONSE STATUS", res['status'], res['description'])
def main(): # pragma: no cover args = get_args() try: connection = fdn_connection(args.keyfile, keyname=args.key) except Exception as e: print("Connection failed") sys.exit(1) itemids = scu.get_item_ids_from_args(args.input, connection, args.search) excluded_types = get_excluded(args.types2exclude, args.types2include) no_child = ['Publication', 'Lab', 'User', 'Award'] # default no_childs if args.no_children: no_child = list(set(no_child.extend(args.no_children))) all_linked_ids = [] # main loop through the top level item ids for itemid in itemids: linked = scu.get_linked_items(connection, itemid, {}) if excluded_types is not None: linked = scu.filter_dict_by_value(linked, excluded_types, include=False) ll = [(k, linked[k]) for k in sorted(linked, key=linked.get)] for i, t in ll: suff = '' if i == itemid: suff = '\tINPUT' if is_released(i, connection): suff = '\tRELEASED' + suff if not args.include_released: print(i, '\t', t, '\tSKIPPING', suff) continue if i not in all_linked_ids: all_linked_ids.append(i) else: suff = suff + '\tSEEN' print(i, '\t', t, suff) for a in all_linked_ids: print(a)
def main(): # pragma: no cover args = get_args() dbupdate = args.dbupdate try: connection = fdn_connection(args.keyfile, keyname=args.key) except Exception as e: print("Connection failed") sys.exit(1) cnts = Counter() reltag = args.reltag # build the search query string query = 'type=DataReleaseUpdate&update_tag=' + reltag relupdates = scu.get_item_ids_from_args([query], connection, True) update_items = [] for u in relupdates: res = get_FDN(u, connection) for ui in res.get('update_items'): if ui.get('primary_id'): update_items.append(ui['primary_id']) seen = [] # update_items = ['experiment-set-replicates/4DNESOI2ALTL'] for item in update_items: res = get_FDN(item, connection) uid = res.get('uuid') type = get_attype(res) cnts[type] += 1 if (not uid) or (uid in seen) or ('ExperimentSet' not in type): # case for first freeze (no processed files included) print("SKIPPING ", uid) cnts['skipped'] += 1 continue add_tag2item(connection, uid, reltag, seen, cnts, type, dbupdate) if 'ExperimentSet' in type: # get the experiments and files exps = res.get('experiments_in_set') if exps is not None: cnts['Experiment'] += len(exps) for exp in exps: # import pdb; pdb.set_trace() add_tag2item(connection, exp, reltag, seen, cnts, 'Experiment', dbupdate) files = exp.get('files') if files is not None: cnts['FileFastq'] += len(files) for file in files: file = add_tag2item(connection, file, reltag, seen, cnts, 'FileFastq', dbupdate) epfiles = exp.get('processed_files') # epfiles = None # case for first freeze (no processed files included) if epfiles is not None: cnts['FileProcessed'] += len(epfiles) for epf in epfiles: add_tag2item(connection, epf, reltag, seen, cnts, 'FileProcessed', dbupdate) # check the processed files directly associated to the eset # pfiles = res.get('procesed_files') pfiles = None # case for first freeze (no processed files included) if pfiles is not None: cnts['FileProcessed'] += len(pfiles) for pf in pfiles: add_tag2item(connection, pf, reltag, seen, cnts, 'FileProcessed', dbupdate) print(cnts)