def crawl(objects, actors) : for root, dirs, files in os.walk(objects): for f in files: if not f.endswith(".json") : continue path = os.path.join(root, f) path = os.path.abspath(path) fh = open(path, 'r') data = json.load(fh) for role in ('actors', 'printers', 'publishers') : if not data.get(role, False): continue for a in data[ role ]: id = a.get('irn', None) if not id: continue parent = utils.id2path(id) dirname = os.path.join(actors, parent) if not os.path.exists(dirname): os.makedirs(dirname) fname = "%s.json" % id path = os.path.join(dirname, fname) if os.path.exists(path): continue fh = open(path, 'w') json.dump(a, fh, indent=2) fh.close()
whoami = os.path.abspath(sys.argv[0]) bindir = os.path.dirname(whoami) collection = os.path.dirname(bindir) objects = os.path.join(collection, 'objects') obj_csv = os.path.join(collection, 'objects.csv') obj_fh = open(obj_csv, 'r') reader = UnicodeDictReader(obj_fh) for row in reader: fname = "%s.json" % row['id'] root = utils.id2path(row['id']) root = os.path.join(objects, root) out = os.path.join(root, fname) print out continue if not os.path.exists(root): os.makedirs(root) out_fh = open(out, 'w') json.dump(row, out_fh, indent=2) print out
whoami = os.path.abspath(sys.argv[0]) bindir = os.path.dirname(whoami) collection = os.path.dirname(bindir) objects = os.path.join(collection, 'objects') obj_csv = os.path.join(collection, 'MetObjects.csv') obj_fh = open(obj_csv, 'r') reader = UnicodeDictReader(obj_fh) for row in reader: fname = "%s.json" % row['Object ID'] root = utils.id2path(row['Object ID']) root = os.path.join(objects, root) out = os.path.join(root, fname) print out #continue if not os.path.exists(root): os.makedirs(root) out_fh = open(out, 'w') json.dump(row, out_fh, indent=2) print out