Example #1
0
def upload_collection_from_files(db,
                                 db_name,
                                 coll_name,
                                 master_file_name,
                                 json_file_name,
                                 fresh=False):
    """Freshly upload inventory for a single collection. CLOBBERS CONTENT

        db -- LMFDB connection to inventory database
        db_name -- Name of database this collection is in
        coll_name -- Name of collection to upload
        master_file_name -- path to report tool database structure file
        json_file_name -- path to file containing additional inventory data
        fresh -- set to skip some syncing if this is a fresh or new upload
    """

    decoder = json.JSONDecoder()

    inv.log_dest.info("Uploading collection structure for " + coll_name)
    structure_data = decoder.decode(read_file(master_file_name))

    #Do we need to keep the orphans?
    #orphaned_keys = upload_collection_structure(db, db_name, coll_name, structure_data, fresh=fresh)
    upload_collection_structure(db,
                                db_name,
                                coll_name,
                                structure_data,
                                fresh=fresh)

    inv.log_dest.info("Uploading collection description for " + coll_name)
    data = decoder.decode(read_file(json_file_name))
    upload_collection_description(db, db_name, coll_name, data, fresh=fresh)
Example #2
0
def upload_all_structure(db, structure_dat):
    """Upload an everything from a structure json document

        db -- LMFDB connection to inventory database
        structure_dat -- JSON document containing all db/collections to upload
    """

    inv.log_dest.info(
        "_____________________________________________________________________________________________"
    )
    inv.log_dest.info("Processing structure data")
    n_dbs = len(structure_dat.keys())
    progress_tracker = 0

    for DB_name in structure_dat:
        progress_tracker += 1
        inv.log_dest.info("Uploading " + DB_name + " (" +
                          str(progress_tracker) + " of " + str(n_dbs) + ')')
        invc.set_db(db, DB_name, DB_name)

        for coll_name in structure_dat[DB_name]:
            inv.log_dest.info("    Uploading collection " + coll_name)
            orphaned_keys = upload_collection_structure(db,
                                                        DB_name,
                                                        coll_name,
                                                        structure_dat,
                                                        fresh=False)
            if len(orphaned_keys) != 0:
                with open('Orph_' + DB_name + '_' + coll_name + '.json',
                          'w') as file:
                    file.write(json.dumps(orphaned_keys))
                    inv.log_dest.info("          Orphans written to Orph_" +
                                      DB_name + '_' + coll_name + '.json')
Example #3
0
def upload_from_files(db, master_file_name, list_file_name, fresh=False):
    """Upload an entire inventory. CLOBBERS CONTENT

        db -- LMFDB connection to inventory database
        master_file_name -- path to report tool database structure file
        list_file_name -- path to file containing list of all additional inventory info files
        fresh -- set to sync tables according to whether this is a fresh or new upload
    """
    #For a complete upload is more logical to fill things in thing by thing
    #so we do all the db's first, then the collections and finish with the additional description

    decoder = json.JSONDecoder()
    structure_dat = decoder.decode(read_file(master_file_name))

    inv.log_dest.info("_____________________________________________________________________________________________")
    inv.log_dest.info("Processing autogenerated inventory")
    n_dbs = len(structure_dat.keys())
    progress_tracker = 0

    for DB_name in structure_dat:
        progress_tracker += 1
        inv.log_dest.info("Uploading " + DB_name+" ("+str(progress_tracker)+" of "+str(n_dbs)+')')
        invc.set_db(db, DB_name, DB_name)

        for coll_name in structure_dat[DB_name]:
            inv.log_dest.info("    Uploading collection "+coll_name)
            orphaned_keys = upload_collection_structure(db, DB_name, coll_name, structure_dat, fresh=fresh)
            if len(orphaned_keys) != 0:
                with open('Orph_'+DB_name+'_'+coll_name+'.json', 'w') as file:
                    file.write(json.dumps(orphaned_keys))
                inv.log_dest.info("          Orphans written to Orph_"+ DB_name+'_'+coll_name+'.json')

    inv.log_dest.info("_____________________________________________________________________________________________")
    inv.log_dest.info("Processing additional inventory")
    file_list = read_list(list_file_name)
    last_db = ''
    progress_tracker = 0
    for file in file_list:
        data = decoder.decode(read_file(file))
        record_name = ih.get_description_key(file)
        DB_name = record_name[0]
        if DB_name != last_db:
            inv.log_dest.info("Uploading " + DB_name+" ("+str(progress_tracker)+" of <="+str(n_dbs)+')')
            last_db = DB_name
            progress_tracker += 1
        coll_name = record_name[1]
        inv.log_dest.info("    Uploading collection "+coll_name)

        upload_collection_description(db, DB_name, coll_name, data)