def upload_to_atk(mylist, atuser, ead_actuate, ead_show, object_type,
                  use_statement, uri_prefix, dip_uuid, access_conditions,
                  use_conditions, restrictions, dip_location):
    #TODO get resource_id from caller
    resource_id = 31
    if uri_prefix[-1] == '/':
        uri_prefix = uri_prefix + dip_uuid + "/objects/"
    else:
        uri_prefix = uri_prefix + "/" + dip_uuid + "/objects/"

    #get mets object if needed
    mets = None
    if restrictions == 'premis' or len(access_conditions) == 0 or len(
            use_conditions) == 0:
        try:
            logger.debug("looking for mets: {}".format(dip_uuid))
            mets_source = dip_location + 'METS.' + dip_uuid + '.xml'
            mets = mets_file(mets_source)
            logger.debug("found mets file")
        except Exception:
            raise
            exit(4)

    global db
    global cursor
    db = atk.connect_db(args.atdbhost, args.atdbport, args.atdbuser,
                        args.atdbpass, args.atdb)
    cursor = db.cursor()

    #get a list of all the items in this collection
    col = atk.collection_list(db, resource_id)
    logger.debug("got collection_list: {}".format(len(col)))
    sql0 = "select max(fileVersionId) from fileversions"
    logger.debug('sql0: ' + sql0)
    cursor.execute(sql0)
    data = cursor.fetchone()
    if not data[0]:
        newfVID = 1
    else:
        newfVID = int(data[0])
    logger.debug('base file version id found is ' + str(data[0]))
    global base_fv_id
    base_fv_id = newfVID

    pairs = get_pairs(dip_uuid)
    #TODO test to make sure we got some pairs

    for f in mylist:
        base_fv_id += 1
        print 'found file: ' + f
        file_name = os.path.basename(f)
        uuid = file_name[0:36]
        access_restrictions = None
        access_rightsGrantedNote = None
        use_restrictions = None
        use_rightsGrantedNote = None
        if mets and mets[uuid]:
            #get premis info from mets
            for premis in mets[uuid]['premis']:
                logger.debug("{} rights = {}, note={}".format(
                    premis, mets[uuid]['premis'][premis]['restriction'],
                    mets[uuid]['premis'][premis]['rightsGrantedNote']))
                if premis == 'Disseminate':
                    access_restrictions = mets[uuid]['premis']['Disseminate'][
                        'restriction']
                    access_rightsGrantedNote = mets[uuid]['premis'][
                        'Disseminate']['rightsGrantedNote']
                if premis == 'Publish':
                    use_restrictions = mets[uuid]['premis']['Publish'][
                        'restriction']
                    use_rightsGrantedNote = mets[uuid]['premis']['Publish'][
                        'rightsGrantedNote']
        try:
            container1 = file_name[44:47]
            container2 = file_name[48:53]
        except:
            logger.error('file name does not have container ids in it')
            exit(5)
        logger.debug("determine restrictions")
        #determine restrictions
        if restrictions == 'no':
            restrictions_apply = False
        elif restrictions == 'yes':
            restrictions_apply = True
            ead_actuate = "none"
            ead_show = "none"
        elif restrictions == 'premis':
            logger.debug("premis restrictions")
            if access_restrictions == 'Allow' and use_restrictions == 'Allow':
                restrictions_apply = False
            else:
                restrictions_apply = True
                ead_actuate = "none"
                ead_show = "none"

        if len(use_conditions) == 0 or restrictions == 'premis':
            if use_rightsGrantedNote:
                use_conditions = use_rightsGrantedNote

        if len(access_conditions) == 0 or restrictions == 'premis':
            if access_rightsGrantedNote:
                access_conditions = access_rightsGrantedNote

        short_file_name = file_name[37:]
        time_now = strftime("%Y-%m-%d %H:%M:%S", localtime())
        file_uri = uri_prefix + file_name

        if uuid in pairs:
            is_resource = False
            if pairs[uuid]['rcid'] > 0:
                sql1 = '''select resourceComponentId, dateBegin, dateEnd, dateExpression, title from
                          resourcescomponents where resourcecomponentid = {}'''.format(
                    pairs[uuid]['rcid'])
            else:
                is_resource = True
                sql1 = '''select resourceComponentId, dateBegin, dateEnd, dateExpression, title from
                          resources where resourceid = {}'''.format(
                    pairs[uuid]['rid'])

            logger.debug('sql1:' + sql1)
            cursor.execute(sql1)
            data = cursor.fetchone()
            rcid = data[0]
            dateBegin = data[1]
            dateEnd = data[2]
            dateExpression = data[3]
            rc_title = data[4]
            logger.debug("found rc_title " + rc_title + ":" +
                         str(len(rc_title)))
            if (not rc_title or len(rc_title) == 0):
                if (not dateExpression or len(dateExpression) == 0):
                    if dateBegin == dateEnd:
                        short_file_name = str(dateBegin)
                    else:
                        short_file_name = str(dateBegin) + '-' + str(dateEnd)
                else:
                    short_file_name = dateExpression
            else:
                short_file_name = rc_title

            logger.debug("dateExpression is : " + str(dateExpression) +
                         str(len(dateExpression)))
            logger.debug("dates are  " + str(dateBegin) + "-" + str(dateEnd))
            logger.debug("short file name is " + str(short_file_name))

            sql2 = "select repositoryId from repositories"
            logger.debug('sql2: ' + sql2)

            cursor.execute(sql2)
            data = cursor.fetchone()
            repoId = data[0]
            logger.debug('repoId: ' + str(repoId))
            sql3 = " select max(archDescriptionInstancesId) from archdescriptioninstances"
            #logger.debug('sql3: ' + sql3)
            cursor.execute(sql3)
            data = cursor.fetchone()
            newaDID = int(data[0]) + 1

            if is_resource:
                sql4 = "insert into archdescriptioninstances (archDescriptionInstancesId, instanceDescriminator, instanceType, resourceId) values (%d, 'digital','Digital object',%d)" % (
                    newaDID, rcid)
            else:
                sql4 = "insert into archdescriptioninstances (archDescriptionInstancesId, instanceDescriminator, instanceType, resourceComponentId) values (%d, 'digital','Digital object',%d)" % (
                    newaDID, rcid)

            #logger.debug('sql4:' + sql4)
            adid = process_sql(sql4)
            #added sanity checks in case date fields in original archival description were all empty
            if len(dateExpression) == 0:
                dateExpression = 'null'
            if not dateBegin:
                dateBegin = 0
            if not dateEnd:
                dateEnd = 0

            sql5 = """INSERT INTO digitalobjects                  
               (`version`,`lastUpdated`,`created`,`lastUpdatedBy`,`createdBy`,`title`,
                `dateExpression`,`dateBegin`,`dateEnd`,`languageCode`,`restrictionsApply`,
                `eadDaoActuate`,`eadDaoShow`,`metsIdentifier`,`objectType`,`objectOrder`,
                `archDescriptionInstancesId`,`repositoryId`)
               VALUES (1,'%s', '%s','%s','%s','%s','%s',%d, %d,'English',%d,'%s','%s','%s','%s',0,%d,%d)""" % (
                time_now, time_now, atuser, atuser, short_file_name,
                dateExpression, dateBegin, dateEnd, int(restrictions_apply),
                ead_actuate, ead_show, uuid, object_type, newaDID, repoId)
            #logger.debug('sql5: ' + sql5)
            doID = process_sql(sql5)
            sql6 = """insert into fileversions (fileVersionId, version, lastUpdated, created, lastUpdatedBy, createdBy, uri, useStatement, sequenceNumber, eadDaoActuate,eadDaoShow, digitalObjectId)
                  values 
               (%d, 1, '%s', '%s', '%s', '%s', '%s', '%s', %d, '%s','%s', %d)""" % (
                base_fv_id, time_now, time_now, atuser, atuser, file_uri,
                use_statement, 0, ead_actuate, ead_show, doID)
            logger.debug('sql6: ' + sql6)
            process_sql(sql6)

            #create notes
            sql7 = " select max(archdescriptionrepeatingdataId) from archdescriptionrepeatingdata"
            #logger.debug('sql7: ' + sql7)
            cursor.execute(sql7)
            data = cursor.fetchone()

            #existence and location of originals note
            newadrd = int(data[0]) + 1
            seq_num = 0
            note_content = dip_uuid
            logger.debug("about to run sql8")
            sql8 = """insert into archdescriptionrepeatingdata 
                (archdescriptionrepeatingdataid, descriminator, version, lastupdated, created, lastupdatedby ,createdby, repeatingdatatype, title, sequenceNumber,
                digitalObjectId, noteContent, notesetctypeid, basic, multiPart,internalOnly) values 
                (%d, 'note',%d, '%s', '%s', '%s', '%s','Note','', 0, %d, '%s',13, '', '', '')""" % (
                newadrd, seq_num, time_now, time_now, atuser, atuser, doID,
                note_content)
            #logger.debug('sql8: ' + sql8)
            adrd = process_sql(sql8)

            #conditions governing access note
            newadrd += 1
            seq_num += 1
            note_content = access_conditions

            sql9 = """insert into archdescriptionrepeatingdata 
                (archdescriptionrepeatingdataid, descriminator, version, lastupdated, created, lastupdatedby ,createdby, repeatingdatatype, title, sequenceNumber,
                digitalObjectId, noteContent, notesetctypeid, basic, multipart, internalOnly) values 
                (%d, 'note',0, '%s', '%s', '%s', '%s','Note','', %d, %d, '%s',8, '', '', '')""" % (
                newadrd, time_now, time_now, atuser, atuser, seq_num, doID,
                note_content)
            adrd = process_sql(sql9)
            #logger.debug('sql9:' + sql9)

            #conditions governing use` note
            newadrd += 1
            seq_num += 1
            note_content = use_conditions

            sql10 = """insert into archdescriptionrepeatingdata 
                (archdescriptionrepeatingdataid, descriminator, version, lastupdated, created, lastupdatedby ,createdby, repeatingdatatype, title, sequenceNumber,
                digitalObjectId, noteContent, notesetctypeid, basic, multipart, internalOnly) values 
                (%d, 'note',0, '%s', '%s', '%s', '%s','Note','', %d, %d, '%s',9, '', '', '')""" % (
                newadrd, time_now, time_now, atuser, atuser, seq_num, doID,
                note_content)
            adrd = process_sql(sql10)
            logger.debug('sql10:' + sql10)

    process_sql("commit")
    delete_pairs(dip_uuid)
    print("completed upload successfully")
    exit(0)
示例#2
0
def upload_to_archivesspace(
    files,
    client,
    xlink_show,
    xlink_actuate,
    object_type,
    use_statement,
    uri,
    dip_uuid,
    access_conditions,
    use_conditions,
    restrictions,
    dip_location,
    inherit_notes,
):

    if not uri.endswith("/"):
        uri += "/"
    pairs = get_pairs(dip_uuid)

    # get mets object if needed
    mets = None
    if (restrictions == "premis" or len(access_conditions) == 0
            or len(use_conditions) == 0):
        logger.debug("Looking for mets: {}".format(dip_uuid))
        mets_source = dip_location + "METS." + dip_uuid + ".xml"
        mets = mets_file(mets_source)
        logger.debug("Found mets file at path: {}".format(mets_source))

    for f in files:
        file_name = os.path.basename(f)
        uuid = file_name[0:36]

        if uuid not in pairs:
            logger.warning("Skipping file {} ({}) - no pairing found".format(
                f, uuid))
            continue

        as_resource = pairs[uuid]

        access_restrictions = None
        access_rightsGrantedNote = None
        use_restrictions = None
        use_rightsGrantedNote = None

        if mets and mets[uuid]:
            # get premis info from mets
            for premis in mets[uuid]["premis"]:
                logger.debug("{} rights = {}, note={}".format(
                    premis,
                    mets[uuid]["premis"][premis]["restriction"],
                    mets[uuid]["premis"][premis]["rightsGrantedNote"],
                ))
                if premis == "disseminate":
                    access_restrictions = mets[uuid]["premis"]["disseminate"][
                        "restriction"]
                    access_rightsGrantedNote = mets[uuid]["premis"][
                        "disseminate"]["rightsGrantedNote"]
                if premis == "publish":
                    use_restrictions = mets[uuid]["premis"]["publish"][
                        "restriction"]
                    use_rightsGrantedNote = mets[uuid]["premis"]["publish"][
                        "rightsGrantedNote"]

        # determine restrictions
        if restrictions == "no":
            restrictions_apply = False
        elif restrictions == "yes":
            restrictions_apply = True
            xlink_actuate = "none"
            xlink_show = "none"
        elif restrictions == "premis":
            logger.debug("premis restrictions")
            if access_restrictions == "Allow" and use_restrictions == "Allow":
                restrictions_apply = False
            else:
                restrictions_apply = True
                xlink_actuate = "none"
                xlink_show = "none"

        if len(use_conditions) == 0 or restrictions == "premis":
            if use_rightsGrantedNote:
                use_conditions = use_rightsGrantedNote

        if len(access_conditions) == 0 or restrictions == "premis":
            if access_rightsGrantedNote:
                access_conditions = access_rightsGrantedNote

        # Get file & format info
        # Client wants access copy info

        original_name = ""
        # Get file & format info
        try:
            fv = FormatVersion.objects.get(fileformatversion__file_uuid=uuid)
            format_version = fv.description
            format_name = fv.format.description
        except FormatVersion.DoesNotExist:
            format_name = format_version = None

        # Client wants access copy info
        try:
            original_file = File.objects.get(filegrpuse="original", uuid=uuid)
        except (File.DoesNotExist, File.MultipleObjectsReturned):
            original_name = ""
            size = format_name = format_version = None
        else:
            # Set some variables based on the original, we will override most
            # of these if there is an access derivative
            size = os.path.getsize(f)
            original_name = os.path.basename(original_file.originallocation)
        try:
            access_file = File.objects.get(filegrpuse="access",
                                           original_file_set__source_file=uuid)
        except (File.DoesNotExist, File.MultipleObjectsReturned):
            # Just use original file info
            pass
        else:
            # HACK remove DIP from the path because create DIP doesn't
            access_file_path = access_file.currentlocation.replace(
                "%SIPDirectory%DIP/", dip_location)
            size = os.path.getsize(access_file_path)

        # HACK map the format version to ArchivesSpace's fixed list of formats it accepts.
        as_formats = {
            "Audio Interchange File Format": "aiff",
            "Audio/Video Interleaved": "avi",
            "Graphics Interchange Format": "gif",
            "JPEG": "jpeg",
            "MPEG Audio": "mp3",
            "PDF": "pdf",
            "Tagged Image File Format": "tiff",
            "Plain Text": "txt",
        }
        if format_name is not None:
            format_name = as_formats.get(format_name)

        logger.info("Uploading {} to ArchivesSpace record {}".format(
            file_name, as_resource))
        client.add_digital_object(
            parent_archival_object=as_resource,
            identifier=uuid,
            # TODO: fetch a title from DC?
            #       Use the title of the parent record?
            title=original_name,
            uri=uri + file_name,
            location_of_originals=dip_uuid,
            object_type=object_type,
            use_statement=use_statement,
            xlink_show=xlink_show,
            xlink_actuate=xlink_actuate,
            restricted=restrictions_apply,
            use_conditions=use_conditions,
            access_conditions=access_conditions,
            size=size,
            format_name=format_name,
            format_version=format_version,
            inherit_notes=inherit_notes,
        )

        delete_pairs(dip_uuid)
示例#3
0
def upload_to_atk(mylist, atuser, ead_actuate, ead_show, object_type, use_statement, uri_prefix, dip_uuid, access_conditions, use_conditions, restrictions, dip_location):
    logger.info("inputs: actuate '{}' show '{}' type '{}'  use_statement '{}' use_conditions '{}'".format(ead_actuate, ead_show, object_type, use_statement, use_conditions))
    if not uri_prefix.endswith('/'):
        uri_prefix += '/'

    # get mets object if needed
    mets = None
    if restrictions == 'premis' or len(access_conditions) == 0 or len(use_conditions) == 0:
        try:
            logger.debug("looking for mets: {}".format(dip_uuid))
            mets_source = dip_location + 'METS.' + dip_uuid + '.xml'
            mets = mets_file(mets_source)
            logger.debug("found mets file")
        except Exception:
            raise
            exit(4)

    client = ArchivistsToolkitClient(args.atdbhost, args.atdbuser, args.atdbpass, args.atdb)

    pairs = get_pairs(dip_uuid)
    # TODO test to make sure we got some pairs

    for f in mylist:
        logger.info('using ' + f)
        file_name = os.path.basename(f)
        logger.info('file_name is ' + file_name)
        uuid = file_name[0:36]
        access_restrictions = None
        access_rightsGrantedNote = None
        use_restrictions = None
        use_rightsGrantedNote = None
        if mets and mets[uuid]:
            # get premis info from mets
            for premis in mets[uuid]['premis']:
                logger.debug("{} rights = {}, note={}".format(premis, mets[uuid]['premis'][premis]['restriction'],mets[uuid]['premis'][premis]['rightsGrantedNote']))
                if premis == 'disseminate':
                    access_restrictions = mets[uuid]['premis']['disseminate']['restriction']
                    access_rightsGrantedNote = mets[uuid]['premis']['disseminate']['rightsGrantedNote']
                if premis == 'publish':
                    use_restrictions = mets[uuid]['premis']['publish']['restriction']
                    use_rightsGrantedNote = mets[uuid]['premis']['publish']['rightsGrantedNote']
        logger.debug("determine restrictions")
        # determine restrictions
        if restrictions == 'no':
            restrictions_apply = False
        elif restrictions == 'yes':
            restrictions_apply = True
            ead_actuate = "none"
            ead_show = "none"
        elif restrictions == 'premis':
            logger.debug("premis restrictions")
            if access_restrictions == 'Allow' and use_restrictions == 'Allow':
                restrictions_apply = False
            else:
                restrictions_apply = True
                ead_actuate = "none"
                ead_show = "none"

        if len(use_conditions) == 0 or restrictions == 'premis':
            if use_rightsGrantedNote:
                use_conditions = use_rightsGrantedNote

        if len(access_conditions) == 0 or restrictions == 'premis':
            if access_rightsGrantedNote:
                access_conditions = access_rightsGrantedNote

        file_uri = uri_prefix + file_name

        if uuid in pairs:
            resource_id = pairs[uuid]['rcid'] if pairs[uuid]['rcid'] > 0 else pairs[uuid]['rid']
            client.add_digital_object(resource_id,
                                      uuid,
                                      uri=file_uri,
                                      restricted=restrictions_apply,
                                      xlink_actuate=ead_actuate,
                                      xlink_show=ead_show,
                                      location_of_originals=dip_uuid,
                                      inherit_dates=True)

    delete_pairs(dip_uuid)
    logger.info("completed upload successfully")
def upload_to_archivesspace(files, client, xlink_show, xlink_actuate,
                            object_type, use_statement, uri, dip_uuid,
                            access_conditions, use_conditions, restrictions,
                            dip_location, inherit_notes):

    if not uri.endswith('/'):
        uri += '/'
    pairs = get_pairs(dip_uuid)

    # get mets object if needed
    mets = None
    if restrictions == 'premis' or len(access_conditions) == 0 or len(
            use_conditions) == 0:
        logger.debug("Looking for mets: {}".format(dip_uuid))
        mets_source = dip_location + 'METS.' + dip_uuid + '.xml'
        mets = mets_file(mets_source)
        logger.debug("Found mets file at path: {}".format(mets_source))

    for f in files:
        file_name = os.path.basename(f)
        uuid = file_name[0:36]

        if uuid not in pairs:
            logger.warning("Skipping file {} ({}) - no pairing found".format(
                f, uuid))
            continue

        as_resource = pairs[uuid]

        access_restrictions = None
        access_rightsGrantedNote = None
        use_restrictions = None
        use_rightsGrantedNote = None

        if mets and mets[uuid]:
            # get premis info from mets
            for premis in mets[uuid]['premis']:
                logger.debug("{} rights = {}, note={}".format(
                    premis, mets[uuid]['premis'][premis]['restriction'],
                    mets[uuid]['premis'][premis]['rightsGrantedNote']))
                if premis == 'disseminate':
                    access_restrictions = mets[uuid]['premis']['disseminate'][
                        'restriction']
                    access_rightsGrantedNote = mets[uuid]['premis'][
                        'disseminate']['rightsGrantedNote']
                if premis == 'publish':
                    use_restrictions = mets[uuid]['premis']['publish'][
                        'restriction']
                    use_rightsGrantedNote = mets[uuid]['premis']['publish'][
                        'rightsGrantedNote']

        # determine restrictions
        if restrictions == 'no':
            restrictions_apply = False
        elif restrictions == 'yes':
            restrictions_apply = True
            xlink_actuate = "none"
            xlink_show = "none"
        elif restrictions == 'premis':
            logger.debug("premis restrictions")
            if access_restrictions == 'Allow' and use_restrictions == 'Allow':
                restrictions_apply = False
            else:
                restrictions_apply = True
                xlink_actuate = "none"
                xlink_show = "none"

        if len(use_conditions) == 0 or restrictions == 'premis':
            if use_rightsGrantedNote:
                use_conditions = use_rightsGrantedNote

        if len(access_conditions) == 0 or restrictions == 'premis':
            if access_rightsGrantedNote:
                access_conditions = access_rightsGrantedNote

        # Get file & format info
        # Client wants access copy info

        original_name = ''
        # Get file & format info
        try:
            fv = FormatVersion.objects.get(fileformatversion__file_uuid=uuid)
            format_version = fv.description
            format_name = fv.format.description
        except FormatVersion.DoesNotExist:
            format_name = format_version = None

        # Client wants access copy info
        try:
            original_file = File.objects.get(filegrpuse='original', uuid=uuid)
        except (File.DoesNotExist, File.MultipleObjectsReturned):
            original_name = ''
            size = format_name = format_version = None
        else:
            # Set some variables based on the original, we will override most
            # of these if there is an access derivative
            size = os.path.getsize(f)
            original_name = os.path.basename(original_file.originallocation)
        try:
            access_file = File.objects.get(filegrpuse='access',
                                           original_file_set__source_file=uuid)
        except (File.DoesNotExist, File.MultipleObjectsReturned):
            # Just use original file info
            pass
        else:
            # HACK remove DIP from the path because create DIP doesn't
            access_file_path = access_file.currentlocation.replace(
                '%SIPDirectory%DIP/', dip_location)
            size = os.path.getsize(access_file_path)

        # HACK map the format version to ArchivesSpace's fixed list of formats it accepts.
        as_formats = {
            'Audio Interchange File Format': 'aiff',
            'Audio/Video Interleaved': 'avi',
            'Graphics Interchange Format': 'gif',
            'JPEG': 'jpeg',
            'MPEG Audio': 'mp3',
            'PDF': 'pdf',
            'Tagged Image File Format': 'tiff',
            'Plain Text': 'txt',
        }
        if format_name is not None:
            format_name = as_formats.get(format_name)

        logger.info("Uploading {} to ArchivesSpace record {}".format(
            file_name, as_resource))
        client.add_digital_object(
            parent_archival_object=as_resource,
            identifier=uuid,
            # TODO: fetch a title from DC?
            #       Use the title of the parent record?
            title=original_name,
            uri=uri + file_name,
            location_of_originals=dip_uuid,
            object_type=object_type,
            use_statement=use_statement,
            xlink_show=xlink_show,
            xlink_actuate=xlink_actuate,
            restricted=restrictions_apply,
            use_conditions=use_conditions,
            access_conditions=access_conditions,
            size=size,
            format_name=format_name,
            format_version=format_version,
            inherit_notes=inherit_notes)

        delete_pairs(dip_uuid)
def upload_to_atk(mylist, atuser, ead_actuate, ead_show, object_type, use_statement, uri_prefix, dip_uuid, access_conditions, use_conditions, restrictions, dip_location):
    #TODO get resource_id from caller
    resource_id = 31
    if uri_prefix[-1] == '/':
        uri_prefix = uri_prefix + dip_uuid + "/objects/"
    else:
        uri_prefix = uri_prefix + "/" + dip_uuid + "/objects/"
        
    #get mets object if needed
    mets = None
    if restrictions == 'premis' or len(access_conditions) == 0 or len(use_conditions) == 0:
        try:
            logger.debug("looking for mets: {}".format(dip_uuid))
            mets_source = dip_location + 'METS.' + dip_uuid + '.xml'
            mets = mets_file(mets_source)
            logger.debug("found mets file")
        except Exception:
            raise
            exit(4)
            
    global db
    global cursor
    db = atk.connect_db(args.atdbhost, args.atdbport, args.atdbuser, args.atdbpass, args.atdb)
    cursor = db.cursor()
    
    #get a list of all the items in this collection
    col = atk.collection_list(db, resource_id)
    logger.debug("got collection_list: {}".format(len(col)))
    sql0 = "select max(fileVersionId) from fileversions"
    logger.debug('sql0: ' + sql0)
    cursor.execute(sql0)
    data = cursor.fetchone()
    if not data[0]:
        newfVID = 1
    else:
        newfVID = int(data[0]) 
    logger.debug('base file version id found is ' + str(data[0]))
    global base_fv_id 
    base_fv_id = newfVID        

    pairs = get_pairs(dip_uuid)
    #TODO test to make sure we got some pairs
    
    for f in mylist:
        base_fv_id+=1 
        print 'found file: ' + f
        file_name = os.path.basename(f)
        uuid = file_name[0:36]
        access_restrictions = None
        access_rightsGrantedNote = None
        use_restrictions = None
        use_rightsGrantedNote = None
        if mets and mets[uuid]:
            #get premis info from mets
            for premis in mets[uuid]['premis']:
                logger.debug("{} rights = {}, note={}".format(premis, mets[uuid]['premis'][premis]['restriction'],mets[uuid]['premis'][premis]['rightsGrantedNote']))
                if premis == 'Disseminate':
                    access_restrictions = mets[uuid]['premis']['Disseminate']['restriction']
                    access_rightsGrantedNote = mets[uuid]['premis']['Disseminate']['rightsGrantedNote']
                if premis == 'Publish':
                    use_restrictions = mets[uuid]['premis']['Publish']['restriction']
                    use_rightsGrantedNote = mets[uuid]['premis']['Publish']['rightsGrantedNote']
        try:
            container1 = file_name[44:47]
            container2 = file_name[48:53]
        except:
            logger.error('file name does not have container ids in it')
            exit(5)
        logger.debug ("determine restrictions")
        #determine restrictions
        if restrictions == 'no':
            restrictions_apply = False
        elif restrictions == 'yes':
            restrictions_apply = True
            ead_actuate = "none"
            ead_show = "none"
        elif restrictions == 'premis':
            logger.debug("premis restrictions")
            if access_restrictions == 'Allow' and use_restrictions == 'Allow':
                restrictions_apply = False
            else:
                restrictions_apply = True
                ead_actuate = "none"
                ead_show = "none"        
                
        if len(use_conditions) == 0 or restrictions == 'premis':
            if use_rightsGrantedNote:
                use_conditions = use_rightsGrantedNote

        if len(access_conditions) == 0 or restrictions == 'premis':
            if access_rightsGrantedNote:
                access_conditions = access_rightsGrantedNote
        
        short_file_name = file_name[37:]
        time_now = strftime("%Y-%m-%d %H:%M:%S", localtime())
        file_uri = uri_prefix  + file_name

        if uuid in pairs:
            is_resource = False
            if pairs[uuid]['rcid'] > 0:
                sql1 = '''select resourceComponentId, dateBegin, dateEnd, dateExpression, title from
                          resourcescomponents where resourcecomponentid = {}'''.format(pairs[uuid]['rcid'])
            else:
                is_resource = True
                sql1 = '''select resourceComponentId, dateBegin, dateEnd, dateExpression, title from
                          resources where resourceid = {}'''.format(pairs[uuid]['rid']) 
                       
            logger.debug('sql1:' + sql1) 
            cursor.execute(sql1)
            data = cursor.fetchone()
            rcid = data[0]
            dateBegin = data[1]
            dateEnd = data[2]
            dateExpression = data[3]
            rc_title = data[4]
            logger.debug("found rc_title " + rc_title + ":" + str(len(rc_title)) ) 
            if (not rc_title or len(rc_title) == 0):
                if (not dateExpression or len(dateExpression) == 0):
                    if dateBegin == dateEnd:
                        short_file_name = str(dateBegin)
                    else:
                        short_file_name = str(dateBegin) + '-' + str(dateEnd)
                else:
                    short_file_name = dateExpression
            else:
                short_file_name = rc_title

            logger.debug("dateExpression is : " + str(dateExpression) + str(len(dateExpression)))
            logger.debug("dates are  " + str(dateBegin) + "-" + str(dateEnd))
            logger.debug("short file name is " + str(short_file_name))
 
            sql2 = "select repositoryId from repositories" 
            logger.debug('sql2: ' + sql2)

            cursor.execute(sql2)
            data = cursor.fetchone()
            repoId = data[0]
            logger.debug('repoId: ' + str(repoId))
            sql3 = " select max(archDescriptionInstancesId) from archdescriptioninstances"
            #logger.debug('sql3: ' + sql3) 
            cursor.execute(sql3)
            data = cursor.fetchone()
            newaDID = int(data[0]) + 1

            if is_resource:
                sql4 = "insert into archdescriptioninstances (archDescriptionInstancesId, instanceDescriminator, instanceType, resourceId) values (%d, 'digital','Digital object',%d)" % (newaDID, rcid)
            else:
                sql4 = "insert into archdescriptioninstances (archDescriptionInstancesId, instanceDescriminator, instanceType, resourceComponentId) values (%d, 'digital','Digital object',%d)" % (newaDID, rcid)
        
            #logger.debug('sql4:' + sql4)
            adid = process_sql(sql4)
            #added sanity checks in case date fields in original archival description were all empty
            if len(dateExpression) == 0:
                dateExpression = 'null'
            if not dateBegin: 
                dateBegin = 0
            if not dateEnd:
                dateEnd = 0
   
            sql5 = """INSERT INTO digitalobjects                  
               (`version`,`lastUpdated`,`created`,`lastUpdatedBy`,`createdBy`,`title`,
                `dateExpression`,`dateBegin`,`dateEnd`,`languageCode`,`restrictionsApply`,
                `eadDaoActuate`,`eadDaoShow`,`metsIdentifier`,`objectType`,`objectOrder`,
                `archDescriptionInstancesId`,`repositoryId`)
               VALUES (1,'%s', '%s','%s','%s','%s','%s',%d, %d,'English',%d,'%s','%s','%s','%s',0,%d,%d)""" % (time_now, time_now, atuser, atuser, short_file_name,dateExpression, dateBegin, dateEnd, int(restrictions_apply), ead_actuate, ead_show,uuid, object_type, newaDID, repoId)
            #logger.debug('sql5: ' + sql5)
            doID = process_sql(sql5)
            sql6 = """insert into fileversions (fileVersionId, version, lastUpdated, created, lastUpdatedBy, createdBy, uri, useStatement, sequenceNumber, eadDaoActuate,eadDaoShow, digitalObjectId)
                  values 
               (%d, 1, '%s', '%s', '%s', '%s', '%s', '%s', %d, '%s','%s', %d)""" % (base_fv_id,time_now, time_now,atuser,atuser,file_uri,use_statement,0, ead_actuate,ead_show, doID)
            logger.debug('sql6: ' + sql6)
            process_sql(sql6)

            #create notes
            sql7 = " select max(archdescriptionrepeatingdataId) from archdescriptionrepeatingdata"
            #logger.debug('sql7: ' + sql7) 
            cursor.execute(sql7)
            data = cursor.fetchone()
       
            #existence and location of originals note 
            newadrd = int(data[0]) + 1
            seq_num = 0
            note_content = dip_uuid
            logger.debug("about to run sql8")
            sql8 = """insert into archdescriptionrepeatingdata 
                (archdescriptionrepeatingdataid, descriminator, version, lastupdated, created, lastupdatedby ,createdby, repeatingdatatype, title, sequenceNumber,
                digitalObjectId, noteContent, notesetctypeid, basic, multiPart,internalOnly) values 
                (%d, 'note',%d, '%s', '%s', '%s', '%s','Note','', 0, %d, '%s',13, '', '', '')""" % (newadrd, seq_num, time_now, time_now, atuser, atuser, doID, note_content ) 
            #logger.debug('sql8: ' + sql8)
            adrd = process_sql(sql8) 
        
            #conditions governing access note
            newadrd += 1
            seq_num += 1
            note_content = access_conditions
        
            sql9 = """insert into archdescriptionrepeatingdata 
                (archdescriptionrepeatingdataid, descriminator, version, lastupdated, created, lastupdatedby ,createdby, repeatingdatatype, title, sequenceNumber,
                digitalObjectId, noteContent, notesetctypeid, basic, multipart, internalOnly) values 
                (%d, 'note',0, '%s', '%s', '%s', '%s','Note','', %d, %d, '%s',8, '', '', '')""" % (newadrd, time_now, time_now, atuser, atuser, seq_num, doID, note_content )
            adrd = process_sql(sql9) 
            #logger.debug('sql9:' + sql9)
         
            #conditions governing use` note
            newadrd += 1
            seq_num += 1
            note_content = use_conditions

            sql10 = """insert into archdescriptionrepeatingdata 
                (archdescriptionrepeatingdataid, descriminator, version, lastupdated, created, lastupdatedby ,createdby, repeatingdatatype, title, sequenceNumber,
                digitalObjectId, noteContent, notesetctypeid, basic, multipart, internalOnly) values 
                (%d, 'note',0, '%s', '%s', '%s', '%s','Note','', %d, %d, '%s',9, '', '', '')""" % (newadrd, time_now, time_now, atuser, atuser, seq_num, doID, note_content )
            adrd = process_sql(sql10)
            logger.debug('sql10:' + sql10)
   
    process_sql("commit")
    delete_pairs(dip_uuid)
    print("completed upload successfully")
    exit(0)