def findOrCreateSipInDB(path, waitSleep=dbWaitSleep, unit_type='SIP'): """Matches a directory to a database sip by it's appended UUID, or path. If it doesn't find one, it will create one""" path = path.replace(config.get('MCPServer', "sharedDirectory"), "%sharedPath%", 1) # Find UUID on end of SIP path UUID = fetchUUIDFromPath(path) if UUID: try: sip = SIP.objects.get(uuid=UUID) except SIP.DoesNotExist: databaseFunctions.createSIP(path, UUID=UUID) else: current_path = sip.currentpath if current_path != path and unit_type == 'SIP': # Ensure path provided matches path in DB sip.currentpath = path sip.save() else: #Find it in the database sips = SIP.objects.filter(currentpath=path) count = sips.count() if count > 1: logger.warning('More than one SIP for path %s, using first result', path) if count > 0: UUID = sips[0].uuid logger.info('Using existing SIP %s at %s', UUID, path) else: logger.info('Not using existing SIP %s at %s', UUID, path) #Create it if not UUID: UUID = databaseFunctions.createSIP(path) logger.info('Creating SIP %s at %s', UUID, path) return UUID
def create_aic(request, *args, **kwargs): aic_form = forms.CreateAICForm(request.POST or None) if aic_form.is_valid(): aip_uuids = ast.literal_eval(aic_form.cleaned_data['results']) logger.info("AIC AIP UUIDs: {}".format(aip_uuids)) # The form was passed a raw list of all AIP UUIDs mapping the user's query; # use those to fetch their names, which is used to produce files below. query = { "query": { "terms": { "uuid": aip_uuids, } } } es_client = elasticSearchFunctions.get_client() results = es_client.search( body=query, index='aips', doc_type='aip', fields='uuid,name', size=elasticSearchFunctions.MAX_QUERY_SIZE, # return all records ) # Create files in staging directory with AIP information shared_dir = helpers.get_server_config_value('sharedDirectory') staging_dir = os.path.join(shared_dir, 'tmp') # Create SIP (AIC) directory in staging directory temp_uuid = str(uuid.uuid4()) destination = os.path.join(staging_dir, temp_uuid) try: os.mkdir(destination) os.chmod(destination, 0o770) except os.error: messages.error(request, "Error creating AIC") logger.exception( "Error creating AIC: Error creating directory {}".format( destination)) return redirect('archival_storage_index') # Create SIP in DB mcp_destination = destination.replace(shared_dir, '%sharedPath%') + '/' databaseFunctions.createSIP(mcp_destination, UUID=temp_uuid, sip_type='AIC') # Create files with filename = AIP UUID, and contents = AIP name for aip in results['hits']['hits']: filepath = os.path.join(destination, aip['fields']['uuid'][0]) with open(filepath, 'w') as f: os.chmod(filepath, 0o660) f.write(str(aip['fields']['name'][0])) return redirect('components.ingest.views.aic_metadata_add', temp_uuid) else: messages.error(request, "Error creating AIC") logger.error("Error creating AIC: Form not valid: {}".format(aic_form)) return redirect('archival_storage_index')
def call(jobs): with transaction.atomic(): for job in jobs: with job.JobContext(): # COPY THE METS FILE # Move the DIP Directory # job.args[1] (fauxUUID) is unused. # job.args[3] (date) is unused. unitPath = job.args[2] basename = os.path.basename(unitPath[:-1]) uuidLen = 36 originalSIPName = basename[:-(uuidLen + 1) * 2] originalSIPUUID = basename[:-(uuidLen + 1)][-uuidLen:] METSPath = os.path.join( unitPath, "metadata/submissionDocumentation/data/", "METS.%s.xml" % (originalSIPUUID), ) if not os.path.isfile(METSPath): job.pyprint("Mets file not found: ", METSPath, file=sys.stderr) job.set_status(255) continue # move mets to DIP src = METSPath dst = os.path.join(unitPath, "DIP", os.path.basename(METSPath)) shutil.move(src, dst) # Move DIP src = os.path.join(unitPath, "DIP") dst = os.path.join( "/var/archivematica/sharedDirectory/watchedDirectories/uploadDIP/", originalSIPName + "-" + originalSIPUUID, ) shutil.move(src, dst) try: SIP.objects.get(uuid=originalSIPUUID) except SIP.DoesNotExist: # otherwise doesn't appear in dashboard createSIP(unitPath, UUID=originalSIPUUID, printfn=job.pyprint) Job.objects.create( jobtype="Hack to make DIP Jobs appear", directory=unitPath, sip_id=originalSIPUUID, currentstep=Job.STATUS_COMPLETED_SUCCESSFULLY, unittype="unitSIP", microservicegroup="Upload DIP", )
def copy_to_originals(request): filepath = request.POST.get('filepath', '') error = check_filepath_exists('/' + filepath) if error == None: processingDirectory = '/var/archivematica/sharedDirectory/currentlyProcessing/' sipName = os.path.basename(filepath) #autoProcessSIPDirectory = ORIGINALS_DIR autoProcessSIPDirectory = '/var/archivematica/sharedDirectory/watchedDirectories/SIPCreation/SIPsUnderConstruction/' tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" sipUUID = uuid.uuid4().__str__() createStructuredDirectory(tmpSIPDir) databaseFunctions.createSIP(destSIPDir.replace('/var/archivematica/sharedDirectory/', '%sharedPath%'), sipUUID) objectsDirectory = os.path.join('/', filepath, 'objects') #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #moveSIPTo autoProcessSIPDirectory shutil.move(tmpSIPDir, destSIPDir) """ # confine destination to subdir of originals filepath = os.path.join('/', filepath) destination = os.path.join(ORIGINALS_DIR, os.path.basename(filepath)) destination = pad_destination_filepath_if_it_already_exists(destination) #error = 'Copying from ' + filepath + ' to ' + destination + '.' try: shutil.copytree( filepath, destination ) except: error = 'Error copying from ' + filepath + ' to ' + destination + '.' """ response = {} if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Copy successful.' return HttpResponse( simplejson.JSONEncoder().encode(response), mimetype='application/json' )
def copy_to_originals(request): filepath = request.POST.get('filepath', '') error = check_filepath_exists('/' + filepath) if error == None: processingDirectory = '/var/archivematica/sharedDirectory/currentlyProcessing/' sipName = os.path.basename(filepath) #autoProcessSIPDirectory = ORIGINALS_DIR autoProcessSIPDirectory = '/var/archivematica/sharedDirectory/watchedDirectories/SIPCreation/SIPsUnderConstruction/' tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" sipUUID = uuid.uuid4().__str__() createStructuredDirectory(tmpSIPDir) databaseFunctions.createSIP( destSIPDir.replace('/var/archivematica/sharedDirectory/', '%sharedPath%'), sipUUID) objectsDirectory = os.path.join('/', filepath, 'objects') #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #moveSIPTo autoProcessSIPDirectory shutil.move(tmpSIPDir, destSIPDir) """ # confine destination to subdir of originals filepath = os.path.join('/', filepath) destination = os.path.join(ORIGINALS_DIR, os.path.basename(filepath)) destination = pad_destination_filepath_if_it_already_exists(destination) #error = 'Copying from ' + filepath + ' to ' + destination + '.' try: shutil.copytree( filepath, destination ) except: error = 'Error copying from ' + filepath + ' to ' + destination + '.' """ response = {} if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Copy successful.' return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json')
def create_arranged_sip(staging_sip_path, files, sip_uuid): shared_dir = helpers.get_server_config_value('sharedDirectory') staging_sip_path = staging_sip_path.lstrip('/') staging_abs_path = os.path.join(shared_dir, staging_sip_path) # Create SIP object sip_name = staging_sip_path.split('/')[1] sip_path = os.path.join(shared_dir, 'watchedDirectories', 'SIPCreation', 'SIPsUnderConstruction', sip_name) currentpath = sip_path.replace(shared_dir, '%sharedPath%', 1) + '/' sip_path = helpers.pad_destination_filepath_if_it_already_exists(sip_path) try: sip = models.SIP.objects.get(uuid=sip_uuid) except models.SIP.DoesNotExist: # Create a SIP object if none exists databaseFunctions.createSIP(currentpath, sip_uuid) else: # Update the already-created SIP with its path if sip.currentpath is not None: return "Provided SIP UUID ({}) belongs to an already-started SIP!".format( sip_uuid) sip.currentpath = currentpath sip.save() # Update currentLocation of files for file_ in files: if file_.get('uuid'): # Strip 'arrange/sip_name' from file path in_sip_path = '/'.join(file_['destination'].split('/')[2:]) currentlocation = '%SIPDirectory%' + in_sip_path models.File.objects.filter(uuid=file_['uuid']).update( sip=sip_uuid, currentlocation=currentlocation) # Create directories for logs and metadata, if they don't exist for directory in ('logs', 'metadata', os.path.join('metadata', 'submissionDocumentation')): try: os.mkdir(os.path.join(staging_abs_path, directory)) except os.error as exception: if exception.errno != errno.EEXIST: raise # Add log of original location and new location of files arrange_log = os.path.join(staging_abs_path, 'logs', 'arrange.log') with open(arrange_log, 'w') as f: log = ('%s -> %s\n' % (file_['source'], file_['destination']) for file_ in files if file_.get('uuid')) f.writelines(log) # Move to watchedDirectories/SIPCreation/SIPsUnderConstruction logger.info('create_arranged_sip: move from %s to %s', staging_abs_path, sip_path) shutil.move(src=staging_abs_path, dst=sip_path)
def create_aic(request, *args, **kwargs): aic_form = forms.CreateAICForm(request.POST or None) if aic_form.is_valid(): aip_uuids = ast.literal_eval(aic_form.cleaned_data["results"]) logger.info("AIC AIP UUIDs: {}".format(aip_uuids)) # The form was passed a raw list of all AIP UUIDs mapping the user's query; # use those to fetch their names, which is used to produce files below. query = {"query": {"terms": {"uuid": aip_uuids}}} es_client = elasticSearchFunctions.get_client() results = es_client.search( body=query, index="aips", _source="uuid,name", size=elasticSearchFunctions.MAX_QUERY_SIZE, # return all records ) # Create files in staging directory with AIP information shared_dir = settings.SHARED_DIRECTORY staging_dir = os.path.join(shared_dir, "tmp") # Create SIP (AIC) directory in staging directory temp_uuid = str(uuid.uuid4()) destination = os.path.join(staging_dir, temp_uuid) try: os.mkdir(destination) os.chmod(destination, 0o770) except os.error: messages.error(request, "Error creating AIC") logger.exception( "Error creating AIC: Error creating directory {}".format( destination)) return redirect("archival_storage_index") # Create SIP in DB mcp_destination = destination.replace(shared_dir, "%sharedPath%") + "/" databaseFunctions.createSIP(mcp_destination, UUID=temp_uuid, sip_type="AIC") # Create files with filename = AIP UUID, and contents = AIP name for aip in results["hits"]["hits"]: filepath = os.path.join(destination, aip["_source"]["uuid"]) with open(filepath, "w") as f: os.chmod(filepath, 0o660) f.write(str(aip["_source"]["name"])) return redirect("components.ingest.views.aic_metadata_add", temp_uuid) else: messages.error(request, "Error creating AIC") logger.error("Error creating AIC: Form not valid: {}".format(aic_form)) return redirect("archival_storage_index")
def call(jobs): with transaction.atomic(): for job in jobs: with job.JobContext(): objectsDirectory = job.args[1] transferName = job.args[2] transferUUID = job.args[3] processingDirectory = job.args[4] autoProcessSIPDirectory = job.args[5] sharedPath = job.argv[6] transfer_objects_directory = '%transferDirectory%objects' for container in os.listdir(objectsDirectory): sipUUID = uuid.uuid4().__str__() containerPath = os.path.join(objectsDirectory, container) if not os.path.isdir(containerPath): job.pyprint("file (not container) found: ", container, file=sys.stderr) continue sipName = "%s-%s" % (transferName, container) tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" archivematicaFunctions.create_structured_directory(tmpSIPDir, manual_normalization=True) databaseFunctions.createSIP(destSIPDir.replace(sharedPath, '%sharedPath%'), sipUUID, printfn=job.pyprint) # move the objects to the SIPDir for item in os.listdir(containerPath): shutil.move(os.path.join(containerPath, item), os.path.join(tmpSIPDir, "objects", item)) # get the database list of files in the objects directory # for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' directory = os.path.join(transfer_objects_directory, container) files = File.objects.filter(removedtime__isnull=True, currentlocation__startswith=directory, transfer_id=transferUUID) for f in files: currentPath = databaseFunctions.deUnicode(f.currentlocation).replace(directory, transfer_objects_directory) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): f.currentlocation = currentPath.replace("%transferDirectory%", "%SIPDirectory%") f.sip_id = sipUUID f.save() else: job.pyprint("file not found: ", currentSIPFilePath, file=sys.stderr) # moveSIPTo autoProcessSIPDirectory shutil.move(tmpSIPDir, destSIPDir)
def findOrCreateSipInDB(path, waitSleep=dbWaitSleep): UUID = "" path = path.replace(config.get('MCPServer', "sharedDirectory"), "%sharedPath%", 1) #find UUID on end of SIP path uuidLen = -36 if isUUID(path[uuidLen-1:-1]): UUID = path[uuidLen-1:-1] if UUID == "": #Find it in the database databaseInterface.printSQL = True sql = """SELECT sipUUID FROM SIPs WHERE currentPath = '""" + MySQLdb.escape_string(path) + "';" #if waitSleep != 0: #time.sleep(waitSleep) #let db be updated by the microservice that moved it. c, sqlLock = databaseInterface.querySQL(sql) row = c.fetchone() if not row: print "Not opening existing SIP:", UUID, "-", path while row != None: UUID = row[0] print "Opening existing SIP:", UUID, "-", path row = c.fetchone() sqlLock.release() #Create it if UUID == "": UUID = databaseFunctions.createSIP(path) print "DEBUG creating sip", path, UUID return UUID
def findOrCreateSipInDB(path, waitSleep=dbWaitSleep): UUID = "" path = path.replace(config.get('MCPServer', "sharedDirectory"), "%sharedPath%", 1) #find UUID on end of SIP path uuidLen = -36 if isUUID(path[uuidLen - 1:-1]): UUID = path[uuidLen - 1:-1] if UUID == "": #Find it in the database databaseInterface.printSQL = True sql = """SELECT sipUUID FROM SIPs WHERE currentPath = '""" + MySQLdb.escape_string( path) + "';" #if waitSleep != 0: #time.sleep(waitSleep) #let db be updated by the microservice that moved it. c, sqlLock = databaseInterface.querySQL(sql) row = c.fetchone() if not row: print "Not opening existing SIP:", UUID, "-", path while row != None: UUID = row[0] print "Opening existing SIP:", UUID, "-", path row = c.fetchone() sqlLock.release() #Create it if UUID == "": UUID = databaseFunctions.createSIP(path) print "DEBUG creating sip", path, UUID return UUID
def create_aic(request): """Create an AIC from POSTed list of AIP UUIDs. :param request: Django request object. :return: Redirect to appropriate view. """ uuids = request.GET.get("uuids") if not uuids: messages.error(request, "Unable to create AIC: No AIPs selected") return redirect("archival_storage:archival_storage_index") # Make a list of UUIDs from from comma-separated string in request. aip_uuids = uuids.split(",") logger.info("AIC AIP UUIDs: {}".format(aip_uuids)) # Use the AIP UUIDs to fetch names, which are used to produce files below. query = {"query": {"terms": {"uuid": aip_uuids}}} es_client = es.get_client() results = es_client.search( body=query, index=es.AIPS_INDEX, _source="uuid,name", size=es.MAX_QUERY_SIZE ) # Create SIP (AIC) directory in a staging directory. shared_dir = settings.SHARED_DIRECTORY staging_dir = os.path.join(shared_dir, "tmp") temp_uuid = str(uuid.uuid4()) destination = os.path.join(staging_dir, temp_uuid) try: os.mkdir(destination) os.chmod(destination, DIRECTORY_PERMISSIONS) except OSError as e: messages.error(request, "Error creating AIC") logger.exception("Error creating AIC: {}".format(e)) return redirect("archival_storage:archival_storage_index") # Create an entry for the SIP (AIC) in the database. mcp_destination = os.path.join(destination.replace(shared_dir, "%sharedPath%"), "") databaseFunctions.createSIP(mcp_destination, UUID=temp_uuid, sip_type="AIC") # Create files with filename = AIP UUID, and contents = AIP name. for aip in results["hits"]["hits"]: filepath = os.path.join(destination, aip["_source"]["uuid"]) with open(filepath, "w") as f: os.chmod(filepath, FILE_PERMISSIONS) f.write(str(aip["_source"]["name"])) return redirect("ingest:aic_metadata_add", temp_uuid)
def findOrCreateSipInDB(path, waitSleep=dbWaitSleep, unit_type="SIP"): """Matches a directory to a database sip by it's appended UUID, or path. If it doesn't find one, it will create one""" path = path.replace(django_settings.SHARED_DIRECTORY, "%sharedPath%", 1) query = Q(currentpath=path) # Find UUID on end of SIP path UUID = fetchUUIDFromPath(path) sip = None if UUID: query = query | Q(uuid=UUID) sips = SIP.objects.filter(query) count = sips.count() if count > 1: # This might have happened because the UUID at the end of the directory # name corresponds to a different SIP in the database. # Try refiltering the queryset on path alone, and see if that brought us # down to a single SIP. sips = sips.filter(currentpath=path) count = sips.count() # Darn: we must have multiple SIPs with the same path in the database. # We have no reasonable way to recover from this condition. if count > 1: logger.error( "More than one SIP for path %s and/or UUID %s, using first result", path, UUID, ) if count > 0: sip = sips[0] UUID = sip.uuid logger.info("Using existing SIP %s at %s", UUID, path) else: logger.info("Not using existing SIP %s at %s", UUID, path) if sip is None: # Create it # Note that if UUID is None here, a new UUID will be generated # and returned by the function; otherwise it returns the # value that was passed in. UUID = createSIP(path, UUID=UUID) logger.info("Creating SIP %s at %s", UUID, path) else: current_path = sip.currentpath if current_path != path and unit_type == "SIP": # Ensure path provided matches path in DB sip.currentpath = path sip.save() return UUID
if __name__ == '__main__': objectsDirectory = sys.argv[1] transferName = sys.argv[2] transferUUID = sys.argv[3] processingDirectory = sys.argv[4] autoProcessSIPDirectory = sys.argv[5] sharedPath = sys.argv[6] sipName = transferName sipUUID = uuid.uuid4().__str__() tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" createStructuredDirectory(tmpSIPDir) databaseFunctions.createSIP(destSIPDir.replace(sharedPath, '%sharedPath%'), sipUUID) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #get the database list of files in the objects directory #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transferUUID + "'" for row in databaseInterface.queryAllSQL(sql): fileUUID = row[0] currentPath = databaseFunctions.deUnicode(row[1]) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % (MySQLdb.escape_string(currentPath.replace("%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) databaseInterface.runSQL(sql)
def process_transfer(request, transfer_uuid): response = {} if request.user.id: # get transfer info transfer = models.Transfer.objects.get(uuid=transfer_uuid) transfer_path = transfer.currentlocation.replace( '%sharedPath%', helpers.get_server_config_value('sharedDirectory') ) import MySQLdb import databaseInterface import databaseFunctions import shutil from archivematicaCreateStructuredDirectory import createStructuredDirectory from archivematicaCreateStructuredDirectory import createManualNormalizedDirectoriesList createStructuredDirectory(transfer_path, createManualNormalizedDirectories=False) processingDirectory = helpers.get_server_config_value('processingDirectory') transfer_directory_name = os.path.basename(transfer_path[:-1]) transfer_name = transfer_directory_name[:-37] sharedPath = helpers.get_server_config_value('sharedDirectory') tmpSIPDir = os.path.join(processingDirectory, transfer_name) + "/" #processSIPDirectory = os.path.join(sharedPath, 'watchedDirectories/system/autoProcessSIP') + '/' processSIPDirectory = os.path.join(sharedPath, 'watchedDirectories/SIPCreation/SIPsUnderConstruction') + '/' #destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" #destSIPDir = os.path.join(processSIPDirectory, transfer_name + '-' + ) + "/" createStructuredDirectory(tmpSIPDir, createManualNormalizedDirectories=False) objectsDirectory = os.path.join(transfer_path, 'objects') + '/' """ #create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') #lookup_path = '%sharedPath%watchedDirectories/workFlowDecisions/createDip/' + transfer_name + '/' sql = " " "SELECT sipUUID FROM SIPs WHERE currentPath = '" " " + MySQLdb.escape_string(lookup_path) + "';" rows = databaseInterface.queryAllSQL(sql) if len(rows) > 0: row = rows[0] sipUUID = row[0] else: sipUUID = uuid.uuid4().__str__() databaseFunctions.createSIP(lookup_path, sipUUID) """ sipUUID = uuid.uuid4().__str__() destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') databaseFunctions.createSIP(lookup_path, sipUUID) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #get the database list of files in the objects directory #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transfer_uuid + "'" for row in databaseInterface.queryAllSQL(sql): fileUUID = row[0] currentPath = databaseFunctions.deUnicode(row[1]) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % (MySQLdb.escape_string(currentPath.replace("%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) databaseInterface.runSQL(sql) else: print >>sys.stderr, "file not found: ", currentSIPFilePath #copy processingMCP.xml file src = os.path.join(os.path.dirname(objectsDirectory[:-1]), "processingMCP.xml") dst = os.path.join(tmpSIPDir, "processingMCP.xml") shutil.copy(src, dst) #moveSIPTo processSIPDirectory shutil.move(tmpSIPDir, destSIPDir) elasticSearchFunctions.connect_and_change_transfer_file_status(transfer_uuid, '') response['message'] = 'SIP ' + sipUUID + ' created.' else: response['error'] = True response['message'] = 'Must be logged in.' return HttpResponse( simplejson.JSONEncoder(encoding='utf-8').encode(response), mimetype='application/json' )
def _create_arranged_sip(staging_sip_path, files, sip_uuid): shared_dir = django_settings.SHARED_DIRECTORY staging_sip_path = staging_sip_path.lstrip("/") staging_abs_path = os.path.join(shared_dir, staging_sip_path) # If an arranged SIP contains a single file that comes from a # transfer wherein UUIDs were assigned to directories, then assign new # UUIDs to all directories in the arranged SIP. diruuids = _source_transfers_gave_uuids_to_directories(files) # boolean # Create SIP object sip_name = staging_sip_path.split("/")[1] sip_path = os.path.join( shared_dir, "watchedDirectories", "SIPCreation", "SIPsUnderConstruction", sip_name, ) currentpath = sip_path.replace(shared_dir, "%sharedPath%", 1) + "/" sip_path = helpers.pad_destination_filepath_if_it_already_exists(sip_path) try: sip = models.SIP.objects.get(uuid=sip_uuid) except models.SIP.DoesNotExist: # Create a SIP object if none exists databaseFunctions.createSIP(currentpath, sip_uuid, diruuids=diruuids) sip = models.SIP.objects.get(uuid=sip_uuid) else: # Update the already-created SIP with its path if sip.currentpath is not None: return _( "Provided SIP UUID (%(uuid)s) belongs to an already-started SIP!" ) % {"uuid": sip_uuid} sip.currentpath = currentpath sip.diruuids = diruuids sip.save() # Update currentLocation of files # Also get all directory paths implicit in all of the file paths directories = set() for file_ in files: if file_.get("uuid"): # Strip 'arrange/sip_name' from file path in_sip_path = "/".join(file_["destination"].split("/")[2:]) currentlocation = "%SIPDirectory%" + in_sip_path models.File.objects.filter(uuid=file_["uuid"]).update( sip=sip_uuid, currentlocation=currentlocation ) # Get all ancestor directory paths of the file's destination. subdir = os.path.dirname(currentlocation) while subdir: directory = subdir.replace("%SIPDirectory%", "%SIPDirectory%objects/") directories.add(directory) subdir = os.path.dirname(subdir) if diruuids: # Create new Directory models for all subdirectories in the newly # arranged SIP. Because the user can arbitrarily modify the directory # structure, it doesn't make sense to reuse any directory models that # were created during transfer. models.Directory.create_many( archivematicaFunctions.get_dir_uuids(directories, logger), sip, unit_type="sip", ) # Create directories for logs and metadata, if they don't exist for directory in ( "logs", "metadata", os.path.join("metadata", "submissionDocumentation"), ): try: os.mkdir(os.path.join(staging_abs_path, directory)) except os.error as exception: if exception.errno != errno.EEXIST: raise # Add log of original location and new location of files arrange_log = os.path.join(staging_abs_path, "logs", "arrange.log") with open(arrange_log, "w") as f: log = ( "%s -> %s\n" % (file_["source"], file_["destination"]) for file_ in files if file_.get("uuid") ) f.writelines(log) # Move to watchedDirectories/SIPCreation/SIPsUnderConstruction logger.info("_create_arranged_sip: move from %s to %s", staging_abs_path, sip_path) shutil.move(src=staging_abs_path, dst=sip_path)
originalSIPUUID = basename[:-(uuidLen + 1)][-uuidLen:] METSPath = os.path.join(unitPath, "metadata/submissionDocumentation/data/", "METS.%s.xml" % (originalSIPUUID)) if not os.path.isfile(METSPath): print("Mets file not found: ", METSPath, file=sys.stderr) exit(-1) # move mets to DIP src = METSPath dst = os.path.join(unitPath, "DIP", os.path.basename(METSPath)) shutil.move(src, dst) # Move DIP src = os.path.join(unitPath, "DIP") dst = os.path.join( "/var/archivematica/sharedDirectory/watchedDirectories/uploadDIP/", originalSIPName + "-" + originalSIPUUID) shutil.move(src, dst) try: SIP.objects.get(uuid=originalSIPUUID) except SIP.DoesNotExist: # otherwise doesn't appear in dashboard createSIP(unitPath, UUID=originalSIPUUID) Job.objects.create(jobtype="Hack to make DIP Jobs appear", directory=unitPath, sip_id=originalSIPUUID, currentstep="Completed successfully", unittype="unitSIP", microservicegroup="Upload DIP")
processingDirectory = sys.argv[4] autoProcessSIPDirectory = sys.argv[5] sharedPath = sys.argv[6] sipName = transferName tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" archivematicaFunctions.create_structured_directory( tmpSIPDir, manual_normalization=False) #create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') try: sipUUID = SIP.objects.get(currentpath=lookup_path).uuid except SIP.DoesNotExist: sipUUID = databaseFunctions.createSIP(lookup_path) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): src_path = os.path.join(objectsDirectory, item) dst_path = os.path.join(tmpSIPDir, "objects", item) # If dst_path already exists and is a directory, shutil.move # will move src_path into it rather than overwriting it; # to avoid incorrectly-nested paths, move src_path's contents # into it instead. if os.path.exists(dst_path) and os.path.isdir(src_path): for subitem in os.listdir(src_path): shutil.move(os.path.join(src_path, subitem), dst_path) else: shutil.move(src_path, dst_path)
def process_transfer(request, transfer_uuid): response = {} if request.user.id: # get transfer info transfer = models.Transfer.objects.get(uuid=transfer_uuid) transfer_path = transfer.currentlocation.replace( '%sharedPath%', helpers.get_server_config_value('sharedDirectory')) import MySQLdb import databaseInterface import databaseFunctions import shutil from archivematicaCreateStructuredDirectory import createStructuredDirectory from archivematicaCreateStructuredDirectory import createManualNormalizedDirectoriesList createStructuredDirectory(transfer_path, createManualNormalizedDirectories=False) processingDirectory = helpers.get_server_config_value( 'processingDirectory') transfer_directory_name = os.path.basename(transfer_path[:-1]) transfer_name = transfer_directory_name[:-37] sharedPath = helpers.get_server_config_value('sharedDirectory') tmpSIPDir = os.path.join(processingDirectory, transfer_name) + "/" #processSIPDirectory = os.path.join(sharedPath, 'watchedDirectories/system/autoProcessSIP') + '/' processSIPDirectory = os.path.join( sharedPath, 'watchedDirectories/SIPCreation/SIPsUnderConstruction') + '/' #destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" #destSIPDir = os.path.join(processSIPDirectory, transfer_name + '-' + ) + "/" createStructuredDirectory(tmpSIPDir, createManualNormalizedDirectories=False) objectsDirectory = os.path.join(transfer_path, 'objects') + '/' """ #create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') #lookup_path = '%sharedPath%watchedDirectories/workFlowDecisions/createDip/' + transfer_name + '/' sql = " " "SELECT sipUUID FROM SIPs WHERE currentPath = '" " " + MySQLdb.escape_string(lookup_path) + "';" rows = databaseInterface.queryAllSQL(sql) if len(rows) > 0: row = rows[0] sipUUID = row[0] else: sipUUID = uuid.uuid4().__str__() databaseFunctions.createSIP(lookup_path, sipUUID) """ sipUUID = uuid.uuid4().__str__() destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') databaseFunctions.createSIP(lookup_path, sipUUID) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #get the database list of files in the objects directory #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transfer_uuid + "'" for row in databaseInterface.queryAllSQL(sql): fileUUID = row[0] currentPath = databaseFunctions.deUnicode(row[1]) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % ( MySQLdb.escape_string( currentPath.replace( "%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) databaseInterface.runSQL(sql) else: print >> sys.stderr, "file not found: ", currentSIPFilePath #copy processingMCP.xml file src = os.path.join(os.path.dirname(objectsDirectory[:-1]), "processingMCP.xml") dst = os.path.join(tmpSIPDir, "processingMCP.xml") shutil.copy(src, dst) #moveSIPTo processSIPDirectory shutil.move(tmpSIPDir, destSIPDir) elasticSearchFunctions.connect_and_change_transfer_file_status( transfer_uuid, '') response['message'] = 'SIP ' + sipUUID + ' created.' else: response['error'] = True response['message'] = 'Must be logged in.' return HttpResponse( simplejson.JSONEncoder(encoding='utf-8').encode(response), mimetype='application/json')
def call(jobs): with transaction.atomic(): for job in jobs: with job.JobContext(): objectsDirectory = job.args[1] transferName = job.args[2] transferUUID = job.args[3] processingDirectory = job.args[4] autoProcessSIPDirectory = job.args[5] sharedPath = job.args[6] sipName = transferName tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" archivematicaFunctions.create_structured_directory( tmpSIPDir, manual_normalization=False) # If transfer is a reingested AIP, then pass that info to the SIP sip_type = 'SIP' sip_uuid = None transfer = Transfer.objects.get(uuid=transferUUID) if transfer.type == 'Archivematica AIP': sip_type = 'AIP-REIN' # Use reingested AIP's UUID as the SIP UUID # Get AIP UUID from reingest METS name job.pyprint( 'path', os.path.join(objectsDirectory, '..', 'metadata'), 'listdir', os.listdir( os.path.join(objectsDirectory, '..', 'metadata'))) for item in os.listdir( os.path.join(objectsDirectory, '..', 'metadata')): if item.startswith('METS'): sip_uuid = item.replace('METS.', '').replace('.xml', '') job.pyprint('sip_uuid', sip_uuid) job.pyprint('sip_type', sip_type) # Find out if any ``Directory`` models were created for the source # ``Transfer``. If so, this fact gets recorded in the new ``SIP`` model. dir_mdls = Directory.objects.filter( transfer_id=transferUUID, currentlocation__startswith='%transferDirectory%objects') diruuids = len(dir_mdls) > 0 # Create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') try: sip = SIP.objects.get(currentpath=lookup_path).uuid if diruuids: sip.diruuids = True sip.save() except SIP.DoesNotExist: sip_uuid = databaseFunctions.createSIP(lookup_path, UUID=sip_uuid, sip_type=sip_type, diruuids=diruuids, printfn=job.pyprint) sip = SIP.objects.get(uuid=sip_uuid) # Move the objects to the SIPDir for item in os.listdir(objectsDirectory): src_path = os.path.join(objectsDirectory, item) dst_path = os.path.join(tmpSIPDir, "objects", item) # If dst_path already exists and is a directory, shutil.move # will move src_path into it rather than overwriting it; # to avoid incorrectly-nested paths, move src_path's contents # into it instead. if os.path.exists(dst_path) and os.path.isdir(src_path): for subitem in os.listdir(src_path): shutil.move(os.path.join(src_path, subitem), dst_path) else: shutil.move(src_path, dst_path) # Get the ``Directory`` models representing the subdirectories in the # objects/ directory. For each subdirectory, confirm it's in the SIP # objects/ directory, and update the current location and owning SIP. for dir_mdl in dir_mdls: currentPath = databaseFunctions.deUnicode( dir_mdl.currentlocation) currentSIPDirPath = currentPath.replace( "%transferDirectory%", tmpSIPDir) if os.path.isdir(currentSIPDirPath): dir_mdl.currentlocation = currentPath.replace( "%transferDirectory%", "%SIPDirectory%") dir_mdl.sip = sip dir_mdl.save() else: job.pyprint("directory not found: ", currentSIPDirPath, file=sys.stderr) # Get the database list of files in the objects directory. # For each file, confirm it's in the SIP objects directory, and update the # current location/ owning SIP' files = File.objects.filter( transfer_id=transferUUID, currentlocation__startswith='%transferDirectory%objects', removedtime__isnull=True) for f in files: currentPath = databaseFunctions.deUnicode( f.currentlocation) currentSIPFilePath = currentPath.replace( "%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): f.currentlocation = currentPath.replace( "%transferDirectory%", "%SIPDirectory%") f.sip = sip f.save() else: job.pyprint("file not found: ", currentSIPFilePath, file=sys.stderr) archivematicaFunctions.create_directories( archivematicaFunctions.MANUAL_NORMALIZATION_DIRECTORIES, basepath=tmpSIPDir) # Copy the JSON metadata file, if present; this contains a # serialized copy of DC metadata entered in the dashboard UI # during the transfer. src = os.path.normpath( os.path.join(objectsDirectory, "..", "metadata", "dc.json")) dst = os.path.join(tmpSIPDir, "metadata", "dc.json") if os.path.exists(src): shutil.copy(src, dst) # Copy processingMCP.xml file src = os.path.join(os.path.dirname(objectsDirectory[:-1]), "processingMCP.xml") dst = os.path.join(tmpSIPDir, "processingMCP.xml") shutil.copy(src, dst) # moveSIPTo autoProcessSIPDirectory shutil.move(tmpSIPDir, destSIPDir)
def call(jobs): with transaction.atomic(): for job in jobs: with job.JobContext(): objectsDirectory = job.args[1] transferName = job.args[2] transferUUID = job.args[3] processingDirectory = job.args[4] autoProcessSIPDirectory = job.args[5] sharedPath = job.args[6] sipName = transferName tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" archivematicaFunctions.create_structured_directory( tmpSIPDir, manual_normalization=False) # If transfer is a reingested AIP, then pass that info to the SIP sip_type = "SIP" sip_uuid = None transfer = Transfer.objects.get(uuid=transferUUID) if transfer.type == "Archivematica AIP": sip_type = "AIP-REIN" # Use reingested AIP's UUID as the SIP UUID # Get AIP UUID from reingest METS name job.pyprint( "path", os.path.join(objectsDirectory, "..", "metadata"), "listdir", os.listdir( os.path.join(objectsDirectory, "..", "metadata")), ) for item in os.listdir( os.path.join(objectsDirectory, "..", "metadata")): if item.startswith("METS"): sip_uuid = item.replace("METS.", "").replace(".xml", "") job.pyprint("sip_uuid", sip_uuid) job.pyprint("sip_type", sip_type) # Find out if any ``Directory`` models were created for the source # ``Transfer``. If so, this fact gets recorded in the new ``SIP`` model. dir_mdls = Directory.objects.filter( transfer_id=transferUUID, currentlocation__startswith="%transferDirectory%objects", ) diruuids = len(dir_mdls) > 0 # Create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, "%sharedPath%") try: sip = SIP.objects.get(currentpath=lookup_path) if diruuids: sip.diruuids = True sip.save() except SIP.DoesNotExist: sip_uuid = databaseFunctions.createSIP( lookup_path, UUID=sip_uuid, sip_type=sip_type, diruuids=diruuids, printfn=job.pyprint, ) sip = SIP.objects.get(uuid=sip_uuid) # Set activeAgent using the value in Transfer. This ensures # that events generated in Ingest can fall to this value in # scenarios where the processing config does not require user # interfactions, e.g. in the "automated" processing config. try: unit_variable = UnitVariable.objects.get( unittype="Transfer", unituuid=transferUUID, variable="activeAgent", ) except UnitVariable.DoesNotExist: unit_variable = None if unit_variable: try: agent = Agent.objects.get( id=unit_variable.variablevalue) except Agent.DoesNotExist: pass else: sip.update_active_agent(agent.userprofile.user_id) # Move the objects to the SIPDir for item in os.listdir(objectsDirectory): src_path = os.path.join(objectsDirectory, item) dst_path = os.path.join(tmpSIPDir, "objects", item) # If dst_path already exists and is a directory, shutil.move # will move src_path into it rather than overwriting it; # to avoid incorrectly-nested paths, move src_path's contents # into it instead. if os.path.exists(dst_path) and os.path.isdir(src_path): for subitem in os.listdir(src_path): shutil.move(os.path.join(src_path, subitem), dst_path) else: shutil.move(src_path, dst_path) # Get the ``Directory`` models representing the subdirectories in the # objects/ directory. For each subdirectory, confirm it's in the SIP # objects/ directory, and update the current location and owning SIP. for dir_mdl in dir_mdls: currentPath = databaseFunctions.deUnicode( dir_mdl.currentlocation) currentSIPDirPath = currentPath.replace( "%transferDirectory%", tmpSIPDir) if os.path.isdir(currentSIPDirPath): dir_mdl.currentlocation = currentPath.replace( "%transferDirectory%", "%SIPDirectory%") dir_mdl.sip = sip dir_mdl.save() else: job.pyprint("directory not found: ", currentSIPDirPath, file=sys.stderr) # Get the database list of files in the objects directory. # For each file, confirm it's in the SIP objects directory, and update the # current location/ owning SIP' files = File.objects.filter( transfer_id=transferUUID, currentlocation__startswith="%transferDirectory%objects", removedtime__isnull=True, ) for f in files: currentPath = databaseFunctions.deUnicode( f.currentlocation) currentSIPFilePath = currentPath.replace( "%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): f.currentlocation = currentPath.replace( "%transferDirectory%", "%SIPDirectory%") f.sip = sip f.save() else: job.pyprint("file not found: ", currentSIPFilePath, file=sys.stderr) archivematicaFunctions.create_directories( archivematicaFunctions.MANUAL_NORMALIZATION_DIRECTORIES, basepath=tmpSIPDir, ) # Copy the JSON metadata file, if present; this contains a # serialized copy of DC metadata entered in the dashboard UI # during the transfer. src = os.path.normpath( os.path.join(objectsDirectory, "..", "metadata", "dc.json")) dst = os.path.join(tmpSIPDir, "metadata", "dc.json") if os.path.exists(src): shutil.copy(src, dst) # Copy processingMCP.xml file src = os.path.join(os.path.dirname(objectsDirectory[:-1]), "processingMCP.xml") dst = os.path.join(tmpSIPDir, "processingMCP.xml") shutil.copy(src, dst) # moveSIPTo autoProcessSIPDirectory shutil.move(tmpSIPDir, destSIPDir)
# ``Transfer``. If so, this fact gets recorded in the new ``SIP`` model. dir_mdls = Directory.objects.filter( transfer_id=transferUUID, currentlocation__startswith='%transferDirectory%objects') diruuids = len(dir_mdls) > 0 # Create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') try: sip = SIP.objects.get(currentpath=lookup_path) if diruuids: sip.diruuids = True sip.save() except SIP.DoesNotExist: sip_uuid = databaseFunctions.createSIP(lookup_path, UUID=sip_uuid, sip_type=sip_type, diruuids=diruuids) sip = SIP.objects.get(uuid=sip_uuid) # Move the objects to the SIPDir for item in os.listdir(objectsDirectory): src_path = os.path.join(objectsDirectory, item) dst_path = os.path.join(tmpSIPDir, "objects", item) # If dst_path already exists and is a directory, shutil.move # will move src_path into it rather than overwriting it; # to avoid incorrectly-nested paths, move src_path's contents # into it instead. if os.path.exists(dst_path) and os.path.isdir(src_path): for subitem in os.listdir(src_path): shutil.move(os.path.join(src_path, subitem), dst_path) else:
sipName = transferName tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" createStructuredDirectory(tmpSIPDir, createManualNormalizedDirectories=False) #create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') sql = """SELECT sipUUID FROM SIPs WHERE currentPath = '""" + MySQLdb.escape_string(lookup_path) + "';" rows = databaseInterface.queryAllSQL(sql) if len(rows) > 0: row = rows[0] sipUUID = row[0] else: sipUUID = uuid.uuid4().__str__() databaseFunctions.createSIP(lookup_path, sipUUID) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #get the database list of files in the objects directory #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transferUUID + "'" for row in databaseInterface.queryAllSQL(sql): fileUUID = row[0] currentPath = databaseFunctions.deUnicode(row[1]) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % (MySQLdb.escape_string(currentPath.replace("%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) databaseInterface.runSQL(sql)
transfer_objects_directory = '%transferDirectory%objects' for container in os.listdir(objectsDirectory): sipUUID = uuid.uuid4().__str__() containerPath = os.path.join(objectsDirectory, container) if not os.path.isdir(containerPath): print >> sys.stderr, "file (not container) found: ", container continue sipName = "%s-%s" % (transferName, container) tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" archivematicaFunctions.create_structured_directory( tmpSIPDir, manual_normalization=True) databaseFunctions.createSIP( destSIPDir.replace(sharedPath, '%sharedPath%'), sipUUID) # move the objects to the SIPDir for item in os.listdir(containerPath): shutil.move(os.path.join(containerPath, item), os.path.join(tmpSIPDir, "objects", item)) # get the database list of files in the objects directory # for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' directory = os.path.join(transfer_objects_directory, container) files = File.objects.filter(removedtime__isnull=True, currentlocation__startswith=directory, transfer_id=transferUUID) for f in files: currentPath = databaseFunctions.deUnicode( f.currentlocation).replace(directory,
date = sys.argv[3] basename = os.path.basename(unitPath[:-1]) uuidLen = 36 originalSIPName = basename[:-(uuidLen+1)*2] originalSIPUUID = basename[:-(uuidLen+1)][-uuidLen:] METSPath = os.path.join(unitPath, "metadata/submissionDocumentation/data/", "METS.%s.xml" % (originalSIPUUID)) if not os.path.isfile(METSPath): print >>sys.stderr, "Mets file not found: ", METSPath exit(-1) #move mets to DIP src = METSPath dst = os.path.join(unitPath, "DIP", os.path.basename(METSPath)) shutil.move(src, dst) #Move DIP src = os.path.join(unitPath, "DIP") dst = os.path.join("/var/archivematica/sharedDirectory/watchedDirectories/uploadDIP/", originalSIPName + "-" + originalSIPUUID) shutil.move(src, dst) sql = """SELECT sipUUID from SIPs where sipUUID = '%s'; """ % (originalSIPUUID) rows = databaseInterface.queryAllSQL(sql) if not len(rows): #otherwise doesn't appear in dashboard import uuid createSIP(unitPath, UUID=originalSIPUUID) databaseInterface.runSQL("""INSERT INTO Jobs (jobUUID, jobType, directory, SIPUUID, currentStep, unitType, subJobOf, microserviceGroup) VALUES ('%s','Hack to make DIP Jobs appear', '%s', '%s', 'Completed successfully', 'unitSIP', '', 'Upload DIP');""" % (str(uuid.uuid4()), unitPath, originalSIPUUID))