def get_modified_standard_transfer_path(type=None): path = os.path.join(helpers.get_server_config_value('watchDirectoryPath'), 'activeTransfers') if type != None: try: path = os.path.join(path, helpers.transfer_directory_by_type(type)) except: return None shared_directory_path = helpers.get_server_config_value('sharedDirectory') return path.replace(shared_directory_path, '%sharedPath%', 1)
def get_modified_standard_transfer_path(type=None): path = os.path.join( helpers.get_server_config_value('watchDirectoryPath'), 'activeTransfers' ) if type != None: try: path = os.path.join(path, helpers.transfer_directory_by_type(type)) except: return None shared_directory_path = helpers.get_server_config_value('sharedDirectory') return path.replace(shared_directory_path, '%sharedPath%', 1)
def create_aic(request, *args, **kwargs): aic_form = forms.CreateAICForm(request.POST or None) if aic_form.is_valid(): aip_uuids = ast.literal_eval(aic_form.cleaned_data['results']) logger.info("AIC AIP UUIDs: {}".format(aip_uuids)) # The form was passed a raw list of all AIP UUIDs mapping the user's query; # use those to fetch their names, which is used to produce files below. query = { "query": { "terms": { "uuid": aip_uuids, } } } es_client = elasticSearchFunctions.get_client() results = es_client.search( body=query, index='aips', doc_type='aip', fields='uuid,name', size=elasticSearchFunctions.MAX_QUERY_SIZE, # return all records ) # Create files in staging directory with AIP information shared_dir = helpers.get_server_config_value('sharedDirectory') staging_dir = os.path.join(shared_dir, 'tmp') # Create SIP (AIC) directory in staging directory temp_uuid = str(uuid.uuid4()) destination = os.path.join(staging_dir, temp_uuid) try: os.mkdir(destination) os.chmod(destination, 0o770) except os.error: messages.error(request, "Error creating AIC") logger.exception( "Error creating AIC: Error creating directory {}".format( destination)) return redirect('archival_storage_index') # Create SIP in DB mcp_destination = destination.replace(shared_dir, '%sharedPath%') + '/' databaseFunctions.createSIP(mcp_destination, UUID=temp_uuid, sip_type='AIC') # Create files with filename = AIP UUID, and contents = AIP name for aip in results['hits']['hits']: filepath = os.path.join(destination, aip['fields']['uuid'][0]) with open(filepath, 'w') as f: os.chmod(filepath, 0o660) f.write(str(aip['fields']['name'][0])) return redirect('components.ingest.views.aic_metadata_add', temp_uuid) else: messages.error(request, "Error creating AIC") logger.error("Error creating AIC: Form not valid: {}".format(aic_form)) return redirect('archival_storage_index')
def storagesetup(request): # Display the dashboard UUID on the storage service setup page dashboard_uuid = helpers.get_setting('dashboard_uuid', None) assert dashboard_uuid is not None # Prefill the storage service URL inital_data = { 'storage_service_url': helpers.get_setting('storage_service_url', 'http://localhost:8000'), 'storage_service_user': helpers.get_setting('storage_service_user', 'test'), 'storage_service_apikey': helpers.get_setting('storage_service_apikey', None) } storage_form = StorageSettingsForm(request.POST or None, initial=inital_data) if storage_form.is_valid(): # Set storage service URL storage_form.save() if "use_default" in request.POST: shared_path = helpers.get_server_config_value('sharedDirectory') # Post first user & API key user = User.objects.all()[0] api_key = ApiKey.objects.get(user=user) # Create pipeline, tell it to use default setup try: storage_service.create_pipeline( create_default_locations=True, shared_path=shared_path, api_username=user.username, api_key=api_key.key, ) except Exception: messages.warning( request, 'Error creating pipeline: is the storage server running? Please contact an administrator.' ) else: # Add the storage service URL to the API whitelist ss_url = urlparse.urlparse( helpers.get_setting('storage_service_url')) whitelist = helpers.get_setting('api_whitelist', '127.0.0.1') whitelist = '\n'.join([whitelist, ss_url.hostname]) helpers.set_setting('api_whitelist', whitelist) else: # Storage service manually set up, just register Pipeline if # possible. Do not provide additional information about the shared # path, or API, as this is probably being set up in the storage # service manually. try: storage_service.create_pipeline() except Exception: pass return redirect('main.views.home') else: return render(request, 'installer/storagesetup.html', locals())
def ingest_upload_atk_get_dip_object_paths(uuid): # determine the DIP upload directory watch_dir = helpers.get_server_config_value('watchDirectoryPath') dip_upload_dir = os.path.join(watch_dir, 'uploadDIP') # work out directory name for DIP (should be the same as the SIP) try: sip = models.SIP.objects.get(uuid=uuid) except: raise Http404 directory = os.path.basename(os.path.dirname(sip.currentpath)) # work out the path to the DIP's METS file metsFilePath = os.path.join(dip_upload_dir, directory, 'METS.' + uuid + '.xml') # read file paths from METS file tree = ElementTree.parse(metsFilePath) root = tree.getroot() # use paths to create an array that we'll sort and store path UUIDs separately paths = [] path_uuids = {} # in the end we'll populate this using paths and path_uuids files = [] # get each object's filepath for item in root.findall( "{http://www.loc.gov/METS/}fileSec/{http://www.loc.gov/METS/}fileGrp[@USE='original']/{http://www.loc.gov/METS/}file" ): for item2 in item.findall("{http://www.loc.gov/METS/}FLocat"): object_path = item2.attrib['{http://www.w3.org/1999/xlink}href'] # look up file's UUID file = models.File.objects.get(sip=uuid, currentlocation='%SIPDirectory%' + object_path) # remove "objects/" dir when storing representation if object_path.index('objects/') == 0: object_path = object_path[8:] paths.append(object_path) path_uuids[object_path] = file.uuid # create array of objects with object data paths.sort() for path in paths: files.append({'uuid': path_uuids[path], 'path': path}) return files """
def create_arranged_sip(staging_sip_path, files, sip_uuid): shared_dir = helpers.get_server_config_value('sharedDirectory') staging_sip_path = staging_sip_path.lstrip('/') staging_abs_path = os.path.join(shared_dir, staging_sip_path) # Create SIP object sip_name = staging_sip_path.split('/')[1] sip_path = os.path.join(shared_dir, 'watchedDirectories', 'SIPCreation', 'SIPsUnderConstruction', sip_name) currentpath = sip_path.replace(shared_dir, '%sharedPath%', 1) + '/' sip_path = helpers.pad_destination_filepath_if_it_already_exists(sip_path) try: sip = models.SIP.objects.get(uuid=sip_uuid) except models.SIP.DoesNotExist: # Create a SIP object if none exists databaseFunctions.createSIP(currentpath, sip_uuid) else: # Update the already-created SIP with its path if sip.currentpath is not None: return "Provided SIP UUID ({}) belongs to an already-started SIP!".format( sip_uuid) sip.currentpath = currentpath sip.save() # Update currentLocation of files for file_ in files: if file_.get('uuid'): # Strip 'arrange/sip_name' from file path in_sip_path = '/'.join(file_['destination'].split('/')[2:]) currentlocation = '%SIPDirectory%' + in_sip_path models.File.objects.filter(uuid=file_['uuid']).update( sip=sip_uuid, currentlocation=currentlocation) # Create directories for logs and metadata, if they don't exist for directory in ('logs', 'metadata', os.path.join('metadata', 'submissionDocumentation')): try: os.mkdir(os.path.join(staging_abs_path, directory)) except os.error as exception: if exception.errno != errno.EEXIST: raise # Add log of original location and new location of files arrange_log = os.path.join(staging_abs_path, 'logs', 'arrange.log') with open(arrange_log, 'w') as f: log = ('%s -> %s\n' % (file_['source'], file_['destination']) for file_ in files if file_.get('uuid')) f.writelines(log) # Move to watchedDirectories/SIPCreation/SIPsUnderConstruction logger.info('create_arranged_sip: move from %s to %s', staging_abs_path, sip_path) shutil.move(src=staging_abs_path, dst=sip_path)
def transfer_file_download(request, uuid): # get file basename try: file = models.File.objects.get(uuid=uuid) except: raise Http404 file_basename = os.path.basename(file.currentlocation) shared_directory_path = helpers.get_server_config_value('sharedDirectory') transfer = models.Transfer.objects.get(uuid=file.transfer.uuid) path_to_transfer = transfer.currentlocation.replace('%sharedPath%', shared_directory_path) path_to_file = file.currentlocation.replace('%transferDirectory%', path_to_transfer) return send_file(request, path_to_file)
def transfer_file_download(request, uuid): # get file basename try: file = models.File.objects.get(uuid=uuid) except: raise Http404 file_basename = os.path.basename(file.currentlocation) shared_directory_path = helpers.get_server_config_value('sharedDirectory') transfer = models.Transfer.objects.get(uuid=file.transfer.uuid) path_to_transfer = transfer.currentlocation.replace('%sharedPath%', shared_directory_path) path_to_file = file.currentlocation.replace('%transferDirectory%', path_to_transfer) return helpers.send_file(request, path_to_file)
def download_fs(request): shared_dir = os.path.realpath(helpers.get_server_config_value('sharedDirectory')) filepath = base64.b64decode(request.GET.get('filepath', '')) requested_filepath = os.path.realpath('/' + filepath) # respond with 404 if a non-Archivematica file is requested try: if requested_filepath.index(shared_dir) == 0: return helpers.send_file(request, requested_filepath) else: raise django.http.Http404 except ValueError: raise django.http.Http404
def get_modified_standard_transfer_path(transfer_type=None): path = os.path.join(helpers.get_server_config_value('watchDirectoryPath'), 'activeTransfers') if transfer_type is not None: try: path = os.path.join( path, filesystem_ajax_views.TRANSFER_TYPE_DIRECTORIES[transfer_type]) except: return None return path.replace(SHARED_DIRECTORY_ROOT, '%sharedPath%', 1)
def aic_metadata_add(request, uuid): sip_type_id = ingest_sip_metadata_type_id() try: dc = models.DublinCore.objects.get( metadataappliestotype=sip_type_id, metadataappliestoidentifier=uuid) id = dc.id except models.DublinCore.DoesNotExist: dc = models.DublinCore( metadataappliestotype=sip_type_id, metadataappliestoidentifier=uuid) form = ingest_forms.AICDublinCoreMetadataForm(request.POST or None, instance=dc) if form.is_valid(): # Save the metadata dc = form.save() dc.type = "Archival Information Collection" dc.save() # Start the MicroServiceChainLink for the AIC shared_dir = helpers.get_server_config_value('sharedDirectory') source = os.path.join(shared_dir, 'tmp', uuid) watched_dir = helpers.get_server_config_value('watchDirectoryPath') name = dc.title if dc.title else dc.identifier name = slugify(name).replace('-', '_') dir_name = '{name}-{uuid}'.format(name=name, uuid=uuid) destination = os.path.join(watched_dir, 'system', 'createAIC', dir_name) destination_db = destination.replace(shared_dir, '%sharedPath%')+'/' models.SIP.objects.filter(uuid=uuid).update(currentpath=destination_db) shutil.move(source, destination) return redirect('ingest_index') name = dc.title or "New AIC" aic = True return render(request, 'ingest/metadata_edit.html', locals())
def start_reingest(request): """ Endpoint to approve reingest of an AIP. Expects a POST request with the `uuid` of the SIP, and the `name`, which is also the directory in %sharedPath%tmp where the SIP is found. Example usage: curl --data "username=demo&api_key=<API key>&name=test-efeb95b4-5e44-45a4-ab5a-9d700875eb60&uuid=efeb95b4-5e44-45a4-ab5a-9d700875eb60" http://localhost/api/ingest/reingest """ if request.method == 'POST': error = authenticate_request(request) if error: response = {'error': True, 'message': error} return helpers.json_response(response, status_code=403) sip_name = request.POST.get('name') sip_uuid = request.POST.get('uuid') if not all([sip_name, sip_uuid]): response = { 'error': True, 'message': '"name" and "uuid" are required.' } return helpers.json_response(response, status_code=400) # TODO Clear DB of residual stuff related to SIP models.Task.objects.filter(job__sipuuid=sip_uuid).delete() models.Job.objects.filter(sipuuid=sip_uuid).delete() models.SIP.objects.filter( uuid=sip_uuid).delete() # Delete is cascading # Move to watched directory shared_directory_path = helpers.get_server_config_value( 'sharedDirectory') source = os.path.join(shared_directory_path, 'tmp', sip_name) dest = os.path.join(shared_directory_path, 'watchedDirectories', 'system', 'reingestAIP', '') try: LOGGER.debug('Reingest moving from %s to %s', source, dest) shutil.move(source, dest) except (shutil.Error, OSError) as e: error = e.strerror or "Unable to move reingested AIP to start reingest." LOGGER.warning('Unable to move reingested AIP to start reingest', exc_info=True) if error: response = {'error': True, 'message': error} return helpers.json_response(response, status_code=500) else: response = {'message': 'Approval successful.'} return helpers.json_response(response) else: return django.http.HttpResponseNotAllowed(permitted_methods=['POST'])
def ingest_browse(request, browse_type, jobuuid): watched_dir = helpers.get_server_config_value('watchDirectoryPath') if browse_type == 'normalization': title = 'Review normalization' directory = os.path.join(watched_dir, 'approveNormalization') elif browse_type == 'aip': title = 'Review AIP' directory = os.path.join(watched_dir, 'storeAIP') elif browse_type == 'dip': title = 'Review DIP' directory = os.path.join(watched_dir, 'uploadedDIPs') else: raise Http404 jobs = models.Job.objects.filter(jobuuid=jobuuid, subjobof='') name = utils.get_directory_name_from_job(jobs) return render(request, 'ingest/aip_browse.html', locals())
def process_transfer(request, transfer_uuid): response = {} if request.user.id: # get transfer info transfer = models.Transfer.objects.get(uuid=transfer_uuid) transfer_path = transfer.currentlocation.replace( '%sharedPath%', helpers.get_server_config_value('sharedDirectory')) import MySQLdb import databaseInterface import databaseFunctions import shutil from archivematicaCreateStructuredDirectory import createStructuredDirectory from archivematicaCreateStructuredDirectory import createManualNormalizedDirectoriesList createStructuredDirectory(transfer_path, createManualNormalizedDirectories=False) processingDirectory = helpers.get_server_config_value( 'processingDirectory') transfer_directory_name = os.path.basename(transfer_path[:-1]) transfer_name = transfer_directory_name[:-37] sharedPath = helpers.get_server_config_value('sharedDirectory') tmpSIPDir = os.path.join(processingDirectory, transfer_name) + "/" #processSIPDirectory = os.path.join(sharedPath, 'watchedDirectories/system/autoProcessSIP') + '/' processSIPDirectory = os.path.join( sharedPath, 'watchedDirectories/SIPCreation/SIPsUnderConstruction') + '/' #destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" #destSIPDir = os.path.join(processSIPDirectory, transfer_name + '-' + ) + "/" createStructuredDirectory(tmpSIPDir, createManualNormalizedDirectories=False) objectsDirectory = os.path.join(transfer_path, 'objects') + '/' """ #create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') #lookup_path = '%sharedPath%watchedDirectories/workFlowDecisions/createDip/' + transfer_name + '/' sql = " " "SELECT sipUUID FROM SIPs WHERE currentPath = '" " " + MySQLdb.escape_string(lookup_path) + "';" rows = databaseInterface.queryAllSQL(sql) if len(rows) > 0: row = rows[0] sipUUID = row[0] else: sipUUID = uuid.uuid4().__str__() databaseFunctions.createSIP(lookup_path, sipUUID) """ sipUUID = uuid.uuid4().__str__() destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') databaseFunctions.createSIP(lookup_path, sipUUID) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #get the database list of files in the objects directory #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transfer_uuid + "'" for row in databaseInterface.queryAllSQL(sql): fileUUID = row[0] currentPath = databaseFunctions.deUnicode(row[1]) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % ( MySQLdb.escape_string( currentPath.replace( "%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) databaseInterface.runSQL(sql) else: print >> sys.stderr, "file not found: ", currentSIPFilePath #copy processingMCP.xml file src = os.path.join(os.path.dirname(objectsDirectory[:-1]), "processingMCP.xml") dst = os.path.join(tmpSIPDir, "processingMCP.xml") shutil.copy(src, dst) #moveSIPTo processSIPDirectory shutil.move(tmpSIPDir, destSIPDir) elasticSearchFunctions.connect_and_change_transfer_file_status( transfer_uuid, '') response['message'] = 'SIP ' + sipUUID + ' created.' else: response['error'] = True response['message'] = 'Must be logged in.' return HttpResponse( simplejson.JSONEncoder(encoding='utf-8').encode(response), mimetype='application/json')
def administration_processing(request): file_path = os.path.join( helpers.get_server_config_value('sharedDirectory'), 'sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml' ) boolean_select_fields = [ { "name": "backup_transfer", "label": "Create transfer backup", "yes_option": "Backup transfer", "no_option": "Do not backup transfer", "applies_to": "Workflow decision - create transfer backup" }, { "name": "quarantine_transfer", "label": "Send transfer to quarantine", "yes_option": "Quarantine", "no_option": "Skip quarantine", "applies_to": "Workflow decision - send transfer to quarantine" }, { "name": "normalize_transfer", "label": "Approve normalization", "applies_to": "Approve normalization", "yes_option": "Approve normalization", "action": "Approve" }, { "name": "store_aip", "label": "Store AIP", "yes_option": "Store AIP", "applies_to": "Store AIP", "action": "Store AIP" } ] chain_choice_fields = [ { "name": "create_sip", "label": "Create SIP(s)" }, { "name": "select_format_id_tool", "label": "Select format identification tool" }, { "name": "normalize", "label": "Normalize" } ] populate_select_fields_with_chain_choice_options(chain_choice_fields) replace_dict_fields = [ { "name": "compression_algo", "label": "Select compression algorithm" }, { "name": "compression_level", "label": "Select compression level" }, { "name": "store_aip_location", "label": "Store AIP location" } ] populate_select_fields_with_replace_dict_options(replace_dict_fields) select_fields = chain_choice_fields + replace_dict_fields if request.method == 'POST': # render XML using request data xmlChoices = PreconfiguredChoices() # use toggle field submissions to add to XML for field in boolean_select_fields: enabled = request.POST.get(field['name']) if enabled == 'yes': if 'yes_option' in field: # can be set to either yes or no toggle = request.POST.get(field['name'] + '_toggle', '') if toggle == 'yes': go_to_chain_text = field['yes_option'] elif 'no_option' in field: go_to_chain_text = field['no_option'] if 'no_option' in field: xmlChoices.add_choice( field['applies_to'], go_to_chain_text ) else: if toggle == 'yes': xmlChoices.add_choice( field['applies_to'], go_to_chain_text ) else: xmlChoices.add_choice( field['label'], field['action'] ) # set quarantine duration if applicable quarantine_expiry_enabled = request.POST.get('quarantine_expiry_enabled', '') quarantine_expiry = request.POST.get('quarantine_expiry', '') if quarantine_expiry_enabled == 'yes' and quarantine_expiry != '': xmlChoices.add_choice( 'Remove from quarantine', 'Unquarantine', quarantine_expiry ) # use select field submissions to add to XML for field in select_fields: enabled = request.POST.get(field['name'] + '_enabled') if enabled == 'yes': field_value = request.POST.get(field['name'], '') if field_value != '': xmlChoices.add_choice( field['label'], field_value ) xmlChoices.write_to_file(file_path) return HttpResponseRedirect(reverse('components.administration.views.administration_processing')) else: debug = request.GET.get('debug', '') quarantine_expiry = '' file = open(file_path, 'r') xml = file.read() # parse XML to work out locals() root = etree.fromstring(xml) choices = root.find('preconfiguredChoices') for item in boolean_select_fields: item['checked'] = '' item['yes_checked'] = '' item['no_checked'] = '' for choice in choices: applies_to = choice.find('appliesTo').text go_to_chain = choice.find('goToChain').text # use toggle field submissions to add to XML for field in boolean_select_fields: if applies_to == field['applies_to']: set_field_property_by_name(boolean_select_fields, field['name'], 'checked', 'checked') if 'yes_option' in field: if go_to_chain == field['yes_option']: set_field_property_by_name(boolean_select_fields, field['name'], 'yes_checked', 'selected') else: set_field_property_by_name(boolean_select_fields, field['name'], 'no_checked', 'selected') # a quarantine expiry was found if applies_to == 'Remove from quarantine': quarantine_expiry_enabled_checked = 'checked' quarantine_expiry = choice.find('delay').text # check select fields for defaults for field in select_fields: if applies_to == field['label']: field['selected'] = go_to_chain field['checked'] = 'checked' return render(request, 'administration/processing.html', locals())
def administration_processing(request): file_path = os.path.join( helpers.get_server_config_value('sharedDirectory'), 'sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml' ) boolean_select_fields = [{ "name": "backup_transfer", "label": "Create transfer backup", "yes_option": "Backup transfer", "no_option": "Do not backup transfer", "applies_to": "Workflow decision - create transfer backup" }, { "name": "quarantine_transfer", "label": "Send transfer to quarantine", "yes_option": "Quarantine", "no_option": "Skip quarantine", "applies_to": "Workflow decision - send transfer to quarantine" }, { "name": "normalize_transfer", "label": "Approve normalization", "applies_to": "Approve normalization", "yes_option": "Approve normalization", "action": "Approve" }, { "name": "store_aip", "label": "Store AIP", "yes_option": "Store AIP", "applies_to": "Store AIP", "action": "Store AIP" }] chain_choice_fields = [{ "name": "create_sip", "label": "Create SIP(s)" }, { "name": "select_format_id_tool", "label": "Select format identification tool" }, { "name": "normalize", "label": "Normalize" }] populate_select_fields_with_chain_choice_options(chain_choice_fields) replace_dict_fields = [{ "name": "compression_algo", "label": "Select compression algorithm" }, { "name": "compression_level", "label": "Select compression level" }, { "name": "store_aip_location", "label": "Store AIP location" }] populate_select_fields_with_replace_dict_options(replace_dict_fields) select_fields = chain_choice_fields + replace_dict_fields if request.method == 'POST': # render XML using request data xmlChoices = PreconfiguredChoices() # use toggle field submissions to add to XML for field in boolean_select_fields: enabled = request.POST.get(field['name']) if enabled == 'yes': if 'yes_option' in field: # can be set to either yes or no toggle = request.POST.get(field['name'] + '_toggle', '') if toggle == 'yes': go_to_chain_text = field['yes_option'] elif 'no_option' in field: go_to_chain_text = field['no_option'] if 'no_option' in field: xmlChoices.add_choice(field['applies_to'], go_to_chain_text) else: if toggle == 'yes': xmlChoices.add_choice(field['applies_to'], go_to_chain_text) else: xmlChoices.add_choice(field['label'], field['action']) # set quarantine duration if applicable quarantine_expiry_enabled = request.POST.get( 'quarantine_expiry_enabled', '') quarantine_expiry = request.POST.get('quarantine_expiry', '') if quarantine_expiry_enabled == 'yes' and quarantine_expiry != '': xmlChoices.add_choice('Remove from quarantine', 'Unquarantine', quarantine_expiry) # use select field submissions to add to XML for field in select_fields: enabled = request.POST.get(field['name'] + '_enabled') if enabled == 'yes': field_value = request.POST.get(field['name'], '') if field_value != '': xmlChoices.add_choice(field['label'], field_value) xmlChoices.write_to_file(file_path) return HttpResponseRedirect( reverse( 'components.administration.views.administration_processing')) else: debug = request.GET.get('debug', '') quarantine_expiry = '' file = open(file_path, 'r') xml = file.read() # parse XML to work out locals() root = etree.fromstring(xml) choices = root.find('preconfiguredChoices') for item in boolean_select_fields: item['checked'] = '' item['yes_checked'] = '' item['no_checked'] = '' for choice in choices: applies_to = choice.find('appliesTo').text go_to_chain = choice.find('goToChain').text # use toggle field submissions to add to XML for field in boolean_select_fields: if applies_to == field['applies_to']: set_field_property_by_name(boolean_select_fields, field['name'], 'checked', 'checked') if 'yes_option' in field: if go_to_chain == field['yes_option']: set_field_property_by_name(boolean_select_fields, field['name'], 'yes_checked', 'selected') else: set_field_property_by_name(boolean_select_fields, field['name'], 'no_checked', 'selected') # a quarantine expiry was found if applies_to == 'Remove from quarantine': quarantine_expiry_enabled_checked = 'checked' quarantine_expiry = choice.find('delay').text # check select fields for defaults for field in select_fields: if applies_to == field['label']: field['selected'] = go_to_chain field['checked'] = 'checked' return render(request, 'administration/processing.html', locals())
def reingest(request, target): """ Endpoint to approve reingest of an AIP to the beginning of transfer or ingest. Expects a POST request with the `uuid` of the SIP, and the `name`, which is also the directory in %sharedPath%tmp where the SIP is found. Example usage: $ http POST http://localhost/api/ingest/reingest \ username=demo api_key=$API_KEY \ name=test-efeb95b4-5e44-45a4-ab5a-9d700875eb60 \ uuid=efeb95b4-5e44-45a4-ab5a-9d700875eb60 :param str target: ingest or transfer """ if request.method != 'POST': return django.http.HttpResponseNotAllowed(permitted_methods=['POST']) error = authenticate_request(request) if error: response = {'error': True, 'message': error} return helpers.json_response(response, status_code=403) sip_name = request.POST.get('name') sip_uuid = request.POST.get('uuid') if not all([sip_name, sip_uuid]): response = { 'error': True, 'message': '"name" and "uuid" are required.' } return helpers.json_response(response, status_code=400) if target not in ('transfer', 'ingest'): response = {'error': True, 'message': 'Unknown tranfer type.'} return helpers.json_response(response, status_code=400) # TODO Clear DB of residual stuff related to SIP models.Task.objects.filter(job__sipuuid=sip_uuid).delete() models.Job.objects.filter(sipuuid=sip_uuid).delete() models.SIP.objects.filter(uuid=sip_uuid).delete() # Delete is cascading models.RightsStatement.objects.filter( metadataappliestoidentifier=sip_uuid).delete( ) # Not actually a foreign key models.DublinCore.objects.filter( metadataappliestoidentifier=sip_uuid).delete() shared_directory_path = helpers.get_server_config_value('sharedDirectory') source = os.path.join(shared_directory_path, 'tmp', sip_name) reingest_uuid = sip_uuid if target == 'transfer': dest = os.path.join(shared_directory_path, 'watchedDirectories', 'activeTransfers', 'standardTransfer') # If the destination dir has a UUID, remove it sip_basename = os.path.basename(os.path.normpath(sip_name)) name_has_uuid = len(sip_basename) > 36 and re.match( UUID_REGEX, sip_basename[-36:]) is not None if name_has_uuid: dest = os.path.join(dest, sip_basename[:-37]) if os.path.isdir(dest): response = { 'error': True, 'message': 'There is already a transfer in standardTransfer with the same name.' } return helpers.json_response(response, status_code=400) dest = os.path.join(dest, '') # Persist transfer record in the database tdetails = { 'currentlocation': '%sharedPath%' + dest[len(shared_directory_path):], 'uuid': str(uuid.uuid4()), 'type': 'Archivematica AIP', } reingest_uuid = tdetails['uuid'] models.Transfer.objects.create(**tdetails) LOGGER.info( 'Transfer saved in the database (uuid=%s, type=%s, location=%s)', tdetails['uuid'], tdetails['type'], tdetails['currentlocation']) elif target == 'ingest': dest = os.path.join(shared_directory_path, 'watchedDirectories', 'system', 'reingestAIP', '') # Move to watched directory try: LOGGER.debug('Reingest moving from %s to %s', source, dest) shutil.move(source, dest) except (shutil.Error, OSError) as e: error = e.strerror or "Unable to move reingested AIP to start reingest." LOGGER.warning('Unable to move reingested AIP to start reingest', exc_info=True) if error: response = {'error': True, 'message': error} return helpers.json_response(response, status_code=500) else: response = { 'message': 'Approval successful.', 'reingest_uuid': reingest_uuid } return helpers.json_response(response)
import django.http # External dependencies, alphabetical from annoying.functions import get_object_or_None from tastypie.authentication import ApiKeyAuthentication # This project, alphabetical import archivematicaFunctions from contrib.mcp.client import MCPClient from components.filesystem_ajax import views as filesystem_ajax_views from components.unit import views as unit_views from components import helpers from main import models LOGGER = logging.getLogger('archivematica.dashboard') SHARED_DIRECTORY_ROOT = helpers.get_server_config_value('sharedDirectory') UUID_REGEX = re.compile( r'^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$', re.IGNORECASE) def authenticate_request(request): error = None api_auth = ApiKeyAuthentication() authorized = api_auth.is_authenticated(request) # 'authorized' can be True, False or tastypie.http.HttpUnauthorized # Check explicitly for True, not just truthiness if authorized is True: client_ip = request.META['REMOTE_ADDR']
def process_transfer(request, transfer_uuid): response = {} if request.user.id: # get transfer info transfer = models.Transfer.objects.get(uuid=transfer_uuid) transfer_path = transfer.currentlocation.replace( '%sharedPath%', helpers.get_server_config_value('sharedDirectory') ) import MySQLdb import databaseInterface import databaseFunctions import shutil from archivematicaCreateStructuredDirectory import createStructuredDirectory from archivematicaCreateStructuredDirectory import createManualNormalizedDirectoriesList createStructuredDirectory(transfer_path, createManualNormalizedDirectories=False) processingDirectory = helpers.get_server_config_value('processingDirectory') transfer_directory_name = os.path.basename(transfer_path[:-1]) transfer_name = transfer_directory_name[:-37] sharedPath = helpers.get_server_config_value('sharedDirectory') tmpSIPDir = os.path.join(processingDirectory, transfer_name) + "/" #processSIPDirectory = os.path.join(sharedPath, 'watchedDirectories/system/autoProcessSIP') + '/' processSIPDirectory = os.path.join(sharedPath, 'watchedDirectories/SIPCreation/SIPsUnderConstruction') + '/' #destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" #destSIPDir = os.path.join(processSIPDirectory, transfer_name + '-' + ) + "/" createStructuredDirectory(tmpSIPDir, createManualNormalizedDirectories=False) objectsDirectory = os.path.join(transfer_path, 'objects') + '/' """ #create row in SIPs table if one doesn't already exist lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') #lookup_path = '%sharedPath%watchedDirectories/workFlowDecisions/createDip/' + transfer_name + '/' sql = " " "SELECT sipUUID FROM SIPs WHERE currentPath = '" " " + MySQLdb.escape_string(lookup_path) + "';" rows = databaseInterface.queryAllSQL(sql) if len(rows) > 0: row = rows[0] sipUUID = row[0] else: sipUUID = uuid.uuid4().__str__() databaseFunctions.createSIP(lookup_path, sipUUID) """ sipUUID = uuid.uuid4().__str__() destSIPDir = os.path.join(processSIPDirectory, transfer_name) + "/" lookup_path = destSIPDir.replace(sharedPath, '%sharedPath%') databaseFunctions.createSIP(lookup_path, sipUUID) #move the objects to the SIPDir for item in os.listdir(objectsDirectory): shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) #get the database list of files in the objects directory #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transfer_uuid + "'" for row in databaseInterface.queryAllSQL(sql): fileUUID = row[0] currentPath = databaseFunctions.deUnicode(row[1]) currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) if os.path.isfile(currentSIPFilePath): sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % (MySQLdb.escape_string(currentPath.replace("%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) databaseInterface.runSQL(sql) else: print >>sys.stderr, "file not found: ", currentSIPFilePath #copy processingMCP.xml file src = os.path.join(os.path.dirname(objectsDirectory[:-1]), "processingMCP.xml") dst = os.path.join(tmpSIPDir, "processingMCP.xml") shutil.copy(src, dst) #moveSIPTo processSIPDirectory shutil.move(tmpSIPDir, destSIPDir) elasticSearchFunctions.connect_and_change_transfer_file_status(transfer_uuid, '') response['message'] = 'SIP ' + sipUUID + ' created.' else: response['error'] = True response['message'] = 'Must be logged in.' return HttpResponse( simplejson.JSONEncoder(encoding='utf-8').encode(response), mimetype='application/json' )