def start_transfer(transfer_name, transfer_type, accession, access_id, paths, row_ids): """ Start a new transfer. :param str transfer_name: Name of new transfer. :param str transfer_type: Type of new transfer. From TRANSFER_TYPE_DIRECTORIES. :param str accession: Accession number of new transfer. :param str access_id: Access system identifier for the new transfer. :param list paths: List of <location_uuid>:<relative_path> to be copied into the new transfer. Location UUIDs should be associated with this pipeline, and relative path should be relative to the location. :param list row_ids: ID of the associated TransferMetadataSet for disk image ingest. :returns: Dict with {'message': <message>, ['error': True, 'path': <path>]}. Error is a boolean, present and True if there is an error. Message describes the success or failure. Path is populated if there is no error. """ if not transfer_name: raise ValueError('No transfer name provided.') if not paths: raise ValueError('No path provided.') # Create temp directory that everything will be copied into temp_base_dir = os.path.join(SHARED_DIRECTORY_ROOT, 'tmp') temp_dir = tempfile.mkdtemp(dir=temp_base_dir) os.chmod(temp_dir, 0o770) # Needs to be writeable by the SS for i, path in enumerate(paths): index = i + 1 # so transfers start from 1, not 0 # Don't suffix the first transfer component, only subsequent ones if index > 1: target = transfer_name + '_' + str(index) else: target = transfer_name row_id = row_ids[i] if helpers.file_is_an_archive(path): transfer_dir = temp_dir p = path.split(':', 1)[1] logger.debug('found a zip file, splitting path ' + p) filepath = os.path.join(temp_dir, os.path.basename(p)) else: path = os.path.join(path, '.') # Copy contents of dir but not dir transfer_dir = os.path.join(temp_dir, target) filepath = os.path.join(temp_dir, target) transfer_relative = transfer_dir.replace(SHARED_DIRECTORY_ROOT, '', 1) copy_from_transfer_sources([path], transfer_relative) filepath = archivematicaFunctions.unicodeToStr(filepath) try: destination = copy_to_start_transfer( filepath=filepath, type=transfer_type, accession=accession, access_id=access_id, transfer_metadata_set_row_uuid=row_id) except Exception as e: logger.exception('Error starting transfer {}: {}'.format( filepath, e)) raise Exception('Error starting transfer {}: {}'.format( filepath, e)) shutil.rmtree(temp_dir) return {'message': _('Copy successful.'), 'path': destination}
def unapproved_transfers(request): # Example: http://127.0.0.1/api/transfer/unapproved?username=mike&api_key=<API key> response = {} unapproved = [] jobs = models.Job.objects.filter( (Q(jobtype="Approve standard transfer") | Q(jobtype="Approve zipped transfer") | Q(jobtype="Approve DSpace transfer") | Q(jobtype="Approve bagit transfer") | Q(jobtype="Approve zipped bagit transfer")) & Q(currentstep=models.Job.STATUS_AWAITING_DECISION)) for job in jobs: # remove standard transfer path from directory (and last character) type_and_directory = job.directory.replace( get_modified_standard_transfer_path() + "/", "", 1) # remove trailing slash if not a zipped bag file if not helpers.file_is_an_archive(job.directory): type_and_directory = type_and_directory[:-1] transfer_watch_directory = type_and_directory.split("/")[0] # Get transfer type from transfer directory transfer_type_directories_reversed = { v: k for k, v in filesystem_ajax_views.TRANSFER_TYPE_DIRECTORIES.items() } transfer_type = transfer_type_directories_reversed[ transfer_watch_directory] job_directory = type_and_directory.replace( transfer_watch_directory + "/", "", 1) unapproved.append({ "type": transfer_type, "directory": job_directory, "uuid": job.sipuuid }) # get list of unapproved transfers # return list as JSON response["results"] = unapproved response["message"] = "Fetched unapproved transfers successfully." return helpers.json_response(response)
def unapproved_transfers(request): # Example: http://127.0.0.1/api/transfer/unapproved?username=mike&api_key=<API key> if request.method == 'GET': auth_error = authenticate_request(request) response = {} if auth_error is None: error = None unapproved = [] jobs = models.Job.objects.filter( (Q(jobtype="Approve standard transfer") | Q(jobtype="Approve DSpace transfer") | Q(jobtype="Approve bagit transfer") | Q(jobtype="Approve zipped bagit transfer")) & Q(currentstep='Awaiting decision')) for job in jobs: # remove standard transfer path from directory (and last character) type_and_directory = job.directory.replace( get_modified_standard_transfer_path() + '/', '', 1) # remove trailing slash if not a zipped bag file if not helpers.file_is_an_archive(job.directory): type_and_directory = type_and_directory[:-1] transfer_watch_directory = type_and_directory.split('/')[0] # Get transfer type from transfer directory transfer_type_directories_reversed = { v: k for k, v in filesystem_ajax_views.TRANSFER_TYPE_DIRECTORIES.items() } transfer_type = transfer_type_directories_reversed[ transfer_watch_directory] job_directory = type_and_directory.replace( transfer_watch_directory + '/', '', 1) unapproved.append({ 'type': transfer_type, 'directory': job_directory, 'uuid': job.sipuuid, }) # get list of unapproved transfers # return list as JSON response['results'] = unapproved if error is not None: response['message'] = error response['error'] = True else: response[ 'message'] = 'Fetched unapproved transfers successfully.' if error is not None: return helpers.json_response(response, status_code=500) else: return helpers.json_response(response) else: response['message'] = auth_error response['error'] = True return helpers.json_response(response, status_code=403) else: return django.http.HttpResponseNotAllowed(permitted_methods=['GET'])
def unapproved_transfers(request): if request.method == 'GET': auth_error = authenticate_request(request) response = {} if auth_error == None: message = '' error = None unapproved = [] jobs = models.Job.objects.filter( ( Q(jobtype="Approve standard transfer") | Q(jobtype="Approve DSpace transfer") | Q(jobtype="Approve bagit transfer") | Q(jobtype="Approve zipped bagit transfer") ) & Q(currentstep='Awaiting decision') ) for job in jobs: # remove standard transfer path from directory (and last character) type_and_directory = job.directory.replace( get_modified_standard_transfer_path() + '/', '', 1 ) # remove trailing slash if not a zipped bag file if not helpers.file_is_an_archive(job.directory): type_and_directory = type_and_directory[:-1] transfer_watch_directory = type_and_directory.split('/')[0] transfer_type = helpers.transfer_type_by_directory(transfer_watch_directory) job_directory = type_and_directory.replace(transfer_watch_directory + '/', '', 1) unapproved.append({ 'type': transfer_type, 'directory': job_directory }) # get list of unapproved transfers # return list as JSON response['results'] = unapproved if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Fetched unapproved transfers successfully.' if error != None: return HttpResponseServerError( simplejson.JSONEncoder().encode(response), mimetype='application/json' ) else: return HttpResponse( simplejson.JSONEncoder().encode(response), mimetype='application/json' ) else: response['message'] = auth_error response['error'] = True return HttpResponseForbidden( simplejson.JSONEncoder().encode(response), mimetype='application/json' ) else: return Http404
def copy_to_start_transfer(request): filepath = archivematicaFunctions.unicodeToStr(request.POST.get('filepath', '')) type = request.POST.get('type', '') accession = request.POST.get('accession', '') error = check_filepath_exists('/' + filepath) if error == None: # confine destination to subdir of originals filepath = os.path.join('/', filepath) basename = os.path.basename(filepath) # default to standard transfer type_paths = { 'standard': 'standardTransfer', 'unzipped bag': 'baggitDirectory', 'zipped bag': 'baggitZippedDirectory', 'dspace': 'Dspace', 'maildir': 'maildir', 'TRIM': 'TRIM' } try: type_subdir = type_paths[type] destination = os.path.join(ACTIVE_TRANSFER_DIR, type_subdir) except KeyError: destination = os.path.join(STANDARD_TRANSFER_DIR) # if transfer compontent path leads to a ZIP file, treat as zipped # bag if not helpers.file_is_an_archive(filepath): destination = os.path.join(destination, basename) destination = pad_destination_filepath_if_it_already_exists(destination) # relay accession via DB row that MCPClient scripts will use to get # supplementary info from if accession != '': temp_uuid = uuid.uuid4().__str__() mcp_destination = destination.replace(SHARED_DIRECTORY_ROOT + '/', '%sharedPath%') + '/' transfer = models.Transfer.objects.create( uuid=temp_uuid, accessionid=accession, currentlocation=mcp_destination ) transfer.save() try: shutil.move(filepath, destination) except: error = 'Error copying from ' + filepath + ' to ' + destination + '. (' + str(sys.exc_info()[0]) + ')' response = {} if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Copy successful.' return HttpResponse( simplejson.JSONEncoder().encode(response), mimetype='application/json' )
def copy_transfer_component(request): transfer_name = archivematicaFunctions.unicodeToStr(request.POST.get('name', '')) path = archivematicaFunctions.unicodeToStr(request.POST.get('path', '')) destination = archivematicaFunctions.unicodeToStr(request.POST.get('destination', '')) error = None if transfer_name == '': error = 'No transfer name provided.' else: if path == '': error = 'No path provided.' else: # if transfer compontent path leads to an archive, treat as zipped # bag if helpers.file_is_an_archive(path): rsync_copy(path, destination) paths_copied = 1 else: transfer_dir = os.path.join(destination, transfer_name) # Create directory before it is used, otherwise shutil.copy() # would that location to store a file if not os.path.isdir(transfer_dir): os.mkdir(transfer_dir) paths_copied = 0 # cycle through each path copying files/dirs inside it to transfer dir for entry in sorted_directory_list(path): entry_path = os.path.join(path, entry) if os.path.isdir(entry_path): rsync_copy(entry_path, transfer_dir) """ destination_dir = os.path.join(transfer_dir, entry) try: shutil.copytree( entry_path, destination_dir ) except: error = 'Error copying from ' + entry_path + ' to ' + destination_dir + '. (' + str(sys.exc_info()[0]) + ')' """ else: rsync_copy(entry_path, transfer_dir) #shutil.copy(entry_path, transfer_dir) paths_copied = paths_copied + 1 response = {} if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Copied ' + str(paths_copied) + ' entries.' return HttpResponse( simplejson.JSONEncoder().encode(response), mimetype='application/json' )
def copy_to_start_transfer(request): filepath = archivematicaFunctions.unicodeToStr( request.POST.get('filepath', '')) type = request.POST.get('type', '') accession = request.POST.get('accession', '') error = check_filepath_exists('/' + filepath) if error == None: # confine destination to subdir of originals filepath = os.path.join('/', filepath) basename = os.path.basename(filepath) # default to standard transfer type_paths = { 'standard': 'standardTransfer', 'unzipped bag': 'baggitDirectory', 'zipped bag': 'baggitZippedDirectory', 'dspace': 'Dspace', 'maildir': 'maildir', 'TRIM': 'TRIM' } try: type_subdir = type_paths[type] destination = os.path.join(ACTIVE_TRANSFER_DIR, type_subdir) except KeyError: destination = os.path.join(STANDARD_TRANSFER_DIR) # if transfer compontent path leads to a ZIP file, treat as zipped # bag if not helpers.file_is_an_archive(filepath): destination = os.path.join(destination, basename) destination = pad_destination_filepath_if_it_already_exists( destination) # relay accession via DB row that MCPClient scripts will use to get # supplementary info from if accession != '': temp_uuid = uuid.uuid4().__str__() mcp_destination = destination.replace(SHARED_DIRECTORY_ROOT + '/', '%sharedPath%') + '/' transfer = models.Transfer.objects.create( uuid=temp_uuid, accessionid=accession, currentlocation=mcp_destination) transfer.save() try: shutil.move(filepath, destination) except: error = 'Error copying from ' + filepath + ' to ' + destination + '. (' + str( sys.exc_info()[0]) + ')' response = {} if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Copy successful.' return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json')
def copy_transfer_component(request): transfer_name = archivematicaFunctions.unicodeToStr( request.POST.get('name', '')) path = archivematicaFunctions.unicodeToStr(request.POST.get('path', '')) destination = archivematicaFunctions.unicodeToStr( request.POST.get('destination', '')) error = None if transfer_name == '': error = 'No transfer name provided.' else: if path == '': error = 'No path provided.' else: # if transfer compontent path leads to an archive, treat as zipped # bag if helpers.file_is_an_archive(path): rsync_copy(path, destination) paths_copied = 1 else: transfer_dir = os.path.join(destination, transfer_name) # Create directory before it is used, otherwise shutil.copy() # would that location to store a file if not os.path.isdir(transfer_dir): os.mkdir(transfer_dir) paths_copied = 0 # cycle through each path copying files/dirs inside it to transfer dir for entry in sorted_directory_list(path): entry_path = os.path.join(path, entry) if os.path.isdir(entry_path): rsync_copy(entry_path, transfer_dir) """ destination_dir = os.path.join(transfer_dir, entry) try: shutil.copytree( entry_path, destination_dir ) except: error = 'Error copying from ' + entry_path + ' to ' + destination_dir + '. (' + str(sys.exc_info()[0]) + ')' """ else: rsync_copy(entry_path, transfer_dir) #shutil.copy(entry_path, transfer_dir) paths_copied = paths_copied + 1 response = {} if error != None: response['message'] = error response['error'] = True else: response['message'] = 'Copied ' + str(paths_copied) + ' entries.' return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json')