def _handle_package_request_remote_result_notification(config, event, success): response_message = None # Setting name is determined using event type setting_prefix = "{}_request_notification".format( config.event_type.lower()) request_notification_url = utils.get_setting( "{}_url".format(setting_prefix)) # If notification is configured, attempt if request_notification_url is not None: headers = {"Content-type": "application/json"} # Status reported may be approved, yet failed during execution status_to_report = event.status if event.status == Event.APPROVED and not success: status_to_report += ' (failed)' # Serialize payload payload = json.dumps({ "event_id": event.id, "message": "{}: {}".format(status_to_report, event.status_reason), "success": success }) # Specify basic authentication, if configured request_notification_auth_username = utils.get_setting( "{}_auth_username".format(setting_prefix)) request_notification_auth_password = utils.get_setting( "{}_auth_password".format(setting_prefix)) if request_notification_auth_username is not None: auth = requests.auth.HTTPBasicAuth( request_notification_auth_username, request_notification_auth_password) else: auth = None # Make request and set response message, if included in notification request response body notification_response = requests.post(request_notification_url, auth=auth, data=payload, headers=headers) try: responseData = json.loads(notification_response.content) response_message = responseData['message'] except ValueError: pass return response_message
def path2browse_dict(path): """Given a path on disk, return a dict with keys for directories, entries and properties. """ should_count = not utils.get_setting("object_counting_disabled", False) entries = [] directories = [] properties = {} for entry in sorted(_scandir_public(path), key=lambda e: e.name.lower()): entries.append(entry.name) if not entry.is_dir(): properties[entry.name] = {"size": entry.stat().st_size} elif os.access(entry.path, os.R_OK): directories.append(entry.name) if should_count: properties[entry.name] = { "object count": count_objects_in_directory(entry.path) } return { "directories": directories, "entries": entries, "properties": properties }
def pipeline_edit(request, uuid=None): if uuid: action = _("Edit Pipeline") pipeline = get_object_or_404(Pipeline, uuid=uuid) initial = {} else: action = _("Create Pipeline") pipeline = None initial = { "create_default_locations": True, "enabled": not utils.get_setting("pipelines_disabled"), } if request.method == "POST": form = forms.PipelineForm(request.POST, instance=pipeline, initial=initial) if form.is_valid(): pipeline = form.save() pipeline.save(form.cleaned_data["create_default_locations"]) messages.success(request, _("Pipeline saved.")) return redirect("locations:pipeline_list") else: form = forms.PipelineForm(instance=pipeline, initial=initial) return render( request, "locations/pipeline_form.html", { "action": action, "form": form, "pipeline": pipeline, }, )
def obj_create(self, bundle, **kwargs): bundle = super(PipelineResource, self).obj_create(bundle, **kwargs) bundle.obj.enabled = not utils.get_setting('pipelines_disabled', False) create_default_locations = bundle.data.get('create_default_locations', False) # Try to guess Pipeline's IP, if doing default setup if create_default_locations: ip = bundle.request.META.get('REMOTE_ADDR') or None bundle.obj.remote_name = ip shared_path = bundle.data.get('shared_path', None) bundle.obj.save(create_default_locations, shared_path) return bundle
def pipeline_edit(request, uuid=None): if uuid: action = _("Edit Pipeline") pipeline = get_object_or_404(Pipeline, uuid=uuid) initial = {} else: action = _("Create Pipeline") pipeline = None initial = {'enabled': not utils.get_setting('pipelines_disabled')} if request.method == 'POST': form = forms.PipelineForm(request.POST, instance=pipeline, initial=initial) if form.is_valid(): pipeline = form.save() pipeline.save(form.cleaned_data['create_default_locations']) messages.success(request, _("Pipeline saved.")) return redirect('pipeline_list') else: form = forms.PipelineForm(instance=pipeline, initial=initial) return render(request, 'locations/pipeline_form.html', locals())
def path2browse_dict(path): """Given a path on disk, return a dict with keys for directories, entries and properties. """ properties = {} # Sorted list of all entries in directory, excluding hidden files entries = [name for name in os.listdir(path) if name[0] != '.'] entries = sorted(entries, key=lambda s: s.lower()) directories = [] for name in entries: full_path = os.path.join(path, name) properties[name] = {'size': os.path.getsize(full_path)} if utils.get_setting('object_counting_disabled', False): properties[name]['object count'] = '0+' elif os.path.isdir(full_path) and os.access(full_path, os.R_OK): directories.append(name) properties[name]['object count'] = count_objects_in_directory( full_path) return {'directories': directories, 'entries': entries, 'properties': properties}
def create_default_locations(self, shared_path=None): """ Creates default locations for a pipeline based on config. Creates a local filesystem Space and currently processing location in it. If a shared_path is provided, currently processing location is at that path. Creates Transfer Source and AIP Store locations based on configuration from administration.Settings. """ # Use shared path if provided if not shared_path: shared_path = "/var/archivematica/sharedDirectory" shared_path = shared_path.strip("/") + "/" LOGGER.info("Creating default locations for pipeline %s.", self) space, space_created = Space.objects.get_or_create( access_protocol=Space.LOCAL_FILESYSTEM, path="/") if space_created: local_fs = LocalFilesystem(space=space) local_fs.save() LOGGER.info("Protocol Space created: %s", local_fs) try: currently_processing, _ = Location.active.get_or_create( purpose=Location.CURRENTLY_PROCESSING, defaults={ "space": space, "relative_path": shared_path }, ) except Location.MultipleObjectsReturned: currently_processing = Location.active.filter( purpose=Location.CURRENTLY_PROCESSING).first() LocationPipeline.objects.get_or_create(pipeline=self, location=currently_processing) LOGGER.info("Currently processing: %s", currently_processing) purposes = [ { "default": "default_transfer_source", "new": "new_transfer_source", "purpose": Location.TRANSFER_SOURCE, }, { "default": "default_aip_storage", "new": "new_aip_storage", "purpose": Location.AIP_STORAGE, }, { "default": "default_dip_storage", "new": "new_dip_storage", "purpose": Location.DIP_STORAGE, }, { "default": "default_backlog", "new": "new_backlog", "purpose": Location.BACKLOG, }, { "default": "default_recovery", "new": "new_recovery", "purpose": Location.AIP_RECOVERY, }, ] for p in purposes: defaults = utils.get_setting(p["default"], []) for uuid in defaults: if uuid == "new": # Create new location new_location = utils.get_setting(p["new"]) location = Location.objects.create(purpose=p["purpose"], **new_location) else: # Fetch existing location location = Location.objects.get(uuid=uuid) assert location.purpose == p["purpose"] location.default = True location.save() LOGGER.info("Adding new %s %s to %s", p["purpose"], location, self) LocationPipeline.objects.get_or_create(pipeline=self, location=location)
def create_default_locations(self, shared_path=None): """ Creates default locations for a pipeline based on config. Creates a local filesystem Space and currently processing location in it. If a shared_path is provided, currently processing location is at that path. Creates Transfer Source and AIP Store locations based on configuration from administration.Settings. """ # Use shared path if provided if not shared_path: shared_path = '/var/archivematica/sharedDirectory' shared_path = shared_path.strip('/') + '/' LOGGER.info("Creating default locations for pipeline %s.", self) space, space_created = Space.objects.get_or_create( access_protocol=Space.LOCAL_FILESYSTEM, path='/') if space_created: local_fs = LocalFilesystem(space=space) local_fs.save() LOGGER.info("Protocol Space created: %s", local_fs) currently_processing, _ = Location.active.get_or_create( purpose=Location.CURRENTLY_PROCESSING, defaults={ 'space': space, 'relative_path': shared_path }) LocationPipeline.objects.get_or_create(pipeline=self, location=currently_processing) LOGGER.info("Currently processing: %s", currently_processing) purposes = [ { 'default': 'default_transfer_source', 'new': 'new_transfer_source', 'purpose': Location.TRANSFER_SOURCE }, { 'default': 'default_aip_storage', 'new': 'new_aip_storage', 'purpose': Location.AIP_STORAGE }, { 'default': 'default_dip_storage', 'new': 'new_dip_storage', 'purpose': Location.DIP_STORAGE }, { 'default': 'default_backlog', 'new': 'new_backlog', 'purpose': Location.BACKLOG }, { 'default': 'default_recovery', 'new': 'new_recovery', 'purpose': Location.AIP_RECOVERY }, ] for p in purposes: defaults = utils.get_setting(p['default'], []) for uuid in defaults: if uuid == 'new': # Create new location new_location = utils.get_setting(p['new']) location = Location.objects.create(purpose=p['purpose'], **new_location) else: # Fetch existing location location = Location.objects.get(uuid=uuid) assert location.purpose == p['purpose'] LOGGER.info("Adding new %s %s to %s", p['purpose'], location, self) LocationPipeline.objects.get_or_create(pipeline=self, location=location)
def startup(): import django.core.exceptions import errno import os.path from locations import models as locations_models from common import utils import logging LOGGER = logging.getLogger(__name__) LOGGER.info("Running startup") try: space, space_created = locations_models.Space.objects.get_or_create( access_protocol=locations_models.Space.LOCAL_FILESYSTEM, path=os.sep, defaults={ "staging_path": os.path.join(os.sep, 'var', 'archivematica', 'storage_service') }) if space_created: locations_models.LocalFilesystem.objects.create(space=space) LOGGER.info('Created default Space %s', space) except django.core.exceptions.MultipleObjectsReturned: LOGGER.info('Multiple default Spaces exist, done default setup.') return default_locations = [ { 'purpose': locations_models.Location.TRANSFER_SOURCE, 'relative_path': 'home', 'description': '', 'default_setting': 'default_transfer_source', }, { 'purpose': locations_models.Location.AIP_STORAGE, 'relative_path': os.path.join('var', 'archivematica', 'sharedDirectory', 'www', 'AIPsStore'), 'description': 'Store AIP in standard Archivematica Directory', 'default_setting': 'default_aip_storage', }, { 'purpose': locations_models.Location.DIP_STORAGE, 'relative_path': os.path.join('var', 'archivematica', 'sharedDirectory', 'www', 'DIPsStore'), 'description': 'Store DIP in standard Archivematica Directory', 'default_setting': 'default_dip_storage', }, { 'purpose': locations_models.Location.BACKLOG, 'relative_path': os.path.join('var', 'archivematica', 'sharedDirectory', 'www', 'AIPsStore', 'transferBacklog'), 'description': 'Default transfer backlog', 'default_setting': 'default_backlog', }, { 'purpose': locations_models.Location.STORAGE_SERVICE_INTERNAL, 'relative_path': os.path.join('var', 'archivematica', 'storage_service'), 'description': 'For storage service internal usage.', 'default_setting': None, 'create_dirs': True, }, { 'purpose': locations_models.Location.AIP_RECOVERY, 'relative_path': os.path.join('var', 'archivematica', 'storage_service', 'recover'), 'description': 'Default AIP recovery', 'default_setting': 'default_recovery', 'create_dirs': True, }, ] for loc_info in default_locations: try: new_loc, created = locations_models.Location.active.get_or_create( purpose=loc_info['purpose'], defaults={ 'space': space, 'relative_path': loc_info['relative_path'], 'description': loc_info['description'] }) if created: LOGGER.info('Created default %s Location %s', loc_info['purpose'], new_loc) except locations_models.Location.MultipleObjectsReturned: continue if created and loc_info.get('create_dirs'): LOGGER.info('Creating %s Location %s', loc_info['purpose'], new_loc) try: os.mkdir(new_loc.full_path) # Hack for extra recovery dir if loc_info[ 'purpose'] == locations_models.Location.AIP_RECOVERY: os.mkdir(os.path.join(new_loc.full_path, 'backup')) except OSError as e: if e.errno != errno.EEXIST: LOGGER.error("%s location %s not accessible.", loc_info['purpose'], new_loc.full_path) if loc_info['default_setting'] and utils.get_setting( loc_info['default_setting']) is None: utils.set_setting(loc_info['default_setting'], [new_loc.uuid]) LOGGER.info('Set %s as %s', new_loc, loc_info['default_setting'])
def populate_default_locations(): """Create default local filesystem space and its locations.""" try: space, space_created = locations_models.Space.objects.get_or_create( access_protocol=locations_models.Space.LOCAL_FILESYSTEM, path=os.sep, defaults={ "staging_path": os.path.join(os.sep, "var", "archivematica", "storage_service") }, ) if space_created: locations_models.LocalFilesystem.objects.create(space=space) LOGGER.info("Created default Space %s", space) except django.core.exceptions.MultipleObjectsReturned: LOGGER.info("Multiple default Spaces exist, done default setup.") return default_locations = [ { "purpose": locations_models.Location.TRANSFER_SOURCE, "relative_path": "home", "description": "Default transfer source", "default_setting": "default_transfer_source", }, { "purpose": locations_models.Location.AIP_STORAGE, "relative_path": os.path.join("var", "archivematica", "sharedDirectory", "www", "AIPsStore"), "description": "Store AIP in standard Archivematica Directory", "default_setting": "default_aip_storage", }, { "purpose": locations_models.Location.DIP_STORAGE, "relative_path": os.path.join("var", "archivematica", "sharedDirectory", "www", "DIPsStore"), "description": "Store DIP in standard Archivematica Directory", "default_setting": "default_dip_storage", }, { "purpose": locations_models.Location.BACKLOG, "relative_path": os.path.join( "var", "archivematica", "sharedDirectory", "www", "AIPsStore", "transferBacklog", ), "description": "Default transfer backlog", "default_setting": "default_backlog", }, { "purpose": locations_models.Location.STORAGE_SERVICE_INTERNAL, "relative_path": os.path.join("var", "archivematica", "storage_service"), "description": "For storage service internal usage.", "default_setting": None, "create_dirs": True, }, { "purpose": locations_models.Location.AIP_RECOVERY, "relative_path": os.path.join("var", "archivematica", "storage_service", "recover"), "description": "Default AIP recovery", "default_setting": "default_recovery", "create_dirs": True, }, ] for loc_info in default_locations: try: new_loc, created = locations_models.Location.active.get_or_create( purpose=loc_info["purpose"], defaults={ "space": space, "relative_path": loc_info["relative_path"], "description": loc_info["description"], }, ) if created: LOGGER.info("Created default %s Location %s", loc_info["purpose"], new_loc) except locations_models.Location.MultipleObjectsReturned: continue if created and loc_info.get("create_dirs"): LOGGER.info("Creating %s Location %s", loc_info["purpose"], new_loc) try: os.mkdir(new_loc.full_path) # Hack for extra recovery dir if loc_info[ "purpose"] == locations_models.Location.AIP_RECOVERY: os.mkdir(os.path.join(new_loc.full_path, "backup")) except OSError as e: if e.errno != errno.EEXIST: LOGGER.error( "%s location %s not accessible.", loc_info["purpose"], new_loc.full_path, ) if (loc_info["default_setting"] and utils.get_setting(loc_info["default_setting"]) is None): utils.set_setting(loc_info["default_setting"], [new_loc.uuid]) LOGGER.info("Set %s as %s", new_loc, loc_info["default_setting"])