def exportrun_delete_exports(sender, instance, *args, **kwargs): """ Delete the associated export files and notifications when an ExportRun is deleted. """ if getattr(settings, 'USE_S3', False): delete_from_s3(run_uid=str(instance.uid)) run_dir = '{0}/{1}'.format(settings.EXPORT_DOWNLOAD_ROOT.rstrip('/'), str(instance.uid)) try: shutil.rmtree(run_dir, ignore_errors=True) logger.info("The directory {0} was deleted.".format(run_dir)) except OSError: logger.warn("The directory {0} was already moved or doesn't exist.".format(run_dir)) instance.delete_notifications()
def test_s3_delete(self, mock_get_s3_client): mock_client = MagicMock() mock_get_s3_client.return_value = mock_client expected_key = "run_uid/file.txt" url = "http://s3.url/{0}".format(expected_key) run_uid = 'run' delete_from_s3(run_uid=run_uid, download_url=url) mock_client.list_objects.return_value = {"contents": [expected_key]} mock_client.delete_object.assert_called_once_with(Bucket='test-bucket', Key=expected_key)
def exporttaskresult_delete_exports(sender, instance, *args, **kwargs): """ Delete associated files when deleting the FileProducingTaskResult. """ # The url should be constructed as [download context, run_uid, filename] if getattr(settings, 'USE_S3', False): delete_from_s3(download_url=instance.download_url) url_parts = instance.download_url.split('/') full_file_download_path = '/'.join([settings.EXPORT_DOWNLOAD_ROOT.rstrip('/'), url_parts[-2], url_parts[-1]]) try: os.remove(full_file_download_path) logger.info("The directory {0} was deleted.".format(full_file_download_path)) except OSError: logger.warn("The file {0} was already removed or does not exist.".format(full_file_download_path)) instance.delete_notifications()
def exporttaskresult_delete_exports(sender, instance, *args, **kwargs): """ Delete associated files when deleting the FileProducingTaskResult. """ # The url should be constructed as [download context, run_uid, filename] if getattr(settings, "USE_S3", False): delete_from_s3(download_url=instance.download_url) url_parts = instance.download_url.split("/") full_file_download_path = "/".join([settings.EXPORT_DOWNLOAD_ROOT.rstrip("/"), url_parts[-2], url_parts[-1]]) try: os.remove(full_file_download_path) logger.info("The directory {0} was deleted.".format(full_file_download_path)) except OSError: logger.warn("The file {0} was already removed or does not exist.".format(full_file_download_path)) instance.delete_notifications()
def exportrun_delete_exports(sender, instance, *args, **kwargs): """ Delete the associated export files and notifications when an ExportRun is deleted. """ if getattr(settings, 'USE_S3', False): delete_from_s3(run_uid=str(instance.uid)) run_dir = '{0}/{1}'.format(settings.EXPORT_DOWNLOAD_ROOT.rstrip('/'), str(instance.uid)) try: shutil.rmtree(run_dir, ignore_errors=True) logger.info("The directory {0} was deleted.".format(run_dir)) except OSError: logger.warn( "The directory {0} was already moved or doesn't exist.".format( run_dir)) instance.delete_notifications()
def exportrun_delete_exports(sender, instance, *args, **kwargs): """ Delete the associated export files and notifications when an ExportRun is deleted. """ runs = instance.job.runs.all().order_by("-created_at") instance.job.last_export_run = runs[1] if len(runs) > 1 else None instance.job.save() if getattr(settings, "USE_S3", False): delete_from_s3(run_uid=str(instance.uid)) run_dir = "{0}/{1}".format(settings.EXPORT_DOWNLOAD_ROOT.rstrip("/"), str(instance.uid)) try: shutil.rmtree(run_dir, ignore_errors=True) logger.info("The directory {0} was deleted.".format(run_dir)) except OSError: logger.warn("The directory {0} was already moved or doesn't exist.".format(run_dir)) instance.delete_notifications()
def test_s3_delete(self): client = get_s3_client() stubber = Stubber(client) stubber.activate() stubber.add_response('put_object', self._base_response, dict(Bucket=ANY, Key=self._path, Body='test')) list_objects_response = { 'IsTruncated': False, 'Name': 'test-bucket', 'MaxKeys': 1000, 'Prefix': '', 'Contents': [{ u'LastModified': datetime.datetime(2016, 9, 23, 11, 17, 14), u'ETag': '"20d2cb13afb394301bbea0bcff19e12b"', u'StorageClass': 'STANDARD', u'Key': self._path, u'Owner': { u'DisplayName': 'test', u'ID': '31d89f79718dbd4435290740e6fa5e41cffafa7d9a3c323c85b525342e6341ae' }, u'Size': 77824 }], 'EncodingType': 'url', 'ResponseMetadata': { 'RequestId': 'abc123', 'HTTPStatusCode': 200, 'HostId': 'abc123' }, 'Marker': '' } stubber.add_response('list_objects', list_objects_response, { 'Bucket': ANY, 'Prefix': self._uuid }) stubber.add_response('delete_object', self._base_response, { 'Bucket': ANY, 'Key': self._path }) with patch('eventkit_cloud.utils.s3.open', mock_open(read_data='test'), create=True) as mock_open_obj: upload_to_s3(self._uuid, self._filename, self._filename, client=client) delete_from_s3(self._uuid, client=client)