def test_handle_archive_fail_copy(self, mock_send_mail): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, self.dst, self.user, {} ) args_user = dict( to_addr=self.user.username, user=self.user, src=self.src, mail=mails.ARCHIVE_COPY_ERROR_USER, results={}, mimetype='html', ) args_desk = dict( to_addr=settings.SUPPORT_EMAIL, user=self.user, src=self.src, mail=mails.ARCHIVE_COPY_ERROR_DESK, results={}, ) mock_send_mail.assert_has_calls([ call(**args_user), call(**args_desk), ], any_order=True)
def test_handle_archive_fail_size(self, mock_send_mail): archiver_utils.handle_archive_fail( ARCHIVER_SIZE_EXCEEDED, self.src, self.dst, self.user, {} ) args_user = dict( to_addr=self.user.username, user=self.user, src=self.src, mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, mimetype='html', ) args_desk = dict( to_addr=settings.SUPPORT_EMAIL, user=self.user, src=self.src, mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, stat_result={}, ) mock_send_mail.assert_has_calls([ call(**args_user), call(**args_desk), ], any_order=True)
def test_handle_archive_fail_size(self, mock_send_mail): archiver_utils.handle_archive_fail( ARCHIVER_SIZE_EXCEEDED, self.src, self.dst, self.user, {} ) args_user = dict( to_addr=self.user.username, user=self.user, src=self.src, mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, can_change_preferences=False, mimetype='html', ) args_desk = dict( to_addr=settings.SUPPORT_EMAIL, user=self.user, src=self.src, mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, stat_result={}, ) mock_send_mail.assert_has_calls([ call(**args_user), call(**args_desk), ], any_order=True)
def archive_callback(dst): """Blinker listener for updates to the archive task. When the tree of ArchiveJob instances is complete, proceed to send success or failure mails :param dst: registration Node """ root = dst.root root_job = root.archive_job if not root_job.archive_tree_finished(): return if root_job.sent: return root_job.sent = True root_job.save() if root_job.success: archiver_utils.archive_success(root, root.registered_user) if dst.pending_embargo: for contributor in root.active_contributors(): project_utils.send_embargo_email( root, contributor, urls=root_job.meta['embargo_urls'].get(contributor._id), ) else: archiver_utils.send_archiver_success_mail(root) for node in node_and_primary_descendants(root): node.update_search() # update search if public else: archiver_utils.handle_archive_fail( ARCHIVER_UNCAUGHT_ERROR, root.registered_from, root, root.registered_user, dst.archive_job.target_addons, )
def test_handle_archive_fail_copy(self, mock_send_mail): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, self.dst, self.user, {} ) args_user = dict( to_addr=self.user.username, user=self.user, src=self.src, mail=mails.ARCHIVE_COPY_ERROR_USER, results={}, can_change_preferences=False, mimetype='html', ) args_desk = dict( to_addr=settings.SUPPORT_EMAIL, user=self.user, src=self.src, mail=mails.ARCHIVE_COPY_ERROR_DESK, results={}, ) mock_send_mail.assert_has_calls([ call(**args_user), call(**args_desk), ], any_order=True)
def archive_fail(dst, errors): reason = dst.archive_status root_job = dst.root.archive_job if root_job.sent: return root_job.sent = True root_job.save() archiver_utils.handle_archive_fail(reason, dst.root.registered_from, dst.root, dst.root.registered_user, errors)
def test_handle_archive_fail(self, mock_send_mail): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, self.dst, self.user, {} ) assert_equal(mock_send_mail.call_count, 2) assert_true(self.dst.is_deleted)
def remove_failed_registrations(dry_run=True): init_app(set_backends=True, routes=False) count = 0 failed = find_failed_registrations() if not dry_run: for f in failed: logging.info('Cleaning {}'.format(f)) if not f.registered_from: logging.info('Node {0} had registered_from == None'.format( f._id)) continue if not f.archive_job: # Be extra sure not to delete legacy registrations continue f.archive_job.status = ARCHIVER_FAILURE f.archive_job.sent = True f.archive_job.save() handle_archive_fail(ARCHIVER_UNCAUGHT_ERROR, f.registered_from, f, f.creator, f.archive_job.target_info()) count += 1 logging.info('Cleaned {} registrations'.format(count))
def archive_callback(dst): """Blinker listener for updates to the archive task. When the tree of ArchiveJob instances is complete, proceed to send success or failure mails :param dst: registration Node """ root = dst.root root_job = root.archive_job if not root_job.archive_tree_finished(): return if root_job.sent: return if root_job.success: # Prevent circular import with app.py from website.archiver import tasks tasks.archive_success.delay(dst_pk=root._id, job_pk=root_job._id) else: archiver_utils.handle_archive_fail( ARCHIVER_UNCAUGHT_ERROR, root.registered_from, root, root.registered_user, dst.archive_job.target_addons )
def remove_failed_registrations(dry_run=True): init_app(set_backends=True, routes=False) count = 0 failed = find_failed_registrations() if not dry_run: for f in failed: logging.info('Cleaning {}'.format(f)) if not f.registered_from: logging.info('Node {0} had registered_from == None'.format(f._id)) continue if not f.archive_job: # Be extra sure not to delete legacy registrations continue f.archive_job.status = ARCHIVER_FAILURE f.archive_job.sent = True f.archive_job.save() handle_archive_fail( ARCHIVER_UNCAUGHT_ERROR, f.registered_from, f, f.creator, f.archive_job.target_info() ) count += 1 logging.info('Cleaned {} registrations'.format(count))
def archive_callback(dst): """Blinker listener for updates to the archive task. When the tree of ArchiveJob instances is complete, proceed to send success or failure mails :param dst: registration Node """ root = dst.root root_job = root.archive_job if not root_job.archive_tree_finished(): return if root_job.sent: return root_job.sent = True root_job.save() if root_job.success: dst.sanction.ask(root.active_contributors()) else: archiver_utils.handle_archive_fail( ARCHIVER_UNCAUGHT_ERROR, root.registered_from, root, root.registered_user, dst.archive_job.target_addons, )
def archive_callback(dst): """Blinker listener for updates to the archive task. When the tree of ArchiveJob instances is complete, proceed to send success or failure mails :param dst: registration Node """ root = dst.root root_job = root.archive_job if not root_job.archive_tree_finished(): return if root_job.sent: return if root_job.success: # Prevent circular import with app.py from website.archiver import tasks tasks.archive_success.delay(dst_pk=root._id, job_pk=root_job._id) else: archiver_utils.handle_archive_fail( ARCHIVER_UNCAUGHT_ERROR, root.registered_from, root, root.registered_user, dst.archive_job.target_addons, )