コード例 #1
0
def test_unpublish_multiple(get_s3_bucket_mock, root_doc, redirect_doc,
                            redirect_to_home):
    """
    Test the unpublish task for multiple documents of various kinds, including
    standard documents and redirects.
    """
    log_mock = mock.Mock()
    docs = (root_doc, redirect_doc, redirect_to_home)
    doc_locale_slug_pairs = [(doc.locale, doc.slug) for doc in docs]
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    unpublish(doc_locale_slug_pairs, log=log_mock, completion_message='Done!')
    s3_keys = tuple(get_s3_key(doc) for doc in docs)
    s3_bucket_mock.delete_objects.assert_called_once_with(
        Delete={
            'Objects': [
                {
                    'Key': key
                }
                for key in s3_keys
            ]
        }
    )
    log_mock.error.assert_called_once_with(
        'Unable to unpublish {}: (InternalError) Some error'.format(s3_keys[0])
    )
    log_mock.info.assert_has_calls(
        [mock.call('Unpublished {}'.format(key)) for key in s3_keys[1:]] +
        [mock.call('Done!')]
    )
コード例 #2
0
def test_unpublish_multiple_chunked(get_s3_bucket_mock, root_doc, redirect_doc,
                                    redirect_to_home):
    """
    Test the unpublish task for multiple documents where the deletes are
    broken-up into chunks.
    """
    log_mock = mock.Mock()
    docs = (root_doc, redirect_doc, redirect_to_home)
    doc_locale_slug_pairs = [(doc.locale, doc.slug) for doc in docs]
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    unpublish(doc_locale_slug_pairs, log=log_mock, completion_message='Done!')
    s3_keys = tuple(get_s3_key(doc) for doc in docs)
    s3_bucket_mock.delete_objects.assert_has_calls([
        mock.call(Delete={'Objects': [{
            'Key': key
        } for key in s3_keys[:2]]}),
        mock.call(Delete={'Objects': [{
            'Key': key
        } for key in s3_keys[2:]]})
    ])
    log_mock.error.assert_has_calls([
        mock.call(
            'Unable to unpublish {}: (InternalError) Some error'.format(key))
        for key in s3_keys[:2]
    ])
    log_mock.info.assert_has_calls(
        [mock.call('Unpublished {}'.format(s3_keys[-1])),
         mock.call('Done!')])
コード例 #3
0
def test_unpublish_no_s3_bucket_configured(root_doc):
    """Test the unpublish task when no S3 bucket is configured."""
    log_mock = mock.Mock()
    doc_locale_slug_pairs = [(root_doc.locale, root_doc.slug)]
    unpublish(doc_locale_slug_pairs, log=log_mock)
    log_mock.info.assert_called_once_with(
        'Skipping unpublish of {!r}: no S3 bucket configured'.format(
            doc_locale_slug_pairs))
コード例 #4
0
ファイル: test_tasks.py プロジェクト: mozilla/kuma
def test_unpublish_no_s3_bucket_configured(root_doc):
    """Test the unpublish task when no S3 bucket is configured."""
    log_mock = mock.Mock()
    doc_locale_slug_pairs = [(root_doc.locale, root_doc.slug)]
    unpublish(doc_locale_slug_pairs, log=log_mock)
    log_mock.info.assert_called_once_with(
        'Skipping unpublish of {!r}: no S3 bucket configured'.format(
            doc_locale_slug_pairs))
コード例 #5
0
ファイル: unpublish.py プロジェクト: suhailsinghbains/kuma
 def handle(self, *args, **options):
     Logger = namedtuple('Logger', 'info, error')
     log = Logger(info=self.stdout.write, error=self.stderr.write)
     paths = options['paths']
     if not paths:
         raise CommandError('Need at least one document path to remove')
     doc_locale_slug_pairs = []
     for path in paths:
         if path.startswith('/'):
             path = path[1:]
         locale, sep, slug = path.partition('/')
         head, sep, tail = slug.partition('/')
         if head == 'docs':
             slug = tail
         doc_locale_slug_pairs.append((locale, slug))
     unpublish(doc_locale_slug_pairs, log=log)
コード例 #6
0
ファイル: unpublish.py プロジェクト: w0lramD/kuma
 def handle(self, *args, **options):
     Logger = namedtuple("Logger", "info, error")
     log = Logger(info=self.stdout.write, error=self.stderr.write)
     paths = options["paths"]
     if not paths:
         raise CommandError("Need at least one document path to remove")
     doc_locale_slug_pairs = []
     for path in paths:
         if path.startswith("/"):
             path = path[1:]
         locale, sep, slug = path.partition("/")
         head, sep, tail = slug.partition("/")
         if head == "docs":
             slug = tail
         doc_locale_slug_pairs.append((locale, slug))
     unpublish(doc_locale_slug_pairs, log=log)
コード例 #7
0
ファイル: test_tasks.py プロジェクト: mozilla/kuma
def test_unpublish_multiple_chunked(get_s3_bucket_mock, root_doc, redirect_doc,
                                    redirect_to_home):
    """
    Test the unpublish task for multiple documents where the deletes are
    broken-up into chunks.
    """
    log_mock = mock.Mock()
    docs = (root_doc, redirect_doc, redirect_to_home)
    doc_locale_slug_pairs = [(doc.locale, doc.slug) for doc in docs]
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    unpublish(doc_locale_slug_pairs, log=log_mock, completion_message='Done!')
    s3_keys = tuple(get_s3_key(doc) for doc in docs)
    s3_bucket_mock.delete_objects.assert_has_calls([
        mock.call(
            Delete={
                'Objects': [
                    {
                        'Key': key
                    }
                    for key in s3_keys[:2]
                ]
            }
        ),
        mock.call(
            Delete={
                'Objects': [
                    {
                        'Key': key
                    }
                    for key in s3_keys[2:]
                ]
            }
        )
    ])
    log_mock.error.assert_has_calls([
        mock.call(
            'Unable to unpublish {}: (InternalError) Some error'.format(key))
        for key in s3_keys[:2]
    ])
    log_mock.info.assert_has_calls([
        mock.call('Unpublished {}'.format(s3_keys[-1])),
        mock.call('Done!')
    ])
コード例 #8
0
ファイル: test_tasks.py プロジェクト: mozilla/kuma
def test_unpublish(get_s3_bucket_mock, root_doc, case):
    """Test the unpublish task for a single document."""
    if case in ('deleted', 'purged'):
        root_doc.deleted = True
        root_doc.save()
        if case == 'purged':
            root_doc.purge()
    log_mock = mock.Mock()
    s3_bucket_mock = get_mocked_s3_bucket()
    get_s3_bucket_mock.return_value = s3_bucket_mock
    unpublish.get_logger = mock.Mock(return_value=log_mock)
    unpublish([(root_doc.locale, root_doc.slug)])
    s3_key = get_s3_key(root_doc)
    s3_bucket_mock.delete_objects.assert_called_once_with(
        Delete={
            'Objects': [
                {
                    'Key': s3_key
                }
            ]
        }
    )
    log_mock.info.assert_called_once_with('Unpublished {}'.format(s3_key))
コード例 #9
0
@pytest.mark.parametrize('case', ('un-deleted', 'deleted', 'purged'))
@pytest.mark.parametrize('invalidate_cdn_cache', (True, False))
@mock.patch('kuma.api.tasks.get_s3_bucket')
def test_unpublish(get_s3_bucket_mock, root_doc, invalidate_cdn_cache, case):
    """Test the unpublish task for a single document."""
    if case in ('deleted', 'purged'):
        root_doc.deleted = True
        root_doc.save()
        if case == 'purged':
            root_doc.purge()
    log_mock = mock.Mock()
    s3_bucket_mock = get_mocked_s3_bucket()
    get_s3_bucket_mock.return_value = s3_bucket_mock
    unpublish.get_logger = mock.Mock(return_value=log_mock)
    with mock.patch('kuma.api.tasks.request_cdn_cache_invalidation') as mocked:
        unpublish([(root_doc.locale, root_doc.slug)],
                  invalidate_cdn_cache=invalidate_cdn_cache)
        if invalidate_cdn_cache:
            mocked.delay.assert_called_once_with([(root_doc.locale,
                                                   root_doc.slug)])
        else:
            mocked.delay.assert_not_called()
    s3_key = get_s3_key(root_doc)
    s3_bucket_mock.delete_objects.assert_called_once_with(
        Delete={'Objects': [{
            'Key': s3_key
        }]})
    log_mock.info.assert_called_once_with('Unpublished {}'.format(s3_key))


@mock.patch('kuma.api.tasks.get_s3_bucket')
def test_unpublish_multiple(get_s3_bucket_mock, root_doc, redirect_doc,
コード例 #10
0

@pytest.mark.parametrize('case', ('un-deleted', 'deleted', 'purged'))
@mock.patch('kuma.api.tasks.get_s3_bucket')
def test_unpublish(get_s3_bucket_mock, root_doc, case):
    """Test the unpublish task for a single document."""
    if case in ('deleted', 'purged'):
        root_doc.deleted = True
        root_doc.save()
        if case == 'purged':
            root_doc.purge()
    log_mock = mock.Mock()
    s3_bucket_mock = get_mocked_s3_bucket()
    get_s3_bucket_mock.return_value = s3_bucket_mock
    unpublish.get_logger = mock.Mock(return_value=log_mock)
    unpublish([(root_doc.locale, root_doc.slug)])
    s3_key = get_s3_key(root_doc)
    s3_bucket_mock.delete_objects.assert_called_once_with(
        Delete={
            'Objects': [
                {
                    'Key': s3_key
                }
            ]
        }
    )
    log_mock.info.assert_called_once_with('Unpublished {}'.format(s3_key))


@mock.patch('kuma.api.tasks.get_s3_bucket')
def test_unpublish_multiple(get_s3_bucket_mock, root_doc, redirect_doc,