Exemple #1
0
def test_publish_no_s3_bucket_configured(root_doc):
    """Test the publish task when no S3 bucket is configured."""
    log_mock = mock.Mock()
    doc_pks = [root_doc.pk]
    publish(doc_pks, log=log_mock)
    log_mock.info.assert_called_once_with(
        'Skipping publish of {!r}: no S3 bucket configured'.format(doc_pks))
Exemple #2
0
def test_publish_no_s3_bucket_configured(root_doc):
    """Test the publish task when no S3 bucket is configured."""
    log_mock = mock.Mock()
    doc_pks = [root_doc.pk]
    publish(doc_pks, log=log_mock)
    log_mock.info.assert_called_once_with(
        'Skipping publish of {!r}: no S3 bucket configured'.format(doc_pks))
Exemple #3
0
 def handle(self, *args, **options):
     Logger = namedtuple('Logger', 'info, error')
     log = Logger(info=self.stdout.write, error=self.stderr.write)
     if options['all'] or options['locale']:
         filters = {}
         if options['locale'] and not options['all']:
             locale = options['locale']
             log.info(
                 'Publishing all documents in locale {}'.format(locale))
             filters.update(locale=locale)
         else:
             locale = None
             log.info('Publishing all documents')
         chunk_size = max(options['chunk_size'], 1)
         docs = Document.objects.filter(**filters)
         doc_pks = docs.values_list('id', flat=True)
         num_docs = len(doc_pks)
         num_tasks = int(ceil(num_docs / float(chunk_size)))
         log.info('...found {} documents.'.format(num_docs))
         # Let's publish the documents in a group of chunks, where the
         # tasks in the group can be run in parallel.
         tasks = []
         for i, chunk in enumerate(chunked(doc_pks, chunk_size)):
             message = 'Published chunk #{} of {}'.format(i + 1, num_tasks)
             tasks.append(publish.si(chunk, completion_message=message))
         if num_tasks == 1:
             msg = ('Launching a single task handling '
                    'all {} documents.'.format(num_docs))
         else:
             msg = ('Launching {} paralellizable tasks, each handling '
                    'at most {} documents.'.format(num_tasks, chunk_size))
         log.info(msg)
         group(*tasks).apply_async()
     else:
         paths = options['paths']
         if not paths:
             raise CommandError(
                 'Need at least one document path to publish')
         doc_pks = []
         get_doc_pk = Document.objects.values_list('id', flat=True).get
         for path in paths:
             if path.startswith('/'):
                 path = path[1:]
             locale, sep, slug = path.partition('/')
             head, sep, tail = slug.partition('/')
             if head == 'docs':
                 slug = tail
             try:
                 doc_pk = get_doc_pk(locale=locale, slug=slug)
             except Document.DoesNotExist:
                 msg = 'Document with locale={} and slug={} does not exist'
                 log.error(msg.format(locale, slug))
             else:
                 doc_pks.append(doc_pk)
         publish(doc_pks, log=log)
Exemple #4
0
 def handle(self, *args, **options):
     Logger = namedtuple('Logger', 'info, error')
     log = Logger(info=self.stdout.write, error=self.stderr.write)
     if options['all'] or options['locale']:
         filters = {}
         if options['locale'] and not options['all']:
             locale = options['locale']
             log.info('Publishing all documents in locale {}'.format(locale))
             filters.update(locale=locale)
         else:
             locale = None
             log.info('Publishing all documents')
         chunk_size = max(options['chunk_size'], 1)
         docs = Document.objects.filter(**filters)
         doc_pks = docs.values_list('id', flat=True)
         num_docs = len(doc_pks)
         num_tasks = int(ceil(num_docs / float(chunk_size)))
         log.info('...found {} documents.'.format(num_docs))
         # Let's publish the documents in a group of chunks, where the
         # tasks in the group can be run in parallel.
         tasks = []
         for i, chunk in enumerate(chunked(doc_pks, chunk_size)):
             message = 'Published chunk #{} of {}'.format(i + 1, num_tasks)
             tasks.append(publish.si(chunk, completion_message=message))
         if num_tasks == 1:
             msg = ('Launching a single task handling '
                    'all {} documents.'.format(num_docs))
         else:
             msg = ('Launching {} paralellizable tasks, each handling '
                    'at most {} documents.'.format(num_tasks, chunk_size))
         log.info(msg)
         group(*tasks).apply_async()
     else:
         paths = options['paths']
         if not paths:
             raise CommandError('Need at least one document path to publish')
         doc_pks = []
         get_doc_pk = Document.objects.values_list('id', flat=True).get
         for path in paths:
             if path.startswith('/'):
                 path = path[1:]
             locale, sep, slug = path.partition('/')
             head, sep, tail = slug.partition('/')
             if head == 'docs':
                 slug = tail
             try:
                 doc_pk = get_doc_pk(locale=locale, slug=slug)
             except Document.DoesNotExist:
                 msg = 'Document with locale={} and slug={} does not exist'
                 log.error(msg.format(locale, slug))
             else:
                 doc_pks.append(doc_pk)
         publish(doc_pks, log=log)
Exemple #5
0
def test_publish_multiple(get_s3_bucket_mock, root_doc, redirect_doc,
                          redirect_to_home, trans_doc):
    """
    Test the publish task for multiple documents of various kinds, including
    standard documents and redirects.
    """
    trans_doc.delete()
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish(
        [trans_doc.pk, root_doc.pk, redirect_doc.pk, redirect_to_home.pk],
        log=log_mock,
        completion_message="Done!",
    )
    s3_bucket_mock.put_object.assert_has_calls([
        mock.call(
            ACL="public-read",
            Key=get_s3_key(root_doc),
            Body=json.dumps(document_api_data(root_doc)),
            ContentType="application/json",
            ContentLanguage=root_doc.locale,
        ),
        mock.call(
            ACL="public-read",
            Key=get_s3_key(redirect_doc),
            WebsiteRedirectLocation=get_s3_key(
                root_doc,
                prefix_with_forward_slash=True,
                suffix_file_extension=False,
            ),
            ContentType="application/json",
            ContentLanguage=redirect_doc.locale,
            Body=json.dumps(
                document_api_data(
                    redirect_url=get_content_based_redirect(redirect_doc)[0])),
        ),
        mock.call(
            ACL="public-read",
            Key=get_s3_key(redirect_to_home),
            Body=json.dumps(document_api_data(redirect_url="/en-US/")),
            ContentType="application/json",
            ContentLanguage=redirect_to_home.locale,
        ),
    ])
    log_mock.error.assert_called_once_with(
        "Document with pk={} does not exist".format(trans_doc.pk))
    log_mock.info.assert_has_calls([
        mock.call("Published S3 Object #1"),
        mock.call("Published S3 Object #2"),
        mock.call("Published S3 Object #3"),
        mock.call("Done!"),
    ])
Exemple #6
0
def test_publish_standard(get_s3_bucket_mock, root_doc):
    """Test the publish task for a standard (non-redirect) document."""
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish.get_logger = mock.Mock(return_value=log_mock)
    publish([root_doc.pk])
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(root_doc),
        Body=json.dumps(document_api_data(root_doc, ensure_contributors=True)),
        ContentType='application/json',
        ContentLanguage=root_doc.locale
    )
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #7
0
def test_publish_redirect_to_home(get_s3_bucket_mock, redirect_to_home):
    """
    Test the publish task for a document that redirects to a URL outside the
    S3 bucket, in this case the home page.
    """
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([redirect_to_home.pk], log=log_mock)
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(redirect_to_home),
        Body=json.dumps(document_api_data(redirect_url='/en-US/')),
        ContentType='application/json',
        ContentLanguage=redirect_to_home.locale)
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #8
0
def test_publish_redirect_to_home(get_s3_bucket_mock, redirect_to_home):
    """
    Test the publish task for a document that redirects to a URL outside the
    S3 bucket, in this case the home page.
    """
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([redirect_to_home.pk], log=log_mock)
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(redirect_to_home),
        Body=json.dumps(document_api_data(redirect_url='/en-US/')),
        ContentType='application/json',
        ContentLanguage=redirect_to_home.locale
    )
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #9
0
def test_publish_redirect(get_s3_bucket_mock, root_doc, redirect_doc):
    """
    Test the publish task for a document that redirects to another document
    within the S3 bucket.
    """
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([redirect_doc.pk], log=log_mock)
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(redirect_doc),
        WebsiteRedirectLocation=get_s3_key(root_doc, for_redirect=True),
        ContentType='application/json',
        ContentLanguage=redirect_doc.locale
    )
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #10
0
def test_publish_redirect_to_other(get_s3_bucket_mock, redirect_to_macros):
    """
    Test the publish task for a document that redirects to a URL outside the
    S3 bucket, in this case someting other than the home page.
    """
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([redirect_to_macros.pk], log=log_mock)
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(redirect_to_macros),
        Body=json.dumps(
            document_api_data(redirect_url=absolutify(
                '/en-US/dashboards/macros', for_wiki_site=True))),
        ContentType='application/json',
        ContentLanguage=redirect_to_macros.locale)
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #11
0
def test_publish_redirect_to_other(get_s3_bucket_mock, redirect_to_macros):
    """
    Test the publish task for a document that redirects to a URL outside the
    S3 bucket, in this case someting other than the home page.
    """
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([redirect_to_macros.pk], log=log_mock)
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(redirect_to_macros),
        Body=json.dumps(document_api_data(
            redirect_url=absolutify('/en-US/dashboards/macros',
                                    for_wiki_site=True))),
        ContentType='application/json',
        ContentLanguage=redirect_to_macros.locale
    )
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #12
0
def test_publish_standard(get_s3_bucket_mock, root_doc, invalidate_cdn_cache):
    """Test the publish task for a standard (non-redirect) document."""
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish.get_logger = mock.Mock(return_value=log_mock)
    with mock.patch('kuma.api.tasks.request_cdn_cache_invalidation') as mocked:
        publish([root_doc.pk], invalidate_cdn_cache=invalidate_cdn_cache)
        if invalidate_cdn_cache:
            mocked.delay.assert_called_once_with([(root_doc.locale,
                                                   root_doc.slug)])
        else:
            mocked.delay.assert_not_called()
    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(root_doc),
        Body=json.dumps(document_api_data(root_doc)),
        ContentType='application/json',
        ContentLanguage=root_doc.locale)
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #13
0
def test_publish_multiple(get_s3_bucket_mock, root_doc, redirect_doc,
                          redirect_to_home, trans_doc):
    """
    Test the publish task for multiple documents of various kinds, including
    standard documents and redirects.
    """
    trans_doc.delete()
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([trans_doc.pk, root_doc.pk, redirect_doc.pk, redirect_to_home.pk],
            log=log_mock, completion_message='Done!')
    s3_bucket_mock.put_object.assert_has_calls([
        mock.call(
            ACL='public-read',
            Key=get_s3_key(root_doc),
            Body=json.dumps(
                document_api_data(root_doc, ensure_contributors=True)),
            ContentType='application/json',
            ContentLanguage=root_doc.locale
        ),
        mock.call(
            ACL='public-read',
            Key=get_s3_key(redirect_doc),
            WebsiteRedirectLocation=get_s3_key(root_doc, for_redirect=True),
            ContentType='application/json',
            ContentLanguage=redirect_doc.locale
        ),
        mock.call(
            ACL='public-read',
            Key=get_s3_key(redirect_to_home),
            Body=json.dumps(document_api_data(redirect_url='/en-US/')),
            ContentType='application/json',
            ContentLanguage=redirect_to_home.locale
        ),
    ])
    log_mock.error.assert_called_once_with(
        'Document with pk={} does not exist'.format(trans_doc.pk))
    log_mock.info.assert_has_calls([
        mock.call('Published S3 Object #1'),
        mock.call('Published S3 Object #2'),
        mock.call('Published S3 Object #3'),
        mock.call('Done!'),
    ])
Exemple #14
0
def test_publish_redirect(get_s3_bucket_mock, root_doc, redirect_doc):
    """
    Test the publish task for a document that redirects to another document
    within the S3 bucket.
    """
    log_mock = mock.Mock()
    get_s3_bucket_mock.return_value = s3_bucket_mock = get_mocked_s3_bucket()
    publish([redirect_doc.pk], log=log_mock)

    s3_bucket_mock.put_object.assert_called_once_with(
        ACL='public-read',
        Key=get_s3_key(redirect_doc),
        WebsiteRedirectLocation=get_s3_key(
            root_doc, prefix_with_forward_slash=True, suffix_file_extension=False),
        ContentType='application/json',
        ContentLanguage=redirect_doc.locale,
        Body=json.dumps(document_api_data(None, redirect_url=get_content_based_redirect(redirect_doc)[0]))
    )
    log_mock.info.assert_called_once_with('Published S3 Object #1')
Exemple #15
0
 def handle(self, *args, **options):
     Logger = namedtuple("Logger", "info, error")
     log = Logger(info=self.stdout.write, error=self.stderr.write)
     if options["all"] or options["locale"]:
         if options["locale"] and options["all"]:
             raise CommandError(
                 "Specifying --locale with --all is the same as --all"
             )
         filters = {}
         if options["locale"]:
             locale = options["locale"]
             log.info("Publishing all documents in locale {}".format(locale))
             filters.update(locale=locale)
         else:
             log.info("Publishing all documents")
         chunk_size = max(options["chunk_size"], 1)
         docs = Document.objects.filter(**filters)
         doc_pks = docs.values_list("id", flat=True)
         num_docs = len(doc_pks)
         num_tasks = int(ceil(num_docs / float(chunk_size)))
         log.info("...found {} documents.".format(num_docs))
         # Let's publish the documents in a group of chunks, where the
         # tasks in the group can be run in parallel.
         tasks = []
         for i, chunk in enumerate(chunked(doc_pks, chunk_size)):
             message = "Published chunk #{} of {}".format(i + 1, num_tasks)
             tasks.append(
                 publish.si(
                     chunk, completion_message=message, invalidate_cdn_cache=False
                 )
             )
         if num_tasks == 1:
             msg = "Launching a single task handling " "all {} documents.".format(
                 num_docs
             )
         else:
             msg = (
                 "Launching {} paralellizable tasks, each handling "
                 "at most {} documents.".format(num_tasks, chunk_size)
             )
         log.info(msg)
         group(*tasks).apply_async()
     else:
         paths = options["paths"]
         if not paths:
             raise CommandError("Need at least one document path to publish")
         doc_pks = []
         get_doc_pk = Document.objects.values_list("id", flat=True).get
         for path in paths:
             if path.startswith("/"):
                 path = path[1:]
             locale, sep, slug = path.partition("/")
             head, sep, tail = slug.partition("/")
             if head == "docs":
                 slug = tail
             try:
                 doc_pk = get_doc_pk(locale=locale, slug=slug)
             except Document.DoesNotExist:
                 msg = "Document with locale={} and slug={} does not exist"
                 log.error(msg.format(locale, slug))
             else:
                 doc_pks.append(doc_pk)
         publish(
             doc_pks,
             log=log,
             invalidate_cdn_cache=(not options["skip_cdn_invalidation"]),
         )