예제 #1
0
def publish(base_url,
            site_prefix,
            bucket,
            federalist_config,
            aws_region,
            aws_access_key_id,
            aws_secret_access_key,
            dry_run=False):
    '''
    Publish the built site to S3.
    '''
    logger = get_logger('publish')

    logger.info('Publishing to S3')

    start_time = datetime.now()

    s3_client = boto3.client(service_name='s3',
                             aws_access_key_id=aws_access_key_id,
                             aws_secret_access_key=aws_secret_access_key,
                             region_name=aws_region)

    s3publisher.publish_to_s3(directory=str(SITE_BUILD_DIR_PATH),
                              base_url=base_url,
                              site_prefix=site_prefix,
                              bucket=bucket,
                              federalist_config=federalist_config,
                              s3_client=s3_client,
                              dry_run=dry_run)

    delta_string = delta_to_mins_secs(datetime.now() - start_time)
    logger.info(f'Total time to publish: {delta_string}')
def publish(ctx, base_url, site_prefix, bucket, cache_control,
            aws_region, dry_run=False):
    '''
    Publish the built site to S3.

    Expects AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY to be
    in the environment.
    '''
    LOGGER.info('Publishing to S3')

    access_key_id = os.environ['AWS_ACCESS_KEY_ID']
    secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']

    start_time = datetime.now()

    publish_to_s3(
        directory=SITE_BUILD_DIR_PATH,
        base_url=base_url,
        site_prefix=site_prefix,
        bucket=bucket,
        cache_control=cache_control,
        aws_region=aws_region,
        access_key_id=access_key_id,
        secret_access_key=secret_access_key,
        dry_run=dry_run
    )

    delta_string = delta_to_mins_secs(datetime.now() - start_time)
    LOGGER.info(f'Total time to publish: {delta_string}')
def test_publish_to_s3(tmpdir, s3_client):
    # Use tmpdir to create a fake directory
    # full of directories and files to be published/deleted/updated
    test_dir = tmpdir.mkdir('test_dir')

    # make a subdirectory
    test_dir.mkdir('sub_dir')

    site_prefix = 'test_dir'

    filenames = ['index.html',
                 'boop.txt',
                 'sub_dir/index.html']

    _make_fake_files(test_dir, filenames)

    # Make a federalist.json file!
    repo_config = {
        'headers': [
            {'/index.html': {'cache-control': 'no-cache'}},
            {'/*.txt': {'cache-control': 'max-age=1000'}}
        ]
    }
    clone_dir = tmpdir.mkdir('clone_dir')
    federalist_json_file = clone_dir.join('federalist.json')
    with federalist_json_file.open('w') as json_file:
        return json.dump(repo_config, json_file)

    publish_kwargs = {
        'directory': str(test_dir),
        'base_url': '/base_url',
        'site_prefix': site_prefix,
        'bucket': TEST_BUCKET,
        'cache_control': 'max-age=10',
        's3_client': s3_client,
        'clone_dir': str(clone_dir)
    }

    # Create mock for default 404 page request
    with requests_mock.mock() as m:
        m.get(('https://raw.githubusercontent.com'
               '/18F/federalist-404-page/master/'
               '404-federalist-client.html'),
              text='default 404 page')

        publish_to_s3(**publish_kwargs)

        results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)

        keys = [r['Key'] for r in results['Contents']]

        assert results['KeyCount'] == 6  # 4 files, 3 redirects & 404.html

        assert f'{site_prefix}/index.html' in keys
        assert f'{site_prefix}/boop.txt' in keys
        assert f'{site_prefix}/sub_dir' in keys
        assert f'{site_prefix}/sub_dir/index.html' in keys
        assert f'{site_prefix}/404.html' in keys
        assert f'{site_prefix}' in keys  # main redirect object

        # Check the cache control headers
        cache_control_checks = [
            ('index.html',  'no-cache'),
            ('boop.txt',    'max-age=1000'),
            ('404.html',    'max-age=60')
        ]
        for filename, expected in cache_control_checks:
            result = s3_client.get_object(
                        Bucket=TEST_BUCKET,
                        Key=f'{site_prefix}/{filename}')['CacheControl']
            assert result == expected

        # Add another file to the directory
        more_filenames = ['new_index.html']
        _make_fake_files(test_dir, more_filenames)
        publish_to_s3(**publish_kwargs)
        results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)

        assert results['KeyCount'] == 7

        # Delete some files and check that the published files count
        # is correct
        test_dir.join('new_index.html').remove()
        test_dir.join('boop.txt').remove()
        publish_to_s3(**publish_kwargs)
        results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)
        assert results['KeyCount'] == 5

        # Write an existing file with different content so that it
        # needs to get updated
        index_key = f'{site_prefix}/index.html'
        orig_etag = s3_client.get_object(
                        Bucket=TEST_BUCKET,
                        Key=index_key)['ETag']
        test_dir.join('index.html').write('totally new content!!!')
        publish_to_s3(**publish_kwargs)
        results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)

        # number of keys should be the same
        assert results['KeyCount'] == 5

        # make sure content in changed file is updated
        new_etag = s3_client.get_object(
                    Bucket=TEST_BUCKET,
                    Key=index_key)['ETag']
        assert new_etag != orig_etag

        # test hidden files and directories
        test_dir.mkdir('.well-known')
        test_dir.mkdir('.not-well-known')
        more_filenames = ['.well-known/security.txt',
                          '.well-known/not-security.txt',
                          '.well-known/.security',
                          '.not-well-known/security.txt',
                          '.security']
        _make_fake_files(test_dir, more_filenames)
        publish_to_s3(**publish_kwargs)
        results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)
        assert results['KeyCount'] == 6
예제 #4
0
def test_publish_to_s3(tmpdir, s3_client):
    # Use tmpdir to create a fake directory
    # full of directories and files to be published/deleted/updated
    test_dir = tmpdir.mkdir('test_dir')

    # make a subdirectory
    test_dir.mkdir('sub_dir')

    site_prefix = 'test_dir'

    filenames = ['index.html', 'boop.txt', 'sub_dir/index.html']

    _make_fake_files(test_dir, filenames)

    publish_kwargs = {
        'directory': str(test_dir),
        'base_url': '/base_url',
        'site_prefix': site_prefix,
        'bucket': TEST_BUCKET,
        'cache_control': 'max-age=10',
        's3_client': s3_client,
    }
    publish_to_s3(**publish_kwargs)

    results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)

    keys = [r['Key'] for r in results['Contents']]

    assert results['KeyCount'] == 7  # 4 files, 3 redirect objects w/ /404/

    assert f'{site_prefix}/index.html' in keys
    assert f'{site_prefix}/boop.txt' in keys
    assert f'{site_prefix}/sub_dir' in keys
    assert f'{site_prefix}/sub_dir/index.html' in keys
    assert f'{site_prefix}/404' in keys
    assert f'{site_prefix}/404/index.html' in keys
    assert f'{site_prefix}' in keys  # main redirect object

    # Add another file to the directory
    more_filenames = ['new_index.html']
    _make_fake_files(test_dir, more_filenames)
    publish_to_s3(**publish_kwargs)
    results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)

    assert results['KeyCount'] == 8

    # Delete some files and check that the published files count
    # is correct
    test_dir.join('new_index.html').remove()
    test_dir.join('boop.txt').remove()
    publish_to_s3(**publish_kwargs)
    results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)
    assert results['KeyCount'] == 6

    # Write an existing file with different content so that it
    # needs to get updated
    index_key = f'{site_prefix}/index.html'
    orig_etag = s3_client.get_object(Bucket=TEST_BUCKET, Key=index_key)['ETag']
    test_dir.join('index.html').write('totally new content!!!')
    publish_to_s3(**publish_kwargs)
    results = s3_client.list_objects_v2(Bucket=TEST_BUCKET)

    # number of keys should be the same
    assert results['KeyCount'] == 6

    # make sure content in changed file is updated
    new_etag = s3_client.get_object(Bucket=TEST_BUCKET, Key=index_key)['ETag']
    assert new_etag != orig_etag