コード例 #1
0
def export_dos_opportunities(
    client,
    logger,
    stage: str,
    output_dir,
    dry_run: bool = False
):
    output_dir = Path(output_dir)
    if not output_dir.exists():
        logger.info(f"Creating {output_dir} directory")
        output_dir.mkdir(parents=True)

    latest_framework_slug = get_latest_dos_framework(client)

    communications_bucket = S3(get_bucket_name(stage, "communications"))
    reports_bucket = S3(get_bucket_name(stage, "reports"))

    logger.info("Exporting DOS opportunity data to CSV")

    # Get the data
    rows = get_brief_data(client, logger, include_buyer_user_details=True)

    # Construct CSV for admins
    write_rows_to_csv(rows, output_dir / "opportunity-data-for-admins.csv", logger)
    # Construct public CSV (filter out buyer details)
    write_rows_to_csv(
        [
            OrderedDict((k, v) for k, v in row.items() if k in DOS_OPPORTUNITY_HEADERS)
            for row in rows
        ],
        output_dir / DOWNLOAD_FILE_NAME,
        logger
    )

    # Upload admin CSV to reports bucket
    upload_file_to_s3(
        output_dir / "opportunity-data-for-admins.csv",
        reports_bucket,
        f"{latest_framework_slug}/reports/{DOWNLOAD_FILE_NAME}",
        DOWNLOAD_FILE_NAME,
        public=False,
        dry_run=dry_run,
        logger=logger
    )

    # Upload public CSV to S3
    upload_file_to_s3(
        output_dir / DOWNLOAD_FILE_NAME,
        communications_bucket,
        f"{latest_framework_slug}/communications/data/{DOWNLOAD_FILE_NAME}",
        DOWNLOAD_FILE_NAME,
        public=True,
        dry_run=dry_run,
        logger=logger
    )
コード例 #2
0
    def test_content_type_detection(self):
        # File extensions allowed for G6 documents: pdf, odt, ods, odp
        test_type = S3('test-bucket')._get_mimetype('test-file.pdf')
        self.assertEqual(test_type, 'application/pdf')

        test_type = S3('test-bucket')._get_mimetype('test-file.odt')
        self.assertEqual(test_type, 'application/vnd.oasis.opendocument.text')

        test_type = S3('test-bucket')._get_mimetype('test-file.ods')
        self.assertEqual(test_type,
                         'application/vnd.oasis.opendocument.spreadsheet')

        test_type = S3('test-bucket')._get_mimetype('test-file.odp')
        self.assertEqual(test_type,
                         'application/vnd.oasis.opendocument.presentation')
コード例 #3
0
    def test_endpoint_url_config_not_set_in_development(self, boto3):
        app = flask.Flask("test_endpoint_url_from_flask_config")
        app.env = "development"

        with app.app_context():
            S3("bucket")
            assert "endpoint_url" not in boto3.resource.call_args[1]
コード例 #4
0
    def test_delete_key(self):
        mock_bucket = FakeBucket(['folder/test-file.pdf'])
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket').delete_key('folder/test-file.pdf')

        assert 'folder/test-file.pdf' not in mock_bucket.keys
コード例 #5
0
    def test_delete_key_moves_file_with_prefix(self):
        mock_bucket = FakeBucket(['folder/test-file.pdf'])
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket').delete_key('folder/test-file.pdf')

        assert 'folder/2015-10-10T00:00:00-test-file.pdf' in mock_bucket.keys
コード例 #6
0
    def test_get_signed_url_with_expires_at(self, bucket_with_file):
        signed_url = S3('dear-liza').get_signed_url('with/straw.dear.pdf',
                                                    expires_in=10)
        parsed_signed_url = urlparse(signed_url)

        parsed_qs = parse_qs(parsed_signed_url.query)
        assert parsed_qs["Expires"] == ["1444435210"]
コード例 #7
0
    def test_save_file(self, empty_bucket, path, expected_path, expected_ct,
                       expected_filename, expected_ext, timestamp,
                       expected_timestamp, download_filename, expected_cd):
        returned_key_dict = S3("dear-liza").save(
            path,
            file_=BytesIO(b"one two three"),
            timestamp=timestamp,
            download_filename=download_filename,
        )

        assert returned_key_dict == {
            "path": expected_path,
            "filename": expected_filename,
            "ext": expected_ext,
            "last_modified": expected_timestamp,
            "size": 13,
        }

        summary_list = list(empty_bucket.objects.all())
        assert len(summary_list) == 1
        assert summary_list[0].key == expected_path
        obj0 = summary_list[0].Object()
        assert obj0.metadata == {
            "timestamp": expected_timestamp,
        }
        assert obj0.content_disposition == expected_cd
        assert obj0.get()["Body"].read() == b"one two three"
        if sys.version_info < (3, 0):
            # moto currently has a py3 bug which makes this fail - the fix not yet upstream - perhaps next time you come
            # across this message try updating moto to the latest version and see if this works
            assert obj0.content_type == expected_ct
コード例 #8
0
    def test_save_existing_file(self, bucket_with_file):
        returned_key_dict = S3("dear-liza").save(
            "with/straw.dear.pdf",
            file_=BytesIO(b"significantly longer contents than before"),
            download_filename="significantly_different.pdf",
        )

        assert returned_key_dict == {
            "path": "with/straw.dear.pdf",
            "filename": "straw.dear",
            "ext": "pdf",
            "last_modified": "2014-10-20T00:00:00.000000Z",
            "size": 41,
        }

        summary_list = list(bucket_with_file.objects.all())
        assert len(summary_list) == 1
        assert summary_list[0].key == "with/straw.dear.pdf"
        obj0 = summary_list[0].Object()
        assert obj0.metadata == {
            "timestamp": "2014-10-20T00:00:00.000000Z",
        }
        assert obj0.content_disposition == 'attachment; filename="significantly_different.pdf"'
        assert obj0.get()["Body"].read(
        ) == b"significantly longer contents than before"
        if sys.version_info < (3, 0):
            # moto currently has a py3 bug which makes this fail - the fix not yet upstream - perhaps next time you come
            # across this message try updating moto to the latest version and see if this works
            assert obj0.content_type == "application/pdf"
コード例 #9
0
    def test_endpoint_url_ignored_in_production(self, boto3):
        app = flask.Flask("test_endpoint_url_from_flask_config")
        app.env = "production"
        app.config["DM_S3_ENDPOINT_URL"] = "http://localhost:5100"

        with app.app_context():
            S3("bucket")
            assert "endpoint_url" not in boto3.resource.call_args[1]
コード例 #10
0
    def test_endpoint_url_config_none_in_development(self, boto3):
        app = flask.Flask("test_endpoint_url_from_flask_config")
        app.env = "development"
        app.config["DM_S3_ENDPOINT_URL"] = None

        with app.app_context():
            S3("bucket")
            assert "endpoint_url" not in boto3.resource.call_args[1]
コード例 #11
0
    def test_save_sets_timestamp_to_current_time(self):
        mock_bucket = FakeBucket()
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket').save('folder/test-file.pdf', mock_file('blah', 123))

        mock_bucket.s3_key_mock.set_metadata.assert_called_once_with(
            'timestamp', "2015-10-10T00:00:00.000000Z")
def main(stage):
    drafts_bucket_name = 'digitalmarketplace-g7-draft-documents-{0}-{0}'.format(
        stage)
    communications_bucket_name = 'digitalmarketplace-communications-{0}-{0}'.format(
        stage)

    drafts_bucket = S3(drafts_bucket_name)
    communications_bucket = S3(communications_bucket_name)

    copy_file = make_copier(drafts_bucket, communications_bucket)

    copy_file("g-cloud-7-supplier-pack.zip",
              "g-cloud-7/communications/g-cloud-7-supplier-pack.zip")
    for file in drafts_bucket.list('g-cloud-7-updates'):
        newpath = "g-cloud-7/communications/updates/{}".format(
            re.sub(r'^g-cloud-7-updates/', '', file['path']))
        copy_file(file['path'], newpath)
コード例 #13
0
 def test_get_key(self, bucket_with_file):
     assert S3('dear-liza').get_key('with/straw.dear.pdf') == {
         "path": "with/straw.dear.pdf",
         "filename": "straw.dear",
         "ext": "pdf",
         "size": 12,
         "last_modified": "2005-04-03T02:01:00.000000Z",
     }
コード例 #14
0
    def test_list_files(self):
        mock_bucket = mock.Mock()
        self.s3_mock.get_bucket.return_value = mock_bucket

        fake_key = FakeKey('dir/file 1.odt')
        mock_bucket.list.return_value = [fake_key]
        expected = [fake_key.fake_format_key(filename='file 1', ext='odt')]

        self.assertEqual(S3('test-bucket').list(), expected)
コード例 #15
0
    def test_endpoint_url_from_flask_config(self, boto3):
        app = flask.Flask("test_endpoint_url_from_flask_config")
        app.env = "development"
        app.config["DM_S3_ENDPOINT_URL"] = "http://localhost:5100"

        with app.app_context():
            S3("bucket")
            assert boto3.resource.call_args[1][
                "endpoint_url"] == "http://localhost:5100"
コード例 #16
0
    def test_move_existing_doesnt_delete_file(self):
        mock_bucket = FakeBucket(['folder/test-file.odt'])
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket')._move_existing(existing_path='folder/test-file.odt',
                                         move_prefix='OLD')

        self.assertEqual(mock_bucket.keys,
                         {'folder/test-file.odt', 'folder/OLD-test-file.odt'})
コード例 #17
0
 def test_bucket_short_name_invalid_format(self):
     bad_bucket_names = [
         'something-invalid',
         'digitalmarketplace-something-environ-environ-other',
         'digitalmarketplace-something-environ-other',
     ]
     for bad_bucket_name in bad_bucket_names:
         with pytest.raises(ValueError):
             S3(bad_bucket_name).bucket_short_name
コード例 #18
0
 def test_get_key_weird_file(self, bucket_with_weird_file):
     assert S3('dear-liza').get_key('.!!!.dear.pdf') == {
         "ext": "pdf",
         "filename": ".!!!.dear",
         "last_modified":
         bucket_with_weird_file["expected_returned_timestamp"],
         "path": ".!!!.dear.pdf",
         "size": 26,
     }
コード例 #19
0
 def test_get_signed_url(self, bucket_with_file):
     signed_url = S3('dear-liza').get_signed_url('with/straw.dear.pdf')
     parsed_signed_url = urlparse(signed_url)
     # to an extent the url format should be opaque and up to amazon so we might have to rethink these assertions if
     # anything changes
     assert "dear-liza" in parsed_signed_url.hostname
     assert parsed_signed_url.path == "/with/straw.dear.pdf"
     parsed_qs = parse_qs(parsed_signed_url.query)
     assert parsed_qs["AWSAccessKeyId"] == ["AKIAIABCDABCDABCDABC"]
     assert parsed_qs["Signature"]
コード例 #20
0
    def test_copy_existing_file(self, bucket_with_file):
        target_key = "copy/straw.dear.pdf"
        returned_key_dict = S3("dear-liza").copy(src_bucket="dear-liza",
                                                 src_key="with/straw.dear.pdf",
                                                 target_key=target_key)

        assert returned_key_dict['path'] == target_key
        assert returned_key_dict['size'] == 12
        assert returned_key_dict[
            'last_modified'] == "2005-04-03T02:01:00.000000Z"
コード例 #21
0
    def test_save_sets_content_type_and_acl(self):
        mock_bucket = FakeBucket()
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket').save('folder/test-file.pdf', mock_file('blah', 123))
        self.assertEqual(mock_bucket.keys, set(['folder/test-file.pdf']))

        mock_bucket.s3_key_mock.set_contents_from_file.assert_called_with(
            mock.ANY, headers={'Content-Type': 'application/pdf'})
        mock_bucket.s3_key_mock.set_acl.assert_called_with('public-read')
コード例 #22
0
    def test_save_existing_file(self):
        mock_bucket = FakeBucket(['folder/test-file.pdf'])
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket').save('folder/test-file.pdf',
                               mock_file('blah', 123),
                               move_prefix='OLD')

        self.assertEqual(mock_bucket.keys,
                         {'folder/test-file.pdf', 'folder/OLD-test-file.pdf'})
コード例 #23
0
    def test_get_key(self):
        mock_bucket = mock.Mock()
        self.s3_mock.get_bucket.return_value = mock_bucket

        fake_key = FakeKey('dir/file1.pdf')
        mock_bucket.get_key.return_value = fake_key

        assert S3('test-bucket').get_key(
            'dir/file1.pdf') == fake_key.fake_format_key(filename='file1',
                                                         ext='pdf')
コード例 #24
0
    def test_list_files_with_loading_custom_timestamps(self):
        mock_bucket = mock.Mock()
        self.s3_mock.get_bucket.return_value = mock_bucket

        fake_key = FakeKey('dir/file 1.odt')
        mock_bucket.list.return_value = [fake_key]
        mock_bucket.get_key.return_value = FakeKey(
            'dir/file 1.odt', timestamp='2015-10-10T15:00:00.0000Z')

        assert S3('test-bucket').list(
            load_timestamps=True
        )[0]['last_modified'] == '2015-10-10T15:00:00.000000Z'
コード例 #25
0
    def test_list_files_removes_directories(self):
        mock_bucket = mock.Mock()
        self.s3_mock.get_bucket.return_value = mock_bucket

        fake_key_directory = FakeKey('dir/', size=0)
        fake_key_file = FakeKey('dir/file 1.odt')
        mock_bucket.list.return_value = [fake_key_directory, fake_key_file]
        expected = [
            fake_key_file.fake_format_key(filename='file 1', ext='odt')
        ]

        self.assertEqual(S3('test-bucket').list(), expected)
コード例 #26
0
    def test_list_files_order_by_last_modified(self):
        mock_bucket = mock.Mock()
        self.s3_mock.get_bucket.return_value = mock_bucket

        fake_key_later = FakeKey('dir/file 1.odt')
        fake_key_earlier = FakeKey('dir/file 2.odt',
                                   last_modified='2014-08-17T14:00:00.000000Z')
        mock_bucket.list.return_value = [fake_key_later, fake_key_earlier]
        expected = [
            fake_key_earlier.fake_format_key(filename='file 2', ext='odt'),
            fake_key_later.fake_format_key(filename='file 1', ext='odt')
        ]

        self.assertEqual(S3('test-bucket').list(), expected)
コード例 #27
0
    def test_list_files_with_loading_custom_timestamps_sorts_by_timestamp(
            self):
        mock_bucket = mock.Mock()
        self.s3_mock.get_bucket.return_value = mock_bucket

        fake_key = FakeKey('dir/file 1.odt')
        mock_bucket.list.return_value = [fake_key, fake_key, fake_key]
        mock_bucket.get_key.side_effect = [
            FakeKey('dir/file 1.odt', timestamp='2015-12-10T15:00:00.0000Z'),
            FakeKey('dir/file 1.odt', timestamp='2015-11-10T15:00:00.0000Z'),
            FakeKey('dir/file 1.odt'),
        ]

        results = S3('test-bucket').list(load_timestamps=True)
        assert results[0]['last_modified'] == '2015-08-17T14:00:00.000000Z'
        assert results[1]['last_modified'] == '2015-11-10T15:00:00.000000Z'
        assert results[2]['last_modified'] == '2015-12-10T15:00:00.000000Z'
コード例 #28
0
    def test_save_sets_content_type_and_content_disposition_header(self):
        mock_bucket = FakeBucket()
        self.s3_mock.get_bucket.return_value = mock_bucket

        S3('test-bucket').save('folder/test-file.pdf',
                               mock_file('blah', 123),
                               download_filename='new-test-file.pdf')
        self.assertEqual(mock_bucket.keys, set(['folder/test-file.pdf']))

        mock_bucket.s3_key_mock.set_contents_from_file.assert_called_with(
            mock.ANY,
            headers={
                'Content-Type':
                'application/pdf',
                'Content-Disposition':
                'attachment; filename="new-test-file.pdf"'.encode('utf-8')
            })
コード例 #29
0
def update(service_id, section_id):
    service = data_api_client.get_service(service_id)
    if service is None:
        abort(404)
    service = service['services']

    content = content_loader.get_builder('g-cloud-6', 'edit_service_as_admin').filter(service)
    section = content.get_section(section_id)
    if section is None or not section.editable:
        abort(404)

    errors = None
    posted_data = section.get_data(request.form)

    uploaded_documents, document_errors = upload_service_documents(
        S3(current_app.config['DM_S3_DOCUMENT_BUCKET']),
        current_app.config['DM_DOCUMENTS_URL'],
        service, request.files, section)

    if document_errors:
        errors = section.get_error_messages(document_errors)
    else:
        posted_data.update(uploaded_documents)

    if not errors and section.has_changes_to_save(service, posted_data):
        try:
            data_api_client.update_service(
                service_id,
                posted_data,
                current_user.email_address)
        except HTTPError as e:
            errors = section.get_error_messages(e.message)

    if errors:
        if not posted_data.get('serviceName', None):
            posted_data['serviceName'] = service.get('serviceName', '')
        return render_template(
            "edit_section.html",
            section=section,
            service_data=posted_data,
            service_id=service_id,
            errors=errors
        ), 400

    return redirect(url_for(".view", service_id=service_id))
コード例 #30
0
    def test_default_move_prefix_is_datetime(self):
        mock_bucket = FakeBucket(['folder/test-file.pdf'])
        self.s3_mock.get_bucket.return_value = mock_bucket
        now = datetime.datetime(2015, 1, 1, 1, 2, 3, 4)

        with mock.patch.object(datetime, 'datetime',
                               mock.Mock(wraps=datetime.datetime)) as patched:
            patched.utcnow.return_value = now
            S3('test-bucket').save(
                'folder/test-file.pdf',
                mock_file('blah', 123),
            )

            self.assertEqual(
                mock_bucket.keys, {
                    'folder/test-file.pdf',
                    'folder/2015-01-01T01:02:03.000004-test-file.pdf'
                })