Пример #1
0
    def _create_topics_config(self):
        topics = {"topic": {}}
        for topic, topic_arn_label in (('multipage', 'MultipageSNS'), ('migrationdownload', 'MigrationDownloadSNS'),
                                       ('migrationupload', 'MigrationUploadSNS')):
            topics['topic'].update({topic: self.stack_mgr.stack_data['cms'][topic_arn_label]})

        S3.upload_string(self._format_name('config'), 'topics', json.dumps(topics), partition=False)
Пример #2
0
 def _create_bucket_configs(self):
     for bucket in ['files', 'storage']:
         S3.upload_string(self._format_name('config'),
                          bucket,
                          json.dumps(
                              {"bucket_name": self._format_name(bucket)}),
                          partition=False)
Пример #3
0
    def _create_queues_config(self):
        queues = {"queue": {}}
        for queue, queue_arn_label in (('multipage', 'MultipageSQS'), ('migrationdownload', 'MigrationDownloadSQS'),
                                       ('migrationupload', 'MigrationUploadSQS')):
            queues['queue'].update({queue: self.stack_mgr.stack_data['cms'][queue_arn_label]})

        S3.upload_string(self._format_name('config'), 'queues', json.dumps(queues), partition=False)
Пример #4
0
    def _create_config_database(self):
        conn = boto.rds2.connect_to_region(self.args.region)

        while True:
            try:
                describe = conn.describe_db_instances(self.tmpl_args['rds']['instance_id'])
                info = describe['DescribeDBInstancesResponse']['DescribeDBInstancesResult']['DBInstances'][0]
                if info['DBInstanceStatus'] == 'available':
                    # check if we have it in config already
                    try:
                        self.rds_config = json.loads(S3.get_string(self._format_name('config'), 'database'))
                    except boto.exception.S3ResponseError:
                        self.rds_config['database'] = 'postgres://{0}:{1}@{2}:{3}/{4}'.format(
                            self.tmpl_args['rds']['username'], self.tmpl_args['rds']['password'],
                            info['Endpoint']['Address'], '5432', self.tmpl_args['rds']['db_name']
                        )
                    break

                print 'rds', info['DBInstanceStatus']
            except DBInstanceNotFound as e:
                print 'Cannot find instance', str(e)

            time.sleep(10)

        S3.upload_string(self._format_name('config'), 'database', json.dumps(self.rds_config), partition=False)
Пример #5
0
    def _create_config_database(self):
        conn = boto.rds2.connect_to_region(self.args.region)

        while True:
            try:
                describe = conn.describe_db_instances(
                    self.tmpl_args['rds']['instance_id'])
                info = describe['DescribeDBInstancesResponse'][
                    'DescribeDBInstancesResult']['DBInstances'][0]
                if info['DBInstanceStatus'] == 'available':
                    # check if we have it in config already
                    try:
                        self.rds_config = json.loads(
                            S3.get_string(self._format_name('config'),
                                          'database'))
                    except boto.exception.S3ResponseError:
                        self.rds_config[
                            'database'] = 'postgres://{0}:{1}@{2}:{3}/{4}'.format(
                                self.tmpl_args['rds']['username'],
                                self.tmpl_args['rds']['password'],
                                info['Endpoint']['Address'], '5432',
                                self.tmpl_args['rds']['db_name'])
                    break

                print 'rds', info['DBInstanceStatus']
            except DBInstanceNotFound as e:
                print 'Cannot find instance', str(e)

            time.sleep(10)

        S3.upload_string(self._format_name('config'),
                         'database',
                         json.dumps(self.rds_config),
                         partition=False)
Пример #6
0
 def _upload_config_registry(self):
     for filename in ['blueprint', 'document', 'jobs', 'admin_rules']:
         data = json.loads(
             open(
                 resource_filename(
                     'hermes_cloud',
                     'data/config_registry/{0}'.format(filename))).read())
         S3.upload_string(self._format_name('config'),
                          filename,
                          json.dumps(data),
                          partition=False)
Пример #7
0
 def _create_cms_config(self):
     S3.upload_string(
         self._format_name('config'),
         'cms',
         json.dumps({
             'dns':
             self.stack_mgr.stack_data['cms']['CMSFQDN'],
             'name':
             self.stack_mgr.stack_data['cms']['CMSLoadBalancerName']
         }),
         partition=False)
Пример #8
0
    def _create_queues_config(self):
        queues = {"queue": {}}
        for queue, queue_arn_label in (('multipage', 'MultipageSQS'),
                                       ('migrationdownload',
                                        'MigrationDownloadSQS'),
                                       ('migrationupload',
                                        'MigrationUploadSQS')):
            queues['queue'].update(
                {queue: self.stack_mgr.stack_data['cms'][queue_arn_label]})

        S3.upload_string(self._format_name('config'),
                         'queues',
                         json.dumps(queues),
                         partition=False)
Пример #9
0
    def _create_topics_config(self):
        topics = {"topic": {}}
        for topic, topic_arn_label in (('multipage', 'MultipageSNS'),
                                       ('migrationdownload',
                                        'MigrationDownloadSNS'),
                                       ('migrationupload',
                                        'MigrationUploadSNS')):
            topics['topic'].update(
                {topic: self.stack_mgr.stack_data['cms'][topic_arn_label]})

        S3.upload_string(self._format_name('config'),
                         'topics',
                         json.dumps(topics),
                         partition=False)
Пример #10
0
    def get(self):
        registry = Registry()

        if request.path == '/{0}'.format(self._document['document']['url']):
            return redirect('{0}/'.format(request.path))

        (_, key_name) = request.path.split('/{0}'.format(self._document['document']['url']))
        if not key_name or '/' == key_name:
            key_name = '/index.html'  # todo must have a default start page for multipage

        tmp_dir = os.path.abspath(os.path.join('/tmp/multipage', self._document['document']['uuid']))
        if not os.path.exists(tmp_dir):
            os.makedirs(tmp_dir)

        file_path = os.path.abspath(os.path.join(tmp_dir, key_name[1:]))
        if os.path.exists(file_path):
            with open(file_path, 'r') as content:
                contents = content.read()
        else:
            key_name = '{0}{1}'.format(self._document['document']['uuid'], key_name)
            contents = S3.get_string(registry.get('files').get('bucket_name'), key_name)

            dir_name = os.path.dirname(file_path)
            if not os.path.exists(dir_name):
                os.makedirs(dir_name)

            with open(file_path, 'w') as write:
                write.write(contents)

        mimetype = mimetypes.guess_type(file_path)[0]
        return Response(response=contents, status=200, mimetype=mimetype)
Пример #11
0
def test_s3_object_metadata_invalid_key():
    conn = boto.connect_s3()
    bucket = conn.create_bucket('source-bucket')

    s3 = S3()
    with pytest.raises(S3Error):
        s3.metadata('source-bucket', 'invalid-file')
Пример #12
0
    def get(self):
        registry = Registry()

        if request.path == '/{0}'.format(self._document['document']['url']):
            return redirect('{0}/'.format(request.path))

        (_, key_name) = request.path.split('/{0}'.format(
            self._document['document']['url']))
        if not key_name or '/' == key_name:
            key_name = '/index.html'  # todo must have a default start page for multipage

        tmp_dir = os.path.abspath(
            os.path.join('/tmp/multipage', self._document['document']['uuid']))
        if not os.path.exists(tmp_dir):
            os.makedirs(tmp_dir)

        file_path = os.path.abspath(os.path.join(tmp_dir, key_name[1:]))
        if os.path.exists(file_path):
            with open(file_path, 'r') as content:
                contents = content.read()
        else:
            key_name = '{0}{1}'.format(self._document['document']['uuid'],
                                       key_name)
            contents = S3.get_string(
                registry.get('files').get('bucket_name'), key_name)

            dir_name = os.path.dirname(file_path)
            if not os.path.exists(dir_name):
                os.makedirs(dir_name)

            with open(file_path, 'w') as write:
                write.write(contents)

        mimetype = mimetypes.guess_type(file_path)[0]
        return Response(response=contents, status=200, mimetype=mimetype)
Пример #13
0
def test_s3_content_type():
    conn = boto.connect_s3()
    bucket = conn.create_bucket('source-bucket')
    key = Key(bucket, 'test-file')
    key.content_type = 'text/plain'
    key.set_contents_from_string('Hello World')

    assert 'text/plain; charset=utf-8' == S3.get_content_type('source-bucket', 'test-file')
Пример #14
0
def test_s3_object_metadata():
    conn = boto.connect_s3()
    bucket = conn.create_bucket('source-bucket')
    key = Key(bucket, 'test-file')
    key.content_type = 'text/plain'
    key.set_contents_from_string('Hello World')

    s3 = S3()
    assert {'Content-Type': 'text/plain; charset=utf-8'} == s3.metadata('source-bucket', 'test-file')
Пример #15
0
def test_get_string():
    conn = boto.connect_s3()
    conn.create_bucket('source-bucket')
    bucket = conn.get_bucket('source-bucket')

    key = Key(bucket=bucket, name='the-file')
    key.set_contents_from_string('the string')

    assert 'the string' == S3.get_string('source-bucket', 'the-file')
Пример #16
0
def test_upload_string(arrow_mock):
    conn = boto.connect_s3()
    conn.create_bucket('source-bucket')

    mocked_date = arrow.get('2015-01-1')
    arrow_mock.utcnow.return_value = mocked_date

    the_date = mocked_date.date()
    expected = '%s/%s/%s/%s' % (the_date.day, the_date.month, the_date.year, 'test-name')
    assert expected == S3.upload_string('source-bucket', 'test-name', 'my string')
Пример #17
0
    def do_work(self, message=None):
        """

        :type message: boto.sqs.message.Message | None
        :param message:
        :return:
        """

        if not message:
            return

        conn = boto.connect_s3()
        bucket = conn.get_bucket(self.registry.get('files').get('bucket_name'))
        contents = json.loads(message.get_body())

        job_id = str(contents['Message'])
        job = JobDB.selectBy(uuid=job_id).getOne(None)
        if not job:
            log.error('Cannot find job %s', job_id)
            raise InvalidJobError('Invalid Job ID: {0}'.format(job_id))

        job.set(status='running')
        message = job.message

        document = Document.selectBy(uuid=job.message['document']).getOne(None)
        if not document:
            message['reason'] = 'No Document exists'
            job.set(status='failed', message=message)
            raise FatalJobError('No Document Exists')

        record = Document.get_document(document)

        fp = StringIO(
            S3.get_string(
                self.registry.get('storage').get('bucket_name'),
                record['file']['key']))
        with zipfile.ZipFile(fp, 'r') as zip_handle:
            for name in zip_handle.namelist():
                if name.endswith('/'):
                    continue
                key_name = '{0}/{1}'.format(document.uuid, name)
                key = Key(bucket=bucket, name=key_name)
                key.content_type = mimetypes.guess_type(name)[0]
                key.set_contents_from_string(zip_handle.read(name))
                log.info('Uploaded %s', key_name)

        job.set(status='complete')
        if job.message.get('on_complete', {}).get('alter'):
            document.set(**job.message['on_complete']['alter'])

        log.info('Setting job=%s to complete', job_id)
Пример #18
0
def test_generate_form(arrow_mock, uuid_mock):
    conn = boto.connect_s3()
    conn.create_bucket('source-bucket')

    uuid_mock.uuid4.return_value = 'test-id'

    mocked_date = arrow.get('2015-01-1')
    arrow_mock.utcnow.return_value = mocked_date

    the_date = mocked_date.date()
    expected_keyname = '%s/%s/%s/%s' % (the_date.day, the_date.month, the_date.year, 'test-id')

    response = S3.generate_form('source-bucket')
    assert response['action'] == 'https://source-bucket.s3.amazonaws.com/'
    assert [item for item in response['fields'] if item['name'] == 'key'][0]['value'] == expected_keyname
Пример #19
0
    def do_work(self, message=None):
        """

        :type message: boto.sqs.message.Message | None
        :param message:
        :return:
        """

        if not message:
            return

        conn = boto.connect_s3()
        bucket = conn.get_bucket(self.registry.get('files').get('bucket_name'))
        contents = json.loads(message.get_body())

        job_id = str(contents['Message'])
        job = JobDB.selectBy(uuid=job_id).getOne(None)
        if not job:
            log.error('Cannot find job %s', job_id)
            raise InvalidJobError('Invalid Job ID: {0}'.format(job_id))

        job.set(status='running')
        message = job.message

        document = Document.selectBy(uuid=job.message['document']).getOne(None)
        if not document:
            message['reason'] = 'No Document exists'
            job.set(status='failed', message=message)
            raise FatalJobError('No Document Exists')

        record = Document.get_document(document)

        fp = StringIO(S3.get_string(self.registry.get('storage').get('bucket_name'), record['file']['key']))
        with zipfile.ZipFile(fp, 'r') as zip_handle:
            for name in zip_handle.namelist():
                if name.endswith('/'):
                    continue
                key_name = '{0}/{1}'.format(document.uuid, name)
                key = Key(bucket=bucket, name=key_name)
                key.content_type = mimetypes.guess_type(name)[0]
                key.set_contents_from_string(zip_handle.read(name))
                log.info('Uploaded %s', key_name)

        job.set(status='complete')
        if job.message.get('on_complete', {}).get('alter'):
            document.set(**job.message['on_complete']['alter'])

        log.info('Setting job=%s to complete', job_id)
Пример #20
0
    def _build_rds(self):
        try:
            config = json.loads(S3.get_string(self._format_name('config'), 'database'))
            parse = urlparse.urlparse(config['database'])
            (username, contents, port) = parse.netloc.split(':')
            (password, contents) = contents.split('@')

            self.tmpl_args['rds'] = {
                'username': username,
                'password': password,
                'db_name': self._format_name('cmsdb').replace('-', '_'),
                'instance_id': self._format_name('cmsdb')
            }
        except boto.exception.S3ResponseError:
            self.tmpl_args['rds'] = {
                'username': self._format_name('system_database').replace('-', '_'),
                'password': uuid.uuid4(),
                'db_name': self._format_name('cmsdb').replace('-', '_'),
                'instance_id': self._format_name('cmsdb')
            }
Пример #21
0
    def _build_rds(self):
        try:
            config = json.loads(
                S3.get_string(self._format_name('config'), 'database'))
            parse = urlparse.urlparse(config['database'])
            (username, contents, port) = parse.netloc.split(':')
            (password, contents) = contents.split('@')

            self.tmpl_args['rds'] = {
                'username': username,
                'password': password,
                'db_name': self._format_name('cmsdb').replace('-', '_'),
                'instance_id': self._format_name('cmsdb')
            }
        except boto.exception.S3ResponseError:
            self.tmpl_args['rds'] = {
                'username':
                self._format_name('system_database').replace('-', '_'),
                'password': uuid.uuid4(),
                'db_name': self._format_name('cmsdb').replace('-', '_'),
                'instance_id': self._format_name('cmsdb')
            }
Пример #22
0
def test_upload_string_no_partition():
    conn = boto.connect_s3()
    conn.create_bucket('source-bucket')

    assert 'test-name' == S3.upload_string('source-bucket', 'test-name', 'my string', partition=False)
Пример #23
0
 def _create_region_config(self):
     S3.upload_string(self._format_name('config'),
                      'region',
                      json.dumps({'region': self.args.region}),
                      partition=False)
Пример #24
0
 def _upload_config_registry(self):
     for filename in ['blueprint', 'document', 'jobs', 'admin_rules']:
         data = json.loads(open(resource_filename('hermes_cloud',
                                                  'data/config_registry/{0}'.format(filename))).read())
         S3.upload_string(self._format_name('config'), filename, json.dumps(data), partition=False)
Пример #25
0
 def _create_bucket_configs(self):
     for bucket in ['files', 'storage']:
         S3.upload_string(self._format_name('config'), bucket,
                          json.dumps({"bucket_name": self._format_name(bucket)}), partition=False)
Пример #26
0
 def _create_cms_config(self):
     S3.upload_string(self._format_name('config'), 'cms', json.dumps({
         'dns': self.stack_mgr.stack_data['cms']['CMSFQDN'],
         'name': self.stack_mgr.stack_data['cms']['CMSLoadBalancerName']
     }), partition=False)
Пример #27
0
 def _create_region_config(self):
     S3.upload_string(self._format_name('config'), 'region', json.dumps({'region': self.args.region}),
                      partition=False)