예제 #1
0
    def test_write_s3_check_consistency_wrong_items_count(self):
        # given
        items_to_write = self.get_batch()
        options = self.get_writer_config()
        options['options']['check_consistency'] = True

        # when:
        try:
            writer = S3Writer(options, ExportMeta(options))
            writer.write_batch(items_to_write)
            writer.flush()
        finally:
            writer.close()
        bucket = self.s3_conn.get_bucket('fake_bucket')
        key = bucket.get_key('tests/0.jl.gz')
        content = key.get_contents_as_string()
        bucket.delete_key('tests/0.jl.gz')
        new_key = bucket.new_key('tests/0.jl.gz')
        new_key.update_metadata({'total': 999})
        new_key.set_contents_from_string(content)

        # then:
        with self.assertRaisesRegexp(InconsistentWriteState,
                                     'Unexpected number of records'):
            writer.finish_writing()
예제 #2
0
 def __get_file_contents_list_from_bucket(bucket, prefix, bucket_name):
     json_files_list = []
     for key in bucket.list(prefix=prefix):
         if key.name.endswith('/') or key.name.endswith('-done'):
             continue
         try:
             new_key_name = "{}-done".format(key.name)
             bucket.copy_key(new_key_name=new_key_name, src_bucket_name=bucket_name, src_key_name=key.name)
             bucket.delete_key(key.name)
             new_key = bucket.get_key(new_key_name)
             new_key.get_contents_to_filename(filename="tmp.json.gz")
             f = gzip.open('tmp.json.gz', 'rb')
             json_files_list.append(f.read())
             f.close()
         except Exception as ex:
             Logger.log("warning", "{} FAILED: {}".format(key.name, ex.message))
     return json_files_list
예제 #3
0
    def find_key(self, bucket_names, key_name):
        """ Takes a list of bucket and searches across all of them for a
        given key. Returns a list of keys found, as duplicate key names can
        be used in different buckets.
        """
        keys = []
        for bucket_name in bucket_names:
            try:
                bucket = self.get_bucket(bucket_name)
            except boto.exception.S3ResponseError:
                continue

            key = bucket.get_key(key_name)
            if key:
                keys.append(key)

        return keys
예제 #4
0
    def test_write_s3_check_consistency_wrong_size(self):
        # given
        items_to_write = self.get_batch()
        options = self.get_writer_config()
        options['options']['check_consistency'] = True

        # when:
        try:
            writer = S3Writer(options, ExportMeta(options))
            writer.write_batch(items_to_write)
            writer.flush()
        finally:
            writer.close()
        bucket = self.s3_conn.get_bucket('fake_bucket')
        key = bucket.get_key('tests/0.jl.gz')
        key.set_contents_from_string('fake contents')

        # then:
        with self.assertRaisesRegexp(InconsistentWriteState, 'has unexpected size'):
            writer.finish_writing()
예제 #5
0
    def test_write_s3_check_consistency_wrong_size(self):
        # given
        items_to_write = self.get_batch()
        options = self.get_writer_config()
        options['options']['check_consistency'] = True

        # when:
        try:
            writer = S3Writer(options, ExportMeta(options))
            writer.write_batch(items_to_write)
            writer.flush()
        finally:
            writer.close()
        bucket = self.s3_conn.get_bucket('fake_bucket')
        key = bucket.get_key('tests/0.jl.gz')
        key.set_contents_from_string('fake contents')

        # then:
        with self.assertRaisesRegexp(InconsistentWriteState,
                                     'has unexpected size'):
            writer.finish_writing()
예제 #6
0
    def test_write_s3_check_consistency_wrong_items_count(self):
        # given
        items_to_write = self.get_batch()
        options = self.get_writer_config()
        options['options']['check_consistency'] = True

        # when:
        try:
            writer = S3Writer(options, ExportMeta(options))
            writer.write_batch(items_to_write)
            writer.flush()
        finally:
            writer.close()
        bucket = self.s3_conn.get_bucket('fake_bucket')
        key = bucket.get_key('tests/0.jl.gz')
        content = key.get_contents_as_string()
        bucket.delete_key('tests/0.jl.gz')
        new_key = bucket.new_key('tests/0.jl.gz')
        new_key.update_metadata({'total': 999})
        new_key.set_contents_from_string(content)

        # then:
        with self.assertRaisesRegexp(InconsistentWriteState, 'Unexpected number of records'):
            writer.finish_writing()
예제 #7
0
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )

bucket = conn.get_bucket(s3BktLdrBucketName)

# user/group setup for perms
subprocess.call(['groupadd', '-g', gid, groupname])
subprocess.call(['useradd', '-M', '-u', uid, '-g', gid, groupname])

# setup dirs
subprocess.call(['mkdir', '-p', nfsLocalMountRoot])
subprocess.call(['mkdir', '-p', s3MountRoot])
subprocess.call(['mkdir', '-p', s3BktLdrInstallRoot])

# pull down software
key = bucket.get_key(s3BktLdrJar)
key.get_contents_to_filename(s3BktLdrInstallRoot+'/'+s3BktLdrJar)

key = bucket.get_key(s3BktLdrProps)
key.get_contents_to_filename(s3BktLdrInstallRoot+'/'+s3BktLdrProps)

key = bucket.get_key(fusepyRPM)
key.get_contents_to_filename(s3BktLdrInstallRoot+'/'+fusepyRPM)

key = bucket.get_key(yas3fsRPM)
key.get_contents_to_filename(s3BktLdrInstallRoot+'/'+yas3fsRPM)

# perms
subprocess.call(['chown', '-R', ('root:'+groupname), s3BktLdrInstallRoot])
subprocess.call(['chmod', '-R', '770', s3BktLdrInstallRoot])