Esempio n. 1
0
def __upload(proj, repo, branch, data, maker, force=True):
    root, source, dist = (None, None, None)
    try:
        root = Folder(data.root or '~')
        source = root.child_folder('src')
        source.make()
        source = source.child_folder(proj)
        dist = root.child_folder('dist')
        tree = Tree(source, repo=repo, branch=branch)
        key = None
        if not force:
            key = check_revision_already_published(proj, data.bucket, tree)

        if not key:
            b = Bucket(data.bucket)
            b.make()
            key_folder = Folder(proj).child_folder(branch)
            zippath = dist.child_file(proj + '.zip')
            tree.clone(tip_only=True)
            sha = tree.get_revision(short=False)
            key_folder = key_folder.child_folder(sha)
            target = dist.child_folder(proj)
            target.make()
            maker(source, target)
            target.zip(zippath.path)
            b.add_file(zippath, target_folder=key_folder.path)
            key = b.bucket.get_key(key_folder.child(zippath.name))
    finally:
        if source:
            source.delete()
        if dist:
            dist.delete()

    return key.generate_url(30000)
Esempio n. 2
0
def delete_bucket():
    conf_path = BEES_ROOT.child('project.yaml')
    config = yaml.load(File(conf_path).read_all())
    config['file_path'] = conf_path
    bucket_name = config['publish']['bucket']
    bucket = Bucket(bucket_name)
    bucket.connect()
    bucket.delete(recurse=True)
Esempio n. 3
0
def cleanup():
    if not _bucket_name:
        return
    bucket = Bucket(_bucket_name)
    try:
        bucket.connect()
        bucket.delete(recurse=True)
    except:
        pass
    data_folder.delete()
Esempio n. 4
0
def check_revision_already_published(proj, bucket_name, tree):
    b = Bucket(bucket_name)
    if not b.connect():
        return None

    sha = tree.get_revision_remote()
    key_folder = Folder(proj).child_folder(tree.branch_name)
    key_folder = key_folder.child_folder(sha)
    key_path = key_folder.child(proj + '.zip')
    return b.bucket.get_key(key_path)
Esempio n. 5
0
def __dump_and_upload_result(name, data):
    result = HERE.child_file('result.log')
    with open(result.path, 'a') as f:
        f.write('*********' + name + '************')
        f.write(yaml.dump(data))
        f.write('*********' + name + '************')
    b = Bucket(data['bucket'],
                aws_access_key_id=data['keys']['access_key'],
                aws_secret_access_key=data['keys']['secret'])
    b.make()
    b.add_file(result)
Esempio n. 6
0
def test_upload_stack():
    conf_path = BEES_ROOT.child('project.yaml')
    conf = yaml.load(File(conf_path).read_all())
    conf['file_path'] = conf_path
    uploaded = stack.upload_stack(conf)['result']
    bucket_name = conf['publish']['bucket']
    bucket = Bucket(bucket_name)
    bucket.connect()
    bucket.set_policy()
    for rpath, info in uploaded.iteritems():
        response = urllib2.urlopen(info.url)
        html = response.read()
        source_text = File(info.target).read_all()
        assert_text_equals(html, source_text)
Esempio n. 7
0
    def delete_bucket(self, bucket_name, delete_route=False):
        bucket = Bucket(bucket_name,
                        aws_access_key_id=self.config.awskeys.access_key,
                        aws_secret_access_key=self.config.awskeys.secret)

        if delete_route:
            route = self.get_route()
            try:
                route.delete_route_to_bucket(bucket_name)
            except:
                # Route does not exist
                pass

        if bucket.connect():
            bucket.delete(recurse=True)
Esempio n. 8
0
def upload_stack(config, **kwargs):
    config, env, files = validate_stack(config, **kwargs)
    if not config:
        raise Exception("Invalid template.")
    bucket_name = config.publish.get("bucket", None)
    if not bucket_name:
        raise Exception("You need to provide a bucket name for publishing your stack.")
    path = config.publish.get("path", None)
    if path:
        path = path.rstrip("/") + "/"
    bucket = Bucket(bucket_name, **kwargs)
    bucket.make()
    result = {}
    url_format = "http://{bucket_name}.s3.amazonaws.com/{path}{template_name}"
    for rpath, (source, target) in files.iteritems():
        full_path = bucket.add_file(target, acl="private", target_folder=path)
        signed_url = bucket.get_signed_url(full_path)
        url = url_format.format(bucket_name=bucket_name, path=path, template_name=rpath)
        result[rpath] = dict(url=url, source=source, target=target)
    return ConfigDict(dict(result=result, files=files, config=config))
Esempio n. 9
0
    def make_bucket(self, bucket_name, error='404.html', make_route=False):
        bucket = Bucket(bucket_name,
                        aws_access_key_id=self.config.awskeys.access_key,
                        aws_secret_access_key=self.config.awskeys.secret)
        bucket.make()
        bucket.set_policy()
        bucket.serve(error=error)

        if make_route:
            route = self.get_route()
            try:
                route.add_route_to_bucket(bucket_name)
            except:
                pass

        return bucket
Esempio n. 10
0
 def get_bucket(self, bucket_name):
     bucket = Bucket(bucket_name,
                     aws_access_key_id=self.config.awskeys.access_key,
                     aws_secret_access_key=self.config.awskeys.secret)
     bucket.connect()
     return bucket
Esempio n. 11
0
    branch='master',
    bucket='releases.dev.gitbot.test',
    keys=dict(access_key=access_key, secret=secret),
    command='all'
)

conn = connect_to_region('us-east-1',
                aws_access_key_id=access_key,
                aws_secret_access_key=secret)
queue = Queue(conn, queue_url)
message = queue.new_message(json.dumps(data))
message = queue.write(message)

# Wait for result
b = Bucket(
    data['bucket'],
    aws_access_key_id=access_key,
    aws_secret_access_key=secret)
b.connect()

print '\n Wating for bucket:\n'

key = None
key_path = 'result.log'
while not b.bucket:
    print '.'
    sleep(100)
    b.connect()

print '\n Wating for result.log:\n'