Exemplo n.º 1
0
 def images(self):
     """Return a list of Docker images on the current machine."""
     output = SystemCommand(self.cmd.images).output
     output.pop(0)
     return [
         row.split(' ', 1)[0] + ':' +
         row.split(' ', 1)[1].strip().split(' ', 1)[0] for row in output
     ]
Exemplo n.º 2
0
 def push(self):
     """Push a docker image to a DockerHub repo."""
     print('Pushing Docker image ({0})'.format(self.cmd.docker_image))
     sc = SystemCommand(self.cmd.push, decode_output=False)
     self.add_command(sc.command)
     self.add_task('Pushed Docker image {0} to DockerHub repo'.format(
         self.cmd.docker_image))
Exemplo n.º 3
0
def eb_deploy_multi(values):
    # Get Dockerize parameters
    docker = dockerize()[0]

    # Remote directory relative to Project root
    remote_dir = os.path.join(docker.source, REMOTE_DIRECTORY)

    # Copy the Dockerrun.aws.json file to '-remote' directory
    if not os.path.exists(remote_dir):
        os.mkdir(remote_dir)
    shutil.copyfile(os.path.join(docker.source, 'Dockerrun.aws.json'),
                    os.path.join(remote_dir, 'Dockerrun.aws.json'))

    # Change directory to source
    os.chdir(remote_dir)
    print(os.getcwd())
    print('Using root directory: {0}'.format(remote_dir))

    with Timer('Deployed to AWS EB'):
        SystemCommand(
            'eb deploy {env} --label {version} --message "{message}"'.format(
                env=values['aws_eb_env'],
                version=docker.tag,
                message=values['desc']), False)
    return
Exemplo n.º 4
0
    def move(self,
             src_path,
             dst_path,
             dst_bucket=None,
             recursive=False,
             include=None,
             exclude=None):
        """
        Move an S3 file or folder to another

        :param src_path: Path to source file or folder in S3 bucket
        :param dst_path: Path to destination file or folder
        :param dst_bucket: Bucket to copy to, defaults to same bucket
        :param recursive: Recursively copy all files within the directory
        :param include: Don't exclude files or objects in the command that match the specified pattern
        :param exclude: Exclude all files or objects from the command that matches the specified pattern

        More on inclusion and exclusion parameters...
        http://docs.aws.amazon.com/cli/latest/reference/s3/index.html#use-of-exclude-and-include-filters
        """
        uri1 = '{uri}/{src}'.format(uri=self.bucket_uri, src=src_path)
        uri2 = '{uri}/{dst}'.format(
            uri=bucket_uri(dst_bucket) if dst_bucket else self.bucket_uri,
            dst=dst_path)

        # Move recursively if both URI's are directories and NOT files
        return SystemCommand(
            self.cmd.move(object1=uri1,
                          object2=uri2,
                          recursive=is_recursive_needed(
                              uri1, uri2, recursive_default=recursive),
                          include=include,
                          exclude=exclude))
Exemplo n.º 5
0
    def list(self,
             remote_path='',
             recursive=False,
             human_readable=False,
             summarize=False):
        """
        List files/folders in a S3 bucket path.

        Optionally, return information on file size on each objects as
        well as summary info.

        :param remote_path: Path to object root in S3 bucket
        :param recursive: Recursively list files/folders
        :param human_readable: Displays file sizes in human readable format
        :param summarize: Displays summary information (number of objects, total size)
        :return:
        """
        return [
            out.rsplit(' ', 1)[-1] for out in SystemCommand(
                self.cmd.list(uri='{0}/{1}'.format(
                    self.bucket_uri, remote_path_root(remote_path)),
                              recursive=recursive,
                              human_readable=human_readable,
                              summarize=summarize))
        ]
Exemplo n.º 6
0
    def create_bucket(self, region='us-east-1'):
        """
        Create a new S3 bucket.

        :param region: Bucket's hosting region
        """
        # Validate that the bucket does not already exist
        assert self.bucket_name not in self.buckets, 'ERROR: Bucket `{0}` already exists.'.format(
            self.bucket_name)

        # Create the bucket
        create = SystemCommand(self.cmd.make_bucket(self.bucket_uri, region))

        # Enable transfer acceleration
        SystemCommand(self.cmd.enable_transfer_acceleration(self.bucket_name))

        return create
Exemplo n.º 7
0
    def list(self, all_apps=False, verbose=False):
        """
        Lists all environments in the current application or all environments in all applications.

        :param all_apps: Lists all environments from all applications.
        :param verbose: Provides more detailed information about all environments, including instances.
        :return: List of Elastic Beanstalk applications
        """
        def verbose_env(environment):
            return environment.split(' : ')[0].replace('* ', '')

        def verbose_ec2(environment):
            return environment.split(' : ')[-1].replace("['",
                                                        '').replace("']", '')

        self.initialize()
        cmd = 'eb list'
        cmd += ' --all' if all_apps else ''
        if verbose:
            output = SystemCommand(cmd + ' --verbose').output
            response = {
                'region': output.pop(0).replace('Region: ', ''),
                'application': output.pop(0).replace('Application: ', '')
            }
            output.pop(0).replace('Environments: ', '')
            response['environments'] = {
                verbose_env(i): verbose_ec2(i)
                for i in output
            }
            return response
        else:
            return set(
                [i.replace('* ', '') for i in SystemCommand(cmd).output])
Exemplo n.º 8
0
    def buckets(self):
        """
        List all available S3 buckets.

        Execute the `aws s3 ls` command and decode the output
        """
        return [
            out.rsplit(' ', 1)[-1] for out in SystemCommand(self.cmd.list())
        ]
Exemplo n.º 9
0
    def is_acceleration_enabled(self):
        """Determine if transfer acceleration is enabled for an AWS S3 bucket."""
        output = SystemCommand(
            self.cmd.acceleration_enabled_status(self.bucket_name)).output

        if len(output) > 0:
            return output[0].strip('"').lower() == 'enabled'
        else:
            return False
Exemplo n.º 10
0
 def pull(self, resolve_tag=True):
     """Push a docker image to a DockerHub repo."""
     if resolve_tag and not self.cmd.tag:
         self.cmd.tag = self.image_tags[0]
     print('Pulling Docker image ({0})'.format(self.cmd.docker_image))
     sc = SystemCommand(self.cmd.pull, decode_output=False)
     self.add_command(sc.command)
     self.add_task('Pulled Docker image {0} from DockerHub repo'.format(
         self.cmd.docker_image))
Exemplo n.º 11
0
 def exists(self, remote_path):
     """
     Check to see if an S3 key (file or directory) exists
     :return: Bool
     """
     # Check to see if a result was returned, if not then key does not exist
     return True if len(
         SystemCommand(
             self.cmd.list('{0}/{1}'.format(self.bucket_uri,
                                            remote_path)))) > 0 else False
Exemplo n.º 12
0
    def delete_bucket(self, force=False):
        """
        Deletes an empty S3 bucket. A bucket must be completely empty of objects and versioned
        objects before it can be deleted. However, the force parameter can be used to delete
        the non-versioned objects in the bucket before the bucket is deleted.

        :param force: Deletes all objects in the bucket including the bucket itself
        """
        # Validate that the bucket does exist
        assert self.bucket_name in self.buckets, 'ERROR: Bucket `{0}` does not exists.'.format(
            self.bucket_name)
        return SystemCommand(self.cmd.remove_bucket(self.bucket_uri, force))
Exemplo n.º 13
0
 def run(self):
     """Push a docker image to a DockerHub repo."""
     print('Locally running Docker image')
     sc = SystemCommand(self.cmd.run, decode_output=False)
     self.add_command(sc.command)
     if sc.success:
         self.add_task('Running Docker image ({0}) on local machine'.format(
             self.cmd.docker_image))
     else:
         self.add_task(
             'ERROR: Unable to running Docker image ({0}) on local machine'.
             format(self.cmd.docker_image))
Exemplo n.º 14
0
 def environments(self):
     """Retrieve a list of environments in the current EB application."""
     cmd = 'aws elasticbeanstalk describe-environments --application-name {0}'.format(
         self.aws_application_name)
     return {
         d[3].split('.', 1)[0]: {
             'running_version': d[-1],
             'status': d[-2]
         }
         for d in [i.split('\t') for i in SystemCommand(cmd).output]
         if d[0].lower() == 'environments'
     }
Exemplo n.º 15
0
 def bootstrap(self):
     """Bootstrap docker-compose service development by pulling existing images then building services."""
     print('Bootstrapping docker-compose services')
     for index, cmd in enumerate(self.cmd.bootstrap):
         sc = SystemCommand(cmd, decode_output=False)
         self.add_command(sc.command)
         if sc.success:
             self.add_task('SUCCESS ({}/{}): {}'.format(
                 index + 1,
                 len(self.cmd.bootstrap) + 1, sc.command))
         else:
             self.add_task('ERROR   ({}/{}): {}'.format(
                 index + 1,
                 len(self.cmd.bootstrap) + 1, sc.command))
Exemplo n.º 16
0
 def reboot(self):
     """Reboot docker-compose container services by rebuilding then restarting."""
     print('Bootstrapping docker-compose services')
     for index, cmd in enumerate(self.cmd.reboot):
         sc = SystemCommand(cmd, decode_output=False)
         self.add_command(sc.command)
         if sc.success:
             self.add_task('SUCCESS ({}/{}): {}'.format(
                 index + 1,
                 len(self.cmd.reboot) + 1, sc.command))
         else:
             self.add_task('ERROR   ({}/{}): {}'.format(
                 index + 1,
                 len(self.cmd.reboot) + 1, sc.command))
Exemplo n.º 17
0
    def pre_sign(self, remote_path, expiration=3600):
        """
        Generate a pre-signed URL for an Amazon S3 object.

        This allows anyone who receives the pre-signed URL to retrieve the S3 object
        with an HTTP GET request.

        :param remote_path: Path to S3 object relative to bucket root
        :param expiration: Number of seconds until the pre-signed URL expires
        :return:
        """
        return SystemCommand(
            self.cmd.pre_sign(
                '{uri}/{src}'.format(uri=self.bucket_uri, src=remote_path),
                expiration))[0]
Exemplo n.º 18
0
    def initialize(self, source=None):
        """Initialize the docker application if it hasn't been previously initialized."""
        # Path to .elasticbeanstalk directory
        source = self.source if not source else source

        # Initialize docker
        os.chdir(source)
        SystemCommand(
            'eb init --region {0} --keyname {1} -p docker {2}'.format(
                self.aws_region, self.aws_instance_key, self.docker_repo))
        self.add_task("Initialized '{0}' as an EB application".format(
            self.aws_application_name))

        # Edit default region value in config.yaml
        self.set_region(source)
Exemplo n.º 19
0
    def _deploy(self):
        """Use awsebcli command '$eb deploy' to deploy an updated Elastic Beanstalk environment."""
        # Update a Dockerrun.aws.json file in -remote directory
        self.Dockerrun.create()

        # Initialize application in -remote directory
        self.initialize(self.Dockerrun.remote_source)

        os.chdir(self.Dockerrun.remote_source)
        SystemCommand(
            'eb deploy {env} --label {version} --message "{message}"'.format(
                env=self.aws_environment_name,
                version=self.aws_version,
                message=self.aws_version_description))
        self.add_task('Deployed Elastic Beanstalk environment {0}'.format(
            self.aws_environment_name))
Exemplo n.º 20
0
    def delete(self, remote_path, recursive=False, include=None, exclude=None):
        """
        Delete an S3 object from a bucket.

        :param remote_path: Path to S3 object relative to bucket root
        :param recursive: Recursively copy all files within the directory
        :param include: Don't exclude files or objects in the command that match the specified pattern
        :param exclude: Exclude all files or objects from the command that matches the specified pattern
        :return: Command string
        """
        # Delete recursively if both URI's are directories and NOT files
        return SystemCommand(
            self.cmd.remove(uri='{uri}/{src}'.format(uri=self.bucket_uri,
                                                     src=remote_path),
                            recursive=is_recursive_needed(
                                remote_path, recursive_default=recursive),
                            include=include,
                            exclude=exclude))
Exemplo n.º 21
0
    def download(self,
                 remote_path,
                 local_path=os.getcwd(),
                 recursive=False,
                 quiet=None):
        """
        Download a file or folder from an S3 bucket.

        :param remote_path: S3 key, aka remote path relative to S3 bucket's root
        :param local_path: Path to file on local disk
        :param recursive: Recursively download files/folders
        :param quiet: When true, does not display the operations performed from the specified command
        """
        return SystemCommand(
            self.cmd.copy(object1='{0}/{1}'.format(self.bucket_uri,
                                                   remote_path),
                          object2=local_path,
                          recursive=recursive,
                          quiet=quiet if quiet else self.quiet))
Exemplo n.º 22
0
    def upload(self, local_path, remote_path=None, acl='private', quiet=None):
        """
        Upload a local file to an S3 bucket.

        :param local_path: Path to file on local disk
        :param remote_path: S3 key, aka remote path relative to S3 bucket's root
        :param acl: Access permissions, must be either 'private', 'public-read' or 'public-read-write'
        :param quiet: When true, does not display the operations performed from the specified command
        """
        # Recursively upload files if the local target is a folder
        # Use local_path file/folder name as remote_path if none is specified
        remote_path = os.path.basename(
            local_path) if not remote_path else remote_path
        assert_acl(acl)
        return SystemCommand(
            self.cmd.copy(
                object1=local_path,
                object2='{0}/{1}'.format(self.bucket_uri, remote_path),
                recursive=True if os.path.isdir(local_path) else False,
                acl=acl,
                quiet=quiet if quiet else self.quiet))
Exemplo n.º 23
0
    def deploy(self):
        """Deploy a docker image from a DockerHub repo to a AWS elastic beanstalk environment instance."""
        # Check to see if AWS EB Environment already exists or if it is 'Terminated'
        try:
            environments = self.environments.get(
                self.aws_environment_name).get('status', None) == 'Terminated'
        except AttributeError:
            environments = True
        if any(condition for condition in (
                self.aws_environment_name not in self.environments,
                environments)):
            print('Creating Elastic Beanstalk environment')
            self._create()
        else:
            print('Deploying Elastic Beanstalk environment')
            self._deploy()

        # Dump deployment data/results to JSON
        self.update_history(self.json_path, self.parameters)

        # Open Elastic Beanstalk in a browser
        SystemCommand('eb open')
Exemplo n.º 24
0
    def copy(self,
             src_path,
             dst_path,
             dst_bucket=None,
             recursive=False,
             include=None,
             exclude=None,
             acl='private',
             quiet=None):
        """
        Copy an S3 file or folder to another

        :param src_path: Path to source file or folder in S3 bucket
        :param dst_path: Path to destination file or folder
        :param dst_bucket: Bucket to copy to, defaults to same bucket
        :param recursive: Recursively copy all files within the directory
        :param include: Don't exclude files or objects in the command that match the specified pattern
        :param exclude: Exclude all files or objects from the command that matches the specified pattern
        :param acl: Access permissions, must be either 'private', 'public-read' or 'public-read-write'
        :param quiet: When true, does not display the operations performed from the specified command

        More on inclusion and exclusion parameters...
        http://docs.aws.amazon.com/cli/latest/reference/s3/index.html#use-of-exclude-and-include-filters
        """
        uri1 = '{uri}/{src}'.format(uri=self.bucket_uri, src=src_path)
        uri2 = '{uri}/{dst}'.format(
            uri=bucket_uri(dst_bucket) if dst_bucket else self.bucket_uri,
            dst=dst_path)

        # Copy recursively if both URI's are directories and NOT files
        return SystemCommand(
            self.cmd.copy(object1=uri1,
                          object2=uri2,
                          recursive=is_recursive_needed(
                              uri1, uri2, recursive_default=recursive),
                          include=include,
                          exclude=exclude,
                          acl=acl,
                          quiet=quiet if quiet else self.quiet))
Exemplo n.º 25
0
    def _create(self):
        """Use awsebcli command `$ eb create` to create a new Elastic Beanstalk environment."""
        # Create directory with '-remote' extension next to source
        if not os.path.exists(self.Dockerrun.remote_source):
            os.mkdir(self.Dockerrun.remote_source)
            self.add_task(
                "Created directory '{0}' for storing Dockerrun file".format(
                    self.Dockerrun.remote_source))

        # Create a Dockerrun.aws.json file in -remote directory
        self.Dockerrun.create()

        # Initialize application in -remote directory
        self.initialize(self.Dockerrun.remote_source)

        # Create Elastic Beanstalk environment in current application
        os.chdir(self.Dockerrun.remote_source)
        cmd = 'eb create {env} --keyname {key}'.format(
            env=self.aws_environment_name, key=self.aws_instance_key)
        SystemCommand(cmd)
        self.Dockerrun.destroy()
        self.add_task('Created Elastic Beanstalk environment {0}'.format(
            self.aws_environment_name))
Exemplo n.º 26
0
    def sync(self,
             local_path,
             remote_path=None,
             delete=False,
             acl='private',
             quiet=None,
             remote_source=False):
        """
        Synchronize local files with an S3 bucket.

        S3 sync only copies missing or outdated files or objects between
        the source and target.  However, you can also supply the --delete
        option to remove files or objects from the target that are not
        present in the source.

        :param local_path: Local source directory
        :param remote_path: Destination directory (relative to bucket root)
        :param delete: Sync with deletion, disabled by default
        :param acl: Access permissions, must be either 'private', 'public-read' or 'public-read-write'
        :param quiet: When true, does not display the operations performed from the specified command
        :param remote_source: When true, remote_path is used as the source instead of destination
        """
        assert_acl(acl)
        uri = '{0}/{1}'.format(
            self.bucket_uri,
            os.path.basename(local_path) if not remote_path else remote_path)

        # Sync from the S3 bucket
        destination, source = (local_path,
                               uri) if remote_source else (uri, local_path)

        return SystemCommand(
            self.cmd.sync(source=source,
                          destination=destination,
                          delete=delete,
                          acl=acl,
                          quiet=quiet if quiet else self.quiet))
Exemplo n.º 27
0
 def delete_images(self):
     """Delete all images on the current machine."""
     if len(self.images) > 0:
         return SystemCommand(self.cmd.delete_images)
Exemplo n.º 28
0
 def clean(self):
     """Remove stopped containers and intermediate images from the current machine."""
     return SystemCommand(self.cmd.clean)
Exemplo n.º 29
0
 def build(self):
     """Build a docker image for distribution to DockerHub."""
     print('Building Docker image ({0})'.format(self.cmd.docker_image))
     sc = SystemCommand(self.cmd.build, decode_output=False)
     self.add_command(sc.command)
     self.add_task('Built Docker image ({0})'.format(self.cmd.docker_image))
Exemplo n.º 30
0
 def delete_volumes(self):
     """Delete all volumes on the current machine."""
     return SystemCommand(self.cmd.delete_volumes)