示例#1
0
def upload_from_instance_to_s3(download_filters: list,
                               host: str,
                               port: int,
                               user: str,
                               key_path: str,
                               dry_run: bool = False):
    """Uploads files from the running instance to the S3 bucket.

    It uses a temporary S3 directory that is unique for the instance. This
    directory keeps downloaded from the instance files in order to sync only
    changed files with local, not all of them every download).
    """

    # "sudo" should be called with the "-i" flag to use the root environment, so aws-cli will read
    # the config file from the root home directory
    args = ['sudo', '-i', '/tmp/spotty/instance/scripts/upload_files.sh']
    args += AwsCli.get_s3_sync_arguments(filters=download_filters,
                                         delete=True,
                                         quote=True,
                                         dry_run=dry_run)

    if not dry_run:
        args += ['>', '/dev/null']

    remote_cmd = subprocess.list2cmdline(args)

    # connect to the instance and run the remote command
    ssh_command = get_ssh_command(host,
                                  port,
                                  user,
                                  key_path,
                                  remote_cmd,
                                  quiet=not dry_run)
    subprocess.call(ssh_command)
示例#2
0
    def _run(self, instance_manager: AbstractInstanceManager, args: Namespace,
             output: AbstractOutputWriter):
        if args.list_sessions:
            remote_cmd = ['tmux', 'ls', ';', 'echo', '']
        else:
            # tmux session name
            session_name = args.session_name
            if not session_name:
                session_name = 'spotty-ssh-host-os' if args.host_os else 'spotty-ssh-container'

            # a command to connect to the host OS or to the container
            remote_cmd = ['tmux', 'new', '-s', session_name, '-A']
            if not args.host_os:
                # connect to the container or keep the tmux window in case of a failure
                container_cmd = subprocess.list2cmdline(
                    ['sudo', '/tmp/spotty/instance/scripts/container_bash.sh'])
                tmux_cmd = '%s || tmux set remain-on-exit on' % container_cmd
                remote_cmd += [tmux_cmd]

        remote_cmd = subprocess.list2cmdline(remote_cmd)

        # connect to the instance
        ssh_command = get_ssh_command(instance_manager.get_ip_address(),
                                      instance_manager.ssh_port,
                                      instance_manager.ssh_user,
                                      instance_manager.ssh_key_path,
                                      remote_cmd)
        subprocess.call(ssh_command)
示例#3
0
文件: ssh.py 项目: mpaepper/spotty
    def run(self, output: AbstractOutputWriter):
        project_config = self._config['project']
        instance_config = self._config['instance']

        project_name = project_config['name']
        region = instance_config['region']
        local_ssh_port = instance_config['localSshPort']

        # get instance IP address
        stack = StackResource(None, project_name, region)
        ec2 = boto3.client('ec2', region_name=region)
        ip_address = get_instance_ip_address(ec2, stack.name)

        if self._args.host_os:
            # connect to the host OS
            session_name = self._args.session_name if self._args.session_name else 'spotty-ssh-host-os'
            remote_cmd = ['tmux', 'new', '-s', session_name, '-A']
        else:
            # connect to the container
            session_name = self._args.session_name if self._args.session_name else 'spotty-ssh-container'
            remote_cmd = ['tmux', 'new', '-s', session_name, '-A', 'sudo', '/scripts/container_bash.sh']

        remote_cmd = subprocess.list2cmdline(remote_cmd)

        # connect to the instance
        ssh_command = get_ssh_command(project_name, region, ip_address, remote_cmd, local_ssh_port)
        subprocess.call(ssh_command)
示例#4
0
文件: sync.py 项目: tsdalton/spotty
def sync_bucket_to_instance(sync_filters: list, host: str, port: int, user: str, key_path: str):
    """Syncs the project from the bucket to the instance."""
    remote_cmd = subprocess.list2cmdline(['sudo', '/tmp/spotty/instance/scripts/sync_project.sh',
                                          *get_instance_sync_arguments(sync_filters), '>', '/dev/null'])

    # connect to the instance and run remote command
    ssh_command = get_ssh_command(host, port, user, key_path, remote_cmd, quiet=True)
    subprocess.call(ssh_command)
示例#5
0
def sync_instance_with_s3(sync_filters: list, host: str, port: int, user: str, key_path: str):
    """Syncs the project from the S3 bucket to the instance."""

    # "sudo" should be called with the "-i" flag to use the root environment, so aws-cli will read
    # the config file from the root home directory
    remote_cmd = subprocess.list2cmdline(['sudo', '-i', '/tmp/spotty/instance/scripts/sync_project.sh',
                                          *get_instance_sync_arguments(sync_filters), '>', '/dev/null'])

    # connect to the instance and run remote command
    ssh_command = get_ssh_command(host, port, user, key_path, remote_cmd, quiet=True)
    subprocess.call(ssh_command)
示例#6
0
def sync_instance_with_s3(instance_ip_address, project_name, region,
                          local_ssh_port: None):
    # command to sync S3 with the instance
    remote_cmd = subprocess.list2cmdline([
        'sudo', '-i', '/bin/bash', '-e', '/tmp/scripts/sync_project.sh', '>',
        '/dev/null'
    ])

    # connect to the instance and run remote command
    ssh_command = get_ssh_command(project_name,
                                  region,
                                  instance_ip_address,
                                  remote_cmd,
                                  local_ssh_port,
                                  quiet=True)
    subprocess.call(ssh_command)
示例#7
0
    def _run(self, instance_manager: AbstractInstanceManager, args: Namespace,
             output: AbstractOutputWriter):
        # check that it's a GCP instance
        if not isinstance(instance_manager, InstanceManager):
            raise ValueError('Instance "%s" is not an GCP instance.' %
                             instance_manager.instance_config.name)

        deployment = instance_manager.image_deployment

        try:
            deployment.deploy(args.family_name, args.debug_mode, output)
        finally:
            if args.debug_mode:
                ip_address = deployment.get_ip_address()
                if ip_address:
                    ssh_command = get_ssh_command(
                        ip_address, instance_manager.ssh_port,
                        instance_manager.ssh_user,
                        instance_manager.ssh_key_path, 'tmux')

                    output.write(
                        '\nUse the following command to connect to the instance:\n'
                        '  %s\n' % subprocess.list2cmdline(ssh_command))
示例#8
0
def sync_instance_with_s3(sync_filters: list, host: str, port: int, user: str,
                          key_path: str):
    """Syncs the project from the S3 bucket to the instance."""

    # "sudo" should be called with the "-i" flag to use the root environment, so aws-cli will read
    # the config file from the root home directory
    remote_cmd = subprocess.list2cmdline([
        'sudo', '-i', '/tmp/spotty/instance/scripts/sync_project.sh',
        *get_instance_sync_arguments(sync_filters)
    ])

    # connect to the instance and run remote command
    ssh_command = get_ssh_command(host,
                                  port,
                                  user,
                                  key_path,
                                  remote_cmd,
                                  quiet=False)
    try:
        output = subprocess.check_output(ssh_command)
        print(f"Sync cmd output (remote host): {str(output)}")
    except subprocess.CalledProcessError as e:
        print(f"Process error when copying to the instance: \n{str(e.output)}")
示例#9
0
文件: run.py 项目: mpaepper/spotty
    def run(self, output: AbstractOutputWriter):
        project_config = self._config['project']
        instance_config = self._config['instance']
        project_name = project_config['name']
        region = instance_config['region']
        local_ssh_port = instance_config['localSshPort']

        script_name = self._args.script_name
        if script_name not in self._config['scripts']:
            raise ValueError('Script "%s" is not defined in the configuration file.' % script_name)

        # get instance IP address
        stack = StackResource(None, project_name, region)
        ec2 = boto3.client('ec2', region_name=region)
        ip_address = get_instance_ip_address(ec2, stack.name)

        # sync the project with the instance
        if self._args.sync:
            output.write('Syncing the project with S3 bucket...')

            # sync the project with S3 bucket
            sync_filters = project_config['syncFilters']
            sync_project_with_s3(self._project_dir, project_name, region, sync_filters, output)

            output.write('Syncing S3 bucket with the instance...')

            # sync S3 with the instance
            sync_instance_with_s3(ip_address, project_name, region, local_ssh_port)

        # tmux session name
        session_name = self._args.session_name if self._args.session_name else 'spotty-script-%s' % script_name

        # base64 encoded user script from the configuration file
        script_base64 = base64.b64encode(self._config['scripts'][script_name].encode('utf-8')).decode('utf-8')

        # remote path where the script will be uploaded
        script_path = '/tmp/docker/%s.sh' % script_name

        # log file for the script outputs
        script_log_file = '/var/log/spotty-run/%s.log' % script_name

        # command to attach user to existing tmux session
        attach_session_cmd = subprocess.list2cmdline(['tmux', 'attach', '-t', session_name, '>', '/dev/null', '2>&1'])

        # command to upload user script to the instance
        upload_script_cmd = subprocess.list2cmdline(['echo', script_base64, '|', 'base64', '-d', '>', script_path])

        # command to log the time when user script started
        start_time_cmd = subprocess.list2cmdline(['echo', '-e', '\\nScript started: `date \'+%Y-%m-%d %H:%M:%S\'`\\n',
                                                  '>>', script_log_file])

        # command to run user script inside the docker container
        docker_cmd = subprocess.list2cmdline(['sudo', '/scripts/container_bash.sh', '-xe', script_path, '2>&1',
                                              '|', 'tee', '-a', script_log_file])

        # command to create new tmux session and run user script
        new_session_cmd = subprocess.list2cmdline(['tmux', 'new', '-s', session_name,
                                                   '%s && %s' % (start_time_cmd, docker_cmd)])

        # composition of the commands: if user cannot be attached to the tmux session (assume the session doesn't
        # exist), then we uploading user script to the instance, creating new tmux session and running that script
        # inside the Docker container
        remote_cmd = '%s || (%s && %s)' % (attach_session_cmd, upload_script_cmd, new_session_cmd)

        # connect to the instance and run the command above
        ssh_command = get_ssh_command(project_name, region, ip_address, remote_cmd, local_ssh_port)
        subprocess.call(ssh_command)