Ejemplo n.º 1
0
def check_credentials(local_dir, sshpath, password, subdir=None):
    """
    Checks whether one can download to (or upload from) the directory ``local_dir`` on the Plex_ server to the remote SSH server.

    :param str local_dir: the local directory, on the Plex_ server, into which to download files from the remote SSH server.
    :param str sshpath: the full path with username and host name for the SSH server. Format is ``'username@hostname'``.
    :param str password: the password to connect as the SSH server.
    :param str subdir: if not ``None``, the subdirectory on the remote SSH server from which to download files.

    :returns: if everything works, return ``'SUCCESS'``. If fails, return specific illuminating error messages.
    :rtype: str

    .. seealso::
    
       * :py:meth:`push_credentials <howdy.core.core_rsync.push_credentials>`.
       * :py:meth:`get_credentials <howdy.core.core_rsync.get_credentials>`.
    """
    try:
        #
        ## first, does local directory exist?
        if not os.path.isdir(os.path.abspath(local_dir)):
            raise ValueError("Error, %s is not a directory." %
                             os.path.abspath(local_dir))
        #
        ## second, can we login with username and password?
        uname = sshpath.split('@')[0]
        hostname = sshpath.split('@')[1]
        print(uname, hostname)
        # raises a ValueError if cannot do so
        # needs to pass in look_for_keys = False so not use id_* keys
        with Connection(hostname,
                        user=uname,
                        connect_kwargs={
                            'password': password,
                            'look_for_keys': False
                        }) as conn:
            conn.run('ls', hide=True)  # errors out if not a valid connection
            #
            ## third, if subdir is None does it exist?
            if subdir is not None:
                if not exists(conn, subdir):
                    raise ValueError("Error, %s does not exist." % subdir)
                # will raise an error if this is a file
                directory(conn, subdir)
        return 'SUCCESS'
    except Exception as e:
        return str(e)
Ejemplo n.º 2
0
def ec2_connection(request, ec2_instance, ec2_key_name, ec2_instance_type,
                   region):
    """
    Fixture to establish connection with EC2 instance if necessary
    :param request: pytest test request
    :param ec2_instance: ec2_instance pytest fixture
    :param ec2_key_name: unique key name
    :param ec2_instance_type: ec2_instance_type pytest fixture
    :param region: Region where ec2 instance is launched
    :return: Fabric connection object
    """
    instance_id, instance_pem_file = ec2_instance
    region = P3DN_REGION if ec2_instance_type == "p3dn.24xlarge" else region
    ip_address = ec2_utils.get_public_ip(instance_id, region=region)
    LOGGER.info(f"Instance ip_address: {ip_address}")
    user = ec2_utils.get_instance_user(instance_id, region=region)
    LOGGER.info(f"Connecting to {user}@{ip_address}")
    conn = Connection(
        user=user,
        host=ip_address,
        connect_kwargs={"key_filename": [instance_pem_file]},
    )

    random.seed(f"{datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')}")
    unique_id = random.randint(1, 100000)

    artifact_folder = f"{ec2_key_name}-{unique_id}-folder"
    s3_test_artifact_location = test_utils.upload_tests_to_s3(artifact_folder)

    def delete_s3_artifact_copy():
        test_utils.delete_uploaded_tests_from_s3(s3_test_artifact_location)

    request.addfinalizer(delete_s3_artifact_copy)

    conn.run(
        f"aws s3 cp --recursive {test_utils.TEST_TRANSFER_S3_BUCKET}/{artifact_folder} $HOME/container_tests"
    )
    conn.run(
        f"mkdir -p $HOME/container_tests/logs && chmod -R +x $HOME/container_tests/*"
    )

    # Log into ECR if we are in canary context
    if test_utils.is_canary_context():
        public_registry = test_utils.PUBLIC_DLC_REGISTRY
        test_utils.login_to_ecr_registry(conn, public_registry, region)

    return conn
Ejemplo n.º 3
0
def resource_session(request):
    """Pytest fixture that provides instantiated session objects for each
    of the classes that inherit the Session or POSIXSession classes.

    The fixture will run the test method once for each session object provided.

    Parameters
    ----------
    request : object
        Pytest request object that contains the class to test against

    Yields
    -------
    session object
        Instantiated object based on a class that extends the Session or
        POSIXSession class
    """
    # Initialize docker connection to testing container.
    if request.param in [DockerSession, PTYDockerSession]:
        client = docker.Client()
        container = [
            c for c in client.containers()
            if '/testing-container' in c['Names']
        ][0]
        yield request.param(client, container)
    elif request.param in [SSHSession, PTYSSHSession]:
        # Initialize SSH connection to testing Docker container.
        connection = Connection(
            'localhost',
            user='******',
            port=49000,
            connect_kwargs={
                'password': '******'
            }
        )
        connection.open()
        yield request.param(connection)
    elif request.param in [SingularitySession, PTYSingularitySession]:
        # Initialize Singularity test container.
        name = str(uuid.uuid4().hex)[:11]
        resource = Singularity(name=name, image='docker://python:2.7')
        resource.connect()
        resource.create()
        yield request.param(name)
        resource.delete()
    else:
        yield request.param()
Ejemplo n.º 4
0
    def __init__(self, ops, data):
        super(CosmicSystemVM, self).__init__(ops, data)

        # Load configuration
        config = get_config()
        ssh_user = config.get('ssh', 'user', fallback=None)
        ssh_key_file = config.get('ssh', 'ssh_key_file', fallback=None)
        connect_kwargs = {'banner_timeout': 60}
        if ssh_key_file:
            connect_kwargs['key_filename'] = ssh_key_file

        # Setup SSH connection
        self._connection = Connection(self['hostname'],
                                      user=ssh_user,
                                      connect_kwargs=connect_kwargs,
                                      forward_agent=True,
                                      connect_timeout=60)
Ejemplo n.º 5
0
 def simple_command_with_pty(self):
     """
     Run command under PTY on localhost
     """
     # Most Unix systems should have stty, which asplodes when not run
     # under a pty, and prints useful info otherwise
     result = Connection('localhost').run(
         'stty size',
         hide=True,
         pty=True,
     )
     found = result.stdout.strip().split()
     cols, rows = pty_size()
     assert tuple(map(int, found)), (rows == cols)
     # PTYs use \r\n, not \n, line separation
     assert "\r\n" in result.stdout
     assert result.pty is True
Ejemplo n.º 6
0
def run_checks(con_details, **commands_to_run):
    remote_report = dict()
    for check_types in commands_to_run:
        try:
            for i in commands_to_run[check_types]:
                with Connection(user=con_details.username,
                                host=con_details.ip,
                                connect_kwargs={
                                    "key_filename": con_details.private_key
                                }) as conn:
                    result = conn.run(i, hide=True)
                    msg = "##################### {0.command!r} on {0.connection.host} #####################\n\n{0.stdout}"
                    print(msg.format(result))
                    remote_report[i] = result.stdout
        except TypeError:
            pass
    return remote_report
Ejemplo n.º 7
0
def pull_db(ctx):
    """Pull a local copy of the remote DB and reset all passwords"""
    _local_dotenv_if_not_exists()

    with Connection(config.host) as conn:
        e = _srv_env(conn, f"{config.domain}/.env")

    srv_dsn = e("DATABASE_URL")
    local_dsn = _local_env()("DATABASE_URL")
    dbname = _dbname_from_dsn(local_dsn)

    run(ctx, f"dropdb --if-exists {dbname}", warn=True)
    run(ctx, f"createdb {dbname}")
    run(ctx,
        f"ssh {config.host} -C 'pg_dump -Ox {srv_dsn}' | psql {local_dsn}")

    reset_pw(ctx)
Ejemplo n.º 8
0
    def _config(self, hosts, node_parameters):
        Print.info('Generating configuration files...')

        # Cleanup all local configuration files.
        cmd = CommandMaker.cleanup()
        subprocess.run([cmd], shell=True, stderr=subprocess.DEVNULL)

        # Recompile the latest code.
        cmd = CommandMaker.compile().split()
        subprocess.run(cmd, check=True, cwd=PathMaker.node_crate_path())

        # Create alias for the client and nodes binary.
        cmd = CommandMaker.alias_binaries(PathMaker.binary_path())
        subprocess.run([cmd], shell=True)

        # Generate configuration files.
        keys = []
        key_files = [PathMaker.key_file(i) for i in range(len(hosts))]
        for filename in key_files:
            cmd = CommandMaker.generate_key(filename).split()
            subprocess.run(cmd, check=True)
            keys += [Key.from_file(filename)]

        names = [x.name for x in keys]
        consensus_addr = [f'{x}:{self.settings.consensus_port}' for x in hosts]
        mempool_addr = [f'{x}:{self.settings.mempool_port}' for x in hosts]
        front_addr = [f'{x}:{self.settings.front_port}' for x in hosts]
        committee = Committee(names, consensus_addr, mempool_addr, front_addr)
        committee.print(PathMaker.committee_file())

        node_parameters.print(PathMaker.parameters_file())

        # Cleanup all nodes.
        cmd = f'{CommandMaker.cleanup()} || true'
        g = Group(*hosts, user='******', connect_kwargs=self.connect)
        g.run(cmd, hide=True)

        # Upload configuration files.
        progress = progress_bar(hosts, prefix='Uploading config files:')
        for i, host in enumerate(progress):
            c = Connection(host, user='******', connect_kwargs=self.connect)
            c.put(PathMaker.committee_file(), '.')
            c.put(PathMaker.key_file(i), '.')
            c.put(PathMaker.parameters_file(), '.')

        return committee
Ejemplo n.º 9
0
def setup_master(master: str, passphrase: str, user: str) -> None:
    connect_kwargs = {'passphrase': passphrase}
    node = Connection(master, user=user, connect_kwargs=connect_kwargs)
    home = get_home_path(node)[0]
    # install(node)  # Not calling because of Fabric bug

    # redis conf
    logfile = f'logfile "{home}/redis.log"'
    node.put('redis_configs/redis.conf', 'redis-stable/redis.conf')
    node.run(f'echo {logfile} >> redis-stable/redis.conf')

    # sentinal conf
    logfile = f'logfile "{home}/sentinel.log"'
    sentinel_monitor = f'sentinel monitor mymaster {master} 6379 2'
    node.put('redis_configs/sentinel.conf', 'redis-stable/sentinel.conf')
    node.run(f'echo {logfile} >> redis-stable/sentinel.conf')
    node.run(f"sed -i 's/placeholder-line/{sentinel_monitor}/g' redis-stable/sentinel.conf")
Ejemplo n.º 10
0
def recieve():
    print('start recieve:')
    ssh_recieve.close()
    ssh_recieve = Connection(
        host='pc09-fort.emulab.net',
        user='******',
        connect_kwargs={'key_filename': '/Users/allis/Powder/paramiko/id_rsa'})
    ssh_recieve.run(
        'uhd_rx_cfile -f 3555e6 --lo-offset=1.2M -N 50000000 power1')
    ssh_recieve.run('iq_to_power.py -p ~/power1 -w 25000000 -n db_power1')
    subprocess.call(
        'pscp -pw kirby -scp [email protected]:~/db_power1.csv db_power1.csv'
    )
    #ssh_transmit.run('^C')
    #    endRecieve = True
    #    print(endRecieve)
    print('end recieve!')
Ejemplo n.º 11
0
def file_scp():
    for host in deploy_hosts:
        conn = Connection(host)
        conn.run('mkdir -p %s' % cellar_path)
        conn.put('%s/%s' % (software_path, es_file), cellar_path)
        conn.put('%s/%s' % (software_path, es_ik_file), cellar_path)
        conn.run('tar -zxvf %s -C %s' % ('%s/%s' %
                                         (cellar_path, es_file), cellar_path))
        conn.run('unzip %s -d %s' % ('%s/%s' %
                                     (cellar_path, es_ik_file), cellar_path))
        conn.run('mv %s %s' %
                 ('%s/%s' %
                  (cellar_path, es_ik_unzip_dir), '%s/%s/plugins/ik' %
                  (cellar_path, es_untar_dir)))
        # clear
        conn.run('rm %s/%s' % (cellar_path, es_file))
        conn.run('rm %s/%s' % (cellar_path, es_ik_file))
Ejemplo n.º 12
0
def build(c):
    # includes = ['static', 'templates', 'transwarp', 'favicon.ico', '*.py']
    # excludes = ['test', '.*', '*.pyc', "*.pyo"]
    c = Connection('65.49.215.135', user='******', port=27723, connect_kwargs={'password': '******'})
    # result = c.run('ls', hide=True)
    # msg = "Ran {0.command!r} on {0.connection.host}, got stdout:\n{0.stdout}"
    # print(msg.format(result))
    c.local('cd %s' % os.path.join(os.path.abspath('.'), 'www'))
    c.local('del dist\\%s' % _TAR_FILE)
    tar = tarfile.open("dist\\%s" % _TAR_FILE, "w:gz")
    for root, _dir, files in os.walk("www/"):  # 打包www文件夹
        for f in files:
            if not (('.pyc' in f) or ('.pyo' in f)):  # 排除开发过程调试产生的文件,为了简单点实现,此处没有完全照搬廖老师的参数
                fullpath = os.path.join(root, f)
                tar.add(fullpath)
    tar.close()
    deploy(c)
Ejemplo n.º 13
0
def run_script(request):
    site_pk = request.POST.get("site_id", 0)
    action = request.POST.get("action", "")

    # 什么也不做
    if not action:
        return jsonFailed(5, "没有要执行的文件")

    site = Site.objects.filter(pk=site_pk).first()
    if not site:
        return jsonFailed(1, msg="没有找到该站点!该站点可能已经被删除")

    action_dict = {
        'restart': site.restart,
        'deploy': site.deploy,
        'update_cert': site.update_cert
    }

    file_name = action_dict.get(action, "")

    if not file_name:
        return jsonFailed(1, msg="该站点未设置脚本文件")

    # 连接服务器
    user = '******'
    host = settings.SERVER_HOST
    key_filename = settings.KEY_FILENAME_PATH

    result = Connection(host,
                        user=user,
                        connect_kwargs={"key_filename": key_filename})

    # script_file = os.path.join(settings.SCRIPT_PATH, file_name)

    try:
        # 要执行的命令
        with result.cd(script_file):
            result = result.run("python3 %s" % file_name)
            msg = result.stdout

        return jsonSuccess(msg="返回结果:" + msg)

        # print('return_code', pwd.return_code)
        # print('stdout', pwd.stdout)
    except UnexpectedExit as e:
        return jsonFailed(2, msg="执行命令错误")
Ejemplo n.º 14
0
def deploy(host, user, password, component):
    ''' deploy services to specified machine via fabric
        example:
        python3 -m tao.tools deploy_rpm -h 10.1.2.13 -p dx@666 -c falcon-api:1.0 -c falcon-graph:1.1
    '''
    config = Config(overrides={'sudo': {'password': password}})
    if not component:
        _echo("no components to deploy")
        raise RuntimeError
    with Connection(host,
                    user,
                    config=config,
                    connect_kwargs={'password': password}) as conn:
        conn.run('yum clean all')
        for app_item in component:
            _do_deploy(conn, app_item)
        _echo('done')
Ejemplo n.º 15
0
def check_readiness(args, sudo_user=None):
    echo_h1("Checking readiness")
    success = True
    metadata = {}
    for host in args.host:
        with Connection(host, user=args.user) as conn:
            echo_h2(host)
            ok1, m = _check_bin(conn, REQUIRED_BINS, sudo_user)
            ok2 = _check_path(conn, args.path, sudo_user)
            success = success and ok1 and ok2
            metadata[host] = m
            echo("\n")
    if success:
        return metadata
    echo_error("The system failed readiness check. You may fix this by making sure")
    echo_error("packages are installed and the deploy path is writable by the user.")
    sys.exit(1)
Ejemplo n.º 16
0
def main():

	if len(sys.argv)<=1:
		print("please provide vaild param for collect_static:install_reqs:migrate:restart_nginx:restart_server in format of y:n:n:n format")
		return

	deploy_rules= sys.argv[1].split(":")
	branch_allowed_for_management_command = assigned_branch #deploy_rules[-1]

	for host, user, pem in zip(hosts, users, pem_files):

		conn = Connection(inline_ssh_env=pem, host=host, user=user, connect_kwargs={ "key_filename": pem})

		# if management_command_transaction_enabled:
		# 	stop_all_management_commands(conn, host, branch_allowed_for_management_command)

		deploy(conn, *deploy_rules)
Ejemplo n.º 17
0
def deploy():
    c = Connection(env('fab_host'),
                   user=env('fab_user'),
                   port=env('fab_port'),
                   connect_kwargs={'password': env('fab_password')})

    supervisor_conf_path = '~/etc/'
    supervisor_program_name = 'HelloDjango'

    project_root_path = '~/apps/HelloDjango/'

    # 先停止应用
    with c.cd(supervisor_conf_path):
        cmd = 'supervisorctl stop {}'.format(supervisor_program_name)
        c.run(cmd)

    # 进入项目根目录,从 Git 拉取最新代码
    with c.cd(project_root_path):
        cmd = 'git pull'
        responders = _get_github_auth_responders()
        c.run(cmd, watchers=responders)

    # 删除原有的Pipfile.lock
    with c.cd(project_root_path):
        c.run('rm Pipfile.lock')

    # 安装依赖
    with c.cd(project_root_path):
        c.run('pipenv install --deploy --ignore-pipfile')

    # 迁移数据库
    with c.cd(project_root_path):
        c.run('pipenv run python manage.py makemigrations')

    # 迁移数据库
    with c.cd(project_root_path):
        c.run('pipenv run python manage.py migrate')

    # 收集静态文件
    with c.cd(project_root_path):
        c.run('pipenv run python manage.py collectstatic --noinput')

    # 重新启动应用
    with c.cd(supervisor_conf_path):
        cmd = 'supervisorctl start {}'.format(supervisor_program_name)
        c.run(cmd)
Ejemplo n.º 18
0
def deploy_test(c):
    # 连接测试环境部署服务器
    c = Connection('218.29.54.49',
                   port=2206,
                   user='******',
                   connect_kwargs={
                       'password': '******',
                   })
    # 上传文件
    step('Upload dist files to server')
    c.put('.temp/dist.tar', '/opt/test-projects/wistar-supply/')

    # 解压文件
    step('Untar the files')
    c.run('cd /opt/test-projects/wistar-supply/ && tar -xvf dist.tar')

    print('Deployed successfully!')
Ejemplo n.º 19
0
    def _connect(self):
        """ Open connection to remote host """

        if self.conn is None:
            try:
                self.client = Connection(
                    host=self.host,
                    user=self.user,
                    connect_kwargs={
                        "key_filename": [
                            self.ssh_key_filepath,
                        ]
                    },
                )
            except AuthenticationException as error:
                print(f"Authentication error: {error}")
        return self.client
Ejemplo n.º 20
0
def deploy(c):
    # 连接生产环境部署服务器
    c = Connection('47.96.123.40',
                   port=22,
                   user='******',
                   connect_kwargs={
                       'password': '******',
                   })
    # 上传文件
    step('Upload dist files to server')
    c.put('.temp/dist.tar', '/opt/projects/wistar-supply/')

    # 解压文件
    step('Untar the files')
    c.run('cd /opt/projects/wistar-supply/ && tar -xvf dist.tar')

    print('Deployed successfully!')
Ejemplo n.º 21
0
 def run_command(self, command, print_message=None, verbose=False):
     private_ip = self.get_private_ip()
     public_ip_login_node = self.get_public_ip()
     local_private = self.get_local_rsa_private()
     base_user = self.get_base_user()
     conn = Connection(host=public_ip_login_node,
                       user=base_user,
                       connect_kwargs={"key_filename": local_private},
                       forward_agent=True)
     if print_message is not None:
         print(print_message, end="")
     out, err = run_command_ssh_gateway(conn, base_user, private_ip,
                                        command)
     if verbose:
         print(out, err)
     conn.close()
     return out, err
Ejemplo n.º 22
0
def main(host, url, token, device):
    connection = Connection(host)
    # Stop the service if it's running and installed
    click.echo(
        click.style("Stopping any existing installation...",
                    fg="green",
                    bold=True))
    try:
        connection.sudo("systemctl stop rfid-client")
    except UnexpectedExit:
        pass
    # Ensure base tools are installed
    click.echo(click.style("Installing base tools...", fg="green", bold=True))
    connection.sudo("apt-get update -q")
    connection.sudo("apt-get install -y git python3-pip")
    # Make sure the directory is set up
    click.echo(click.style("Pulling down repo...", fg="green", bold=True))
    connection.sudo("mkdir -p /srv/rfid-inventory")
    connection.sudo("chown %s /srv/rfid-inventory" % connection.user)
    try:
        with connection.cd("/srv/rfid-inventory"):
            connection.run("git pull")
    except UnexpectedExit:
        connection.run(
            "git clone http://www.github.com/andrewgodwin/rfid-inventory /srv/rfid-inventory"
        )
    # Install packages
    click.echo(
        click.style("Installing Python packages...", fg="green", bold=True))
    connection.sudo(
        "pip3 install -r /srv/rfid-inventory/chafon-pc/requirements.txt")
    # Write out a systemd unit file
    click.echo(
        click.style("Writing systemd unit file...", fg="green", bold=True))
    unit_file = unit_template.format(device=device, url=url,
                                     token=token).strip()
    connection.sudo(
        "bash -c \"echo '%s' > /etc/systemd/system/rfid-client.service\"" %
        unit_file)
    # Start it
    click.echo(
        click.style("Enabling and starting service...", fg="green", bold=True))
    connection.sudo("systemctl daemon-reload")
    connection.sudo("systemctl enable rfid-client")
    connection.sudo("systemctl start rfid-client")
Ejemplo n.º 23
0
def restore2local():
    '''
    回复数据库到本地
    '''
    fs = os.listdir(backup_dir)
    files = [
        f for f in fs if f.startswith('backup-') and f.endswith('.sql.tar.gz')
    ]
    files.sort(reverse=True)
    if len(files) == 0:
        print('No backup files found.')
        return
    print('Found %s backup files:' % len(files))
    print('==================================================')
    n = 0
    for f in files:
        print('%s: %s' % (n, f))
        n = n + 1
    print('==================================================')
    try:
        num = int(input('Restore file: '))
    except ValueError:
        print('Invalid file number.')
        return
    restore_file = files[num]
    yn = input('Restore file %s: %s? y/N ' % (num, restore_file))
    if yn != 'y' and yn != 'Y':
        print('Restore cancelled.')
        return
    print('Start restore to local database...')
    sqls = [
        'drop database if exists awesome;', 'create database awesome;',
        'grant select, insert, update, delete on %s.* to \'%s\'@\'localhost\' identified by \'%s\';'
        % (local_sql_db, local_sql_user, local_sql_pass)
    ]
    conn = Connection(ip)
    for sql in sqls:
        conn.local(r'mysql -u{} -p{} -e "{}"'.format(local_sql_Suser,
                                                     local_sql_Spass, sql))
    conn.local('tar zxvf {1:}/{0:} -C {1:}'.format(restore_file, backup_dir))
    conn.local(r'mysql -u{} -p{} {} < {}/{}'.format(local_sql_Suser,
                                                    local_sql_Spass,
                                                    local_sql_db, backup_dir,
                                                    restore_file[:-7]))
    conn.local('rm -f {}/{}'.format(backup_dir, restore_file[:-7]))
Ejemplo n.º 24
0
def get_git_logs(key, host_addr, group):
    str_date = datetime.now().strftime('%s')
    for i, row in enumerate(host_addr):
        id = row['student_id']
        id = id + ((int)(group) * 24)
        id = (str)(id)

        h = row['ip_address']
        # print(id)
        # print(type(id))
        # print(h)
        # print(group)

        try:
            print("Start get_git_logs function")
            c = Connection(host=h,
                           user="******",
                           port=22,
                           connect_timeout=2,
                           connect_kwargs={"key_filename": key})
            print("Success connection host: " + h)

            #vmname = get_vmname(c)
            #print("Success get_vmname function: "+vmname)

            backup_dir = "/root/log/" + group + "/" + id.zfill(
                3) + "/" + str_date
            c.local("mkdir -p " + backup_dir, warn=True)
            print("Create backup_dir locally: " + backup_dir)

            # c.run("mkdir -p "+backup_dir, warn=True)
            # print("Create backup_dir on remote: "+backup_dir)

            c.run("tar czf /root/git.tar.gz" + " -C / git", warn=True)
            print("Create git.tar.gz on remote")

            c.get("/root/git.tar.gz", backup_dir + "/git.tar.gz")
            print("Get git.tar.gz on remote")

            c.run("rm -rf /root/git.tar.gz", warn=True)
            print("Delete git.tar.gz on remote")

            print("Finish get_git_logs function")
        except socket.timeout:
            continue
Ejemplo n.º 25
0
    def __init__(self, host_string="localhost:8822", user="******"):
        """Create a MenderDevice object-

        Keyword arguments:
        host_string -- Remote SSH host of the form host:port
        user -- Remote SSH user
        """
        self.host, self.port = host_string.split(":")
        self.user = user
        self._conn = Connection(
            host=self.host,
            user=self.user,
            port=self.port,
            connect_timeout=60,
            connect_kwargs={"password": "", "banner_timeout": 60, "auth_timeout": 60},
        )
        self._conn.client.set_missing_host_key_policy(IgnorePolicy())
        self._service_name = None
Ejemplo n.º 26
0
def update_config(connect):
    '''
    Заполняем параметры подключения
    '''
    # Если в параметрах запуска не задан хост, то тут тип Context
    if isinstance(connect, Context):
        connect = Connection(host=host)  # Инициализируем Connection
    conf = {
        "user": user,
        "port": port,
        "connect_kwargs": {
            "key_filename": key_filename
        },
    }
    for k, v in conf.items():
        if hasattr(connect, k):
            setattr(connect, k, v)
    return connect
Ejemplo n.º 27
0
    def __init__(self):

        host_list = open('InstancesConfigurations/public_ips',
                         'r').read().splitlines()
        self.connections = []
        self.pool = Serial()
        party_id = 0
        for host in host_list:
            c = Connection(host,
                           user='******',
                           connect_kwargs={
                               'key_filename':
                               ['%s/Keys/matrix.pem' % Path.home()]
                           })
            c.party_id = party_id
            party_id += 1
            self.connections.append(c)
            self.pool.append(c)
Ejemplo n.º 28
0
def run_all2one(bramble, server_ip, niter=niter):
    server = Connection(server_ip, user='******', connect_kwargs=cxn_args)
    ips = [c.host for c in bramble if c.host != server_ip]
    clients = ThreadingGroup(*ips, user='******', connect_kwargs=cxn_args)
    print(f"Begin {len(clients)} clients to 1 server experiment")

    server.run("killall -q iperf", warn=True)
    time.sleep(10)  # wait for old process to die
    server.run(f"iperf -s > {remote_output_dir}/server.log &")

    for i in range(niter):
        print(f"Iteration {i}")
        clients.run("killall -q iperf", warn=True)
        time.sleep(10)  # wait for processes to die
        clients.run(
            f"iperf -P 20 -c {server.host} >> {remote_output_dir}/client.log")

    gather_all2one_results(clients, server)
Ejemplo n.º 29
0
def generate_magento_build_folder():
    local_con = Connection('localhost')
    php_versions = get_php_versions()
    for php_version in php_versions:
        docker_filename = "m2_apache2-php%s" % php_version

        dic_src = "build_folder_m2/demo"
        dic_dest = "build_folder_m2/%s" % docker_filename
        dic_src_path = os.path.abspath(dic_src)
        dic_dest_path = os.path.abspath(dic_dest)
        local_con.local('rm -rf {0} && mkdir {0}'.format(dic_dest_path))
        local_con.local('cp -r %s/* %s' % (dic_src_path, dic_dest_path))

        file_src = "all_m2/%s" % docker_filename
        file_dest = "build_folder_m2/{0}/{0}".format(docker_filename)
        file_src_path = os.path.abspath(file_src)
        file_dest_path = os.path.abspath(file_dest)
        local_con.local('cp -r %s %s' % (file_src_path, file_dest_path))
Ejemplo n.º 30
0
def deploy(arg, group="core"):
    "roda comando em todos os cores"
    env = GROUP_ENVS[group]
    for host in env["hosts"]:
        with Connection(host=host, user="******",
                        connect_kwargs=env["keys"]) as con:
            print("-" * 80)
            print("Start deploy in HOST: ", host)
            con.run("docker-compose pull || true")
            con.run(
                "docker stack deploy -c docker-compose.yml ubuntu --with-registry-auth || true"
            )
            con.run(
                'docker rmi $(docker images --filter "dangling=true" -q --no-trunc) || true'
            )
            print("-" * 80)

    print("Deploy finished")