Ejemplo n.º 1
0
def initial_server_setup(ip, user):
    # As soon as the server is created, we can log in as root
    c = Connection(ip, user='******')

    # Lets create a user and add it to the sudoers group
    # c.run('adduser {}'.format(user))
    # c.run('usermod -aG sudo {}'.format(user))
    # c.run('su - {}'.format(user))
    # c.run('mkdir ~/.ssh')
    # c.run('chmod 700 ~/.ssh')
    # Lets create an ssh-key pair if it does not exist:
    ssh_dir = os.path.join(os.environ.get('HOME'), '.ssh')
    ssh_filename = os.path.join(ssh_dir, 'id_rsa')
    if not os.path.exists(ssh_dir):
        os.makedirs(ssh_dir)
        make_ssh_key(c, ssh_filename)
    else:
        if not os.path.isfile(ssh_filename):
            make_ssh_key(c, ssh_filename)

    # Upload the file to the server
    ssh_filename_pub = ssh_filename + '.pub'
    c.put(ssh_filename_pub, remote='/home/{}/'.format(user))
    c.run('cd /home/{}'.format(user))
    c.run('cat {} >> .ssh/authorized_keys'.format('id_rsa.pub'))
Ejemplo n.º 2
0
def write_configuration(conn: Connection,
                        training_config: CS1TrainingRunConfig):
    """
    Sets up the directories and params.yaml file for the current experiment on Medulla
    Args:
        conn: fabric.Connection object
        training_config: CS1TrainingConfig object from parsed YAML
        theta_experiment_dir: ThetaGPU experiment directory
    """
    top_dir = training_config.medulla_experiment_path

    if not top_dir.is_absolute():
        raise ValueError("medulla_experiment_path must be absolute")

    logger.info(f"Creating {top_dir} on {conn.host}")
    conn.run(f"mkdir -p {top_dir}")
    conn.run(f"mkdir -p {training_config.sim_data_dir}")
    conn.run(f"mkdir -p {training_config.data_dir}")
    conn.run(f"mkdir -p {training_config.eval_data_dir}")
    conn.run(f"mkdir -p {training_config.model_dir}")
    conn.run(f"touch  {training_config.global_path}")

    h5_dir = training_config.initial_h5_transfer_dir
    if h5_dir and h5_dir.is_dir():
        for f in h5_dir.glob("*.h5"):
            conn.put(f.as_posix(), training_config.sim_data_dir.as_posix())

    with NamedTemporaryFile(mode="w", delete=False) as fp:
        yaml.dump(json.loads(training_config.json()), fp)
        fp.flush()
        conn.put(fp.name, top_dir.joinpath("params.yaml").as_posix())
Ejemplo n.º 3
0
class FabricApi:
    def __init__(self, ip, user, password):
        self.conn = Connection(ip, user, connect_kwargs={'password': password})

    def update_symbolic_link(self, src, dest):
        try:
            self.conn.run("[ -e {} ]".format(src))
            self.conn.run("rm -rf {}".format(src))
        except:
            pass

        self.conn.run("ln -s {} {}".format(dest, src))

    def send_file(self, src, dest):
        try:
            self.conn.run("[ -e {} ]".format(dest))
            return True
        except:
            pass

        self.conn.run("mkdir -p {}".format(os.path.dirname(dest)))
        self.conn.put(src, "{}.tar.gz".format(dest))
        self.conn.run("tar -xf {}.tar.gz -C {}".format(dest,
                                                       os.path.dirname(dest)))
        self.conn.run("rm -rf {}.tar.gz".format(dest))
Ejemplo n.º 4
0
def add_new_node_ssh():
    nodes_new = {'172.17.0.12': '111111'}
    node_num = 3
    with open('/etc/hosts', 'a') as f:
        for ip, pwd in node_new.items():
            f.write('%s    node%s' % (ip, node_num))
            node_num += 1
            c = Connection(ip,
                           port=22,
                           user='******',
                           connect_kwargs={'password': pwd})
            c.get('/root/.ssh/id_rsa.pub', '/root/.ssh/id_rsa.pub.bak')
            c.local(
                'cat /root/.ssh/id_rsa.pub.bak >> /root/.ssh/authorized_keys')
            c.local('rm -f /root/.ssh/id_rsa.pub.bak')
    nodes.update(nodes_new)
    for ip, pwd in nodes.items():
        c = Connection(ip,
                       port=22,
                       user='******',
                       connect_kwargs={'password': pwd})
        c.run('rm -f /etc/hosts')
        c.put('/etc/hosts', '/etc/hosts')
        a = c.local('find /root/.ssh/ -name authorized_keys')
        if a.stdout.find('authorized_keys') != -1:
            c.run('rm -f /root/.ssh/authorized_keys')
        c.put('/root/.ssh/authorized_keys', '/root/.ssh/authorized_keys')
    print('over')
Ejemplo n.º 5
0
 def setup_squid(self, server, startup_script_file_location,
                 squid_service_location):
     service_name = squid_service_location.replace('.service', '')
     sudo_config = Config(overrides={'sudo': {'password': server.password}})
     server_connection = Connection(
         host=server.ip_address,
         user=server.username,
         connect_kwargs={"password": server.password},
         config=sudo_config)
     try:
         server_connection.sudo('reboot')
     except UnexpectedExit as e:
         print("Caught error while rebooting machine")
     self.wait_for_server(server)
     server_connection.sudo('apt-get update')
     server_connection.sudo('apt-get install dos2unix')
     file_transfer_result = server_connection.put(
         startup_script_file_location, remote='/usr/bin/')
     server_connection.sudo('chmod +x /usr/bin/' +
                            startup_script_file_location)
     server_connection.sudo('dos2unix /usr/bin/' +
                            startup_script_file_location)
     file_transfer_result = server_connection.put(
         squid_service_location, remote='/etc/systemd/system/')
     server_connection.sudo(f'systemctl enable {service_name}')
     try:
         server_connection.sudo('reboot')
     except UnexpectedExit as e:
         print("Caught error while rebooting machine")
Ejemplo n.º 6
0
def upload_archive():

    print('uploading...')
    # establishing connection to the server
    c = Connection(
        host="178.128.193.27",
        user="******",
        connect_kwargs={
            "key_filename": "D:\projects\doggo\.ssh\openSSH",
        },
    )
    # uploading the zip to myproject directory
    c.put(zipName, 'myproject/')

    print('exctracting...')
    # unziping and overwriting the files
    c.run('cd myproject && unzip -o %s' % zipName)

    # removing the archive
    c.run('cd myproject && rm %s' % zipName)

    # does server restart correctly?
    # sudo -S -p '[sudo] password: '******'\[sudo\] password:'******'1992\n',
    )
    # does not work : c.config.sudo.password('1992')
    print('restarting...')
    c.sudo('systemctl restart myproject', pty=True, watchers=[sudopass])
Ejemplo n.º 7
0
def put_dir(con: Connection, from_dir: str, to_dir: str):
    """ put a directory to server.
    """
    con.local("tar cfz {0}.tar.gz {0}".format(from_dir))
    con.put("{}.tar.gz".format(from_dir), "{}".format(to_dir))
    con.run("tar zxf {1}/{0}.tar.gz -C {1}".format(from_dir, to_dir))
    con.local("rm {}.tar.gz".format(from_dir))
Ejemplo n.º 8
0
def deploy(c):
    user = input('Input login user name: ')

    host = input('Input login host: ')

    root = input('Input project root path:')

    user_pass = getpass.getpass('Input login user pass:')

    result = c.run('python setup.py --fullname', hide=True)
    dist = result.stdout.strip()
    filename = '%s.tar.gz' % dist

    result = c.run('python setup.py --name', hide=True)
    name = result.stdout.strip()

    remote = Connection('%s@%s' % (user, host),
                        connect_kwargs={"password": user_pass})

    remote.run('cd %s && ls -al' % root)
    return False

    remote.put('./dist/%s' %
               filename, remote='%s' % root)

    remote.run('cd %s &&\
     source .env/bin/activate &&\
        ls -al && type python &&\
         pip install %s &&\
         supervisorctl restart %s' % (root, filename, name))
Ejemplo n.º 9
0
def deploy_ccp_docs(
    ccp_doc_root="public_html/py/ciscoconfparse",
    ccp_bundle_name="ccp.tar.gz",
    doc_host="",
    password="",
):

    # Run 'make html' in directory: sphinx-doc/
    run("cd sphinx-doc && make clean && make html")  # local command

    run("cd sphinx-doc/_build/html && tar cvfz {0} *".format(
        os.path.expanduser("~/" + ccp_bundle_name)))

    # Run 'make clean' in directory: sphinx-doc/
    run("cd sphinx-doc && make clean")  # local command

    # ssh with a password...
    conn = Connection("mpenning@{}".format(doc_host),
                      connect_kwargs={"password": password})
    conn.put(
        local=os.path.expanduser("~/{0}".format(ccp_bundle_name)),
        remote=ccp_bundle_name,
    )

    # Delete all the old files
    conn.run("rm -rf {0}/*".format(ccp_doc_root))
    # Move the new files to ccp_doc_root
    conn.run("mv {0} {1}".format(ccp_bundle_name, ccp_doc_root))

    with conn.cd(ccp_doc_root):
        conn.run("tar xvfz {0}".format(ccp_bundle_name))
        conn.run("rm {0}".format(ccp_bundle_name))
Ejemplo n.º 10
0
def install_hashistack(nodes):
    # TODO: make this concurrent in threads
    for node in nodes:
        hostname = node["hostname"]
        conn = Connection(hostname)

        # Create all the script directories that will be needed
        conn.run('mkdir -p /home/ubuntu/scripts/consul')
        conn.run('mkdir -p /home/ubuntu/scripts/vault')
        conn.run('mkdir -p /home/ubuntu/scripts/nomad')
        conn.run('mkdir -p /home/ubuntu/scripts/terraform')
        conn.run('mkdir -p /home/ubuntu/scripts/packer')
        conn.run('mkdir -p /home/ubuntu/scripts/waypoint')
        conn.run('mkdir -p /home/ubuntu/scripts/boundary')

        conn.put("../../bash/rasppi/install.sh", "/home/ubuntu/install.sh")
        conn.put("../../bash/rasppi/consul/setup_basics.sh",
                 "/home/ubuntu/scripts/consul/setup_basics.sh")
        # conn.put("../../bash/rasppi/consul/setup_primary.sh", "/home/ubuntu/scripts/consul/setup_primary.sh")

        # Set the permissions on all the install scripts
        conn.run("chmod 755 *.sh")
        conn.run("chmod 755 scripts/**/*.sh")

        # Then install all the binaries
        install_result = conn.run("./install.sh")
        print(install_result)

        basics_result = conn.run("./scripts/consul/setup_basics.sh")
        print(basics_result)
Ejemplo n.º 11
0
def start_vid(request):
    global filename_global
    filename_global = 'user_{0}_{1}'.format(request.user.id,
                                            request.user.username)
    with open('./runcode/motion.conf') as fin, open(
            './runcode/motion_new.conf', 'w') as fout:
        for i, item in enumerate(fin, 1):
            if i == 450:  # 450 - dir for saving stuff
                item = 'target_dir "' + '/home/pi/runcode/data/videos/' + filename_global + '"\n'
                print(item)
            if i == 473:  # 473 - filename pattern
                global f
                f = request.user.username + '-' + timezone.now().strftime(
                    '%d-%m-%y_%H-%M-%S')
                # f_global = f
                item = 'movie_filename ' + f + '\n'
                print(item)
            fout.write(item)
    c = Connection(host=pi_ip,
                   user='******',
                   connect_kwargs={'password': pi_pwd},
                   connect_timeout=10)
    c.put('./runcode/motion_new.conf', 'runcode/motion_new.conf')
    cmd = " echo " + pi_pwd + " | sudo -S motion -b -c /runcode/motion_new.conf"
    c.run(cmd)
    global a
    a = 1
    sleep(1.5
          )  # don't have any other option as of now to wait for iframe loading
    return HttpResponseRedirect('/runcode/')
Ejemplo n.º 12
0
def deploy(c):
    remote_user = '******'
    remote_password = '******'
    remote_host = '165.22.105.187'

    config = Config(overrides={'sudo': {'password': remote_password}})
    connect_kwarg = {'password': remote_password, 'allow_agent': False}
    conn = Connection(host = remote_host, user=remote_user, config=config, connect_kwargs=connect_kwarg)
    print('Connected with remote machine')

    print('Copy sources')
    conn.put('app.py')
    conn.put("config.json")

    print("install requirements")
    conn.sudo("pip3 install Flask Flask-CORS")

    print("Shutdown previous server")
    conn.sudo('pkill -F server.pid', warn=True)

    print('Start server')
    conn.sudo("nohup python3 app.py &> logs.txt & echo $! > server.pid")

    print("Sucsess!")
    conn.close()
Ejemplo n.º 13
0
def _update_settings(c: Connection, source_folder, sitename):
    settings_path = source_folder + '/superlists/settings.py'
    secret_key_path = source_folder + '/superlists/secret_key.py'
    loc_tmp_dir = 'tmp_remote_settings'
    loc_new_settings_path = loc_tmp_dir + '/settings.py'
    loc_old_settings_path = loc_tmp_dir + '/settings_old.py'
    loc_secret_key_path = loc_tmp_dir + '/secret_key.py'
    os.mkdir(loc_tmp_dir)
    try:
        c.get(settings_path,
              local=os.getcwd() + '/' + loc_tmp_dir + '/settings.py')
        os.rename(loc_tmp_dir + '/settings.py', loc_old_settings_path)
        with open(loc_old_settings_path, 'r') as f:
            content = f.read()
            new_content = re.sub("DEBUG = True", "DEBUG = False", content)
            new_content = re.sub(r'ALLOWED_HOSTS = \[.*\]',
                                 'ALLOWED_HOSTS = ["{}"]'.format(sitename),
                                 new_content)
            new_content = re.sub(r"SECRET_KEY = '.*'",
                                 'from .secret_key import SECRET_KEY',
                                 new_content)
            with open(loc_new_settings_path, 'w') as nf:
                nf.write(new_content)
            if not _exists(c, secret_key_path):
                chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
                key = ''.join(random.SystemRandom().choice(chars)
                              for _ in range(50))
                with open(loc_secret_key_path, 'w') as nkey:
                    nkey.write('SECRET_KEY = "{}"'.format(key))
                c.put(os.getcwd() + '/' + loc_secret_key_path,
                      remote=source_folder + '/superlists/')
            c.put(os.getcwd() + '/' + loc_new_settings_path,
                  remote=source_folder + '/superlists/')
    finally:
        shutil.rmtree(loc_tmp_dir)
Ejemplo n.º 14
0
 def job(self, run: "Run", payload: dict, device: Device) -> dict:
     username, password = run.get_credentials(device)
     fabric_connection = Connection(
         host=device.ip_address,
         port=device.port,
         user=username,
         connect_kwargs={"password": password},
     )
     source_code = run.sub(run.source_code, locals())
     match = run.sub(run.content_match, locals())
     run.log("info", f"Running Unix Shell Script {self.name} on {device.name}")
     script_file_name = "unix_shell_script_service.sh"
     with StringIO(run.source_code) as script_file:
         fabric_connection.put(script_file, script_file_name)
         if run.privileged_mode:
             if not device.enable_password:
                 raise Exception(
                     f"Service {self.name} requested privileged mode on device "
                     f"with no configured enable_password: {device.name}"
                 )
             result = fabric_connection.sudo(
                 f"bash {script_file_name}", password=device.enable_password
             )
         else:
             result = fabric_connection.run(f"bash {script_file_name}")
         fabric_connection.run(f"rm {script_file_name}")
     return {
         "match": match,
         "negative_logic": run.negative_logic,
         "result": f"stdout:\n{result.stdout}\nstderr:\n{result.stderr}",
         "success": result.ok and run.match_content(result, match),
     }
Ejemplo n.º 15
0
def submit_job(connection: Connection, job_script: str) -> str:
    job_script = textwrap.dedent(job_script.lstrip())
    connection.put(io.StringIO(job_script), "test.slm")
    connection.sudo("mkdir -p --mode=777 /mnt/shared/test", in_stream=False)
    res = connection.run("sbatch --chdir=/mnt/shared/test --wait test.slm", timeout=timedelta(minutes=10).seconds, in_stream=False)
    job_id = res.stdout.split()[-1]
    return job_id
Ejemplo n.º 16
0
def deploy(c):
    # 手动输入服务器地址、用户名及密码
    host = input('host:')
    user = input('user:'******'password:'******'22', user=user, connect_kwargs={'password': password})
    # 删除服务器上老版本的tar文件
    conn.run('rm -f %s' % _REMOTE_TMP_TAR)
    # 上传新版本的tar文件
    conn.put(os.path.join(_LOCAL_PKG_PATH, _TAR_FILE), _REMOTE_TMP_TAR)
    # 在服务器上创建新目录
    newdir = 'www-%s' % datetime.now().strftime('%y-%m-%d_%H.%M.%S')
    with conn.cd(_REMOTE_BASE_DIR):
        conn.run('mkdir %s' % newdir)
    # 解压tar文件至新目录
    with conn.cd('%s/%s' % (_REMOTE_BASE_DIR, newdir)):
        conn.run('tar -xzvf %s' % _REMOTE_TMP_TAR)
    # 重置软链接
    with conn.cd(_REMOTE_BASE_DIR):
        conn.run('rm -f www')
        conn.run('ln -s %s www' % newdir)
    # 将app.py设置为可执行
    conn.run('chmod a+x %s/www/app.py' % _REMOTE_BASE_DIR)
    # 重启Python服务和Nginx服务
    conn.run('supervisorctl stop pythonweb')
    conn.run('supervisorctl start pythonweb')
    conn.run('/etc/init.d/nginx reload')
Ejemplo n.º 17
0
def deploy(c):
    remote_user = '******'
    remote_password = '******'
    remote_host = '178.62.2.141'

    config = Config(overrides={'sudo': {'password': remote_password}})
    connect_kwarg = {'password': remote_password, 'allow_agent': False}
    conn = Connection(host=remote_host,
                      user=remote_user,
                      config=config,
                      connect_kwargs=connect_kwarg)
    print("Connected with remote machine")

    print("Copy sources")
    conn.put("app.py")
    conn.put("config.json")

    print("Install requirements")
    conn.sudo("pip3 install Flask Flask-CORS")

    print("Shutdown previous server")
    conn.sudo("pkill -F server.pid", warn=True)

    print("Start server")
    conn.sudo("nohup python3 app.py &> logs.txt & echo $! > server.pid")

    print("Success!")
    conn.close()
Ejemplo n.º 18
0
class Fabapi(object):
    def __init__(self, hostip):
        connect_kwargs = {
            'pkey': settings.DEPLOYKEY,
            # 'password':''
        }
        self.c = Connection(host=hostip,
                            user=settings.DEPLOYUSER,
                            port=22,
                            **connect_kwargs)

    def locald(self, ld):
        return self.c.local(ld)

    def remoted(self, rd, sudoif=0):
        # if sudoif == 1:
        #     return sudo(rd, pty=False)
        # else:
        #     return run(rd)
        return self.c.run(rd, pty=False, shell=False)

    def getfile(self, local_dest, remote_dest):
        self.c.get(remote_dest, local_dest)

    def putfile(self, local_dest, remote_dest):
        self.c.put(local_dest, remote_dest, mode=0o664)

    def isexists(self, rootdir):
        res = int(
            self.c.run(" [ -e {0} ] && echo 1 || echo 0".format(rootdir),
                       shell=False))
        return res
Ejemplo n.º 19
0
    def setup_node(self, ip):
        ssh_conn = SSH_Connection(host=ip,
                                  user=self.config.get('remote', 'user'),
                                  connect_kwargs=self.ssh_custom_key)
        ssh_conn.run('apt-get -y update && apt-get -y upgrade', hide=True)
        ssh_conn.run('apt-get -y install openmpi-bin', hide=True)

        ssh_conn.run('mkdir -p /root/bin', hide=True)
        if self.config.get('local', 'deployable_code').startswith('http'):
            # downloading binary from a trusted non-public address
            ssh_conn.run('cd /root/bin && wget %s' %
                         self.config.get('local', 'deployable_code'),
                         hide=True)
        else:
            # uploading binary from local; requires broadband connection
            ssh_conn.put(self.config.get('local', 'deployable_code'),
                         '/root/bin/Pcrystal')
        if self.config.get('local', 'deployable_code').endswith('.gz'):
            # binary may be gzipped, without subfolders, with an arbitrary archive name,
            # but the name of the binary must remain Pcrystal
            ssh_conn.run(
                'cd /root/bin && tar xvf %s' %
                self.config.get('local', 'deployable_code').split('/')[-1],
                hide=True)
        ssh_conn.run('ln -sf /root/bin/Pcrystal /usr/bin/Pcrystal', hide=True)

        # print and ensure versions
        result = ssh_conn.run('/usr/bin/mpirun --allow-run-as-root -V',
                              hide=True)
        logging.info(result.stdout)
        result = ssh_conn.run('cat /etc/issue', hide=True)
        logging.info(result.stdout)
        result = ssh_conn.run('grep -c ^processor /proc/cpuinfo', hide=True)
        logging.info(result.stdout)
Ejemplo n.º 20
0
def deploy(c):
    conn = Connection("trotsky")
    with conn.cd("~/src/untitled-incremental"):
        conn.run("git fetch")
        conn.run("git stash")
        conn.run("git pull")
        conn.run("git stash pop")
        conn.put("main.wasm")
Ejemplo n.º 21
0
 def restore(self, host, src):
     c = Connection(host)
     c.put(src, f"{self.directory}/backup.tgz")
     c.run(f"cd {self.directory} && docker-compose stop")
     c.run(f"cd {self.directory} && rm -rf db images", warn=True)
     c.run(f"cd {self.directory} && tar -xvzf backup.tgz")
     c.run(f"rm {self.directory}/backup.tgz")
     c.run(f"cd {self.directory} && docker-compose start")
Ejemplo n.º 22
0
def _check_or_copy_wandb_key(hostname: str, ssh_session: fabric.Connection):
    try:
        ssh_session.run("test -f $HOME/.netrc")
    except UnexpectedExit:
        print(
            f"Wandb api key not found in {hostname}. Copying it from {DEFAULT_WANDB_KEY}"
        )
        ssh_session.put(DEFAULT_WANDB_KEY, ".netrc")
Ejemplo n.º 23
0
def deploy(c):
    r = Connection("zerojoy")
    r.put("dist/usb_gadget.tar", "zerojoy.tar")
    f = c.run("poetry version -s")
    wheel = f"zerojoy-{f.stdout.strip()}-py3-none-any.whl"
    r.put(f"dist/{wheel}")
    with r.prefix(". .venv/bin/activate"):
        r.run(f"pip install --no-index --force-reinstall {wheel}")
    restart(r)
Ejemplo n.º 24
0
def passwordlessSSH(user,publicIPs,keyFile,cwd):
	#Change the permissions of authorized_keys to 600
	subprocess.check_output(['bash','-c', "chmod 600 keys/authorized_keys_updated"])

	print("\nCopy the Authorized Keys file to all instances")
	for IPAddress in publicIPs:
		print IPAddress
		connection = Connection(host=IPAddress, user=user, connect_kwargs = {'key_filename': ['' + keyFile + ''] })
		connection.put(''+cwd +'/keys/authorized_keys_updated',remote='/home/'+user+'/.ssh/authorized_keys',preserve_mode=True)
Ejemplo n.º 25
0
    def start(self):
        """ start node
        :return: ip
        """

        # launch server. ubuntu.
        name = names.sample(1).item().lower()
        size = [s for s in self.lc.list_sizes() if s.name == "t3.nano"][0]
        image = self.lc.list_images(ex_image_ids=["ami-03d8261f577d71b6a"])[0]
        node = self.lc.create_node(
            name,
            size,
            image,
            ex_keyname="key",
            ex_spot=True,
            ex_security_groups=["proxy"],
            ex_metadata=dict(app="proxy", ready=False),
        )
        log.info(f"waiting for {name} to start")
        node = self.lc.wait_until_running([node])[0][0]
        self.node = node
        ip = node.public_ips[0]
        self.session = self.get_session(ip)

        # configure using fabric
        con = Connection(
            ip,
            user="******",
            connect_kwargs=dict(key_filename=f"{HOME}/.aws/key"),
        )
        self.con = con
        # retry until ssh available
        Retry(tries=3, delay=2, warn=1)(con.open)()
        con.put(f"{HERE}/tinyproxy.conf")
        con.run(
            "sudo apt-get -qq update && "
            "sudo apt-get -y -qq install dos2unix tinyproxy && "
            "dos2unix -q tinyproxy.conf && "
            "sudo cp tinyproxy.conf /etc/tinyproxy/tinyproxy.conf && "
            "sudo service tinyproxy restart",
            hide="both",
        )

        # wait for proxy to be working
        try:
            self.check_proxy()
        except:
            log.error(
                f"Failed to start proxy for {node.extra.instance_id} at {ip}")
            raise

        # make available
        self.lc.ex_create_tags(node, dict(ready="True"))
        log.info(f" {ip} started")

        return ip
Ejemplo n.º 26
0
def upload_runbook(con: Connection, dst: str):
    print("Uploading runbook to the master node results dir ...")
    try:
        create_runbook(RUNBOOK_PATH, setup_file=README_PATH)
        con.run(f"mkdir -p {dst}")
        con.put(README_PATH, dst)
        con.put(RUNBOOK_PATH, dst)
    finally:
        if os.path.exists(RUNBOOK_PATH):
            os.remove(RUNBOOK_PATH)
Ejemplo n.º 27
0
def test_3_fabric_upload():
    remove(testfilename) if isfile(testfilename) else None
    localfile = open(testfilename, 'w')
    localfile.write(test_text)
    localfile.close()
    c = Connection(host=hostname, user=username, connect_kwargs={'password': password})
    c.put(testfilename, remotepath)
    output = c.run(f'cat {remotepath}').stdout
    assert output == test_text
    remove(testfilename)
def copyFile(user, fileName, destinationPath, publicIPs, keyFile):
    for IPAddress in publicIPs:
        print IPAddress
        connection = Connection(
            host=IPAddress,
            user=user,
            connect_kwargs={'key_filename': ['' + keyFile + '']})
        connection.run('ls -ltr', pty=True)
        connection.put(fileName,
                       remote=destinationPath + fileName,
                       preserve_mode=True)
Ejemplo n.º 29
0
def create_or_update_dot_env(c: fabric.Connection, env_args, env_file):
    """
    Create or update the remote .env file by merging its contents with the
    variables passed in as CLI args and the contents of the passed in local .env
    file. Generate SECRET_KEY if not provided and prompt for DOMAIN/EMAIL if not
    provided.
    """
    info("Creating/updating the remote .env")

    local_env = {}

    if env_file:
        sub_info(f"Reading variables from {env_file}")
        file_env = dotenv.dotenv_values(env_file)
        local_env.update(file_env)

    if env_args:
        sub_info("Compiling variables passed through CLI")
        cli_env_content = "\n".join(env_args)
        cli_env = dotenv.dotenv_values(stream=io.StringIO(cli_env_content))
        local_env.update(cli_env)

    if files.exists(c, ".env"):
        sub_info("Reading variables from remote .env")
        remote_env_content = c.run("cat .env", hide=True).stdout
        remote_env = dotenv.dotenv_values(
            stream=io.StringIO(remote_env_content))
    else:
        remote_env = {}

    for key, value in local_env.items():
        if key in remote_env:
            prompt_text = f"{key} is already set to {remote_env[key]}, are you sure that you want to set it to {value}?"
            if not confirm(prompt_text):
                continue
        sub_info(f"Setting {key} to {value}")
        remote_env[key] = value

    if not remote_env.get("SECRET_KEY"):
        sub_info("Generating value for SECRET_KEY")
        remote_env["SECRET_KEY"] = "".join(
            random.choices(string.ascii_letters + string.digits, k=50))

    for key in ["DOMAIN", "EMAIL"]:
        while not remote_env.get(key):
            value = prompt(f"Enter value for {key}")
            remote_env[key] = value

    sub_info("Writing variables to remote .env")
    remote_env_content = "\n".join(f"{key}={value or ''}"
                                   for key, value in remote_env.items())
    remote_env_content_bytes = io.BytesIO(remote_env_content.encode("utf-8"))
    c.put(remote_env_content_bytes, f"{c.cwd}/.env")
 def run_py_code(self, code=None):
     filename = "./runcode/running/a.py"
     if not code:
         code = self.code
     with open(filename, "w") as f:
         f.write(code)
     c = Connection(host=pi_ip,
                    user='******',
                    connect_kwargs={'password': '******'})
     c.put("./runcode/running/a.py", './runcode/running/a.py')
     c.close()
     self.test_py_code(filename)
     return self.stderr, self.stdout
Ejemplo n.º 31
0
    class put:
        def setup(self):
            self.c = Connection('localhost')
            self.remote = self._tmp('file.txt')

        def base_case(self):
            # Copy file from 'local' (support dir) to 'remote' (tempdir)
            # TODO: consider path.py for contextmanager
            cwd = os.getcwd()
            os.chdir(self._support())
            try:
                # TODO: wrap chdir at the Connection level
                self.c.sftp().chdir(self._tmp())
                result = self.c.put('file.txt')
            finally:
                os.chdir(cwd)

            # Make sure it arrived
            ok_(os.path.exists(self.remote))
            eq_(open(self.remote).read(), "yup\n")
            # Sanity check result object
            eq_(result.remote, self.remote)
            eq_(result.orig_remote, None)
            eq_(result.local, self._support('file.txt'))
            eq_(result.orig_local, 'file.txt')

        def file_like_objects(self):
            fd = BytesIO()
            fd.write(b"yup\n")
            result = self.c.put(local=fd, remote=self.remote)
            eq_(open(self.remote).read(), "yup\n")
            eq_(result.remote, self.remote)
            ok_(result.local is fd)

        def mode_preservation(self):
            # Use a dummy file which is given an unusual, highly unlikely to be
            # default umask, set of permissions (oct 641, aka -rw-r----x)
            local = self._tmp('funky-local.txt')
            with open(local, 'w') as fd:
                fd.write('whatever')
            os.chmod(local, 0o641)
            remote = self._tmp('funky-remote.txt')
            self.c.put(remote=remote, local=local)
            eq_(stat.S_IMODE(os.stat(remote).st_mode), 0o641)
Ejemplo n.º 32
0
    class put:
        def setup(self):
            self.c = Connection("localhost")
            self.remote = path.local.mkdtemp().join("file.txt").realpath()

        def base_case(self):
            # Copy file from 'local' (support dir) to 'remote' (tempdir)
            local_dir = _support()
            with path.local(local_dir).as_cwd():
                tmpdir = self.remote.dirpath()
                # TODO: wrap chdir at the Connection level
                self.c.sftp().chdir(str(tmpdir))
                result = self.c.put("file.txt")
            # Make sure it arrived
            assert self.remote.check()
            assert self.remote.read() == "yup\n"
            # Sanity check result object
            assert result.remote == self.remote
            assert result.orig_remote is None
            assert result.local == _support("file.txt")
            assert result.orig_local == "file.txt"

        def file_like_objects(self):
            fd = BytesIO()
            fd.write(b"yup\n")
            remote_str = str(self.remote)
            result = self.c.put(local=fd, remote=remote_str)
            assert self.remote.read() == "yup\n"
            assert result.remote == remote_str
            assert result.local is fd

        def mode_preservation(self, tmpdir):
            # Use a dummy file which is given an unusual, highly unlikely to be
            # default umask, set of permissions (oct 641, aka -rw-r----x)
            local = tmpdir.join("funky-local.txt")
            local.write("whatever")
            local.chmod(0o641)
            remote = tmpdir.join("funky-remote.txt")
            self.c.put(remote=str(remote), local=str(local))
            assert stat.S_IMODE(remote.stat().mode) == 0o641
Ejemplo n.º 33
0
class RemoteCommandExecutor:
    """Execute remote commands on the cluster master node."""

    USERNAMES = {
        "alinux": "ec2-user",
        "centos6": "centos",
        "centos7": "centos",
        "ubuntu1404": "ubuntu",
        "ubuntu1604": "ubuntu",
    }

    def __init__(self, cluster):
        self.__connection = Connection(
            host=cluster.master_ip,
            user=self.USERNAMES[cluster.os],
            forward_agent=True,
            connect_kwargs={"key_filename": cluster.ssh_key},
        )
        self.__user_at_hostname = "{0}@{1}".format(self.USERNAMES[cluster.os], cluster.master_ip)

    def __del__(self):
        try:
            self.__connection.close()
        except Exception as e:
            # Catch all exceptions if we fail to close the clients
            logging.warning("Exception raised when closing remote ssh client: {0}".format(e))

    def run_remote_command(self, command, log_error=True, additional_files=None, raise_on_error=True, login_shell=True):
        """
        Execute remote command on the cluster master node.

        :param command: command to execute.
        :param log_error: log errors.
        :param additional_files: additional files to copy before executing script.
        :param raise_on_error: if True raises a RemoteCommandExecutionError on failures
        :param login_shell: if True prepends /bin/bash --login -c to the given command
        :return: result of the execution.
        """
        if isinstance(command, list):
            command = " ".join(command)
        self._copy_additional_files(additional_files)
        logging.info("Executing remote command command on {0}: {1}".format(self.__user_at_hostname, command))
        if login_shell:
            command = "/bin/bash --login -c {0}".format(shlex.quote(command))

        result = self.__connection.run(command, warn=True, pty=True, hide=False)
        result.stdout = "\n".join(result.stdout.splitlines())
        result.stderr = "\n".join(result.stderr.splitlines())
        if result.failed and raise_on_error:
            if log_error:
                logging.error(
                    "Command {0} failed with error:\n{1}\nand output:\n{2}".format(
                        command, result.stderr, result.stdout
                    )
                )
            raise RemoteCommandExecutionError(result)
        return result

    def run_remote_script(self, script_file, args=None, log_error=True, additional_files=None):
        """
        Execute a script remotely on the cluster master node.

        Script is copied to the master home dir before being executed.
        :param script_file: local path to the script to execute remotely.
        :param args: args to pass to the script when invoked.
        :param log_error: log errors.
        :param additional_files: additional files to copy before executing script.
        :return: result of the execution.
        """
        script_name = os.path.basename(script_file)
        self.__connection.put(script_file, script_name)
        if not args:
            args = []
        return self.run_remote_command(
            ["/bin/bash", "--login", script_name] + args, log_error=log_error, additional_files=additional_files
        )

    def _copy_additional_files(self, files):
        for file in files or []:
            self.__connection.put(file, os.path.basename(file))
        "key_filename": "c:\Users\Guodong\.ssh\exportedkey201310171355",
    }

    # Superuser privileges via auto-response
    sudo_pass_auto_respond = Responder(
        pattern=r'\[sudo\] password:'******'mypassword\n',
    )

    # create connection
    cxn = Connection('192.168.88.19', config=fabric_config)

    # do tasks on host
    print cxn.run("uname -a", hide=True).stdout
    print cxn.sudo("whoami", hide=True).stdout
    cxn.run('sudo whoami', pty=True, watchers=[sudo_pass_auto_respond])
    cxn.put(__file__, "/tmp/this.py")
    cxn.run("sudo rm -f /tmp/this.py")
    # cxn.get("/tmp/this.py", "this.py")
    print disk_free(cxn)

    # config multiple servers with methods 1
    for host in ('192.168.88.19', '192.168.88.20', '192.168.88.21'):
        result = Connection(host, config=fabric_config).run('uname -s', hide=True)
        print("{}: {}".format(host, result.stdout.strip()))

    # config multiple servers, M2
    results = Group('192.168.88.19', '192.168.88.20', '192.168.88.21', config=fabric_config).run('uname -s', hide=True)
    for connection, result in results.items():
        print("{0.host}: {1.stdout}".format(connection, result))