def deploy_content(cls): remote = SshMachine(TARGET.DOMAIN) with remote.cwd(TARGET.CHECKOUT_PATH): print(remote['git']('reset', '--hard')) print(remote['git']('clean', '-f', '-d')) print(remote['git']('pull')) with remote.cwd(TARGET.TOOL_PATH): print(remote['adfd']('fix-staging-paths'))
def startExperiment(gpu, session, command, project_dir, repo_ssh_string, update_repo=True, rebuild_docker=False, branch=None): """ Helper function to start an experiment remotely. Requirements: <project_dir>/docker/run.sh must exist and take two arguments 1. The name of the docker contain to be created 2. The command to be executed <project_dir>/docker/build.sh must exist Also, the project directory should be located in the home directory. Args: gpu (int): Id of the GPU session (string): Name of the container to be created command (string): Command to be exectued project_dir (string): Name of the project directory """ remote = SshMachine(gpu['server']) r_runDocker = remote[remote.cwd / project_dir / "docker/run.sh"] r_buildDocker = remote[remote.cwd / project_dir / "docker/build.sh"] home_dir = remote.cwd killRunningSession(remote, session) if update_repo: updateRepo(remote, project_dir, repo_ssh_string, branch=branch) if rebuild_docker: # Build docker print("Building container...") with remote.cwd(home_dir / project_dir): r_buildDocker() print("Done.") with remote.cwd(home_dir / project_dir): # r_runDocker(gpu, "code/main.py -p with ./code/conf/openaiEnv.yaml") # print('Executing command: ', command) r_runDocker(str(gpu['gpu_nr']), session, command) remote.close()
def main(machine, instances, queues=['high', 'default', 'low']): r = StrictRedis.from_url(REDIS_URL_RQ) machine_workers = [worker for worker in Worker.all(connection=r) if is_local(machine, worker.name) and \ any(works_on(worker, queue) for queue in queues)] print "%d workers running on %s" % (len(machine_workers), machine) if len(machine_workers): print '\n'.join( map( lambda m: "%s\t%s\t%s" % (m.name, m.get_state(), "stopped" if m.stopped else "running"), machine_workers)) machine_info = workers(machine) rem = SshMachine(machine_info['hostname'], ssh_opts=SSH_OPTS, **machine_info.get('kwargs', {})) dir = rem.path(machine_info['rqdir']) with rem.cwd(dir): for i in xrange(0, instances - len(machine_workers)): rem["./worker.sh"](' '.join(queues)) print "Worker spawned"
def run_more_instances(machine, count, queues=['high', 'default', 'low']): rem = SshMachine(machine, ssh_opts=SSH_OPTS, keyfile=KEYFILE, user='******') dir = rem.path('/home/ec2-user/rq') with rem.cwd(dir): for i in xrange(0, count): rem["./worker.sh"](' '.join(queues)) print "Worker spawned"
def updateRepo(servername, project_dir, repo_ssh_string, branch): """ Helper function to pull newest commits to remote repo. """ remote = SshMachine(servername) r_git = remote['git'] home_dir = remote.cwd # Update repository print("Updating repo...", end='') with remote.cwd(home_dir / project_dir): r_git('fetch', '-q', 'origin') r_git('checkout', branch) r_git('reset', '--hard', 'origin/{}'.format(branch), '-q') # Check that we have the same git hash remote than local with remote.cwd(home_dir / project_dir): r_head = r_git('rev-parse', 'HEAD') l_head = git('rev-parse', 'HEAD') assert l_head == r_head, "Local git hash != pushed git hash. Did you forget to push changes?" print("Repo updated") remote.close()
def setup(machine): rem = SshMachine(workers(machine)['hostname'], ssh_opts=SSH_OPTS) dir = rem.path(workers(machine)['rqdir']) if not dir.exists(): print "CLONING REPO..." rem["git"]("clone", "http://github.com/darioush/rq-dist", dir) print "CLONED..." print "MAKING VIRTUAL ENV..." with rem.cwd(dir): rem["virtualenv"]("env") print "MADE VIRTUAL ENV..." with rem.cwd(dir): print "UPDATING CODE ..." rem["git"]("pull", "origin", "master") print "UPDATING VENV ..." rem["./update-venv.sh"]() my_hostname, _, _ = socket.gethostname().partition('.') if my_hostname == machine: print "Not syncing master worker" return my_d4j = '/'.join( get_property('d4j_path', my_hostname, 0)[0].split('/')[:-2]) dst_d4j = '/'.join(get_property('d4j_path', machine, 0)[0].split('/')[:-3]) print "RSYNCING FOR DEFECTS4J " rsync = local['rsync']['-avz', '--exclude', '.git', '--exclude', 'project_repos'][my_d4j] rsync('%s:%s' % (workers(machine)['hostname'], dst_d4j)) rem_d4j = rem.path(dst_d4j) / 'defects4j' repos_dir = rem_d4j / 'project_repos' if not repos_dir.exists(): with rem.cwd(rem_d4j): print "GETTING REPOSITORIES..." rem['./get-repos.sh']()
def deploy_code(cls): remote = SshMachine(TARGET.DOMAIN) with remote.cwd(TARGET.TOOL_PATH): print(remote['git']('pull')) print(remote[VIRTENV.PIP_BIN]('install', '-U', '-e', '.'))
from plumbum import SshMachine remote = SshMachine('intrepid', user='******', keyfile='/home/rmeadows/.ssh/id_rsa') r_ls = remote['ls'] print 'intrepid: /opt/repos/github:' with remote.cwd('/opt/repos/github'): print r_ls()