def shell(native=False, tmux=True): """ Open common ssh shell """ print(config) if native or eval(str(native)): print('Opening native fabric shell') open_shell(config['ssh_startup'] if eval(str(tmux)) else '') else: print('Opening ssh shell') key = server['ssh_cert'] password = server['ssh_password'] if key: key = '-i ' + os.path.join(home_folder, '.temp/', key) # ssh = "sshpass -p '%s' ssh %s -p %s %s %s" % (password, ssh = "ssh %s -p %s %s %s" % ( '%(ssh_user)s@%(ip)s' % server, server['ssh_port'], key, "-t '%s'" % config['ssh_startup'] if eval(str(tmux)) else '') # if platform.system() == 'Linux': # ssh = "sshpass -p '%s' %s" % (password, ssh) # else: # print(password) print(ssh) os.system(ssh)
def pip_install(self): if 'before_pip_install' in self.pause_at: print(green('\nOpening remote shell - before_pip_install.')) open_shell() if 'before_pip_install' in env: env.before_pip_install(env, *self.task_args, **self.task_kwargs) if exists(os.path.join(self.source_path, '*.pth')): print(green('\nCopying .pth files.')) commands.copy( from_path=os.path.join(self.source_path, '*.pth'), to_path='%s/lib/python%s/site-packages' % (self.virtualenv_path, commands.get_python_version()) ) print(green('\nPip installing requirements.')) # TODO: use requirements_path instead of project_path? instance.pip_install_requirements( self.virtualenv_path, self.source_path, env.log_path ) if 'after_pip_install' in self.pause_at: print(green('\nOpening remote shell - after_pip_install.')) open_shell() if 'after_pip_install' in env: env.after_pip_install(env, *self.task_args, **self.task_kwargs)
def shell(revision=None): '''Open a shell into an app's environment (the enabled one as default)''' cwd = erun('pwd').stdout open_shell('cd %s' % ( 'app' if not revision else ('app-%s' % revision), ))
def kill_celeryd(): '''Kill all celeryd processes in case they cannot be stopped normally Not necessary with supervisord v3.0 (supports stopasgroup and killasgroup)? ''' with settings(warn_only=True): open_shell("ps auxww | grep celeryd | awk '{print $2}' | xargs kill -9")
def shell(args=None, path="~/"): "Run command in a remote shell (in ./~). If command not specified then run the default shell." if args is None: open_shell() else: with ctx.cd(path): ops.run(args)
def shell(): """ Opens the project's python interactive shell. """ open_shell('source %s; %s shell --settings=%s; exit;' % (REMOTE_ENV_CURRENT_ACTIVATE, REMOTE_RELEASE_CURRENT_MANAGE, env.settings))
def shell(): """ Spawns a shell on the remote instance """ with settings(parallel=False): open_shell()
def handle_after_restart(self): if 'after_restart' in self.pause_at: print(green('\nOpening remote shell - after_restart.')) open_shell() if 'after_restart' in env: env.after_restart(env, *self.task_args, **self.task_kwargs)
def ssh(name): """SSH into an instance.""" with open(os.path.join(env.ssh_directory, "".join([name, ".json"])), "r") as f: # noqa host_data = json.load(f) f = open("deploy/fab_hosts/{}.txt".format(name)) env.host_string = "ubuntu@{}".format(f.readline().strip()) with settings(**host_data): open_shell()
def shell(): cwd = erun('pwd').stdout virtualenv_path = os.path.abspath(os.path.join(cwd, '.virtualenv')) open_shell('cd %s && source %s/bin/activate' % ( 'app-%s' % describe_revision(), virtualenv_path, ))
def shell(*args, **kwargs): envVars = { "SENDGRID_EMAIL_USERNAME": env.SENDGRID_EMAIL_USERNAME, "SENDGRID_EMAIL_PASSWORD": env.SENDGRID_EMAIL_PASSWORD, # "PYTHONPATH:": env.python_executable + ":$PYTHONPATH", } setEnv = ";".join("export {0}={1}".format(var, val) for var, val in envVars.iteritems()) with prefix(setEnv): open_shell(*args, **kwargs)
def ssh(name): """ Opens a shell connection to a host given by its EC2 name. """ instance = get_instance_by_tags({'Name': name}) if instance is None: return with settings(user='******', host_string=instance.public_dns_name, key_filename='/Users/aismail/.ssh/ami-keypair.pem'): open_shell()
def shell(revision=None): '''Open a shell into an app's environment (the enabled one as default)''' cwd = erun('pwd').stdout virtualenv_path = os.path.abspath(os.path.join(cwd, '.virtualenv')) open_shell('cd %s && source %s/bin/activate' % ( 'app' if not revision else ('app-%s' % revision), virtualenv_path, ))
def deploy_source(self): if 'before_deploy_source' in self.pause_at: print(green('\nOpening remote shell - before_deploy_source.')) open_shell() if 'before_deploy_source' in env: env.before_deploy_source(env, *self.task_args, **self.task_kwargs) print(green('\nDeploying source.')) source.transfer_source(upload_path=self.source_path, tree=self.stamp)
def show_log(): with settings(warn_only=True): with hide("warnings"): # removes error message when control-c is pressed today_str = datetime.datetime.today().strftime("%Y-%m-%d") # run_tomcat("tail -f /var/log/tomcat/catalina.out") # Redhat broke catalina.out, but this seems to work: run_tomcat("tail -f /var/log/tomcat/catalina." + today_str + ".log") # VERY noisy: run_tomcat("tail -f /var/log/messages") if not confirm("Continue?"): if confirm("Open tomcat.sg shell on server?"): open_shell("tomcat.sg") else: abort("Aborting at user request.")
def migrate(self): if 'before_migrate' in self.pause_at: print(green('\nOpening remote shell - before_migrate.')) open_shell() if 'before_migrate' in env: env.before_migrate(env, *self.task_args, **self.task_kwargs) print(green('\nMigrating database.')) commands.django_manage(self.virtualenv_path, self.source_path, 'migrate --noinput') print('')
def syncdb(self): if 'before_syncdb' in self.pause_at: print(green('\nOpening remote shell - before_syncdb.')) open_shell() if 'before_syncdb' in env: env.before_syncdb(env, *self.task_args, **self.task_kwargs) print(green('\nSyncing database.')) commands.django_manage(self.virtualenv_path, self.source_path, 'syncdb') print('')
def create_virtualenv(self): if 'before_create_virtualenv' in self.pause_at: print(green('\nOpening remote shell - before_create_virtualenv.')) open_shell() if 'before_create_virtualenv' in env: env.before_create_virtualenv(env, *self.task_args, **self.task_kwargs) print(green('\nCreating virtual environment.')) instance.create_virtualenv(self.virtualenv_path, env.python_version) # set virtualenv path in env env.virtualenv_path = self.virtualenv_path
def __call__(self, *args, **kwargs): """ parse optional 'pause' argument, can be given like this: fab staging test:pause=test """ pause_at = kwargs['pause'].split(',') if ('pause' in kwargs) else [] # test pause if ('test' in pause_at): print(green('\nOpening remote shell - test.')) open_shell() # test hook if ('test' in env): env.test(env, *args, **kwargs)
def install_deps(): # configure for java installation require.deb.ppa("ppa:webupd8team/java") sudo("apt-get update") require.deb.packages([ "git", "nginx", "supervisor", # "oracle-java7-installer" ]) # manual istallation of java. warn("Now we will install java virtual machine\n and when installation will done\nyou need press Ctrl+D to continue automate installation") open_shell("sudo apt-get install oracle-java7-installer && java -version") # continue automate installation if not exists("elasticsearch-1.4.1.deb"): run("wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.1.deb") sudo("dpkg -i elasticsearch-1.4.1.deb") sudo("/etc/init.d/nginx stop") # otherwise installation of graphite will failed.
def handle_compass(self): if env.compass_version: if 'before_compass_compile' in self.pause_at: print(green('\nOpening remote shell - before_compass_compile.')) open_shell() if 'before_compass_compile' in env: env.before_compass_compile(env, *self.task_args, **self.task_kwargs) print(green('\nCompiling compass project and upload static files.')) # The optional `compass_extract_path` os used by the AnsibleHost type # to allow these files to be extracted in a sub-directory of the project. # If the project is configured correctly the `collectstatic` run will # then copy them to the right location. source.compass_compile( upload_path=self.source_path, extract_path=os.path.join(self.source_path, env.get('compass_extract_path', '')), tree=self.stamp, compass_version=env.compass_version)
def give_me_a_shell(): open_shell("cd {}".format(env.my_path))
def release(head='HEAD', web_root=None, requirements=u'requirements.txt', envpath='.env', steps=None): '''Main task for releasing. Unarchive the release in the webroot, sync_virtualenv and update the app/ directory to point to the new release and archive in old/. ''' steps = validate_steps(steps) if steps else [] cwd = erun('pwd').stdout if not web_root else web_root abs_envpath = os.path.abspath(os.path.join(cwd, envpath)) if not files.exists(abs_envpath): raise abort('%s doesn\'t exist, create it before release using configure_env task!!!' % abs_envpath) # locally we create the archive with the app code create_release_archive(head) release_filename = get_release_filename() local_release_filepath = get_release_filepath() actual_version = describe_revision(head) previous_version = None # check that the archive contains the requirements file tf = tarfile.open(local_release_filepath) try: tf.getmember(requirements) except KeyError as e: abort('file \'%s\' doesn\'t exist, indicate a requirements file contained into the release archive' % requirements) finally: tf.close() # and upload it to the server if not files.exists(release_filename): put(local_path=local_release_filepath) app_dir = os.path.abspath(os.path.join(cwd, 'app-%s' % describe_revision(head))) virtualenv_path = os.path.abspath(os.path.join(cwd, '.virtualenv')) try: # if exists remove dir if files.exists(app_dir): erun('rm -vfr %s' % ( app_dir, )) # create the remote dir erun('mkdir -p %s' % app_dir) erun('tar xf %s -C %s' % ( release_filename, app_dir, )) sync_virtualenv(virtualenv_path, '%s/%s' % (app_dir, requirements,))# parametrize with cd(app_dir): for step in steps: step(virtualenv_path) # find the previous release and move/unlink it if is_link('app'): # TODO: move old deploy in an 'archive' directory previous_deploy_path = erun('basename $(readlink -f app)').stdout idx = previous_deploy_path.index('-') previous_version = previous_deploy_path[idx + 1:] if previous_version != actual_version: erun('unlink app') erun('mkdir -p old && mv -f %s old/' % previous_deploy_path) erun('ln -s %s app' % app_dir) except CommandFailed as e: print 'An error occoured: %s' % e print ''' %s --> %s Use 'honcho --env ../.env start' inside a screen session ''' % (previous_version, actual_version) open_shell('cd %s && source %s/bin/activate' % ( app_dir, virtualenv_path, ))
def sudo_vi(file): """fab auth.pro ope.vi:/etc/hosts""" open_shell("sudo_vi %s && exit" % (file))
def shell(): open_shell('''cd %(server_project_dir)s/; source env/bin/activate; python manage.py shell''' % env)
def _release(archive, revision=None, web_root=None, **kwargs): ''' Main task its role is to decompress an archive to the web root into a directory named 'app-X' where X identifies the revision; by default the revision is calculated from the sha256 of the archive when not indicated. :param version: :param archive: :param web_root: :param kwargs: :return: ''' previous_revision = None cwd = erun('pwd').stdout if not web_root else web_root if not os.path.exists(archive): raise CommandFailed('Archive \'%s\' doesn\'t exist' % archive) revision = revision or hashfile(archive, hashlib.sha256()) remote_filepath = '%s-%s' % (archive, revision) app_dir = os.path.join(cwd, 'app-%s' % revision) app_symlink = os.path.join(cwd, 'app') put(local_path=archive, remote_path=remote_filepath) try: # if exists remove dir if files.exists(app_dir): erun('rm -vfr %s' % ( app_dir, )) # create the remote dir erun('mkdir -p %s' % app_dir) erun('tar xf %s -C %s' % ( remote_filepath, app_dir, )) # find the previous release and move/unlink it if files.exists(app_symlink) and is_link(app_symlink): # TODO: move old deploy in an 'archive' directory previous_deploy_path = erun('basename $(readlink -f %s)' % app_symlink).stdout idx = previous_deploy_path.index('-') previous_revision = previous_deploy_path[idx + 1:] if previous_revision != revision: erun('unlink %s' % app_symlink) erun('mkdir -p old && mv -f %s old/' % previous_deploy_path) elif files.exists(app_symlink): raise CommandFailed('app directory already exists and is not a symlink') erun('ln -s %s %s' % (app_dir, app_symlink)) except CommandFailed as e: print 'An error occoured: %s' % e print ''' %s --> %s ''' % (previous_revision or '?', revision) open_shell('cd %s' % ( app_dir, ))
def django_shell(): """ Opens a python shell that connects to the django application """ operations.open_shell(command=_manage("shell"))
def ssh(name): """SSH into an instance.""" with open(os.path.join(env.ssh_directory, ''.join([name, '.json'])), 'r') as f: # noqa host_data = json.load(f) with settings(**host_data): open_shell()
def venv_shell(): """ Opens a bash shell with the application virtualenv activated """ operations.open_shell(command="cd %s && source %s/bin/activate" % (env.site_path, env.venv_path))
def dbshell(): """ Opens a psql shell that connects to the application database """ operations.open_shell(command=_manage("dbshell"))
def shell(self): with settings(**self.fabric_params): open_shell()
def sshcmd(cmd): '''fab auth.pro ope.sshcmd:"sudo vi /etc/hosts" ''' open_shell("%s" % cmd)
def shell(): return operations.open_shell()
def ssh(): '''fab auth.pro ope.ssh ''' open_shell("hostname")