def restart_stage_server(server): server = Connection(STAGE_SERVER_HOST) if confirm('Restart Staging server?'): stop_server(server, STAGE_PROJECT_PATH) if confirm('Rebuild yarn?'): rebuild_node(server, STAGE_PROJECT_PATH) start_server(server, STAGE_PROJECT_PATH) print('Stage server was restarted') else: print('Server restart is canseled')
def restart_local_server(server): server = Connection(DEV_SERVER_HOST) if confirm('Restart Production server?'): stop_server(server, DEV_PROJECT_PATH) if confirm('Rebuild yarn?'): rebuild_node(server, DEV_PROJECT_PATH) start_server(server, DEV_PROJECT_PATH) print('Production server was restarted') else: print('Server restart is canseled')
def logo(c): logodir = SRCDIR.joinpath("images", "logos") if logodir.joinpath("MIT-logo-red-gray.eps").exists(): return print( "Use your browser to download the zip file into ./images/logos (MIT certificate required)" ) logourl = ( "https://web.mit.edu/graphicidentity/download/logo-sets/MIT-logos-print.zip" ) print("\t" + logourl) # TODO try to download ourselves from invocations.console import confirm if not confirm("Have you downloaded the zip file?"): exit(1) zippath = logodir.joinpath("MIT-logos-print.zip") if not logodir.joinpath("MIT-logos-print.zip").exists(): print("You failed to download the zip file") exit(1) with tempfile.TemporaryDirectory() as d: c.run(f"unzip -q {zippath} -d {d}") c.run(f"mv {d}/MIT-logos-print/*.eps {logodir}") c.run(f"rm {zippath}")
def deploy(conn, c=None): """ Deploy latest version of the project. Backup current version of the project, push latest version of the project via version control or rsync, install new requirements, sync and migrate the database, collect any new static assets, and restart gunicorn's worker processes for the project. """ c = connect(c) # run(c, "uname -a") # print(c) # print(conn) # run(conn, "uname -a") # # conn.host=env.hosts[0] # conn.user=env.user # print(conn) # run(conn, "uname -a") # exit() if not exists(c, env.proj_path): if confirm("Project does not exist in host server: %s" "\nWould you like to create it?" % env.proj_name): create() else: abort() # Backup current version of the project # with c.cd(env.proj_path): # backup(c, "last.db") # exit() # if env.deploy_tool in env.vcs_tools: # with cd(env.repo_path): # if env.deploy_tool == "git": # run("git rev-parse HEAD > %s/last.commit" % env.proj_path) # elif env.deploy_tool == "hg": # run("hg id -i > last.commit") # with project(): # static_dir = static() # if exists(static_dir): # run("tar -cf static.tar --exclude='*.thumbnails' %s" % # static_dir) # else: # with cd(join(env.proj_path, "..")): # excludes = ["*.pyc", "*.pio", "*.thumbnails"] # exclude_arg = " ".join("--exclude='%s'" % e for e in excludes) # run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg)) # # # Deploy latest version of the project # with update_changed_requirements(): # if env.deploy_tool in env.vcs_tools: # vcs_upload() # else: # rsync_upload() with project(c): manage(c, "collectstatic -v 0 --noinput") manage(c, "migrate --noinput") for name in templates: upload_template_and_reload(c, name) restart(c) return True
def build_docker_image(c): ''' Build the Docker image and possibly upload it. Requires an authenticated dockerhub session with access to the vokomokum group. ''' c.run(f"docker build . --tag {docker_image}") if confirm("Upload image to docker registry?"): c.run(f"docker push {docker_image}")
def bucket_delete(c): """Deletes the S3 bucket used to host the site""" if not confirm("Are you sure you want to delete the bucket %r?" % BUCKET_NAME): print('Aborting at user request.') exit(1) conn = connect_s3(calling_format=BUCKET_CALLING_FORMAT) conn.delete_bucket(BUCKET_NAME) print('Bucket %r deleted.' % BUCKET_NAME)
def bucket_delete(c): """Deletes the S3 bucket used to host the site""" if not confirm( "Are you sure you want to delete the bucket %r?" % BUCKET_NAME): print("Aborting at user request.") exit(1) conn = connect_s3(calling_format=BUCKET_CALLING_FORMAT) conn.delete_bucket(BUCKET_NAME) print("Bucket %r deleted." % BUCKET_NAME)
def dump_database(server): if confirm('Create dump at prod?'): server = Connection(PROD_SERVER_HOST) PROJECT_PATH = PROD_PROJECT_PATH print("Creating at Production server") elif confirm('Create dump at stage?'): server = Connection(STAGE_SERVER_HOST) PROJECT_PATH = STAGE_PROJECT_PATH print("Creating at Stage server") else: server = Connection(DEV_SERVER_HOST) PROJECT_PATH = DEV_PROJECT_PATH print("Creating at Local server") create_dump(server, PROJECT_PATH) print("Dump creating is done!")
def restart_stage_server(server): server = Connection('128.199.74.157') if confirm('Restart Staging server?'): stop_server(server, STAGE_PROJECT_PATH) start_server(server, STAGE_PROJECT_PATH) print('Stage server was restarted') else: print('Server restart is canseled')
def deploy_dist(c): ''' deploy a new server, install all tools ''' if not confirm('the deployment will wipe out ' +'all the existing packages. Is this ok'): raise Exit('aborting dist deployment') conda.install_anaconda(c) git.install_git(c) bower.install_bower(c)
def test(c, standalone=True): ''' perfom tests ''' with c.prefix(venv_activation): c.run("pip install -q pytest multidict") result = c.run('pytest', warn=True) if result.failed and not standalone and not confirm( "Tests failed. Continue anyway?"): raise Exit("Aborting at user request.")
def deploy(server): if confirm('Deploy to prod?'): server = Connection( '*****@*****.**') PROJECT_PATH = PROD_PROJECT_PATH print("Deploying to Production server") else: server = Connection('128.199.74.157') PROJECT_PATH = STAGE_PROJECT_PATH print("Deploying to Stage server") stop_server(server, PROJECT_PATH) pull(server, PROJECT_PATH) if confirm('Rebuild yarn?'): rebuild_node(server, PROJECT_PATH) start_server(server, PROJECT_PATH) print("Deploying to server is done!")
def restart_prod_server(server): server = Connection( '*****@*****.**') if confirm('Restart Production server?'): stop_server(server, PROD_PROJECT_PATH) start_server(server, PROD_PROJECT_PATH) print('Production server was restarted') else: print('Server restart is canseled')
def pull(server, PROJECT_PATH): with server.cd(PROJECT_PATH): print('Start getting files from bitbucket') if confirm('Stash and pull?'): server.run('git stash') if 'www' in PROJECT_PATH: server.run('git pull origin dev', pty=True) else: server.run('git pull', pty=True) server.run('git stash pop') print('Getting files from bitbucket completed')
def init_dist(c): ''' First time code pulling for a new server ''' if not confirm('make sure you have run deploy-dist'): raise Exit('aborting dist initialization') git.git_clone(c) conda.create_virtual_env(c) conda.create_deploy_env(c) bower.bower_pkg_install(c) sup.start(c) nginx.start(c)
def deploy(ctx, env=ENV_DEV, db_url=DEFAULT_DB_URL): check_envs(env) """ Command: $ fab deploy env db_url """ if not confirm('Deploy to {} ?'.format(env)): raise Exit clone_git_src(ctx) create_env_file(ctx, env, db_url) collect_static(ctx) migrate(ctx) create_django_su(ctx)
def test(c): with c.prefix('cd /root/python_linux_automation/redis-4.0.9'): result = c.run('make && make test', warn=True, pty=False) if result.failed and not confirm('Tests failed, continue anyway?'): raise SystemExit("Aborting at user requeset") else: print('All tests passed without errors') c.run('make clean', warn=True, pty=False, hide=True) # with c.prefix('cd /root/python_linux_automation/redis-4.0.9'): # c.run('make clean', warn=True, pty=False, hide=True) with c.prefix("cd /root/python_linux_automation/"): c.run('tar -czf redis-4.0.9.tar.gz redis-4.0.9')
def deploy(ctx, usr, host, compose="docker-compose-prod.yml", branch="master"): """ Deploy proyect on the server with user interaction. """ conn = connection_handler(ctx, usr, host) if not conn: sys.exit("\u274c Failed to connect to server") things_todo = [] if confirm("Do you want to pull from git?", assume_yes=True): things_todo.append(pull) if confirm("Do you want to build backend?", assume_yes=False): things_todo.append(build_backend) if confirm("Do you want to build frontend", assume_yes=True): things_todo.append(build_frontend) if confirm("Do you want to run migrations?", assume_yes=False): things_todo.append(run_migrations) if confirm("Do you want to collect static files?", assume_yes=False): things_todo.append(collect_statics) if confirm("Do you want to restart the server?", assume_yes=True): things_todo.append(restart) for todo in things_todo: todo(conn, usr, host, compose, branch) print("{} {}".format(todo.__name__, "\u2705")) else: print("All jobs done \U0001f643")
def update_dist(c): ''' code/virtual_env/DB updating ''' if not confirm('make sure you have run init-dist'): raise Exit('aborting dist updating') git.update_git_repo(c) conda.update_virtual_env(c) conda.update_deploy_env(c) bower.update_bower_pkg(c) db.upgrade(c) sup.restart(c) nginx.reload(c)
def deploy_dist(c): ''' deploy a new server, install all tools ''' if not confirm('the deployment will wipe out ' + 'all the existing packages. Is this ok'): raise Exit('aborting dist deployment') #nginx installation currently is excluded #supervisor installation is delegated to conda using pip conda.install_anaconda(c) git.install_git(c) bower.install_bower(c) auth.create_role_and_group(c)
def rmRemoteFile(c, filename, nodename): # 初始化远程工具对象 robj = Remote(getEnv(nodename)) # 删除远程文件 result = robj.rmfile(filename) if result == False: print("failed to delete remote file.") if not confirm("failed to delete remote file, Continue[Y/N]?"): return True else: print("succeed to delete remote file.") return True
def checkRemotePath(c, path, nodename): # 初始化远程工具对象 robj = Remote(getEnv(nodename)) # 下载文件到本地 result = robj.checkpath(path) if result == False: print("Remote Path not exists.") if not confirm("Path not exists in remote machine, Continue[Y/N]?"): return True else: print("Remote Path exists.") return True
def pull(server, PROJECT_PATH): with server.cd(PROJECT_PATH): print('Start getting files from bitbucket') if confirm('Stash and pull?'): server.run('git stash') if PROJECT_PATH == STAGE_PROJECT_PATH: server.run('git pull origin {0}'.format(STAGE_BRANCH_NAME), pty=True) elif PROJECT_PATH == PROD_PROJECT_PATH: server.run('git pull origin master', pty=True) else: server.run('git pull', pty=True) server.run('git stash pop') print('Getting files from bitbucket completed')
def deploy(server): if confirm('Deploy to prod?'): server = Connection(PROD_SERVER_HOST) PROJECT_PATH = PROD_PROJECT_PATH print("Deploying to Production server") elif confirm('Deploy to stage?'): server = Connection(STAGE_SERVER_HOST) PROJECT_PATH = STAGE_PROJECT_PATH print("Deploying to Stage server") else: server = Connection(DEV_SERVER_HOST) PROJECT_PATH = DEV_PROJECT_PATH print("Deploying to Local server") stop_server(server, PROJECT_PATH) pull(server, PROJECT_PATH) if confirm('Rebuild yarn?'): rebuild_node(server, PROJECT_PATH) start_server(server, PROJECT_PATH) print("Deploying to server is done!")
def deploy(c, user=None, mode="test", branch="master"): ''' Deploy latest code on our server and set it 'live' (you need admin rights for this on the server) Be sure to use --user=you for sudo commands to succeed. Also, use --mode=production to update the production app. Finally, fabric accepts a few parameters so you handle your ssh key correctly. For instance, you might need to use: --identity=path/to/your/provate/key --prompt-for-passphrase See Fabrics's (>=2.0) documentation for more details. ''' if not user: raise Exit("Please provide a user name for use on the server") # make sure we don't deploy untested or outdated code #test(c, standalone=False) # Here, c is a Connection, not a Context, so this would need work #push(c) if not confirm( "Have you tested and pushed your code?? (You can use `fab prepare_deploy`)" ): raise Exit("Aborting at user request.") code_dir = "/var/voko/git-repo" app_dir = "/var/www" # make sure we have the code cloned on the server if not c.run(f"test -d {code_dir}", warn=True): c.run(f"git clone [email protected]:vokomokum/vkmkm-erp.git {code_dir}", user=user) # make sure sudo command can be used sudo_pass = getpass.getpass(f"What's the sudo password for user {user}?") c.config = Config(overrides={'sudo': {'password': sudo_pass}}) #with cd(code_dir): # not yet implemented in fabric2 and sudo does not remember cd c.sudo( f"{code_dir}/dev-tools/update-members-site-from-git {mode} {branch}") # No need to restart apache, simply touch wsgi file (when in daemon mode) # (see http://code.google.com/p/modwsgi/wiki/ReloadingSourceCode) #if mode == 'production': # c.sudo(f"touch {app_dir}/members/pyramid.wsgi") #else: # c.sudo(f"touch {app_dir}/memberstest/pyramid.wsgi") # The above is not working anymore, we do restart directly now #c.sudo("/etc/init.d/apache2 restart") c.sudo("systemctl restart httpd")
def init_dist(c): ''' First time code pulling for a new server ''' if not confirm('make sure you have run deploy-dist'): raise Exit('dist initialization abort') git.create_new_release(c) conda.create_virtual_env(c) conda.create_deploy_env(c) bower.bower_pkg_install(c) ftp.upload_static_src(c, 'local_src_path', 'srv_src_path', ['*.jpg', '*.mp4', '*.svg', '*.png']) auth.chown_of_dist_repo(c) sup.d_start(c) nginx.start(c)
def create_local_archive(conn, config, src_commit): """ Create local archive and return its path. """ wt = config.get_working_tree() if src_commit is None: src_branch = config.get_config_branch() else: src_branch = src_commit if not os.path.exists(wt): raise Exit("Working tree '{}' does not exist!".format(wt)) has_secrets = os.path.exists(os.path.join(wt, ".gitsecret")) with conn.cd(wt): result = conn.run("git diff-index --quiet HEAD --", warn=True) if result.failed: if confirm( "There are uncommited changes in the working tree. Reset to HEAD?" ): conn.run("git reset --hard HEAD") else: raise Exit( "Can't use working tree with uncommitted changes. Stash, commit, or reset." ) conn.run("git checkout {}".format(shellquote(src_branch))) if has_secrets: conn.run("git secret reveal") ttools.fill_templates(config) archive_branch = "{}-archive".format(src_branch) conn.run("git branch -D {}".format(shellquote(archive_branch)), warn=True) conn.run("git checkout -b {}".format(shellquote(archive_branch))) filter_files_for_archival(conn, config, ".secret") filter_files_for_archival(conn, config, ".template") if has_secrets: secrets_file_name = config.get_secrets_file_name() if secrets_file_name is not None: conn.run("git rm -f {}".format(shellquote(secrets_file_name))) if os.path.exists(os.path.join(wt, '.gitignore')): conn.run("git rm -f .gitignore") if os.path.exists(os.path.join(wt, '.gitsecret')): conn.run("git rm -rf .gitsecret") conn.run("git commit -m 'Decrypted for deployment.'", warn=True) archive_path = conn.run("mktemp").stdout.rstrip() conn.run("git archive --format tgz -o {} HEAD".format( shellquote(archive_path))) conn.run("git checkout {}".format(shellquote(src_branch))) conn.run("git branch -D {}".format(shellquote(archive_branch))) return archive_path
def update_dist(c): ''' code/virtual_env/DB updating ''' if not confirm('make sure you have run init-dist'): raise Exit('aborting dist updating') git.create_new_release(c) conda.update_virtual_env(c) conda.update_deploy_env(c) bower.update_bower_pkg(c) db.upgrade(c) ftp.upload_static_src(c, 'local_src_path', 'srv_src_path', ['*.jpg', '*.mp4', '*.svg', '*.png']) auth.chown_of_dist_repo(c) sup.d_start(c) nginx.reload(c)
def etc_hosts(_, add=None, remove=None): """[--add="127.0.0.1 postgres" | --remove=<ip/host>] """ path = Path("c:/Windows/System32/drivers/etc/hosts") content = path.read_text(encoding="utf-8") new_lines = [] if add: new_lines = content.splitlines() + [add] elif remove: new_lines = [l for l in content.splitlines() if not l.startswith(remove) and not l.endswith(remove)] if new_lines: new_content = "\n".join(new_lines) print(new_content) if confirm("OK?", assume_yes=False): path.write_text(new_content, encoding="utf-8") else: print(content)
def put(ctx, usr, host, ifile, ofile=PROJECT_PATH): """ Upload a file to the server, from the given ifile parameters. By default the file is saved on the current directory. ifile: name/path of the local file to upload. ofile: name/path of the file on the server after upload. """ conn = connection_handler(ctx, usr, host) if not conn: sys.exit("\u274c Failed to connect to server") if confirm( "File {} is going to be upload and save under {}, do you want to proceed?" .format(ifile, ofile)): print("Uploading file... \U0001f4c1") with conn.cd(PROJECT_PATH): conn.put(ifile, ofile) else: print("Aborted, everything stays the same. \U0001f610")
def create_env_file(ctx, env, db_url): check_envs(env) is_env_file_exists = not ctx.run('test -e {}'.format(ENV_FILE_PATH), warn=True) if is_env_file_exists and not confirm('Environment file already exists. Do you want to rewrite it?', assume_yes=False): return if is_env_file_exists: ctx.run('mv {} {}.bak'.format(ENV_FILE_PATH)) ctx.run('echo export DJANGO_SECRET_KEY={} >> {}' .format(generate_django_secret_file(), ENV_FILE_PATH)) ctx.run('echo export DJANGO_SETTINGS_MODULE={}{} >> {}' .format(ENV_DIR, '.settings', ENV_FILE_PATH)) ctx.run('echo export DATABASE_URL={} >> {}'.format(db_url, ENV_FILE_PATH)) if env == ENV_DEV: ctx.run('echo export DEBUG=True >> {}'.format(ENV_FILE_PATH)) ctx.run('echo export DJANGO_CONFIGURATION={} >> {}'.format(ENV_DEV, ENV_FILE_PATH)) elif env == ENV_TEST: ctx.run('echo export DEBUG=False >> {}'.format(ENV_FILE_PATH)) ctx.run('echo export DJANGO_CONFIGURATION={} >> {}'.format(ENV_TEST, ENV_FILE_PATH)) elif env == ENV_PROD: ctx.run('echo export DEBUG=False >> {}'.format(ENV_FILE_PATH)) ctx.run('echo export DJANGO__CONFIGURATION={} >> {}'.format(ENV_PROD, ENV_FILE_PATH)) sentry_dsn = input('Enter Sentry DSN: ') ctx.run('echo export SENTRY_DSN={} >> {}'.format(sentry_dsn, ENV_FILE_PATH))
def returns_False_for_nolike_responses(self, mock_input): for value in ('n', 'N', 'no', 'NO', 'nO', 'No'): mock_input.return_value = value ok_(confirm("Meh") is False)
def displays_question_with_yes_no_suffix(self, mock_input): confirm("Are you sure?") assert mock_input.call_args[0][0] == "Are you sure? [Y/n] "
def test(c): result = c.local("./manage.py test my_app", warn=True) if not result and not confirm("Tests failed. Continue anyway?"): raise Exit("Aborting at user request.")
def reprompts_on_bad_input(self, mock_input): ok_(confirm("O rly?") is True) assert_contains(sys.stderr.getvalue(), "I didn't understand you")
def default_on_empty_response_is_True_by_default(self, mock_input): assert confirm("Are you sure?") is True
def test(c): with c.prefix("source env/bin/activate"): result = c.run("nosetests -v", warn=True) if not result.ok and not confirm("Tests failed. Continue?"): raise Exit()
def default_on_empty_response_is_False_if_assume_yes_False( self, mock_input ): assert confirm("Are you sure?", assume_yes=False) is False
def whitespace_is_trimmed(self, mock_input): assert confirm("Are you sure?") is True
def suffix_changes_when_assume_yes_False(self, mock_input): confirm("Are you sure?", assume_yes=False) assert mock_input.call_args[0][0] == "Are you sure? [y/N] "
def returns_True_for_yeslike_responses(self, mock_input): for value in ('y', 'Y', 'yes', 'YES', 'yES', 'Yes'): mock_input.return_value = value ok_(confirm("Meh") is True)
def returns_True_for_yeslike_responses(self, mock_input): for value in ("y", "Y", "yes", "YES", "yES", "Yes"): mock_input.return_value = value assert confirm("Meh") is True
def returns_False_for_nolike_responses(self, mock_input): for value in ("n", "N", "no", "NO", "nO", "No"): mock_input.return_value = value assert confirm("Meh") is False
def reprompts_on_bad_input(self, mock_input): assert confirm("O rly?") is True assert "I didn't understand you" in sys.stderr.getvalue()