def deploy(c): r = Connection("zerojoy") r.put("dist/usb_gadget.tar", "zerojoy.tar") f = c.run("poetry version -s") wheel = f"zerojoy-{f.stdout.strip()}-py3-none-any.whl" r.put(f"dist/{wheel}") with r.prefix(". .venv/bin/activate"): r.run(f"pip install --no-index --force-reinstall {wheel}") restart(r)
def deploy(ctx, name): data = ctx[name] folder = data.pop("folder") c = Connection(**data.get('connection', {})) with c.prefix("source ~/.zshrc"): with c.cd(os.path.dirname(os.path.dirname(folder))): c.run("git pull") with c.cd(folder): c.run("jekyll build")
def rollback(_, env="dev"): config = load_config(env) work_dir = config["work_dir"] package_file_name = config["package_file_name"] system_python = config["system_python"] config_file_name = config["config_file_name"] connection = Connection(config["host"]) connection.config.run.echo = True # 表结构的回滚 with connection.cd(work_dir): with connection.prefix(". venv/bin/activate"): rollback_command = "migrate rollback" connection.run(rollback_command, warn=True)
def update_report(self, force=False): report_path = os.path.join(dir_path, self.report_dir, self.host) if not os.path.isfile(report_path) or time.time() - os.path.getmtime( report_path) > self.refresh_rate: c = Connection(self.host, user=USER) env = {"LDAP_URL": self.ldap_url, "CCGROUP": self.cc_group} p = "&&".join(['{}={}'.format(k, v) for k, v in env.items()]) with c.prefix(p): outputs = c.run(open(report_sh).read(), replace_env=True) self._stdout = outputs.stdout with open(report_path, 'w') as fp: fp.write(self._stdout) else: self._stdout = open(report_path).read() self.last_report_date = os.path.getmtime(report_path)
def deploy(_, env="dev"): config = load_config(env) work_dir = config["work_dir"] package_file_name = config["package_file_name"] system_python = config["system_python"] config_file_name = config["config_file_name"] connection = Connection(config["host"]) connection.config.run.echo = True # 打包 connection.local("python setup.py sdist") logging.info("【代码打包】OK") # 上传 create_dir(connection, work_dir) upload(connection, "dist", package_file_name, work_dir, package_file_name) # 虚拟环境 test_venv_cmd = "test -f {work_dir}/venv/bin/activate".format( work_dir=work_dir) if connection.run(test_venv_cmd, warn=True).failed: create_venv = "virtualenv -p {system_python} {work_dir}/venv".format( work_dir=work_dir, system_python=system_python) connection.run(create_venv) logging.info("【虚拟环境创建】OK") # 更新安装包 install_requirements_cmd = "easy_install {package_file_name}".format( package_file_name=package_file_name, work_dir=work_dir) with connection.cd(work_dir): with connection.prefix(". venv/bin/activate"): connection.run(install_requirements_cmd) logging.info("【安装包更新】OK") # 配置上传 upload(connection, "configure", config_file_name, work_dir, "config.py") upload(connection, ".", "uwsgi.ini", work_dir, "uwsgi.ini") # 服务启动 / 重启 create_dir(connection, os.path.join(work_dir, "logs")) with connection.cd(work_dir): with connection.prefix(". venv/bin/activate"): check_service1_command = "autointerface check-service1" check_service2_command = "autointerface check-service2" start_service_command = "autointerface start" restart_service_command = "autointerface restart" if connection.run(check_service2_command, warn=True).failed: connection.run(start_service_command) action = "启动" else: connection.run(restart_service_command) action = "重启" # 成功确认 with connection.cd(work_dir): with connection.prefix(". venv/bin/activate"): if connection.run(check_service1_command, warn=True).failed: logging.error("【%s】失败" % action) elif connection.run(check_service2_command, warn=True).failed: logging.error("【%s】失败" % action) else: logging.info("【%s】成功" % action)
conn = Connection( DEV_EC2_HOST, connect_kwargs={ "key_filename": pem_filename, }, ) result = conn.run('mkdir -p ~/crawler') # authorize docker cmd = f'/home/ubuntu/venv/crawler/bin/aws ecr get-login-password --region {aws_region} --profile=recon | docker login --username AWS --password-stdin {image_registry_url}' conn.run(cmd, echo=True) with conn.prefix('cd /home/ubuntu/crawler/'): conn.run('docker stop $(docker ps -a -q)', echo=True) # stop all containers conn.run(f'docker rmi -f {image_registry_url}/{build_tag_name}', echo=True) # delete old image conn.run(f'docker pull {image_registry_url}/{build_tag_name}', echo=True) conn.run( f'docker run --cpu-shares=1024 --restart=always -d -p 8000:8000 {image_registry_url}/{build_tag_name}', echo=True) conn.run(f'docker pull {image_registry_url}/{workers_build_tag_name}', echo=True) worker_command = '"celery" "-A" "crawler" "worker" "--loglevel=INFO" "-Q" "prod-crawler-download-video-file-queue"' conn.run( f'docker run --restart=always --cpu-shares=512 -d {image_registry_url}/{workers_build_tag_name} {worker_command}', echo=True)
class DjangoConnection: def __init__(self, config): self.c = config logger.info("Connecting to {}".format(config.host)) self.c_adm = Connection(host=config.host, user=self.c.admin_username, config=config) self.c_usr = Connection(host=config.host, user=self.c.app_username, config=config) assert (self.c_adm.run('whoami').stdout.strip() == self.c.admin_username) assert (self.c_usr.run('whoami').stdout.strip() == self.c.app_username) @staticmethod def get_instance(config): if "s" in config: return config.s else: s = DjangoConnection(config) config.s = s return s def prepare_version(self, path, code_commit, config_commit): logger.info("Creating code and config files at {}".format(path)) self.c_usr.run("mkdir {}".format(path)) with self.c_usr.cd(path): self.c_usr.run("mkdir {}".format(self.c.static_subdir)) # Download code self._download_repository(self.c.code_repo_url, self.c.code_subdir, code_commit, self.c.code_branch) # Download config self._download_repository(self.c.config_repo_url, self.c.config_subdir, config_commit, self.c.config_branch) # Prepare venv logger.info("Creating virtualenv") self.c_usr.run("python -m venv {}".format(q(self.c.venv_subdir))) with self.c_usr.prefix("source {}/{}/bin/activate" .format(path, q(self.c.venv_subdir))): if self.c.build_script is None: code_requirements = self.c.code_subdir + "/requirements.txt" if os.path.isfile(code_requirements): self.c_usr.run("pip install -r " + q(code_requirements)) config_requirements = self.c.config_subdir + "/requirements.txt" if os.path.isfile(config_requirements): self.c_usr.run("pip install -r " + q(config_requirements)) else: self.c_usr.run(self.c.build_script) def _download_repository(self, url, path, commit, branch): logger.info("Downloading repository {} to {}, commit {}" .format(url, path, commit)) self.c_usr.run("git clone {} {}".format(q(url), q(path))) with self.c_usr.cd(path): self.c_usr.run("git checkout {}".format(q(commit))) self.c_usr.run("git branch -D master") self.c_usr.run("git checkout -b {}".format(branch)) self.c_usr.run( "git branch --set-upstream-to=origin/{}".format(branch)) def stop_maintenance(self): logger.info("Stopping maintenance mode") self.c_adm.run(self.c.maintenance_stop_script) def start_maintenance(self): logger.info("Starting maintenance mode") self.c_adm.run(self.c.maintenance_start_script) def start_django(self): self.stop_maintenance() logger.info("Starting the App") self.c_adm.run("sudo systemctl start -la " + self.c.systemd_service) def stop_django(self): """ Stop the Django App, but we don't fail if already stopped """ logger.info("Stopping django") self.c_adm.run("sudo systemctl stop -la " + self.c.systemd_service) def backup_database(self): logger.info("Backing up database") filepath = self.c_adm.run(self.c.backup_script).stdout.strip() assert int(self.c_adm.run('stat --printf="%s" {}'.format( filepath)).stdout) > 100 * 1000 # 100kB def check_for_uncommited_changes(self): logger.info("Checking for uncommited or unpushed changes") self._check_repository(self.c.current_code) self._check_repository(self.c.current_config) def _check_repository(self, path): with self.c_usr.cd(path): # For some reason this is required to refresh git internals self.c_usr.run("git status") self.c_usr.run("git diff-index --quiet HEAD --") # Checked for unpushed commits assert "" == self.c_usr.run("git log @{u}..").stdout.strip() def change_codebase(self, new_path): logger.info("Changing codebase to {}".format(new_path)) self.c_usr.run("rm -f {}".format(self.c.previous_main)) self.c_usr.run("mv {} {}".format(self.c.current_main, self.c.previous_main)) self.c_usr.run("ln -s {} {}".format(q(new_path), self.c.current_main)) def change_to_previous_codebase(self): self.change_codebase(self.get_previous_version()) def mark_working(self, new_path): if new_path == self.get_working_version(): return self.c_usr.run("rm -f {}".format(self.c.previous_working)) self.c_usr.run( "mv {} {}".format(self.c.current_working, self.c.previous_working)) self.c_usr.run("ln -s {} {}".format(q(new_path), self.c.current_working)) def django_check_manage(self): """ Checks if manage.py succeeds to run without exceptions. Such a check finds many common problems including CONFIG_VERSION in config not matching INSTALLATION_CONFIG_VERSION in code """ logger.info("Checking manage.py") with self.c_usr.prefix( "source {}/bin/activate".format(self.c.current_venv_dir)): with self.c_usr.cd(self.c.deployment_dir): self.c_usr.run("./manage.py") def django_migrations(self): logger.info("Applying Django migrations") with self.c_usr.prefix( "source {}/bin/activate".format(self.c.current_venv_dir)): with self.c_usr.cd(self.c.deployment_dir): self.c_usr.run("./manage.py migrate --no-input") def django_perform_install(self): if self.c.install_script is not None: logger.info("Performing Django version install") with self.c_usr.prefix( "source {}/bin/activate".format(self.c.current_venv_dir)): with self.c_usr.cd(self.c.deployment_dir): self.c_usr.run(self.c.install_script) def check_app_works(self): logger.info("Testing connection to {}".format(self.c.website_url)) r = requests.get(self.c.website_url) count = 0 # Starting of Django App may take some time. while not r.ok and count < 60: logger.debug("Waiting for the App start...") time.sleep(1) r = requests.get(self.c.website_url) count += 1 assert r.ok def get_current_version(self): return self._get_link_target_basename(self.c.current_main) def get_previous_version(self): return self._get_link_target_basename(self.c.previous_main) def get_working_version(self): return self._get_link_target_basename(self.c.current_working) def get_previous_working_version(self): return self._get_link_target_basename(self.c.previous_working) def _get_link_target_basename(self, link): return os.path.basename(self._get_link_target(link)) def _get_link_target(self, link): return self.c_usr.run("readlink -f {}".format(link)).stdout.strip() def delete_version(self, path): if path not in self.list_versions(): raise Exception("Not an available version") if os.path.basename(path) in self.get_protected_versions(): raise Exception("Refusing to delete protected version") logger.info("Deleting {}".format(q(self.c.versions_dir + "/" + path))) self.c_usr.run("rm -rf {}".format(q(self.c.versions_dir + "/" + path))) def delete_versions(self, to_delete): versions_list = self.list_versions() protected_versions = self.get_protected_versions() for path in to_delete: if path not in versions_list: logger.error("Not an available version: {}".format(path)) continue if os.path.basename(path) in protected_versions: logger.warning( "Refusing to delete protected version: {}".format(path)) continue logger.info("Deleting {}".format(q(self.c.versions_dir + "/" + path))) self.c_usr.run("rm -rf {}".format(q(self.c.versions_dir + "/" + path))) def get_protected_versions(self): return [self.get_current_version(), self.get_previous_version(), self.get_working_version(), self.get_previous_working_version(), ] def list_versions(self): folders = self.c_usr.run("ls -1 " + self.c.versions_dir).stdout.split() result = [] for folder in folders: folder = folder.strip() elements = folder.split("-") if elements[0] == "django" \ and elements[1].isnumeric(): result.append(folder) return result