def site_files_exist(self): ''' Determines if the site files exist. Returns 1 if site files exist, 0 otherwise. ''' if env.exists(env.node['site_root']) and env.exists(env.node['site_root'] + '/index.php'): return 1 else: return 0
def deploy(): """ Performs a deploy by invoking copy, then generating next release name and invoking necessary hooks. """ init_tasks() if not has_hook("copy"): return report("No copy method has been defined") if not env.exists(paths.get_shared_path()): return report("You need to run setup before running deploy") run_hook("before_deploy") release_name = int(time.time() * 1000) release_path = paths.get_releases_path(release_name) env.current_release = release_path try: run_hook("copy") except Exception as e: return report("Error occurred on copy. Aborting deploy", err=e) if not env.exists(paths.get_source_path(release_name)): return report("Source path not found '%s'" % paths.get_source_path(release_name)) try: run_hook("deploy") except Exception as e: message = "Error occurred on deploy, starting rollback..." logger.error(message) logger.error(e) run_task("rollback") return report("Error occurred on deploy") # Symlink current folder paths.symlink(paths.get_source_path(release_name), paths.get_current_path()) # Clean older releases if "max_releases" in env: cleanup_releases(int(env.max_releases)) run_hook("after_deploy") if "public_path" in env: paths.symlink(paths.get_source_path(release_name), env.public_path) logger.info("Deploy complete")
def deploy(): """ Performs a deploy by invoking copy, then generating next release name and invoking necessary hooks. """ init_tasks() if not has_hook("copy"): return report("No copy method has been defined") if not env.exists(paths.get_shared_path()): return report("You need to run setup before running deploy") run_hook("before_deploy") release_name = int(time.time()*1000) release_path = paths.get_releases_path(release_name) env.current_release = release_path try: run_hook("copy") except Exception as e: return report("Error occurred on copy. Aborting deploy", err=e) if not env.exists(paths.get_source_path(release_name)): return report("Source path not found '%s'" % paths.get_source_path(release_name)) try: run_hook("deploy") except Exception as e: message = "Error occurred on deploy, starting rollback..." logger.error(message) logger.error(e) run_task("rollback") return report("Error occurred on deploy") # Symlink current folder paths.symlink(paths.get_source_path(release_name), paths.get_current_path()) # Clean older releases if "max_releases" in env: cleanup_releases(int(env.max_releases)) run_hook("after_deploy") if "public_path" in env: paths.symlink(paths.get_source_path(release_name), env.public_path) logger.info("Deploy complete")
def restore_latest_backup(self): ''' Restores a drush archive dump backup of a site. ''' print(cyan('Restoring latest site backup...')) # Make the backup directory if for some reason it doesn't already exist. if not env.exists(env.node['backup_directory']): self.drubs_run('mkdir -p %s' % (env.node['backup_directory'])) with env.cd(env.node['backup_directory']): # Get a list of available backup files sorted with newest first. backup_files = self.drubs_run("ls -1 %s | grep -E '%s_%s_[0-9]{4}\-[0-9]{2}\-[0-9]{2}_[0-9]{2}\-[0-9]{2}\-[0-9]{2}\.tar\.gz' | awk '{print \"%s/\" $0}'" % ( env.node['backup_directory'], env.config['project_settings']['project_name'], env.node_name, env.node['backup_directory'], ), capture=True) backup_files = backup_files.splitlines() backup_files.sort(reverse=True) # If backup files exist, restore the latest backup file. if len(backup_files) > 0: latest_backup_file = backup_files[0] if env.exists(latest_backup_file): if not env.exists(env.node['site_root']): self.drubs_run('mkdir -p %s' % (env.node['site_root'])) with env.cd(env.node['site_root']): self.drush('archive-restore %s --overwrite --destination="%s"' % ( latest_backup_file, env.node['site_root'], )) self.drush('cc all') print(green("Latest backup '%s' restored to '%s' on node '%s'..." % ( latest_backup_file, env.node['site_root'], env.node_name, ))) else: print(red("Latest backup file does not exist or cannot be read in '%s' on node '%s'..." % ( env.node['backup_directory'], env.node_name, ))) else: print(red("No backup files found in '%s' on node '%s'. Cannot restore..." % ( env.node['backup_directory'], env.node_name, )))
def remove_old_backups(self): ''' Removes existing backup files based on the node's backup settings. ''' print(cyan("Checking for site backups to be removed...")) # Make the backup directory if for some reason it doesn't already exist. if not env.exists(env.node['backup_directory']): self.drubs_run('mkdir -p %s' % (env.node['backup_directory'])) # Get a list of available backup files sorted with newest first. backup_files = self.drubs_run("ls -1 %s | grep -E '%s_%s_[0-9]{4}\-[0-9]{2}\-[0-9]{2}_[0-9]{2}\-[0-9]{2}\-[0-9]{2}\.tar\.gz' | awk '{print \"%s/\" $0}'" % ( env.node['backup_directory'], env.config['project_settings']['project_name'], env.node_name, env.node['backup_directory'], ), capture=True) backup_files = backup_files.splitlines() backup_files.sort(reverse=True) # Exclude the first n items from the list, where n is backup_minimum_count. del backup_files[:int(env.node['backup_minimum_count'])] # Delete any remaining backup files in the list that are older than # backup_lifetime_days, if the list still has backups in it. if len(backup_files) > 0: for backup_filename in backup_files: match = search(r'\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}', backup_filename) backup_time = datetime.strptime(match.group(), '%Y-%m-%d_%H-%M-%S') now = datetime.now() if backup_time < (now - timedelta(days=int(env.node['backup_lifetime_days']))): self.drubs_run('rm -f %s' % (backup_filename))
def test_default_maxreleases(self): """ Run 7 deploys and verify that 5 are saved, and that the first release is really removed. """ hooks.register_hook("copy", empty_copy) with settings(source_path="src", warn_only=True): setup() deploy() release_name = paths.get_current_release_name() first_release_path = paths.get_releases_path(release_name) # TODO: Find a better solution then using time.sleep for i in range(6): time.sleep(1) deploy() releases = len(os.listdir(paths.get_releases_path())) self.assertEquals(releases, 5) self.assertFalse(env.exists(first_release_path))
def install(keystone_path=settings.KEYSTONE_ROOT): """Download and install the Back-end and its dependencies.""" if env.exists(keystone_path[:-1]): print 'Already downloaded.' else: env.run(('git clone https://github.com/ging/keystone.git ' '{0}').format(keystone_path)) with env.cd(keystone_path): dependencies = ' '.join(settings.UBUNTU_DEPENDENCIES['keystone']) env.run('sudo apt-get install {0}'.format(dependencies)) env.run('sudo cp etc/keystone.conf.sample etc/keystone.conf') env.run('sudo python tools/install_venv.py') # Uncomment config file with env.cd('etc/'): env.run(("sudo sed -i " "'s/#admin_token=ADMIN/admin_token={0}/g' " "keystone.conf").format(settings.KEYSTONE_ADMIN_TOKEN)) env.run(("sudo sed -i " "'s/#admin_port=35357/admin_port={0}/g' " "keystone.conf").format(settings.KEYSTONE_ADMIN_PORT)) env.run(("sudo sed -i " "'s/#public_port=5000/public_port={0}/g' " "keystone.conf").format(settings.KEYSTONE_PUBLIC_PORT)) print 'Done!'
def restore_db(release=None): """ Restores backup back to version, uses current version by default. """ assert "mysql_user" in env, "Missing mysqL_user in env" assert "mysql_password" in env, "Missing mysql_password in env" assert "mysql_host" in env, "Missing mysql_host in env" assert "mysql_db" in env, "Missing mysql_db in env" if not release: release = paths.get_current_release_name() if not release: raise Exception("Release %s was not found" % release) backup_file = "mysql/%s.sql.gz" % release backup_path = paths.get_backup_path(backup_file) if not env.exists(backup_path): raise Exception("Backup file %s not found" % backup_path) env.run("gunzip < %s | mysql -u %s -p%s -h %s %s" % (backup_path, env.mysql_user, env.mysql_password, env.mysql_host, env.mysql_db))
def backup(forced=False): '''Dumps remote database and stores it in backups folder.''' remote_path = '{}/backups/{}'.format(env.komoo_project_folder, DBFILE()) if env.exists(remote_path): if not forced: return remote_path env.run('pg_dump -U {} --no-privileges --no-owner {} > {}'.format( env.komoo_dbuser, env.komoo_dbname, remote_path)) return remote_path
def enable_apache_access(self): ''' Re-enables access to site root location. Used to remove 503 put in place during site install/upgrade. ''' print(cyan('Re-enabling access to site...')) if env.exists(env.node['site_root'] + '/.htaccess.drubs'): self.drubs_run('rm %s/.htaccess.drubs' % (env.node['site_root']))
def create_venv(): if "venv_path" not in env: raise Exception("No env.venv_path has been specified") # Checks if venv exist and prompts user if reinstalling is an option if env.exists(Path(get_path(), "bin")): logger.warn("Virtualenv is already installed") prompt_result = prompt("Install anyways:", default="no") if prompt_result == "no": return env.run("virtualenv %s" % get_path())
def site_bootstrapped(self): ''' Determines if a bootstrapped drupal site exists. Returns 1 if the site is bootstrapped, 0 otherwise. ''' if not env.exists(env.node['site_root']): return 0 with env.cd(env.node['site_root']): result = self.drubs_run('drush status --fields=bootstrap --no-field-labels', capture=True) if (result.find('Successful') != -1): return 1 else: return 0
def create_backup(self): ''' Creates a drush archive dump backup of a site. ''' if self.site_bootstrapped(): print(cyan('Creating site backup...')) with env.cd(env.node['site_root']): if not env.exists(env.node['backup_directory']): self.drubs_run('mkdir -p %s' % (env.node['backup_directory'])) self.drush('cc all') self.drush('archive-dump --destination="%s/%s_%s_%s.tar.gz" --preserve-symlinks' % ( env.node['backup_directory'], env.config['project_settings']['project_name'], env.node_name, time.strftime("%Y-%m-%d_%H-%M-%S"), )) else: print(cyan('No pre-existing properly-functioning site found. Skipping backup...'))
def provision(self): ''' Creates database and site root. ''' print(cyan('Creating database...')) self.drubs_run('mysql -h%s -u%s -p%s -e "DROP DATABASE IF EXISTS %s;CREATE DATABASE %s;"' % ( env.node['db_host'], env.node['db_user'], env.node['db_pass'], env.node['db_name'], env.node['db_name'], )) print(cyan('Creating site root location...')) if env.exists(env.node['site_root'] + '/sites/default'): with env.cd(env.node['site_root']): self.drubs_run('chmod u+w sites/default') self.drubs_run('ls -A | grep -v ".htaccess.drubs" | xargs rm -rf') self.drubs_run('mkdir -p %s' % (env.node['site_root']))
def update_requirements(): req_dir = None req_path = None if "requirements_root" in env: req_dir = os.path.join(env.current_release, env.requirements_root) if not req_dir: req_dir = os.path.join(env.current_release, "requirements") if "requirements" in env: req_path = Path(req_dir, env.requirements) if not req_path: req_path = Path(req_dir, "%s.txt" % env.stage) if not env.exists(req_path): raise Exception("Requirement file not found at %s" % req_path) env.run("pip install -r %s" % req_path)
def restore_db(release=None): """ Restores backup back to version, uses current version by default. """ if not release: release = paths.get_current_release_name() if not release: raise Exception("Release %s was not found" % release) backup_file = "postgresql/%s.sql.gz" % release backup_path = paths.get_backup_path(backup_file) if not env.exists(backup_path): raise Exception("Backup file %s not found" % backup_path) with context_managers.shell_env(PGPASSWORD=env.psql_password): env.run("pg_restore --clean -h localhost -d %s -U %s '%s'" % (env.psql_db, env.psql_user, backup_path))
def destroy(self): self.check_destructive_action_protection() self.check_and_create_backup() print(cyan('Removing database...')) self.drubs_run('mysql -h%s -u%s -p%s -e "DROP DATABASE IF EXISTS %s";' % ( env.node['db_host'], env.node['db_user'], env.node['db_pass'], env.node['db_name'], )) print(cyan('Removing files...')) if env.exists(env.node['site_root']): self.drubs_run('chmod -R u+w %s' % (env.node['site_root'])) self.drubs_run('rm -rf %s' % (env.node['site_root'])) else: print(yellow('Site root %s does not exist. Nothing to remove.' % ( env.node['site_root'], ))) if not env.no_backup: self.remove_old_backups() self.print_elapsed_time()
def restore_db(release=None): """ Restores backup back to version, uses current version by default. """ if not release: release = paths.get_current_release_name() if not release: raise Exception("Release %s was not found" % release) backup_file = "postgresql/%s.sql.gz" % release backup_path = paths.get_backup_path(backup_file) if not env.exists(backup_path): raise Exception("Backup file %s not found" % backup_path) with context_managers.shell_env(PGPASSWORD=env.psql_password): env.run("pg_restore --clean -h localhost -d %s -U %s '%s'" % ( env.psql_db, env.psql_user, backup_path) )
def test_deploy_rollback(self): hooks.register_hook("copy", empty_copy) with settings( source_path="src", warn_only=True): setup() deploy() release_name = paths.get_current_release_name() deploy() # Run another callback so we can can roll back rollback() self.assertTrue(os.path.exists(os.path.join( env.app_path, "current", "app.txt") )) releases = len(os.listdir(os.path.join(env.app_path, "releases"))) self.assertEquals(releases, 1) self.assertTrue(env.exists(paths.get_releases_path(release_name)))
def make(self): ''' Runs drush make using the make file specified in project configs. ''' print(cyan('Beginning drush make...')) with env.cd(env.node['site_root']): if env.exists(env.node['site_root'] + '/sites/default'): self.drubs_run('chmod 775 sites/default') make_file = env.config_dir + '/' + env.node['make_file'] cache_option = str() if not env.cache: cache_option += ' --no-cache' # Remove all modules/themes/libraries to ensure any deleted files are # removed. See: https://github.com/komlenic/drubs/issues/30 self.drubs_run('rm -rf sites/all/*') if env.host_is_local: self.drush('make --working-copy --no-gitinfofile %s %s' % ( cache_option, make_file, )) else: # Copy drush make file for the node to /tmp on the node. put(make_file, '/tmp/' + env.config['project_settings']['project_name']) # Run drush make. self.drush('make --working-copy --no-gitinfofile %s /tmp/%s/%s' % ( cache_option, env.config['project_settings']['project_name'], env.node['make_file'], )) # Remove drush make file from /tmp on the node. self.drubs_run('rm -rf /tmp/%s/%s' % ( env.config['project_settings']['project_name'], env.node['make_file'], ))
def repo(): if not dir() or not env.exists( os.path.join(env.komoo_project_folder, '.git')): warn('Repository not found.') return False return True
def dir(): if not env.exists(env.komoo_project_folder): warn('Project path not found: {}'.format(env.komoo_project_folder)) return False return True
def virtualenv(): if not env.exists('~/.virtualenvs/{}'.format(env.komoo_virtualenv)): warn('Virtualenv not found.') return False return True
def repo(): if not dir() or not env.exists(os.path.join(env.komoo_project_folder, '.git')): warn('Repository not found.') return False return True