def _build_ansible_galaxy_requirement(self, features): """ Generates ansible galaxy requirement file from features """ with open(self._ansible_base_requirements_file, 'r') as requirements_file: requirement_app = yaml.load(requirements_file) for role in self._get_ansible_roles(features): requirement_app.append(self._get_roles_from_requirement(role)) if requirement_app != [None]: with open(self._ansible_requirement_app, "w") as stream_requirement_app: yaml.dump(requirement_app, stream_requirement_app, default_flow_style=False) log( "Ansible - Getting roles from : {0}".format( self._ansible_galaxy_rq_path), self._log_file) gcall( "{} install -r {} -p {}".format( self._ansible_galaxy_command_path, self._ansible_requirement_app, self._ansible_galaxy_role_path), 'Ansible - ansible-galaxy command', self._log_file) else: raise GalaxyNoMatchingRolesException( "Ansible - ERROR: No roles match galaxy requirements for one or more features {0}" .format(features[-1]['roles']))
def _get_module_s3(self, module, source_url, working_directory): """ Fetch the module sources from S3 :param module: :param source_url: :param working_directory: :return: (source url, working directory, source version, version uid, version message) """ if not source_url.startswith('s3://'): raise GCallException( 'Invalid S3 source url given: "{}", it must starts with "s3://"' .format(source_url)) revision = self._get_module_revision(module['name']) # If revision is HEAD, use the latest S3 object version if revision.lower().strip() in ['head', 'latest']: revision = 'latest' gcall( 'aws s3 cp "{s}" "{w}" --recursive'.format( s=source_url, w=working_directory), 'Retrieving from S3 bucket ({url}) at latest revision'.format( url=source_url), self._log_file) else: log( "Retrieving from S3 bucket ({url}) at revision '{rev}'".format( url=source_url, rev=revision), self._log_file) download_s3_object(self._app, source_url, working_directory, revision, self._log_file) return source_url, working_directory, revision, '', ''
def _package_module(self, module, ts, commit): path = get_buildpack_clone_path_from_module(self._app, module) os.chdir(path) pkg_name = "{0}_{1}_{2}".format(ts, module['name'], commit) pkg_path = '../{0}'.format(pkg_name) uid = module.get('uid', os.geteuid()) gid = module.get('gid', os.getegid()) tar_exclude_git = "--exclude '.git'" if boolify(self._config.get('deployment_package_exclude_git_metadata', False)) else '' gcall("tar czf {0} --owner={1} --group={2} {3} .".format(pkg_path, uid, gid, tar_exclude_git), "Creating package: %s" % pkg_name, self._log_file) log("Uploading package: %s" % pkg_name, self._log_file) cloud_connection = cloud_connections.get(self._app.get('provider', DEFAULT_PROVIDER))(self._log_file) conn = cloud_connection.get_connection(self._config.get('bucket_region', self._app['region']), ["s3"]) bucket = conn.get_bucket(self._config['bucket_s3']) key_path = '{path}/{pkg_name}'.format(path=path, pkg_name=pkg_name) key = bucket.get_key(path) if not key: key = bucket.new_key(key_path) key.set_contents_from_filename(pkg_path) gcall("rm -f {0}".format(pkg_path), "Deleting local package: %s" % pkg_name, self._log_file) deployment_package_retention_config = self._config.get('deployment_package_retention', None) if deployment_package_retention_config and self._app['env'] in deployment_package_retention_config: deployment_package_retention = deployment_package_retention_config.get(self._app['env'], 42) self._purge_s3_package(path, bucket, module, pkg_name, deployment_package_retention) return pkg_name
def _execute_swap_hook(online_app, to_deploy_app, script_name, script_message, log_file): for status, app in (('active', online_app), ('inactive', to_deploy_app)): script = app.get('blue_green', {}).get('hooks', {}).get(script_name, None) if script: script_path = os.path.join(get_path_from_app_with_color(app), script_name) with open(script_path, 'w') as f: f.write(b64decode_utf8(script)) script_env = os.environ.copy() script_env.update(get_ghost_env_variables(app)) gcall('bash {}'.format(script_path), '{}: Execute'.format( script_message.format(status=status)), log_file, env=script_env)
def _local_extract_package(self, module, package): clone_path = get_buildpack_clone_path_from_module(self._app, module) gcall( 'rm -rf "%s"' % clone_path, 'Cleaning old temporary redeploy module working directory "%s"' % clone_path, self._log_file) gcall('mkdir -p "%s"' % clone_path, 'Recreating redeploy module working directory "%s"' % clone_path, self._log_file) key_path = '{path}/{module}/{pkg_name}'.format( path=get_path_from_app_with_color(self._app), module=module['name'], pkg_name=package) log("Downloading package: {0} from '{1}'".format(package, key_path), self._log_file) dest_package_path = "{0}/{1}".format(clone_path, package) cloud_connection = cloud_connections.get( self._app.get('provider', DEFAULT_PROVIDER))(self._log_file) conn = cloud_connection.get_connection( self._config.get('bucket_region', self._app['region']), ["s3"]) bucket = conn.get_bucket(self._config['bucket_s3']) key = bucket.get_key(key_path) if not key: raise GCallException( "Package '{0}' doesn't exist on bucket '{1}'".format( key_path, self._config['bucket_s3'])) key.get_contents_to_filename(dest_package_path) gcall('tar -xf "{0}" -C "{1}"'.format(dest_package_path, clone_path), "Extracting package: %s" % package, self._log_file) return clone_path
def execute_module_script_on_ghost(app, module, script_name, script_friendly_name, clone_path, log_file, job, config): """ Executes the given script on the Ghost instance :param app: Ghost application :param module: Ghost module to extract script from :param script_name: string: the name of the script to find in module :param script_friendly_name: string: the friendly name of the script for logs :param clone_path: string: working directory of the current module :param log_file: string: Log file path :param job: Ghost job :param config: Ghost config """ # Execute script if available if script_name in module: theorical_script_path = "{0}/{1}".format(clone_path, script_name) if os.path.isfile(theorical_script_path): script_path = theorical_script_path else: script_source = b64decode_utf8(module[script_name]) script, script_path = tempfile.mkstemp(dir=clone_path) os.close(script) with io.open(script_path, mode='w', encoding='utf-8') as f: f.write(script_source) script_env = os.environ.copy() script_env.update(get_ghost_env_variables(app, module)) if app['build_infos'].get('container_image') and lxd_is_available( config): source_module = get_buildpack_clone_path_from_module(app, module) container = LXDImageBuilder(app, job, None, log_file, config) if not container.deploy(script_path, module, source_module): raise GCallException( "ERROR: %s execution on container failed" % script_name) else: log("Change directory to working dir ({w})".format(w=clone_path), log_file) os.chdir(clone_path) gcall('bash %s' % script_path, '%s: Execute' % script_friendly_name, log_file, env=script_env) gcall('du -hs .', 'Display current build directory disk usage', log_file) gcall('rm -vf %s' % script_path, '%s: Done, cleaning temporary file' % script_friendly_name, log_file)
def _publish_container(self): """ Publish container as image on registry local after build image """ self._clean_lxd_images() gcall("lxc publish {container_name} local: --alias={job_id} description={container_name} --force".format( job_id=self._job['_id'], container_name=self._container_name), "Publish Container as image", self._log_file)
def _delete_containers_profile(self): """ Delete the container profile """ gcall("lxc profile delete {container_name}".format(container_name=self._container_name), "Delete container profile", self._log_file)
def _execute_deploy(self, module, fabric_execution_strategy, safe_deployment_strategy): """ Returns the deployment id """ now = datetime.datetime.utcnow() ts = calendar.timegm(now.timetuple()) git_repo, clone_path, revision, commit, commit_message = self._get_module_sources( module) # Store predeploy script in tarball if 'pre_deploy' in module: log("Create pre_deploy script for inclusion in target package", self._log_file) predeploy_source = b64decode_utf8(module['pre_deploy']) with io.open(clone_path + '/predeploy', mode='w', encoding='utf-8') as f: f.write(predeploy_source) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Execute buildpack execute_module_script_on_ghost(self._app, module, 'build_pack', 'Buildpack', clone_path, self._log_file, self._job, self._config) # Store postdeploy script in tarball if 'post_deploy' in module: log("Create post_deploy script for inclusion in target package", self._log_file) postdeploy_source = b64decode_utf8(module['post_deploy']) with io.open(clone_path + '/postdeploy', mode='w', encoding='utf-8') as f: f.write(postdeploy_source) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Store after_all_deploy script in tarball if 'after_all_deploy' in module: log( "Create after_all_deploy script for inclusion in target package", self._log_file) afteralldeploy_source = b64decode_utf8(module['after_all_deploy']) with io.open(clone_path + '/after_all_deploy', mode='w', encoding='utf-8') as f: f.write(afteralldeploy_source) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Store module metadata in tarball log("Create metadata file for inclusion in target package", self._log_file) module_metadata = u""" #!/bin/bash GHOST_MODULE_REPO="{repo}" GHOST_MODULE_REV="{rev}" GHOST_MODULE_COMMIT="{commit}" GHOST_MODULE_COMMIT_MESSAGE="{commitmsg}" GHOST_MODULE_USER="******" """ metavars = { "repo": git_repo, "rev": revision, "commit": commit, "commitmsg": commit_message, "user": self._job['user'] } module_metadata = module_metadata.format(**metavars) custom_env_vars = self._app.get('env_vars', None) if custom_env_vars and len(custom_env_vars): module_metadata = module_metadata + u''.join([ u'export {key}="{val}" \n'.format( key=env_var['var_key'], val=env_var.get('var_value', '')) for env_var in custom_env_vars ]) with io.open(clone_path + '/.ghost-metadata', mode='w', encoding='utf-8') as f: f.write(module_metadata) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Create tar archive pkg_name = self._package_module(module, ts, commit) before_update_manifest = update_app_manifest(self._app, self._config, module, pkg_name, self._log_file) try: all_app_modules_list = get_app_module_name_list( self._app['modules']) clean_local_module_workspace( get_path_from_app_with_color(self._app), all_app_modules_list, self._log_file) self._deploy_module(module, fabric_execution_strategy, safe_deployment_strategy) except GCallException as e: log( "Deploy error occured, app manifest will be restored to its previous state", self._log_file) rollback_app_manifest(self._app, self._config, before_update_manifest, self._log_file) raise e if 'after_all_deploy' in module: log( "After all deploy script found for '{0}'. Executing it.". format(module['name']), self._log_file) execute_module_script_on_ghost(self._app, module, 'after_all_deploy', 'After all deploy', clone_path, self._log_file, self._job, self._config) now = datetime.datetime.utcnow() deployment = { 'app_id': self._app['_id'], 'job_id': self._job['_id'], 'module': module['name'], 'revision': revision, 'commit': commit, 'commit_message': commit_message, 'timestamp': ts, 'package': pkg_name, 'module_path': module['path'], '_created': now, '_updated': now, } return self._worker._db.deploy_histories.insert(deployment)
def _get_module_git(self, module, git_repo, clone_path): """ Fetch the module sources from Git :param module: Module object :param git_repo: Source git repository :param clone_path: Working directory :return: (source url, working directory, source version, version uid, version message) """ mirror_path = get_mirror_path_from_module(module) lock_path = get_lock_path_from_repo(git_repo) revision = self._get_module_revision(module['name']) try: git_acquire_lock(lock_path, self._log_file) if not os.path.exists(mirror_path): gcall( 'git --no-pager clone --bare --mirror {r} {m}'.format( r=git_repo, m=mirror_path), 'Create local git mirror for remote {r}'.format( r=git_repo), self._log_file) # Update existing git mirror os.chdir(mirror_path) gcall('git --no-pager gc --auto', 'Cleanup local mirror before update {r}'.format(r=git_repo), self._log_file) gcall('git --no-pager fetch --all --tags --prune', 'Update local git mirror from remote {r}'.format(r=git_repo), self._log_file) finally: git_release_lock(lock_path, self._log_file) # Resolve HEAD symbolic reference to identify the default branch head = git('--no-pager', 'symbolic-ref', '--short', 'HEAD', _tty_out=False).strip() # If revision is HEAD, replace it by the default branch if revision == 'HEAD': revision = head # If revision is a commit hash, a full intermediate clone is required before getting a shallow clone if self._is_commit_hash(revision): # Create intermediate clone from the local git mirror, chdir into it and fetch all commits source_path = get_intermediate_clone_path_from_module( self._app, module) if os.path.exists(source_path): gcall('chmod -R u+rwx {p}'.format(p=source_path), 'Update rights on previous intermediate clone', self._log_file) gcall('rm -rf {p}'.format(p=source_path), 'Removing previous intermediate clone', self._log_file) os.makedirs(source_path) os.chdir(source_path) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager init', 'Git init intermediate clone', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall( 'git --no-pager remote add origin file://{m}'.format( m=mirror_path), 'Git add local mirror as origin for intermediate clone', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager fetch origin', 'Git fetch all commits from origin', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall( 'git --no-pager checkout {r}'.format(r=revision), 'Git checkout revision into intermediate clone: {r}'.format( r=revision), self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Create shallow clone from the intermediate clone, chdir into it and retrieve submodules if os.path.exists(clone_path): gcall('chmod -R u+rwx {p}'.format(p=clone_path), 'Update rights on previous clone', self._log_file) gcall('rm -rf {p}'.format(p=clone_path), 'Removing previous clone', self._log_file) os.makedirs(clone_path) os.chdir(clone_path) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager clone file://{s} .'.format(s=source_path), 'Git clone from intermediate clone', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager submodule update --init --recursive', 'Git update submodules', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Destroy intermediate clone gcall('chmod -R u+rwx {p}'.format(p=source_path), 'Update rights on previous intermediate clone', self._log_file) gcall('rm -rf {p}'.format(p=source_path), 'Removing intermediate clone', self._log_file) else: # Create clone from the local git mirror, chdir into it, fetch requested revision and retrieve submodules if os.path.exists(clone_path): gcall('chmod -R u+rwx {p}'.format(p=clone_path), 'Update rights on previous clone', self._log_file) gcall('rm -rf {p}'.format(p=clone_path), 'Removing previous clone', self._log_file) os.makedirs(clone_path) os.chdir(clone_path) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall( 'git --no-pager clone --depth=10 file://{m} -b {r} .'.format( m=mirror_path, r=revision), 'Git clone from local mirror with depth limited to 10 from a specific revision: {r}' .format(r=revision), self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager submodule update --init --recursive', 'Git update submodules', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Extract commit information commit = git('--no-pager', 'rev-parse', '--short', 'HEAD', _tty_out=False).strip() commit_message = git('--no-pager', 'log', '--max-count=1', '--format=%s', 'HEAD', _tty_out=False).strip() # At last, reset remote origin URL gcall('git --no-pager remote set-url origin {r}'.format(r=git_repo), 'Git reset remote origin to {r}'.format(r=git_repo), self._log_file) return git_repo, clone_path, revision, commit, commit_message
def _execute_deploy(self, module, fabric_execution_strategy, safe_deployment_strategy): """ Returns the deployment id """ now = datetime.datetime.utcnow() ts = calendar.timegm(now.timetuple()) git_repo = module['git_repo'].strip() mirror_path = get_mirror_path_from_module(module) clone_path = get_buildpack_clone_path_from_module(self._app, module) lock_path = get_lock_path_from_repo(git_repo) revision = self._get_module_revision(module['name']) try: git_acquire_lock(lock_path, self._log_file) if not os.path.exists(mirror_path): gcall('git --no-pager clone --bare --mirror {r} {m}'.format(r=git_repo, m=mirror_path), 'Create local git mirror for remote {r}'.format(r=git_repo), self._log_file) # Update existing git mirror os.chdir(mirror_path) gcall('git --no-pager gc --auto', 'Cleanup local mirror before update {r}'.format(r=git_repo), self._log_file) gcall('git --no-pager fetch --all --tags --prune', 'Update local git mirror from remote {r}'.format(r=git_repo), self._log_file) finally: git_release_lock(lock_path, self._log_file) # Resolve HEAD symbolic reference to identify the default branch head = git('--no-pager', 'symbolic-ref', '--short', 'HEAD', _tty_out=False).strip() # If revision is HEAD, replace it by the default branch if revision == 'HEAD': revision = head # If revision is a commit hash, a full intermediate clone is required before getting a shallow clone if self._is_commit_hash(revision): # Create intermediate clone from the local git mirror, chdir into it and fetch all commits source_path = get_intermediate_clone_path_from_module(self._app, module) if os.path.exists(source_path): gcall('chmod -R u+rwx {p}'.format(p=source_path), 'Update rights on previous intermediate clone', self._log_file) gcall('rm -rf {p}'.format(p=source_path), 'Removing previous intermediate clone', self._log_file) os.makedirs(source_path) os.chdir(source_path) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager init', 'Git init intermediate clone', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager remote add origin file://{m}'.format(m=mirror_path), 'Git add local mirror as origin for intermediate clone', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager fetch origin', 'Git fetch all commits from origin', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager checkout {r}'.format(r=revision), 'Git checkout revision into intermediate clone: {r}'.format(r=revision), self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Create shallow clone from the intermediate clone, chdir into it and retrieve submodules if os.path.exists(clone_path): gcall('chmod -R u+rwx {p}'.format(p=clone_path), 'Update rights on previous clone', self._log_file) gcall('rm -rf {p}'.format(p=clone_path), 'Removing previous clone', self._log_file) os.makedirs(clone_path) os.chdir(clone_path) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager clone file://{s} .'.format(s=source_path), 'Git clone from intermediate clone', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager submodule update --init --recursive', 'Git update submodules', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Destroy intermediate clone gcall('chmod -R u+rwx {p}'.format(p=source_path), 'Update rights on previous intermediate clone', self._log_file) gcall('rm -rf {p}'.format(p=source_path), 'Removing intermediate clone', self._log_file) else: # Create clone from the local git mirror, chdir into it, fetch requested revision and retrieve submodules if os.path.exists(clone_path): gcall('chmod -R u+rwx {p}'.format(p=clone_path), 'Update rights on previous clone', self._log_file) gcall('rm -rf {p}'.format(p=clone_path), 'Removing previous clone', self._log_file) os.makedirs(clone_path) os.chdir(clone_path) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager clone --depth=10 file://{m} -b {r} .'.format(m=mirror_path, r=revision), 'Git clone from local mirror with depth limited to 10 from a specific revision: {r}'.format(r=revision), self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) gcall('git --no-pager submodule update --init --recursive', 'Git update submodules', self._log_file) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Extract commit information commit = git('--no-pager', 'rev-parse', '--short', 'HEAD', _tty_out=False).strip() commit_message = git('--no-pager', 'log', '--max-count=1', '--format=%s', 'HEAD', _tty_out=False).strip() # At last, reset remote origin URL gcall('git --no-pager remote set-url origin {r}'.format(r=git_repo), 'Git reset remote origin to {r}'.format(r=git_repo), self._log_file) # Store predeploy script in tarball if 'pre_deploy' in module: log("Create pre_deploy script for inclusion in target package", self._log_file) predeploy_source = b64decode_utf8(module['pre_deploy']) with io.open(clone_path + '/predeploy', mode='w', encoding='utf-8') as f: f.write(predeploy_source) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Execute buildpack execute_module_script_on_ghost(self._app, module, 'build_pack', 'Buildpack', clone_path, self._log_file, self._job, self._config) # Store postdeploy script in tarball if 'post_deploy' in module: log("Create post_deploy script for inclusion in target package", self._log_file) postdeploy_source = b64decode_utf8(module['post_deploy']) with io.open(clone_path + '/postdeploy', mode='w', encoding='utf-8') as f: f.write(postdeploy_source) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Store after_all_deploy script in tarball if 'after_all_deploy' in module: log("Create after_all_deploy script for inclusion in target package", self._log_file) afteralldeploy_source = b64decode_utf8(module['after_all_deploy']) with io.open(clone_path + '/after_all_deploy', mode='w', encoding='utf-8') as f: f.write(afteralldeploy_source) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Store module metadata in tarball log("Create metadata file for inclusion in target package", self._log_file) module_metadata = u""" #!/bin/bash GHOST_MODULE_REPO="{repo}" GHOST_MODULE_REV="{rev}" GHOST_MODULE_COMMIT="{commit}" GHOST_MODULE_COMMIT_MESSAGE="{commitmsg}" GHOST_MODULE_USER="******" """ metavars = { "repo": git_repo, "rev": revision, "commit": commit, "commitmsg": commit_message, "user": self._job['user'] } module_metadata = module_metadata.format(**metavars) custom_env_vars = self._app.get('env_vars', None) if custom_env_vars and len(custom_env_vars): module_metadata = module_metadata + u''.join([u'export {key}="{val}" \n'.format(key=env_var['var_key'], val=env_var.get('var_value', '')) for env_var in custom_env_vars]) with io.open(clone_path + '/.ghost-metadata', mode='w', encoding='utf-8') as f: f.write(module_metadata) gcall('du -hs .', 'Display current build directory disk usage', self._log_file) # Create tar archive pkg_name = self._package_module(module, ts, commit) before_update_manifest = update_app_manifest(self._app, self._config, module, pkg_name, self._log_file) try: all_app_modules_list = get_app_module_name_list(self._app['modules']) clean_local_module_workspace(get_path_from_app_with_color(self._app), all_app_modules_list, self._log_file) self._deploy_module(module, fabric_execution_strategy, safe_deployment_strategy) except GCallException as e: log("Deploy error occured, app manifest will be restored to its previous state", self._log_file) rollback_app_manifest(self._app, self._config, before_update_manifest, self._log_file) raise e if 'after_all_deploy' in module: log("After all deploy script found for '{0}'. Executing it.".format(module['name']), self._log_file) execute_module_script_on_ghost(self._app, module, 'after_all_deploy', 'After all deploy', clone_path, self._log_file, self._job, self._config) now = datetime.datetime.utcnow() deployment = { 'app_id': self._app['_id'], 'job_id': self._job['_id'], 'module': module['name'], 'revision': revision, 'commit': commit, 'commit_message': commit_message, 'timestamp': ts, 'package': pkg_name, 'module_path': module['path'], '_created': now, '_updated': now, } return self._worker._db.deploy_histories.insert(deployment)