Esempio n. 1
0
    def execute(self):
        if not boolify(self._config.get('enable_executescript_command', True)):
            return self._abort(
                "This command has been disabled by your administrator.")
        script = self._job['options'][0] if 'options' in self._job and len(
            self._job['options']) > 0 else None
        module_name = self._job['options'][1] if 'options' in self._job and len(
            self._job['options']) > 1 else None
        execution_strategy = self._job['options'][
            2] if 'options' in self._job and len(
                self._job['options']) > 2 else None
        if execution_strategy == 'single':
            # option[3] is a single Host IP
            fabric_execution_strategy = None
            safe_deployment_strategy = None
            single_host_ip = self._job['options'][
                3] if 'options' in self._job and len(
                    self._job['options']) > 3 else None
        else:
            # option[2] is fabric type, option[3] might be Safe deploy group param
            fabric_execution_strategy = execution_strategy
            safe_deployment_strategy = self._job['options'][
                3] if 'options' in self._job and len(
                    self._job['options']) > 3 else None
            single_host_ip = None

        try:
            log(_green("STATE: Started"), self._log_file)
            try:
                if not script or not script.strip():
                    return self._abort("No valid script provided")
                script_data = b64decode_utf8(script)
                allowed_shebang = ('#!/bin/bash', '#! /bin/bash', '#!/bin/sh',
                                   '#! /bin/sh')
                if not script_data.startswith(allowed_shebang):
                    return self._abort(
                        "No valid shell script provided (shebang missing)")
            except:
                return self._abort("No valid script provided")

            if single_host_ip:
                log(
                    _yellow("Executing script on a single host: %s" %
                            single_host_ip), self._log_file)
                self._exec_script_single_host(script_data, module_name,
                                              single_host_ip)
            else:
                log(_yellow("Executing script on every running instance"),
                    self._log_file)
                self._exec_script(script_data, module_name,
                                  fabric_execution_strategy,
                                  safe_deployment_strategy)

            self._worker.update_status(
                "done", message=self._get_notification_message_done())
            log(_green("STATE: End"), self._log_file)
        except Exception as e:
            self._worker.update_status(
                "failed", message=self._get_notification_message_failed(e))
            log(_red("STATE: End"), self._log_file)
Esempio n. 2
0
def execute_module_script_on_ghost(app, module, script_name,
                                   script_friendly_name, clone_path, log_file,
                                   job, config):
    """ Executes the given script on the Ghost instance

        :param app: Ghost application
        :param module: Ghost module to extract script from
        :param script_name: string: the name of the script to find in module
        :param script_friendly_name: string: the friendly name of the script for logs
        :param clone_path: string: working directory of the current module
        :param log_file: string: Log file path
        :param job: Ghost job
        :param config: Ghost config
    """
    # Execute script if available
    if script_name in module:
        theorical_script_path = "{0}/{1}".format(clone_path, script_name)
        if os.path.isfile(theorical_script_path):
            script_path = theorical_script_path
        else:
            script_source = b64decode_utf8(module[script_name])
            script, script_path = tempfile.mkstemp(dir=clone_path)
            os.close(script)
            with io.open(script_path, mode='w', encoding='utf-8') as f:
                f.write(script_source)

        script_env = os.environ.copy()
        script_env.update(get_ghost_env_variables(app, module))

        if app['build_infos'].get('container_image') and lxd_is_available(
                config):
            source_module = get_buildpack_clone_path_from_module(app, module)
            container = LXDImageBuilder(app, job, None, log_file, config)
            if not container.deploy(script_path, module, source_module):
                raise GCallException(
                    "ERROR: %s execution on container failed" % script_name)
        else:
            log("Change directory to working dir ({w})".format(w=clone_path),
                log_file)
            os.chdir(clone_path)
            gcall('bash %s' % script_path,
                  '%s: Execute' % script_friendly_name,
                  log_file,
                  env=script_env)

        gcall('du -hs .', 'Display current build directory disk usage',
              log_file)
        gcall('rm -vf %s' % script_path,
              '%s: Done, cleaning temporary file' % script_friendly_name,
              log_file)
Esempio n. 3
0
    def _generate_buildimage_hook(self, hook_name):
        """
        Generates a buildimage hook script

        >>> from StringIO import StringIO
        >>> from ghost_tools import b64encode_utf8
        >>> app = {
        ...   'name': 'AppName', 'env': 'prod', 'role': 'webfront', 'region': 'eu-west-1',
        ...   'lifecycle_hooks': {
        ...     'pre_buildimage': u'',
        ...     'post_buildimage': b64encode_utf8(u'echo Custom post-buildimage script')
        ...   }
        ... }
        >>> job = {"_id" : "012345678901234567890123"}
        >>> log_file = StringIO()
        >>> _config = None
        >>> _db = None

        >>> ImageBuilder(app, job, _db, log_file, _config)._generate_buildimage_hook('pre_buildimage')
        '/ghost/AppName/prod/webfront/hook-pre_buildimage'
        >>> with io.open('/ghost/AppName/prod/webfront/hook-pre_buildimage', encoding='utf-8') as f:
        ...   f.read()
        u'echo No pre_buildimage script'

        >>> ImageBuilder(app, job, _db, log_file, _config)._generate_buildimage_hook('post_buildimage')
        '/ghost/AppName/prod/webfront/hook-post_buildimage'
        >>> with io.open('/ghost/AppName/prod/webfront/hook-post_buildimage', encoding='utf-8') as f:
        ...   f.read()
        u'echo Custom post-buildimage script'

        """
        log("Create '%s' script for Packer" % hook_name, self._log_file)
        lfc_hooks = self._app.get('lifecycle_hooks', None)
        if not lfc_hooks or not lfc_hooks.get(hook_name, None):
            hook_source = u"echo No {hook_name} script".format(
                hook_name=hook_name)
        else:
            hook_source = b64decode_utf8(
                self._app['lifecycle_hooks'][hook_name])
        app_path = "/ghost/{name}/{env}/{role}".format(name=self._app['name'],
                                                       env=self._app['env'],
                                                       role=self._app['role'])
        if not os.path.exists(app_path):
            os.makedirs(app_path)
        hook_file_path = "{app_path}/hook-{hook_name}".format(
            app_path=app_path, hook_name=hook_name)
        with io.open(hook_file_path, mode='w', encoding='utf-8') as f:
            f.write(hook_source)
        return hook_file_path
 def _refresh_lifecycle_hook_script(self, lifecycle_hook, lifecycle_hooks,
                                    bucket, prefix):
     key_name = '{prefix}/{lifecycle_hook}'.format(
         prefix=prefix, lifecycle_hook=lifecycle_hook)
     lifecycle_hook_source = lifecycle_hooks is not None and lifecycle_hooks.get(
         lifecycle_hook, None)
     if lifecycle_hook_source:
         lifecycle_hook_source = b64decode_utf8(lifecycle_hook_source)
         k = bucket.new_key(key_name)
         k.set_contents_from_string(lifecycle_hook_source)
         k.close()
         log('INFO: uploaded {key}'.format(key=key_name), self._log_file)
     else:
         bucket.delete_key(key_name)
         log('INFO: deleted {key}'.format(key=key_name), self._log_file)
Esempio n. 5
0
    def _execute_swap_hook(online_app, to_deploy_app, script_name,
                           script_message, log_file):
        for status, app in (('active', online_app), ('inactive',
                                                     to_deploy_app)):
            script = app.get('blue_green', {}).get('hooks',
                                                   {}).get(script_name, None)
            if script:
                script_path = os.path.join(get_path_from_app_with_color(app),
                                           script_name)
                with open(script_path, 'w') as f:
                    f.write(b64decode_utf8(script))

                script_env = os.environ.copy()
                script_env.update(get_ghost_env_variables(app))

                gcall('bash {}'.format(script_path),
                      '{}: Execute'.format(
                          script_message.format(status=status)),
                      log_file,
                      env=script_env)
Esempio n. 6
0
def check_app_b64_scripts(updates):
    """
    Trigger a base64 decode on every script given to the API in order to verify their validity
    :param updates: Modules configurations
    """
    if 'modules' in updates:
        for mod in updates['modules']:
            for script in [
                    'build_pack', 'pre_deploy', 'post_deploy',
                    'after_all_deploy'
            ]:
                if script in mod:
                    try:
                        b64decode_utf8(mod[script])
                    except (binascii.Error, UnicodeDecodeError):
                        raise GhostAPIInputError(
                            'Error decoding script "{s}" in module: "{m}"'.
                            format(s=script, m=mod["name"]))
    if 'lifecycle_hooks' in updates:
        for script in [
                'pre_buildimage', 'post_buildimage', 'pre_bootstrap',
                'post_bootstrap'
        ]:
            if script in updates['lifecycle_hooks']:
                try:
                    b64decode_utf8(updates['lifecycle_hooks'][script])
                except (binascii.Error, UnicodeDecodeError):
                    raise GhostAPIInputError(
                        'Error decoding a script in lifecycle hook: {h}'.
                        format(h=script))
    if 'blue_green' in updates and 'hooks' in updates['blue_green']:
        for script in ['pre_swap', 'post_swap']:
            if script in updates['blue_green']['hooks']:
                try:
                    b64decode_utf8(updates['blue_green']['hooks'][script])
                except (binascii.Error, UnicodeDecodeError):
                    raise GhostAPIInputError(
                        'Error decoding a script in blue/green hook: {h}'.
                        format(h=script))
Esempio n. 7
0
    def _execute_deploy(self, module, fabric_execution_strategy,
                        safe_deployment_strategy):
        """
        Returns the deployment id
        """

        now = datetime.datetime.utcnow()
        ts = calendar.timegm(now.timetuple())

        git_repo, clone_path, revision, commit, commit_message = self._get_module_sources(
            module)

        # Store predeploy script in tarball
        if 'pre_deploy' in module:
            log("Create pre_deploy script for inclusion in target package",
                self._log_file)
            predeploy_source = b64decode_utf8(module['pre_deploy'])
            with io.open(clone_path + '/predeploy', mode='w',
                         encoding='utf-8') as f:
                f.write(predeploy_source)
            gcall('du -hs .', 'Display current build directory disk usage',
                  self._log_file)

        # Execute buildpack
        execute_module_script_on_ghost(self._app, module, 'build_pack',
                                       'Buildpack', clone_path, self._log_file,
                                       self._job, self._config)

        # Store postdeploy script in tarball
        if 'post_deploy' in module:
            log("Create post_deploy script for inclusion in target package",
                self._log_file)
            postdeploy_source = b64decode_utf8(module['post_deploy'])
            with io.open(clone_path + '/postdeploy',
                         mode='w',
                         encoding='utf-8') as f:
                f.write(postdeploy_source)
            gcall('du -hs .', 'Display current build directory disk usage',
                  self._log_file)

        # Store after_all_deploy script in tarball
        if 'after_all_deploy' in module:
            log(
                "Create after_all_deploy script for inclusion in target package",
                self._log_file)
            afteralldeploy_source = b64decode_utf8(module['after_all_deploy'])
            with io.open(clone_path + '/after_all_deploy',
                         mode='w',
                         encoding='utf-8') as f:
                f.write(afteralldeploy_source)
            gcall('du -hs .', 'Display current build directory disk usage',
                  self._log_file)

        # Store module metadata in tarball
        log("Create metadata file for inclusion in target package",
            self._log_file)
        module_metadata = u"""
#!/bin/bash

GHOST_MODULE_REPO="{repo}"
GHOST_MODULE_REV="{rev}"
GHOST_MODULE_COMMIT="{commit}"
GHOST_MODULE_COMMIT_MESSAGE="{commitmsg}"
GHOST_MODULE_USER="******"

"""
        metavars = {
            "repo": git_repo,
            "rev": revision,
            "commit": commit,
            "commitmsg": commit_message,
            "user": self._job['user']
        }
        module_metadata = module_metadata.format(**metavars)
        custom_env_vars = self._app.get('env_vars', None)
        if custom_env_vars and len(custom_env_vars):
            module_metadata = module_metadata + u''.join([
                u'export {key}="{val}" \n'.format(
                    key=env_var['var_key'], val=env_var.get('var_value', ''))
                for env_var in custom_env_vars
            ])
        with io.open(clone_path + '/.ghost-metadata',
                     mode='w',
                     encoding='utf-8') as f:
            f.write(module_metadata)
        gcall('du -hs .', 'Display current build directory disk usage',
              self._log_file)

        # Create tar archive
        pkg_name = self._package_module(module, ts, commit)

        before_update_manifest = update_app_manifest(self._app, self._config,
                                                     module, pkg_name,
                                                     self._log_file)
        try:
            all_app_modules_list = get_app_module_name_list(
                self._app['modules'])
            clean_local_module_workspace(
                get_path_from_app_with_color(self._app), all_app_modules_list,
                self._log_file)
            self._deploy_module(module, fabric_execution_strategy,
                                safe_deployment_strategy)
        except GCallException as e:
            log(
                "Deploy error occured, app manifest will be restored to its previous state",
                self._log_file)
            rollback_app_manifest(self._app, self._config,
                                  before_update_manifest, self._log_file)
            raise e

        if 'after_all_deploy' in module:
            log(
                "After all deploy script found for '{0}'. Executing it.".
                format(module['name']), self._log_file)
            execute_module_script_on_ghost(self._app, module,
                                           'after_all_deploy',
                                           'After all deploy', clone_path,
                                           self._log_file, self._job,
                                           self._config)

        now = datetime.datetime.utcnow()
        deployment = {
            'app_id': self._app['_id'],
            'job_id': self._job['_id'],
            'module': module['name'],
            'revision': revision,
            'commit': commit,
            'commit_message': commit_message,
            'timestamp': ts,
            'package': pkg_name,
            'module_path': module['path'],
            '_created': now,
            '_updated': now,
        }
        return self._worker._db.deploy_histories.insert(deployment)
Esempio n. 8
0
    def _execute_deploy(self, module, fabric_execution_strategy, safe_deployment_strategy):
        """
        Returns the deployment id
        """

        now = datetime.datetime.utcnow()
        ts = calendar.timegm(now.timetuple())

        git_repo = module['git_repo'].strip()
        mirror_path = get_mirror_path_from_module(module)
        clone_path = get_buildpack_clone_path_from_module(self._app, module)
        lock_path = get_lock_path_from_repo(git_repo)
        revision = self._get_module_revision(module['name'])

        try:
            git_acquire_lock(lock_path, self._log_file)

            if not os.path.exists(mirror_path):
                gcall('git --no-pager clone --bare --mirror {r} {m}'.format(r=git_repo, m=mirror_path),
                      'Create local git mirror for remote {r}'.format(r=git_repo),
                      self._log_file)

            # Update existing git mirror
            os.chdir(mirror_path)
            gcall('git --no-pager gc --auto',
                  'Cleanup local mirror before update {r}'.format(r=git_repo),
                  self._log_file)
            gcall('git --no-pager fetch --all --tags --prune',
                  'Update local git mirror from remote {r}'.format(r=git_repo),
                  self._log_file)
        finally:
            git_release_lock(lock_path, self._log_file)

        # Resolve HEAD symbolic reference to identify the default branch
        head = git('--no-pager', 'symbolic-ref', '--short', 'HEAD', _tty_out=False).strip()

        # If revision is HEAD, replace it by the default branch
        if revision == 'HEAD':
            revision = head

        # If revision is a commit hash, a full intermediate clone is required before getting a shallow clone
        if self._is_commit_hash(revision):
            # Create intermediate clone from the local git mirror, chdir into it and fetch all commits
            source_path = get_intermediate_clone_path_from_module(self._app, module)
            if os.path.exists(source_path):
                gcall('chmod -R u+rwx {p}'.format(p=source_path), 'Update rights on previous intermediate clone', self._log_file)
                gcall('rm -rf {p}'.format(p=source_path), 'Removing previous intermediate clone', self._log_file)
            os.makedirs(source_path)
            os.chdir(source_path)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager init', 'Git init intermediate clone', self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager remote add origin file://{m}'.format(m=mirror_path), 'Git add local mirror as origin for intermediate clone', self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager fetch origin', 'Git fetch all commits from origin', self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager checkout {r}'.format(r=revision), 'Git checkout revision into intermediate clone: {r}'.format(r=revision), self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

            # Create shallow clone from the intermediate clone, chdir into it and retrieve submodules
            if os.path.exists(clone_path):
                gcall('chmod -R u+rwx {p}'.format(p=clone_path), 'Update rights on previous clone', self._log_file)
                gcall('rm -rf {p}'.format(p=clone_path), 'Removing previous clone', self._log_file)
            os.makedirs(clone_path)
            os.chdir(clone_path)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager clone file://{s} .'.format(s=source_path), 'Git clone from intermediate clone', self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager submodule update --init --recursive', 'Git update submodules', self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

            # Destroy intermediate clone
            gcall('chmod -R u+rwx {p}'.format(p=source_path), 'Update rights on previous intermediate clone', self._log_file)
            gcall('rm -rf {p}'.format(p=source_path), 'Removing intermediate clone', self._log_file)
        else:
            # Create clone from the local git mirror, chdir into it, fetch requested revision and retrieve submodules
            if os.path.exists(clone_path):
                gcall('chmod -R u+rwx {p}'.format(p=clone_path), 'Update rights on previous clone', self._log_file)
                gcall('rm -rf {p}'.format(p=clone_path), 'Removing previous clone', self._log_file)
            os.makedirs(clone_path)
            os.chdir(clone_path)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager clone --depth=10 file://{m} -b {r} .'.format(m=mirror_path, r=revision), 'Git clone from local mirror with depth limited to 10 from a specific revision: {r}'.format(r=revision), self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)
            gcall('git --no-pager submodule update --init --recursive', 'Git update submodules', self._log_file)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

        # Extract commit information
        commit = git('--no-pager', 'rev-parse', '--short', 'HEAD', _tty_out=False).strip()
        commit_message = git('--no-pager', 'log', '--max-count=1', '--format=%s', 'HEAD', _tty_out=False).strip()

        # At last, reset remote origin URL
        gcall('git --no-pager remote set-url origin {r}'.format(r=git_repo), 'Git reset remote origin to {r}'.format(r=git_repo), self._log_file)

        # Store predeploy script in tarball
        if 'pre_deploy' in module:
            log("Create pre_deploy script for inclusion in target package", self._log_file)
            predeploy_source = b64decode_utf8(module['pre_deploy'])
            with io.open(clone_path + '/predeploy', mode='w', encoding='utf-8') as f:
                f.write(predeploy_source)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

        # Execute buildpack
        execute_module_script_on_ghost(self._app, module, 'build_pack', 'Buildpack', clone_path,
                                       self._log_file, self._job, self._config)

        # Store postdeploy script in tarball
        if 'post_deploy' in module:
            log("Create post_deploy script for inclusion in target package", self._log_file)
            postdeploy_source = b64decode_utf8(module['post_deploy'])
            with io.open(clone_path + '/postdeploy', mode='w', encoding='utf-8') as f:
                f.write(postdeploy_source)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

        # Store after_all_deploy script in tarball
        if 'after_all_deploy' in module:
            log("Create after_all_deploy script for inclusion in target package", self._log_file)
            afteralldeploy_source = b64decode_utf8(module['after_all_deploy'])
            with io.open(clone_path + '/after_all_deploy', mode='w', encoding='utf-8') as f:
                f.write(afteralldeploy_source)
            gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

        # Store module metadata in tarball
        log("Create metadata file for inclusion in target package", self._log_file)
        module_metadata = u"""
#!/bin/bash

GHOST_MODULE_REPO="{repo}"
GHOST_MODULE_REV="{rev}"
GHOST_MODULE_COMMIT="{commit}"
GHOST_MODULE_COMMIT_MESSAGE="{commitmsg}"
GHOST_MODULE_USER="******"

"""
        metavars = {
            "repo": git_repo,
            "rev": revision,
            "commit": commit,
            "commitmsg": commit_message,
            "user": self._job['user']
        }
        module_metadata = module_metadata.format(**metavars)
        custom_env_vars = self._app.get('env_vars', None)
        if custom_env_vars and len(custom_env_vars):
            module_metadata = module_metadata + u''.join([u'export {key}="{val}" \n'.format(key=env_var['var_key'], val=env_var.get('var_value', '')) for env_var in custom_env_vars])
        with io.open(clone_path + '/.ghost-metadata', mode='w', encoding='utf-8') as f:
            f.write(module_metadata)
        gcall('du -hs .', 'Display current build directory disk usage', self._log_file)

        # Create tar archive
        pkg_name = self._package_module(module, ts, commit)

        before_update_manifest = update_app_manifest(self._app, self._config, module, pkg_name, self._log_file)
        try:
            all_app_modules_list = get_app_module_name_list(self._app['modules'])
            clean_local_module_workspace(get_path_from_app_with_color(self._app), all_app_modules_list, self._log_file)
            self._deploy_module(module, fabric_execution_strategy, safe_deployment_strategy)
        except GCallException as e:
            log("Deploy error occured, app manifest will be restored to its previous state", self._log_file)
            rollback_app_manifest(self._app, self._config, before_update_manifest, self._log_file)
            raise e

        if 'after_all_deploy' in module:
            log("After all deploy script found for '{0}'. Executing it.".format(module['name']), self._log_file)
            execute_module_script_on_ghost(self._app, module, 'after_all_deploy', 'After all deploy', clone_path,
                                           self._log_file, self._job, self._config)

        now = datetime.datetime.utcnow()
        deployment = {
            'app_id': self._app['_id'],
            'job_id': self._job['_id'],
            'module': module['name'],
            'revision': revision,
            'commit': commit,
            'commit_message': commit_message,
            'timestamp': ts,
            'package': pkg_name,
            'module_path': module['path'],
            '_created': now,
            '_updated': now,
        }
        return self._worker._db.deploy_histories.insert(deployment)