def deploy_worker(c, app): pconf = c.system.files.data.push pconf = dict_from_list('target', pconf) cmds = [] for target in c.runstate.push_targets: d = Deploy(c) target_pconf = pconf[target] if target_pconf['env'] == 'publication': target_pconf['env'] = 'production' d.load(target_pconf) map_task = app.add('map') map_task.iter = d.deploy_commands() map_task.job = verbose_command cmds.extend(d.deploy_commands()) if c.runstate.dry_run is True: for i in cmds: logger.info('dry run: {0}'.format(' '.join(i))) else: app.run() logger.info('completed deploy for: {0}'.format(' '.join(c.runstate.push_targets)))
def deploy_worker(c, app): pconf = c.system.files.data.push pconf = dict_from_list('target', pconf) backgrounds = [] for target in c.runstate.push_targets: d = Deploy(c) target_pconf = pconf[target] if target_pconf['env'] == 'publication': target_pconf['env'] = 'production' d.load(target_pconf) for cmd in d.deploy_commands(): task = app.add('task') task.args = ' '.join(cmd) task.job = deploy_target task.target = "" task.depends = os.path.join(c.paths.projectroot, c.paths.public_site_output) if c.runstate.dry_run is True: logger.info('dry run: {0}'.format(' '.join(cmd))) if c.runstate.dry_run is False: app.run() logger.info('completed deploy for: {0}'.format(' '.join(c.runstate.push_targets)))
def create_package(target, conf): if target is None: pconf = conf.system.files.data.push[0] target = pconf['target'] else: pconf = dict_from_list(conf.system.files.data.push)[target] logger.info('creating package for target "{0}"'.format(target)) conf_dump_path = dump_config(conf) arc_path = os.path.join(conf.paths.projectroot, conf.paths.buildarchive) arc_fn = package_filename(arc_path, target, conf) if not os.path.exists(arc_path): os.makedirs(arc_path) input_path = os.path.join(conf.paths.projectroot, conf.paths.output, pconf['paths']['local']) output_path_name = conf.git.branches.current if conf.project.branched is True: input_path = os.path.join(input_path, conf.git.branches.current) else: output_path_name = os.path.split(pconf['paths']['local'])[-1] # ready to write the tarball with tarfile.open(arc_fn, 'w:gz') as t: t.add(name=input_path, arcname=output_path_name) t.add(conf_dump_path, arcname=os.path.basename(conf_dump_path)) if 'static' in pconf['paths']: for path in pconf['paths']['static']: rendered_path = os.path.join(conf.paths.projectroot, conf.paths.public, path) if os.path.exists(rendered_path): t.add(name=rendered_path, arcname=path) logger.info('wrote build package to: {0}'.format(arc_fn))
def ingest(self, input_obj): if isinstance(input_obj, list): if len(input_obj) == 1 and isinstance(input_obj[0], dict): input_obj = input_obj[0] else: try: input_obj = dict_from_list('edition', input_obj) except KeyError: logger.error( "replacement specification is malformed. documents need editions" ) return if self.conf.project.edition == self.conf.project.name: if self.conf.project.name in input_obj: self._update_tokens(input_obj[self.conf.project.name]) else: if self._validate_tokens(input_obj) is True: self._update_tokens(input_obj) if self.conf.project.edition in input_obj: self._update_tokens(input_obj[self.conf.project.edition])
def create_package(target, conf): logger.info('creating package for target "{0}"'.format(target)) if target is None: pconf = conf.system.files.data.push[0] target = pconf['target'] else: pconf = dict_from_list('target', conf.system.files.data.push)[target] files_to_archive = [] if conf.project.branched is True: artifacts = (os.path.join(conf.paths.output, conf.git.branches.current), conf.git.branches.current) else: artifacts = (os.path.join(conf.paths.projectroot, conf.paths.output, pconf['paths']['local']), os.path.split(pconf['paths']['local'])[-1]) files_to_archive.append(artifacts) if 'static' in pconf['paths']: files_to_archive.extend([ (os.path.join(conf.paths.projectroot, conf.paths.public, path), path) for path in pconf['paths']['static'] if os.path.exists( os.path.join(conf.paths.projectroot, conf.paths.public, path)) ]) conf_dump_path = dump_config(conf) archive_fn = package_filename(target, conf) files_to_archive.append((conf_dump_path, os.path.basename(conf_dump_path))) create_archive(files_to_archive, archive_fn) logger.info('wrote build package to: {0}'.format(archive_fn))
def deploy_tasks(c, app): """ Deploys the build. The logic for generating the rsync commands is in ``giza.deploy``, and the configuration data is typically in ``config/push``. This function glues the config with the rsync command creation and then runs the commands. """ pconf = c.system.files.data.push pconf = dict_from_list('target', pconf) for target in c.runstate.push_targets: d = Deploy(c) target_pconf = pconf[target] if target_pconf['env'] == 'publication': target_pconf['env'] = 'production' d.load(target_pconf) for cmd in d.deploy_commands(): task = app.add('task') task.args = ' '.join(cmd) task.job = deploy_target task.target = "" task.depends = os.path.join(c.paths.projectroot, c.paths.public_site_output) if c.runstate.dry_run is True: logger.info('dry run: {0}'.format(' '.join(cmd))) logger.info('completed deploy for: {0}'.format(' '.join( c.runstate.push_targets)))
def run_make_operations(targets, conf): """ :param list targets: A list of tuples in the form of ``(<action>, [option, option])`` that define build targets. :param Configuration conf: The top level configuration object. Parses the ``targets`` list and runs tasks defined, including all specified sphinx targets, all ``push`` deployment targets, and will create the ``env`` packages. Noteworthy behavior: - The order of options *except* for the action in the first option is not important. - If you run ``push`` target with the ``deploy`` option (i.e. ``push-deploy`` or ``push-<edition>-deploy``), ``giza`` will *not* run the ``publish`` Sphinx build. - This interface assumes that all deployment targets (defined in each project begin with ``push-`` or ``stage-``.) If you have a project with different deployment targets, you will need to call ``giza deploy`` directly. - The ``env`` cache targets take the same options as the Sphinx builders and package the environment for only those builders. If you specify ``env`` after a Sphinx target, ``giza`` will build the cache for only that package. """ sphinx_opts = { "worker": sphinx_publication, "languages": set(), "editions": set(), "builders": set() } push_opts = {"worker": deploy_tasks, "targets": set(), "type": None} packaging_opts = {} sphinx_builders = avalible_sphinx_builders() deploy_configs = dict_from_list('target', conf.system.files.data.push) tasks = [] for action, options in targets: if action in sphinx_builders: tasks.append(sphinx_opts) add_sphinx_build_options(sphinx_opts, action, options, conf) elif action in ('stage', 'push'): tasks.append(push_opts) push_opts['type'] = action if 'deploy' not in options: sphinx_opts['builders'].add('publish') tasks.append(sphinx_opts) add_sphinx_build_options(sphinx_opts, action, options, conf) conf.runstate.fast = False if action in deploy_configs: push_opts['targets'].add(action) for build_option in options: deploy_target_name = hyph_concat(action, build_option) if build_option in deploy_configs: push_opts['targets'].add(build_option) elif deploy_target_name in deploy_configs: push_opts['targets'].add(deploy_target_name) elif action.startswith('env'): if len(packaging_opts) > 0: packaging_opts = copy.copy(sphinx_opts) packaging_opts['worker'] = env_package_worker tasks.append(packaging_opts) add_sphinx_build_options(packaging_opts, False, options, conf) else: logger.error( 'target: {0} not defined in the make interface'.format(action)) with BuildApp.context(conf) as app: if sphinx_opts in tasks: conf.runstate.languages_to_build = list(sphinx_opts['languages']) conf.runstate.editions_to_build = list(sphinx_opts['editions']) conf.runstate.builder = list(sphinx_opts['builders']) if 'publish' in conf.runstate.builder: conf.runstate.fast = False derive_command('sphinx', conf) sphinx_opts['worker'](conf, conf.runstate, app) if push_opts in tasks: if len(push_opts['targets']) == 0: for lang, edition in itertools.product( conf.runstate.languages_to_build, conf.runstate.editions_to_build): push_target_name = [push_opts['type']] for opt in (edition, lang): if opt is not None: push_target_name.append(opt) push_target_name = '-'.join(push_target_name) push_opts['targets'].add(push_target_name) conf.runstate.push_targets = list(push_opts['targets']) push_opts['worker'](conf, app) derive_command('deploy', conf) if packaging_opts in tasks: derive_command('env', conf) task = app.add('task') task.job = env_package_worker task.args = (conf.runstate, conf) task.target = False task.dependency = False