def _generate_images(cmd, dpi, width, target, source): command(cmd.format(cmd=_get_inkscape_cmd(), dpi=dpi, width=width, target=target, source=source)) logger.debug('generated image file {0}'.format(target))
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: try: builder += '-' + conf.project.edition except: pass command(cmd.format(src=os.path.join(conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: try: builder += '-' + conf.project.edition except: pass command( cmd.format(src=os.path.join(conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def _generate_images(cmd, dpi, width, target, source): command(cmd.format(cmd=_get_inkscape_cmd(), dpi=dpi, width=width, target=target, source=source)) print('[image]: generated image file {0}'.format(target))
def _generate_images(cmd, dpi, width, target, source): command( cmd.format(cmd=_get_inkscape_cmd(), dpi=dpi, width=width, target=target, source=source)) print('[image]: generated image file {0}'.format(target))
def _generate_images(cmd, dpi, width, target, source): command( cmd.format(cmd=_get_inkscape_cmd(), dpi=dpi, width=width, target=target, source=source)) logger.debug('generated image file {0}'.format(target))
def transfer_source(conf): target = os.path.join(conf.paths.projectroot, conf.paths.branch_output) if not os.path.exists(target): os.makedirs(target) print('[sphinx-prep]: created ' + target) elif not os.path.isdir(target): raise InvalidFile('[sphinx-prep]: {0} exists and is not a directory'.format(target)) source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) command('rsync --checksum --recursive --delete {0} {1}'.format(source_dir, target)) print('[sphinx-prep]: updated source in {0}'.format(target))
def transfer_source(sconf, conf): target = os.path.join(conf.paths.projectroot, conf.paths.branch_source) if not os.path.exists(target): os.makedirs(target) logger.debug('created directory for sphinx build: {0}'.format(target)) elif not os.path.isdir(target): msg = '"{0}" exists and is not a directory'.format(target) logger.error(msg) raise InvalidFile(msg) source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) command('rsync --checksum --recursive --delete {0}/ {1}'.format(source_dir, target)) logger.info('prepared source for sphinx build in {0}'.format(target))
def include_files(conf, files=None): if files is not None: return files else: source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) grep = command( 'grep -R ".. include:: /" {0} || exit 0'.format(source_dir), capture=True).out rx = re.compile(source_dir + r'(.*):.*\.\. include:: (.*)') s = [ m.groups() for m in [rx.match(d) for d in grep.split('\n')] if m is not None ] def tuple_sort(k): return k[1] s.sort(key=tuple_sort) files = dict() for i in groupby(s, itemgetter(1)): files[i[0]] = set() for src in i[1]: if not src[0].endswith('~') and not src[0].endswith( 'overview.rst'): files[i[0]].add(src[0]) files[i[0]] = list(files[i[0]]) files[i[0]].sort() files.update(generated_includes(conf)) return files
def sphinx_build_worker(target, conf, sconf, do_post=True): sconf = sconf[target] dirpath = os.path.join(sconf.root, target) if not os.path.exists(dirpath): os.makedirs(dirpath) print('[{0}]: created {1}/{0}'.format(target, sconf.root)) print('[sphinx] [{0}]: starting build at {1}'.format(target, timestamp())) tags = ' '.join(['-t ' + i for i in sconf.tags]) cmd = 'sphinx-build -b {builder} {tags} -q -d {root}/doctrees-{builder} -c {config_path} {sphinx_args} {source} {root}/{builder}' sphinx_cmd = cmd.format(builder=sconf.target, tags=tags, root=sconf.root, config_path=conf.paths.projectroot, sphinx_args=get_sphinx_args(sconf.tags), source=sconf.source ) out = command(sphinx_cmd, capture=True) output_sphinx_stream('\n'.join([out.err, out.out]), target, conf) print('[sphinx] [{0}]: completed build at {1}'.format(target, timestamp())) finalize(target, sconf, conf)
def prereq(sconf, conf=None): conf = lazy_config(conf) jobs = itertools.chain() job_count = runner(jobs) print('[sphinx]: processed {0} build prerequisite jobs'.format(job_count)) generate_source(sconf, conf) dep_count = refresh_dependencies(conf) print('[sphinx]: refreshed {0} dependencies'.format(dep_count)) command(build_platform_notification('Sphinx', 'Build in progress past critical phase.')) dump_file_hashes(conf.system.dependency_cache, conf)
def include_files(files=None, conf=None): if files is not None: return files else: conf = lazy_conf(conf) source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) grep = command('grep -R ".. include:: /" {0} || exit 0'.format(source_dir), capture=True).out rx = re.compile(source_dir + r'(.*):.*\.\. include:: (.*)') s = [ m.groups() for m in [ rx.match(d) for d in grep.split('\n') ] if m is not None ] def tuple_sort(k): return k[1] s.sort(key=tuple_sort) files = dict() for i in groupby(s, itemgetter(1) ): files[i[0]] = set() for src in i[1]: if not src[0].endswith('~'): files[i[0]].add(src[0]) files[i[0]] = list(files[i[0]]) files.update(generated_includes(conf)) return files
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join error_pages(conf) single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if conf.project.name == 'mms' and mms_should_migrate(builder, conf) is False: return False if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin(conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) print('[{0}]: migrated build to {1}'.format(builder, dest)) if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf = BuildConfiguration('sphinx.yaml', pjoin(conf.paths.projectroot, conf.paths.builddata)) if 'dirhtml' in sconf and 'excluded_files' in sconf.dirhtml: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf.dirhtml.excluded_files ] cleaner(fns) print('[dirhtml] [clean]: removed excluded files from output directory')
def json_output(conf=None): conf = lazy_conf(conf) list_file = os.path.join(conf.paths.branch_staging, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): os.makedirs(json_dst) command(cmd.format(src=os.path.join(conf.paths.branch_output, 'json') + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) print('[json]: deployed json files to local staging.')
def json_output(conf=None): conf = lazy_conf(conf) list_file = os.path.join(conf.paths.branch_staging, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): os.makedirs(json_dst) command( cmd.format(src=os.path.join(conf.paths.branch_output, 'json') + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) print('[json]: deployed json files to local staging.')
def build_prerequisites(conf): jobs = itertools.chain( build_prereq_jobs(conf), manpage_jobs(conf), table_jobs(conf), api_jobs(conf), toc_jobs(conf), option_jobs(conf), steps_jobs(conf), release_jobs(conf), intersphinx_jobs(conf), image_jobs(conf), ) try: res = runner(jobs, parallel="process") print("[sphinx-prep]: built {0} pieces of content".format(len(res))) except PoolResultsError: print( "[WARNING]: sphinx prerequisites encountered errors. " "See output above. Continuing as a temporary measure." ) runner(external_jobs(conf), parallel="thread") buildinfo_hash(conf) if conf.project.name != "mms": # we copy source manually for mms in makefile.mms, avoiding this # operation to clarify the artifacts directory transfer_source(conf) print("[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)") dep_count = refresh_dependencies(conf) print("[sphinx-prep]: bumped timestamps of {0} files".format(dep_count)) command(build_platform_notification("Sphinx", "Build in progress pastb critical phase."), ignore=True) print("[sphinx-prep]: INFO - Build in progress past critical phase.") dump_file_hashes(conf.system.dependency_cache, conf) print("[sphinx-prep]: build environment prepared for sphinx.")
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin(conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) logger.info('{0}: migrated build to {1}'.format(builder, dest)) if 'published' in conf.git.branches and conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf = BuildConfiguration('sphinx.yaml', pjoin(conf.paths.projectroot, conf.paths.builddata)) if 'dirhtml' in sconf and 'excluded_files' in sconf.dirhtml: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf.dirhtml.excluded_files ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory')
def build_job_prerequsites(sync, sconf, conf): runner(external_jobs(conf), parallel='thread') with update_source_lock: if conf.project.name != 'mms': if sync.satisfied('transfered_source') is False: transfer_source(sconf, conf) sync.transfered_source = True cond_toc = "build_toc" else: cond_name = 'transfered_' + sconf.edition cond_toc = 'build_toc_' + sconf.edition if sync.satisfied(cond_name) is False: cmd = 'make -C {0} {1}-source-dir={0}{2}{3} EDITION={1} generate-source-{1}' cmd = cmd.format(conf.paths.projectroot, sconf.edition, os.path.sep, conf.paths.branch_source) o = command(cmd, capture=True) if len(o.out.strip()) > 0: print(o.out) sync[cond_name] = True if sync.satisfied(cond_toc) is False: # this has to go here so that MMS can generate different toc trees for # each edition. runner(toc_jobs(conf), parallel='process') sync[cond_toc] = True with update_deps_lock: if sync.satisfied('updated_deps') is False: print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)') dep_count = refresh_dependencies(conf) print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count)) sync.updated_deps = True command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True) print('[sphinx-prep]: INFO - Build in progress past critical phase ({0})'.format(conf.paths.branch_source)) dump_file_hashes(conf) print('[sphinx-prep]: build environment prepared for sphinx.')
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin( conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) logger.info('{0}: migrated build to {1}'.format(builder, dest)) if 'published' in conf.git.branches and conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf = BuildConfiguration( 'sphinx.yaml', pjoin(conf.paths.projectroot, conf.paths.builddata)) if 'dirhtml' in sconf and 'excluded_files' in sconf.dirhtml: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf.dirhtml.excluded_files ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory')
def generate_source(sconf, conf=None): conf = lazy_config(conf) target = sconf.build_source if not os.path.exists(target): os.makedirs(target) print('[sphinx-prep]: created ' + target) elif not os.path.isdir(sconf.build_source): abort('[sphinx-prep]: {0} exists and is not a directory'.format(target)) r = command('rsync --checksum --recursive --delete {0}/ {1}'.format(sconf.source, target), capture=True) print('[sphinx]: updated source in {0}'.format(target))
def setup_config_remote(branch_name, conf): remotes = command('git remote', capture=True).out.split('\n') if 'config-upstream' not in remotes: if conf.git.remote.upstream.startswith('10gen'): git_url = '[email protected]:' else: git_url = 'git://github.com/' command('git remote add config-upstream {0}{1}.git'.format(git_url, conf.git.remote.upstream)) command('git fetch config-upstream') if branch_name not in command('git branch', capture=True).out.split('\n'): command('git branch {0} config-upstream/{0}'.format(branch_name))
def build_prerequisites(conf): jobs = itertools.chain(build_prereq_jobs(conf), manpage_jobs(conf), table_jobs(conf), api_jobs(conf), toc_jobs(conf), option_jobs(conf), steps_jobs(conf), release_jobs(conf), intersphinx_jobs(conf), image_jobs(conf)) try: res = runner(jobs, parallel='process') print('[sphinx-prep]: built {0} pieces of content'.format(len(res))) except PoolResultsError: print('[WARNING]: sphinx prerequisites encountered errors. ' 'See output above. Continuing as a temporary measure.') runner(external_jobs(conf), parallel='thread') buildinfo_hash(conf) if conf.project.name != 'mms': # we copy source manually for mms in makefile.mms, avoiding this # operation to clarify the artifacts directory transfer_source(conf) print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)') dep_count = refresh_dependencies(conf) print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count)) command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True) print('[sphinx-prep]: INFO - Build in progress past critical phase.') dump_file_hashes(conf.system.dependency_cache, conf) print('[sphinx-prep]: build environment prepared for sphinx.')
def setup_config_remote(branch_name, conf): remotes = command('git remote', capture=True).out.split('\n') if 'config-upstream' not in remotes: if conf.git.remote.upstream.startswith('10gen'): git_url = '[email protected]:' else: git_url = 'git://github.com/' command('git remote add config-upstream {0}{1}.git'.format( git_url, conf.git.remote.upstream)) command('git fetch config-upstream') if branch_name not in command('git branch', capture=True).out.split('\n'): command('git branch {0} config-upstream/{0}'.format(branch_name))
def render_versions(conf=None): if is_processed('versions', conf): return conf else: conf = lazy_conf(conf) if 'branches' not in conf.git: conf.git.branches = AttributeDict() version_config_file = os.path.join(conf.paths.builddata, 'published_branches.yaml') try: vconf_data = get_file_from_branch(version_config_file, 'master') except CommandError: if get_branch() == 'master': return conf remotes = command('git remote', capture=True).out.split('\n') if 'origin' in remotes: return conf if 'config-upstream' not in remotes: command('git remote add config-upstream git://github.com/{0}.git'.format(conf.git.remote.upstream)) command('git fetch config-upstream') if 'master' not in command('git branch', capture=True).out.split('\n'): command('git branch master config-upstream/master') vconf_data = get_file_from_branch(version_config_file, 'master') except CommandError: return conf vconf = AttributeDict(yaml.load(vconf_data)) conf.version.update(vconf.version) conf.git.branches.update(vconf.git.branches) conf.system.processed.versions = True return conf
def build_job_prerequsites(sync, sconf, conf): runner(external_jobs(conf), parallel='thread') with update_source_lock: cond_toc = "build_toc" cond_name = 'transfered_source' cond_dep = 'updated_deps' cond_steps = 'build_step_files' if conf.project.name in ['mms', 'training']: cond_name += '_' + sconf.edition cond_toc += '_' + sconf.edition cond_dep += '_' + sconf.edition cond_steps += '_' + sconf.edition if sync.satisfied(cond_name) is False: transfer_source(sconf, conf) sync[cond_name] = True if 'excluded' in sconf: logger.info('removing excluded files') for fn in sconf.excluded: fqfn = os.path.join(conf.paths.projectroot, conf.paths.branch_source, fn[1:]) if os.path.exists(fqfn): if os.path.isdir(fqfn): rmtree(fqfn) else: os.remove(fqfn) logger.debug('removed {0}'.format(fqfn)) logger.info('removed {0} files'.format(len(sconf.excluded))) with ProcessPool() as p: # these must run here so that MMS can generate different toc/steps/etc for # each edition. if sync.satisfied(cond_toc) is False: # even if this fails we don't want it to run more than once sync[cond_toc] = True tr = p.runner(toc_jobs(conf)) logger.info('generated {0} toc files'.format(len(tr))) if sync.satisfied(cond_steps) is False: sync[cond_steps] = True sr = p.runner(steps_jobs(conf)) logger.info('generated {0} step files'.format(len(sr))) if sync.satisfied(cond_dep) is False: logger.debug('using update deps lock.') logger.info( 'resolving all intra-source dependencies now. for sphinx build. (takes several seconds)' ) dep_count = refresh_dependencies(conf) logger.info('bumped {0} dependencies'.format(dep_count)) sync[cond_dep] = True command(build_platform_notification( 'Sphinx', 'Build in progress past critical phase.'), ignore=True) logger.info( 'sphinx build in progress past critical phase ({0})'.format( conf.paths.branch_source)) dump_file_hashes(conf) else: logger.debug('dependencies already updated, lock unneeded.') logger.debug('releasing dependency update lock.') logging.info('build environment prepared for sphinx build {0}.'.format( sconf.builder))
def get_commit(path=None): return command('git rev-parse --verify HEAD', capture=True).out
def cmd(self, *args): args = ' '.join(args) return command(command='cd {0} ; git {1}'.format(self.path, args), capture=True)
def get_file_from_branch(path, branch='master'): cmd = 'git show {branch}:{path}'.format(branch=branch, path=path) return command(command=cmd, capture=True, ignore=False).out
def checkout_file(path, branch='master'): return command(command='git checkout {0} -- {1}'.format(branch, path), capture=False, ignore=False)
def get_branch(path=None): return command('git symbolic-ref HEAD', capture=True).out.split('/')[2]
def dirhtml_migration(conf): cmd = 'rsync -a {source}/ {destination}' command(cmd.format(source=os.path.join(conf.paths.projectroot, conf.paths.output, 'dirhtml'), destination=os.path.join(conf.paths.projectroot, conf.paths.public)))
def build_job_prerequsites(sync, sconf, conf): runner(external_jobs(conf), parallel='thread') with update_source_lock: cond_toc = "build_toc" cond_name = 'transfered_source' cond_dep = 'updated_deps' cond_steps = 'build_step_files' if conf.project.name in ['mms', 'training']: cond_name += '_' + sconf.edition cond_toc += '_' + sconf.edition cond_dep += '_' + sconf.edition cond_steps += '_' + sconf.edition if sync.satisfied(cond_name) is False: transfer_source(sconf, conf) sync[cond_name] = True if 'excluded' in sconf: logger.info('removing excluded files') for fn in sconf.excluded: fqfn = os.path.join(conf.paths.projectroot, conf.paths.branch_source, fn[1:]) if os.path.exists(fqfn): if os.path.isdir(fqfn): rmtree(fqfn) else: os.remove(fqfn) logger.debug('removed {0}'.format(fqfn)) logger.info('removed {0} files'.format(len(sconf.excluded))) with ProcessPool() as p: # these must run here so that MMS can generate different toc/steps/etc for # each edition. if sync.satisfied(cond_toc) is False: # even if this fails we don't want it to run more than once sync[cond_toc] = True tr = p.runner(toc_jobs(conf)) logger.info('generated {0} toc files'.format(len(tr))) if sync.satisfied(cond_steps) is False: sync[cond_steps] = True sr = p.runner(steps_jobs(conf)) logger.info('generated {0} step files'.format(len(sr))) if sync.satisfied(cond_dep) is False: logger.debug('using update deps lock.') logger.info('resolving all intra-source dependencies now. for sphinx build. (takes several seconds)') dep_count = refresh_dependencies(conf) logger.info('bumped {0} dependencies'.format(dep_count)) sync[cond_dep] = True command(build_platform_notification('Sphinx', 'Build in progress past critical phase.'), ignore=True) logger.info('sphinx build in progress past critical phase ({0})'.format(conf.paths.branch_source)) dump_file_hashes(conf) else: logger.debug('dependencies already updated, lock unneeded.') logger.debug('releasing dependency update lock.') logging.info('build environment prepared for sphinx build {0}.'.format(sconf.builder))
def main(): user = user_input() setup_logging(user) if user.repo == '[email protected]:mongodb/docs.git' and user.project != 'manual': msg = '[test]: project and repo are not correctly matched' logger.error(msg) exit(1) if not os.path.exists('build'): os.makedirs('build') elif not os.path.isdir('build'): logger.warning( 'build exists but is not a directory. please investigate.') os.remove('build') root_path = os.path.abspath(os.getcwd()) build_path = os.path.join('build', user.project) if os.path.exists(build_path): logger.info( 'build directory exists. continuing with quasi-incremental build.') else: logger.info('cloning repository') command('git clone {0} {1}'.format(user.repo, build_path)) logger.info('cloned repository') os.chdir(build_path) logger.debug('script working directory is now {0}'.format(os.getcwd())) command('git fetch --all') logger.debug('git fetched testing repository {0} in {1}'.format( user.project, build_path)) try: command('git checkout {0}'.format(user.branch)) except CommandError: command('git checkout -b {0} origin/{0}'.format(user.branch)) except CommandError: logger.error('branch name {0} does not exist in remote'.format( user.branch)) exit(1) command('git reset --hard HEAD~12') logger.debug('re-set testing head 12 commits') command('git pull') logger.debug('ran "git pull" on testing repo') bootstrapped_tools_path = os.path.join('build', 'docs-tools') if not os.path.exists(bootstrapped_tools_path): logger.debug("{0} does not exist".format(bootstrapped_tools_path)) symlink(name=bootstrapped_tools_path, target=root_path) logger.debug('created tools symlink') elif os.path.islink(bootstrapped_tools_path): logger.debug( "{0} is a link. continuing.".format(bootstrapped_tools_path)) elif os.path.isdir(bootstrapped_tools_path ) and not os.path.islink(bootstrapped_tools_path): logger.warning('a tools directory currently exists, removing.') shutil.rmtree(bootstrapped_tools_path) symlink(name=bootstrapped_tools_path, target=root_path) logger.debug('created tools symlink') logger.info('bootstrapping.') command('python bootstrap.py safe') logger.info('moving on to build the publish target.') build_task = command('make publish', capture=True, ignore=True) logger.info('completed build task, moving to printing output') print_build_output(build_task) log_and_propogate_task_return(build_task)
def main(): user = user_input() setup_logging(user) if user.repo == '[email protected]:mongodb/docs.git' and user.project != 'manual': msg = '[test]: project and repo are not correctly matched' logger.error(msg) exit(1) if not os.path.exists('build'): os.makedirs('build') elif not os.path.isdir('build'): logger.warning('build exists but is not a directory. please investigate.') os.remove('build') root_path = os.path.abspath(os.getcwd()) build_path = os.path.join('build', user.project) command('git remote add origin [email protected]:mongodb/docs-tools.git', ignore=True) command('git branch master origin/master', ignore=True) if os.path.exists(build_path): logger.info('build directory exists. continuing with quasi-incremental build.') else: logger.info('cloning repository') command('git clone {0} {1}'.format(user.repo, build_path)) logger.info('cloned repository') if user.toolchain == 'giza': os.chdir('giza') try: command('python setup.py install') except CommandError: command('python setup.py install') except CommandError: command('python setup.py install') except CommandError: logger.critical('giza installation failed after three attempts. declaring failure.') exit(1) os.chdir(root_path) logger.info('installed giza') os.chdir(build_path) logger.debug('script working directory is now {0}'.format(os.getcwd())) command('git fetch --all') logger.debug('git fetched testing repository {0} in {1}'.format(user.project, build_path)) try: command('git checkout {0}'.format(user.branch)) except CommandError: command('git checkout -b {0} origin/{0}'.format(user.branch)) except CommandError: logger.error('branch name {0} does not exist in remote'.format(user.branch)) exit(1) command('git reset --hard HEAD~3') logger.debug('re-set testing head 12 commits') command('git pull') logger.debug('ran "git pull" on testing repo') bootstrapped_tools_path = os.path.join('build', 'docs-tools') if user.toolchain == 'giza': if os.path.exists(bootstrapped_tools_path): try: os.remove(bootstrapped_tools_path) except: shutil.rmtree(bootstrapped_tools_path) else: if not os.path.exists(bootstrapped_tools_path): os.makedirs('build') logger.debug("{0} does not exist".format(bootstrapped_tools_path)) symlink(name=bootstrapped_tools_path, target=root_path) logger.debug('created tools symlink') elif os.path.islink(bootstrapped_tools_path): logger.debug("{0} is a link. continuing.".format(bootstrapped_tools_path)) elif os.path.isdir(bootstrapped_tools_path) and not os.path.islink(bootstrapped_tools_path): logger.warning('a tools directory currently exists, removing.') shutil.rmtree(bootstrapped_tools_path) symlink(name=bootstrapped_tools_path, target=root_path) logger.debug('created tools symlink') logger.info('bootstrapping.') command('python bootstrap.py safe') logger.info('moving on to build the publish target.') if user.toolchain == 'giza': build_task = command('make giza-publish', capture=True, ignore=True) elif user.toolchain == 'legacy': build_task = command('make publish', capture=True, ignore=True) logger.info('completed build task, moving to printing output') print_build_output(build_task) log_and_propogate_task_return(build_task)