def migrate_legacy_apiarg(task, fn, conf, silent=False): legacy_data = ingest_yaml_list(fn) new_data, meta = transform_data( task, legacy_data, fn[len(os.path.join(conf.paths.projectroot, conf.paths.branch_output) ) + 1:], silent, conf) old_base = os.path.basename(fn) if not old_base.startswith(meta['operation']): meta['operation'] = old_base[:-5].split('-', 1)[0] tag = old_base[:-5][len(meta['operation']) + 1:] if tag.startswith('-'): tag = tag[1:] if tag == 'fields': tag = 'field' new_fn_base = hyph_concat('apiargs', meta['interface'], meta['operation'], tag) new_fn_base = new_fn_base + '.yaml' if task == 'source': new_fn = os.path.join(conf.paths.projectroot, conf.paths.includes, new_fn_base) elif task == 'branch': new_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_includes, new_fn_base) return new_data, new_fn
def _resolve_config_data(self, fn, basename): logger.debug('resolving config data from file ' + fn) if fn is None: return [] else: data = ingest_yaml_list(fn) mapping = { 'sphinx_local': SphinxLocalConfig, 'sphinx-local': SphinxLocalConfig, 'manpages': ManpageConfig, 'pdfs': PdfConfig, 'intersphinx': IntersphinxConfig, 'corpora': CorporaConfig, } # recur_mapping for config objects that subclass RecursiveConfigurationBase recur_mapping = { 'translate': TranslateConfig, } if basename in mapping: data = [ mapping[basename](doc) for doc in data ] elif basename in recur_mapping: data = [ recur_mapping[basename](doc, self.conf) for doc in data ] elif basename == 'htaccess': l = HtaccessData() l.conf = self.conf l.extend(data) data = l if len(data) == 1 and basename not in self._always_list_configs: return data[0] else: return data
def ingest(self, src): if not isinstance(src, list) and os.path.isfile(src): src = ingest_yaml_list(src) for doc in src: self.add(doc) if self.collection is None: m = 'all examples must have a collection' logger.error(m) raise InheritableContentError(m)
def populate_external_param(fn, basename, projectdir, sourcedir): if fn.startswith('/'): fn = os.path.join(sourcedir, fn[1:]) try: ext_param = ingest_yaml_list(fn) except OSError: fn = os.path.join(basename, fn) ext_param = ingest_yaml_list(fn) except OSError: fn = os.path.join(projectdir, sourcedir, fn) ext_param = ingest_yaml_list(fn) else: pass o = { } for param in ext_param: # leaving the object sub-document unmodified if we use it at some point, # we might need to modify here. o[param['name']] = param return fn, o
def ingest(self, fn): if self.source_dirname is None: self.source_dirname = os.path.dirname(os.path.abspath(fn)) input_sources = ingest_yaml_list(os.path.join(self.source_dirname, os.path.basename(fn))) self.cache[fn] = dict() self.source_files.append(input_sources) for option in input_sources: opt = Option(option) self.cache_option(opt, fn) self.resolve(fn)
def ingest(self, fn): if self.source_dirname is None: self.source_dirname = os.path.dirname(os.path.abspath(fn)) input_fn = os.path.join(self.source_dirname, os.path.basename(fn)) input_sources = ingest_yaml_list(input_fn) self.cache[fn] = dict() self.source_files.append(input_sources) for option in input_sources: opt = Option(option) opt.source_filenames = [input_fn] self.cache_option(opt, fn) self.resolve(fn)
def generated_includes(conf): toc_spec_files = [] step_files = [] for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'): base = os.path.basename(fn) if base.startswith('toc-spec'): toc_spec_files.append(fn) elif base.startswith('ref-spec'): toc_spec_files.append(fn) elif base.startswith('steps'): step_files.append(fn) elif base.startswith('example'): # example files, for the purpose of this have the same structure as # steps, so we can just use that: step_files.append(fn) maskl = len(conf.paths.source) path_prefix = conf.paths.includes[len(conf.paths.source):] mapping = {} for spec_file in toc_spec_files: if os.path.exists(spec_file): data = ingest_yaml_doc(spec_file) else: continue deps = [os.path.join(path_prefix, i) for i in data['sources']] mapping[spec_file[maskl:]] = deps for step_def in step_files: data = ingest_yaml_list(step_def) deps = [] for step in data: if 'source' in step: deps.append(step['source']['file']) if len(deps) != 0: deps = [os.path.join(path_prefix, i) for i in deps] mapping[step_def[maskl:]] = deps return mapping
def primer_migration_tasks(conf, app): "Migrates all manual files to primer according to the spec. As needed." migration_paths = get_migration_specifications(conf) if len(migration_paths) == 0: return False else: migrations = ingest_yaml_list(*migration_paths) munge_jobs = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) fq_target, fq_source = resolve_page_path(page, conf) if page['source'].endswith('/'): migrations.extend(directory_expansion(fq_source, page, conf)) continue page = trim_leading_slash_from_pages(page) prev = build_migration_task(fq_target, fq_source, app) if 'truncate' in page: build_truncate_task(page['truncate'], fq_target, fq_source, app) if 'transform' in page: prev.job = copy_always munge_jobs.append( build_transform_task(page['transform'], fq_target)) if 'append' in page: prev.job = copy_always build_append_task(page, fq_target, migration_paths, app) post_process_tasks(app=app, tasks=munge_jobs) msg = 'added {0} migration jobs'.format(len(migrations)) logger.info(msg) return True
def clean(conf): "Removes all migrated primer files according to the current spec." migration_paths = get_migration_specifications(conf) migrations = ingest_yaml_list(*migration_paths) targets = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) targets.append(os.path.join(conf.paths.projectroot, conf.paths.source, page['target'])) map(verbose_remove, targets) logger.info('clean: removed {0} files'.format(len(targets)))
def generated_includes(conf): toc_spec_files = [] step_files = [] for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'): base = os.path.basename(fn) if base.startswith('toc-spec'): toc_spec_files.append(fn) elif base.startswith('ref-spec'): toc_spec_files.append(fn) elif base.startswith('steps'): step_files.append(fn) elif base.startswith('example'): # example files, for the purpose of this have the same structure as # steps, so we can just use that: step_files.append(fn) maskl = len(conf.paths.source) path_prefix = conf.paths.includes[len(conf.paths.source):] mapping = {} for spec_file in toc_spec_files: if os.path.exists(spec_file): data = ingest_yaml_doc(spec_file) else: continue deps = [ os.path.join(path_prefix, i ) for i in data['sources']] mapping[spec_file[maskl:]] = deps for step_def in step_files: data = ingest_yaml_list(step_def) deps = [] for step in data: if 'source' in step: deps.append(step['source']['file']) if len(deps) != 0: deps = [ os.path.join(path_prefix, i ) for i in deps ] mapping[step_def[maskl:]] = deps return mapping
def primer_migration_tasks(conf, app): "Migrates all manual files to primer according to the spec. As needed." migration_paths = get_migration_specifications(conf) if len(migration_paths) == 0: return False else: migrations = ingest_yaml_list(*migration_paths) munge_jobs = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) fq_target, fq_source = resolve_page_path(page, conf) if page['source'].endswith('/'): migrations.extend(directory_expansion(fq_source, page, conf)) continue page = trim_leading_slash_from_pages(page) prev = build_migration_task(fq_target, fq_source, app) if 'truncate' in page: build_truncate_task(page['truncate'], fq_target, app) if 'transform' in page: prev.job = copy_always munge_jobs.append(build_transform_task(page['transform'], fq_target)) if 'append' in page: prev.job = copy_always build_append_task(page, fq_target, migration_paths, app) post_process_tasks(app=app, tasks=munge_jobs) msg = 'added {0} migration jobs'.format(len(migrations)) logger.info(msg) return True
def clean(conf, app): "Removes all migrated primer files according to the current spec." migration_paths = get_migration_specifications(conf) migrations = ingest_yaml_list(*migration_paths) targets = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) targets.append(os.path.join(conf.paths.projectroot, conf.paths.source, page['target'])) t = app.add('map') t.job = verbose_remove t.iter = targets t.description = 'clean primer migrations' logger.info('clean: removed {0} files'.format(len(targets)))
def populate_external_param(fn, basename, projectdir, sourcedir): if fn.startswith('/'): fn = os.path.join(sourcedir, fn[1:]) fns = [ fn, os.path.join(basename, fn), os.path.join(projectdir, fn), os.path.join(projectdir, sourcedir, fn) ] ext_param = [] for pfn in fns: if os.path.isfile(pfn): ext_param = ingest_yaml_list(pfn) o = {} for param in ext_param: # leaving the object sub-document unmodified if we use it at some point, # we might need to modify here. o[param['name']] = param return fn, o
def _resolve_config_data(self, fn, basename): logger.debug('resolving config data from file ' + fn) if fn is None: return [] else: data = ingest_yaml_list(fn) mapping = { 'sphinx_local': SphinxLocalConfig, 'sphinx-local': SphinxLocalConfig, 'manpages': ManpageConfig, 'pdfs': PdfConfig, 'intersphinx': IntersphinxConfig, 'corpora': CorporaConfig, } # recur_mapping for config objects that subclass RecursiveConfigurationBase recur_mapping = { 'translate': TranslateConfig, } if basename in mapping: data = [ mapping[basename](doc) for doc in data ] elif basename in recur_mapping: data = [ recur_mapping[basename](doc, self.conf) for doc in data ] elif basename == 'htaccess': l = HtaccessData() l.conf = self.conf l.extend(data) data = l elif basename == 'replacement': data = ReplacementData(data, self.conf) return data if len(data) == 1 and basename not in self._always_list_configs: return data[0] else: return data
def populate_external_param(fn, basename, projectdir, sourcedir): if fn.startswith('/'): fn = os.path.join(sourcedir, fn[1:]) print fn, basename, projectdir, sourcedir fns = [ fn, os.path.join(basename, fn), os.path.join(projectdir, fn), os.path.join(projectdir, sourcedir, fn) ] ext_param = [] for pfn in fns: if os.path.isfile(pfn): ext_param = ingest_yaml_list(pfn) o = { } for param in ext_param: # leaving the object sub-document unmodified if we use it at some point, # we might need to modify here. o[param['name']] = param return fn, o
def __init__(self, fn, cache=None): if cache is None: cache = dict() self.source_fn = fn self.agg_sources = cache self.source_list = ingest_yaml_list(self.source_fn) self.source_dir = os.path.dirname(self.source_fn) self.source = dict() sort_needed = False for idx, step in enumerate(self.source_list): if 'stepnum' not in step: step['stepnum'] = idx+1 else: sort_needed = True if 'source' in step or 'inherit' in step: if 'source' in step: source_file = step['source']['file'] source_ref = step['source']['ref'] elif 'inherit' in step: source_file = step['inherit']['file'] source_ref = step['inherit']['ref'] if source_file in self.agg_sources: current_step = self.agg_sources[source_file].get_step(source_ref) msg = 'resolved ref "{0}" from file "{1}" using step cache' logger.debug(msg.format(source_ref, source_file)) else: msg = 'could *not* resolved ref "{0}" from file "{1}" with step cache' logger.debug(msg.format(source_ref, source_file)) if not os.path.exists(fn): msg = 'file {0} does not exist'.format(fn) logger.error(msg) raise InvalidStep(msg) elif fn in self.agg_sources or source_file in self.agg_sources: msg = 'hitting recursion issue on {0}'.format(fn) logger.error(msg) raise InvalidStep(msg) else: msg = "reading and caching step {0} from {1} and caching" logger.debug(msg.format(source_ref, source_file)) if source_file == os.path.basename(self.source_fn): current_step = self.get_step(source_ref) else: steps = Steps(os.path.join(self.source_dir, source_file), self.agg_sources) current_step = steps.get_step(source_ref) self.agg_sources[source_file] = steps self.agg_sources.update(steps.agg_sources) logger.debug('successfully cached {0}'.format(source_file)) if current_step is None: msg = 'Missing ref for {0}:"{1}" in step file "{2}"'.format(source_file, source_ref, os.path.basename(self.source_fn)) logger.error(msg) raise InvalidStep(msg) current_step.update(step) self._validate_step(current_step, ['ref', 'title']) self.source[source_ref] = current_step self.source_list[idx] = current_step else: self._validate_step(step, ['ref', 'title']) self.source[step['ref']] = step if sort_needed is True: self.source_list.sort(key=lambda k:k['stepnum'])
def add_file(self, fn): if fn not in self.cache or self.cache[fn] == []: data = ingest_yaml_list(fn) self.cache[fn] = self.content_class(data, self, self.conf) else: logger.info('populated file {0} exists in the cache'.format(fn))
def ingest(self, src): if not isinstance(src, list) and os.path.exists(src): src = ingest_yaml_list(src) for doc in src: self.add(doc)
def __init__(self, fn, cache=None): if cache is None: cache = dict() self.source_fn = fn self.agg_sources = cache self.source_list = ingest_yaml_list(self.source_fn) self.source_dir = os.path.dirname(self.source_fn) self.source = dict() sort_needed = False for idx, step in enumerate(self.source_list): if 'stepnum' not in step: step['stepnum'] = idx + 1 else: sort_needed = True if 'source' in step or 'inherit' in step: if 'source' in step: source_file = step['source']['file'] source_ref = step['source']['ref'] elif 'inherit' in step: source_file = step['inherit']['file'] source_ref = step['inherit']['ref'] if source_file in self.agg_sources: current_step = self.agg_sources[source_file].get_step( source_ref) msg = 'resolved ref "{0}" from file "{1}" using step cache' logger.debug(msg.format(source_ref, source_file)) else: msg = 'could *not* resolved ref "{0}" from file "{1}" with step cache' logger.debug(msg.format(source_ref, source_file)) if not os.path.exists(fn): msg = 'file {0} does not exist'.format(fn) logger.error(msg) raise InvalidStep(msg) elif fn in self.agg_sources or source_file in self.agg_sources: msg = 'hitting recursion issue on {0}'.format(fn) logger.error(msg) raise InvalidStep(msg) else: msg = "reading and caching step {0} from {1} and caching" logger.debug(msg.format(source_ref, source_file)) if source_file == os.path.basename(self.source_fn): current_step = self.get_step(source_ref) else: steps = Steps( os.path.join(self.source_dir, source_file), self.agg_sources) current_step = steps.get_step(source_ref) self.agg_sources[source_file] = steps self.agg_sources.update(steps.agg_sources) logger.debug( 'successfully cached {0}'.format(source_file)) if current_step is None: msg = 'Missing ref for {0}:"{1}" in step file "{2}"'.format( source_file, source_ref, os.path.basename(self.source_fn)) logger.error(msg) raise InvalidStep(msg) current_step.update(step) self._validate_step(current_step, ['ref', 'title']) self.source[source_ref] = current_step self.source_list[idx] = current_step else: self._validate_step(step, ['ref', 'title']) self.source[step['ref']] = step if sort_needed is True: self.source_list.sort(key=lambda k: k['stepnum'])
def _generate_api_param(source, target, conf): r = generate_params(ingest_yaml_list(source), source, conf) r.write(target) logger.info('rebuilt {0}'.format(target))