def go_tests(): go_root = join(AMPIFY_ROOT, 'src', 'amp') chdir(go_root) run_command([MAKE, 'nuke']) _, retval = run_command( [MAKE, 'install', 'test'], retcode=True, redirect_stderr=False, redirect_stdout=False ) if retval: sys.exit(retval)
def delete(self, prop, value_regex=None, section=None, all=None): if self.scm == 'git': if all: args = ['--unset-all', prop] if value_regex: args.append(value_regex) elif section: args = ['--remove-section', prop] else: args = ['--unset', prop] if value_regex: args.append(value_regex) run_command(['git', 'config'] + args) self._config_cache.clear()
def set(self, prop, value): if self.scm == 'git': _, error = run_command(['git', 'config', prop, value], retcode=True) if error: raise IOError("Couldn't set: git config %s %s" % (prop, value)) self._config_cache[prop] = value
def root(self): if not self._root: if self.scm == 'git': self._root = abspath( run_command(['git', 'rev-parse', '--show-cdup']).strip() ) return self._root
def get_git_info(filename): """Extract info from the Git repository.""" environ['TZ'] = 'UTC' git_info = run_command(['git', 'log', '--pretty=raw', '--', filename]) info = {'__git__': False} if (not git_info) or git_info.startswith('fatal:'): info['__updated__'] = datetime.utcfromtimestamp( stat(filename).st_mtime ) return info info['__git__'] = True for line in git_info.splitlines(): if line.startswith('author'): email, timestamp, tz = line.split()[-3:] email = email.lstrip('<').rstrip('>') if '(' in email: email = email.split('(')[0].strip() info['__by__'] = email info['__updated__'] = datetime.utcfromtimestamp(float(timestamp)) break return info
def main(argv=None, show_help=False): argv = argv or sys.argv[1:] # Set the script name to ``amp`` so that OptionParser error messages don't # display a meaningless ``main.py`` to end users. sys.argv[0] = 'amp' usage = ("""Usage: amp <command> [options] \nCommands: \n%s version show the version number and exit \nSee `amp help <command>` for more info on a specific command.""" % '\n'.join(" %-8s %s" % (cmd, COMMANDS[cmd].help) for cmd in sorted(COMMANDS)) ) autocomplete( OptionParser(add_help_option=False), ListCompleter(AUTOCOMPLETE_COMMANDS.keys()), subcommands=AUTOCOMPLETE_COMMANDS ) if not argv: show_help = True else: command = argv[0] argv = argv[1:] if command in ['-h', '--help']: show_help = True elif command == 'help': if argv: command = argv[0] argv = ['--help'] else: show_help = True if command in ['-v', '--version', 'version']: print('amp version %s' % ampify.__release__) sys.exit() if show_help: print(usage) sys.exit(1) if command in COMMANDS: return COMMANDS[command](argv) # We support git-command like behaviour. That is, if there's an external # binary named ``amp-foo`` available on the ``$PATH``, then running ``amp # foo`` will automatically delegate to it. try: output, retcode = run_command( ['amp-%s' % command] + argv, retcode=True, redirect_stdout=False, redirect_stderr=False ) except CommandNotFound: exit("ERROR: Unknown command %r" % command) if retcode: sys.exit(retcode)
def do(*cmd, **kwargs): if 'redirect_stdout' not in kwargs: kwargs['redirect_stdout'] = False if 'redirect_stderr' not in kwargs: kwargs['redirect_stderr'] = False if 'exit_on_error' not in kwargs: kwargs['exit_on_error'] = True return run_command(cmd, **kwargs)
def set(self, prop, value): if self.scm == 'git': _, error = run_command( ['git', 'config', prop, value], retcode=True ) if error: raise IOError("Couldn't set: git config %s %s" % (prop, value)) self._config_cache[prop] = value
def init_build_recipes(): if RECIPES_INITIALISED: return # Try getting a lock to avoid concurrent builds. lock(BUILD_LOCK) mkdir(RECEIPTS) for recipe in BUILD_RECIPES: execfile(recipe, BUILTINS) for package in list(RECIPES): recipes = RECIPES[package] versions = [] data = {} for recipe in recipes: recipe_type = recipe.get('type') if recipe_type == 'submodule': path = join(ROOT, recipe['path']) version = run_command( ['git', 'rev-parse', 'HEAD'], cwd=path, exit_on_error=True ).strip() elif recipe_type == 'makelike': contents = [] latest = 0 for pattern in recipe['depends']: for file in glob(pattern): dep_file = open(file, 'rb') contents.append(dep_file.read()) dep_file.close() dep_mtime = stat(file)[ST_MTIME] if dep_mtime > latest: latest = dep_mtime generate = 0 for pattern in recipe['outputs']: files = glob(pattern) if not files: generate = 1 break for file in files: if not isfile(file): generate = 1 break if stat(file)[ST_MTIME] <= latest: generate = 1 break if generate: break if generate: for file in listdir(RECEIPTS): if file.startswith(package + '-'): remove(join(RECEIPTS, file)) version = sha1(''.join(contents)).hexdigest() else: version = recipe['version'] versions.append(version) data[version] = recipe RECIPES[package] = data PACKAGES[package] = versions RECIPES_INITIALISED.append(1)
def get(self, prop, default=None): if prop in self._config_cache: return self._config_cache[prop] if self.scm == 'git': value, error = run_command(['git', 'config', prop], retcode=True) if error: value = default else: value = value.strip() return self._config_cache.setdefault(prop, value)
def is_mercurial(): """Return whether the current directory is inside a Mercurial repo.""" try: _, error = run_command(["hg", "root"], retcode=True) except CommandNotFound: return if not error: return True
def is_git(): """Return whether the current directory is inside a Git repo.""" try: _, error = run_command(["git", "rev-parse", "--is-inside-work-tree"], retcode=True) except CommandNotFound: return if not error: return True
def is_git(): """Return whether the current directory is inside a Git repo.""" try: _, error = run_command( ["git", "rev-parse", "--is-inside-work-tree"], retcode=True ) except CommandNotFound: return if not error: return True
def js_tests(verbose=None): js_root = join(AMPIFY_ROOT, 'src', 'jsutil') chdir(js_root) if verbose: command = ['vows', '--spec'] else: command = ['vows'] _, retval = run_command( command, retcode=True, redirect_stderr=False, redirect_stdout=False ) if retval: sys.exit(retval)
def build(argv=None, completer=None): op = OptionParser(usage="Usage: amp build [options]", add_help_option=False) op.add_option('--role', dest='role', default='default', help="specify a non-default role to build") options, args = parse_options(op, argv, completer) load_role(options.role) install_packages() _, retcode = run_command( [sys.executable, join(AMPIFY_ROOT, 'environ', 'assetgen')], retcode=True, redirect_stdout=False, redirect_stderr=False ) if retcode: sys.exit(retcode)
Extension('greenlet', ['greenlet/greenlet.c'], include_dirs=['greenlet']) ] # ------------------------------------------------------------------------------ # run setup # ------------------------------------------------------------------------------ if not sys.argv[1:]: sys.argv.extend(['build_ext', '-i']) setup( name="pylibs", version="git", description="A collection of Python libraries", ext_modules=extensions, ) pylibs_path = dirname(realpath(__file__)) packages_path = ['pycrypto'] if sys.version_info < (2, 6): packages_path.append('pyssl') for path in packages_path: path = join_path(pylibs_path, path) run_command( [sys.executable, join_path(path, 'setup.py')] + sys.argv[1:], exit_on_error=True, cwd=path, redirect_stdout=False, redirect_stderr=False )
Extension("simplejson._speedups", ["simplejson/_speedups.c"]), ] # ------------------------------------------------------------------------------ # run setup # ------------------------------------------------------------------------------ if not sys.argv[1:]: sys.argv.extend(['build_ext', '-i']) setup( name="pylibs", version="git", description="A collection of Python libraries", ext_modules=extensions, ) pylibs_path = dirname(realpath(__file__)) packages_path = ['pycrypto'] if sys.version_info < (2, 6): packages_path.append('pyssl') for path in packages_path: path = join_path(pylibs_path, path) run_command([sys.executable, join_path(path, 'setup.py')] + sys.argv[1:], exit_on_error=True, cwd=path, redirect_stdout=False, redirect_stderr=False)
def root(self): if not self._root: if self.scm == 'git': self._root = abspath( run_command(['git', 'rev-parse', '--show-cdup']).strip()) return self._root
extensions = [ Extension("simplejson._speedups", ["simplejson/_speedups.c"]), Extension('greenlet', ['greenlet/greenlet.c'], include_dirs=['greenlet']) ] # ------------------------------------------------------------------------------ # run setup # ------------------------------------------------------------------------------ if not sys.argv[1:]: sys.argv.extend(['build_ext', '-i']) setup( name="pylibs", version="git", description="A collection of Python libraries", ext_modules=extensions, ) pylibs_path = dirname(realpath(__file__)) ssl_path = join_path(pylibs_path, 'pyssl') if sys.version_info < (2, 6): for path in [ssl_path]: run_command( [sys.executable, join_path(path, 'setup.py'), 'build_ext', '-i'], exit_on_error=True, cwd=join_path(path), redirect_stdout=False, redirect_stderr=False )
def main(argv=None): argv = argv or sys.argv[1:] op = OptionParser( usage="Usage: %prog [options] [path/to/source/directory]" ) op.add_option('-d', dest='data_file', default='.articlestore', help="Set the path for a data file (default: .articlestore)") op.add_option('-o', dest='output_directory', default='website', help="Set the output directory for files (default: website)") op.add_option('-p', dest='package', default='', help="Generate documentation for a Python package (optional)") op.add_option('--clean', dest='clean', default=False, action='store_true', help="Flag to remove all generated output files") op.add_option('--force', dest='force', default=False, action='store_true', help="Flag to force regeneration of all files") op.add_option('--quiet', dest='quiet', default=False, action='store_true', help="Flag to suppress output") try: options, args = op.parse_args(argv) except SystemExit: return # Normalise various options and load from the config file. if args: source_directory = args[0] source_directory_specified = True else: source_directory = getcwd() source_directory_specified = False source_directory = abspath(source_directory) chdir(source_directory) if not isdir(source_directory): raise IOError("%r is not a directory!" % source_directory) config_file = join_path(source_directory, 'yatiblog.conf') if isfile(config_file): config_file_obj = open(config_file, 'rb') config_data = config_file_obj.read() config_file_obj.close() config = load_yaml(config_data) elif not source_directory_specified: raise IOError("Couldn't find: %s" % config_file) else: config = {} index_pages = config.pop('index_pages', []) if not isinstance(index_pages, list): raise ValueError("The 'index_pages' config value is not a list!") index_pages = dict( (index_page.keys()[0], index_page.values()[0]) for index_page in index_pages ) output_directory = join_path(source_directory, options.output_directory.rstrip('/')) if not isdir(output_directory): if not exists(output_directory): mkdir(output_directory) else: raise IOError("%r is not a directory!" % output_directory) code_pages = config.pop('code_pages', {}) if code_pages: code_layout = code_pages['layout'] code_paths = code_pages['paths'] code_files = {} git_root = realpath(SCMConfig().root) for output_filename, input_pattern in code_paths.items(): files = run_command(['git', 'ls-files', input_pattern], cwd=git_root) files = filter(None, files.splitlines()) if '%' in output_filename: output_pattern = True else: output_pattern = False for file in files: directory = basename(dirname(file)) filename, ext = splitext(basename(file)) if output_pattern: dest = output_filename % { 'dir':directory, 'filename':filename, 'ext':ext } else: dest = output_filename code_files[ join_path(output_directory, dest + '.html') ] = join_path(git_root, file) else: code_files = {} code_layout = None verbose = not options.quiet # See if there's a persistent data file to read from. data_file = join_path(source_directory, options.data_file) if isfile(data_file): data_file_obj = open(data_file, 'rb') data_dict = load_pickle(data_file_obj) data_file_obj.close() else: data_dict = {} # Persist the data file to disk. def persist_data_file(): if data_file: data_file_obj = open(data_file, 'wb') dump_pickle(data_dict, data_file_obj) data_file_obj.close() atexit.register(persist_data_file) # Figure out what the generated files would be. source_files = [ file for file in listfiles(source_directory) if file.endswith('.txt') ] generated_files = [ join_path(output_directory, splitext(file)[0] + '.html') for file in source_files ] index_files = [join_path(output_directory, index) for index in index_pages] # Handle --clean support. if options.clean: for file in generated_files + index_files + [data_file] + code_files.keys(): if isfile(file): if verbose: print "Removing: %s" % file rm(file) data_dict.clear() sys.exit() # Figure out layout dependencies for the source .txt files. layouts = {} sources = {} def init_rst_source(source_file, destname=None): source_path = join_path(source_directory, source_file) source_file_obj = open(source_path, 'rb') content = source_file_obj.read() source_file_obj.close() if not content.startswith('---'): return filebase, filetype = splitext(source_file) filebase = filebase.lower() env = load_yaml(match_yaml_frontmatter(content).group(1)) layout = env.pop('layout') if layout not in layouts: load_layout(layout, source_directory, layouts) content = replace_yaml_frontmatter('', content) if MORE_LINE in content: lead = content.split(MORE_LINE)[0] content = content.replace(MORE_LINE, '') else: lead = content if destname: destname = join_path(output_directory, destname) else: destname = join_path(output_directory, filebase + '.html') sources[source_file] = { '__content__': content, '__deps__': find_include_refs(content), '__env__': env, '__genfile__': destname, '__id__': source_file, '__layout__': layout, '__lead__': lead, '__mtime__': stat(source_path).st_mtime, '__name__': basename(destname), # filebase, '__outdir__': output_directory, '__path__': source_path, '__rst__': True, '__type__': 'text', '__filetype__': filetype } for source_file in source_files: init_rst_source(source_file) # And likewise for any source code files. def init_rst_source_code(source_path, destname): source_file_obj = open(source_path, 'rb') content = source_file_obj.read() source_file_obj.close() filebase, filetype = splitext(basename(source_path)) filebase = filebase.lower() sources[source_path] = { '__content__': content, '__deps__': [], '__env__': {'title': filebase}, '__genfile__': destname, '__id__': source_path, '__layout__': code_layout, '__lead__': '', '__mtime__': stat(source_path).st_mtime, '__name__': basename(destname), # filebase, '__outdir__': output_directory, '__path__': source_path, '__rst__': True, '__type__': 'code', '__filetype__': filetype } if code_layout and code_layout not in layouts: load_layout(code_layout, source_directory, layouts) for destname, source_path in code_files.items(): init_rst_source_code(source_path, destname) # And likewise for the ``index_pages``. render_last = set() for index_page, index_source in index_pages.items(): layout, filetype = splitext(index_source) if filetype == '.genshi': if layout not in layouts: load_layout(layout, source_directory, layouts) source_path = join_path(source_directory, '_layouts', index_source) sources[index_source] = { '__content__': '', '__deps__': [], '__env__': {}, '__genfile__': join_path(output_directory, index_page), '__id__': index_source, '__layout__': layout, '__lead__': '', '__mtime__': stat(source_path).st_mtime, '__name__': basename(index_page), '__outdir__': output_directory, '__path__': source_path, '__rst__': False, '__type__': 'index', '__filetype__': 'genshi' } else: init_rst_source(index_source, index_page) render_last.add(index_source) # Update the envs for all the source files. for source in sources: info = sources[source] layout = info['__layout__'] layout_info = layouts[layout] if layout_info['__deps__']: for dep_layout in reversed(layout_info['__deps__']): info.update(layouts[dep_layout]['__env__']) info.update(layouts[layout]['__env__']) info.update(get_git_info(info['__path__'])) info.update(info.pop('__env__')) # Figure out which files to regenerate. if not options.force: no_regen = set() for source in sources: info = sources[source] try: gen_mtime = stat(info['__genfile__']).st_mtime except: continue dirty = False if gen_mtime < info['__mtime__']: dirty = True layout = info['__layout__'] layout_info = layouts[layout] if layout_info['__deps__']: layout_chain = [layout] + layout_info['__deps__'] else: layout_chain = [layout] for layout in layout_chain: if gen_mtime < layouts[layout]['__mtime__']: dirty = True break for dep in info['__deps__']: dep_mtime = stat(join_path(source_directory, dep)).st_mtime if gen_mtime < dep_mtime: dirty = True break if not dirty: no_regen.add(source) for source in no_regen: if source in render_last: continue del sources[source] remaining = set(sources.keys()) if remaining == render_last: for source in remaining.intersection(no_regen): del sources[source] # Regenerate! items = sorted(sources.items(), key=lambda x: x[1]['__rst__'] == False) for source, source_info in items: info = config.copy() info.update(source_info) if verbose: print print LINE print 'Converting: [%s] %s' % (info['__type__'], info['__path__']) print LINE print if info['__type__'] == 'code': content = info['__content__'] conf = PROGLANGS[info['__filetype__']] if conf[2]: content = conf[2](content) comment_matcher = conf[3] lines = content.split('\n') include_section = None if lines and lines[0].startswith('#!'): lines.pop(0) sections = []; new_section = sections.append docs_text = []; docs_out = docs_text.append code_text = []; code_out = code_text.append for line in lines: if comment_matcher.match(line): line = comment_matcher.sub('', line) if line == '<yatiblog.comment>': include_section = 1 else: docs_out(line) else: if not line.strip(): if docs_text and not include_section: last_line = docs_text[-1].strip() if last_line: last_line_char = last_line[0] for char in last_line: if char != last_line_char: break else: include_section = 1 else: if docs_text: include_section = 1 if docs_text: if include_section: new_section({ 'docs_text': '\n'.join(docs_text) + '\n', 'code_text': '\n'.join(code_text) }) docs_text[:] = [] code_text[:] = [] include_section = None else: docs_text[:] = [] code_out(line) else: code_out(line) new_section({'docs_text': '', 'code_text': '\n'.join(code_text)}) docs = conf[6].join(part['docs_text'] for part in sections) code = conf[4].join(part['code_text'] for part in sections) docs_html, props = render_rst(docs, with_props=1) if ('title' in props) and props['title']: info['title'] = props['title'] code = code.replace('\t', ' ') code_html = highlight(code, get_lexer_by_name(conf[0]), SYNTAX_FORMATTER) docs_split = conf[7].split(docs_html) code_split = conf[5].split(code_html) output = info['__output__'] = [] out = output.append if docs_split and docs_split[0]: diff = 0 docs_split.insert(0, u'') else: diff = 1 last = len(docs_split) - 2 for i in range(last + 1): code = code_split[i+diff].split(u'<br/>') while (code and code[0] == ''): code.pop(0) while (code and code[-1] == ''): code.pop() code = u'<br />'.join(code) if code: if i == last: code = u'<div class="syntax"><pre>' + code else: code = u'<div class="syntax"><pre>' + code + "</pre></div>" out((docs_split[i], code)) elif info['__rst__']: with_props = info.get('with_props', False) if with_props: output, props = render_rst(info['__content__'], with_props=1) if ('title' in props) and props['title']: info['title'] = props['title'] info['__output__'] = output else: output = info['__output__'] = render_rst(info['__content__']) if info['__lead__'] == info['__content__']: info['__lead_output__'] = info['__output__'] else: info['__lead_output__'] = render_rst(info['__lead__']) else: output = '' layout = info['__layout__'] layout_info = layouts[layout] if layout_info['__deps__']: layout_chain = [layout] + layout_info['__deps__'] else: layout_chain = [layout] for layout in layout_chain: template = layouts[layout]['__template__'] output = template.generate( content=output, yatidb=data_dict, **info ).render('xhtml', encoding=None) if isinstance(output, unicode): output = output.encode('utf-8') data_dict[info['__name__']] = info output_file = open(info['__genfile__'], 'wb') output_file.write(output) output_file.close() if verbose: print 'Done!' sys.exit()