def handle_projects(args): try: api = XTMCloudAPI(read_token()) except Exception as err: sys.exit(err) with create_source(args.source_dir, cached=True) as fs: args.projects_func(args, api, fs)
def test_create_source(tmpdir): """Create a hierarchy of multi-sources testing different config options.""" one = tmpdir.mkdir('one') two = tmpdir.mkdir('two') three = tmpdir.mkdir('three') four = two.mkdir('four') one_ap = '\n {}\n {}'.format(two.strpath, '../three') one.join('settings.ini').write(PATHS_FRAGMENT_TEMPLATE.format(one_ap)) one.join('one').write('') two.join('settings.ini').write(PATHS_FRAGMENT_TEMPLATE.format('four')) two.join('two').write('') three.join('three').write('') four.join('four').write('') four.join('settings.ini').write('[paths]') source = create_source(one.strpath) for name in ['one', 'two', 'three', 'four']: assert source.has_file(name)
description= 'CMS development server created to test pages locally and on-the-fly') parser.add_argument('path', nargs='?', default=os.curdir) parser.add_argument( '-a', '--address', default='localhost', help='Address of the interface the server will listen on') parser.add_argument('-p', '--port', type=int, default=5000, help='TCP port the server will listen on') args = parser.parse_args() source = create_source(args.path) address = args.address port = args.port try: from werkzeug.serving import ThreadedWSGIServer, run_simple # see https://github.com/mitsuhiko/werkzeug/pull/770 ThreadedWSGIServer.daemon_threads = True def run(*args, **kwargs): # The werkzeug logger must be configured before the # root logger. Also we must prevent it from propagating # messages, otherwise messages are logged twice. import logging logger = logging.getLogger('werkzeug')
def generate_pages(repo, output_dir, relative=False): known_files = set() def write_file(path_parts, contents, binary=False): encoding = None if binary else 'utf-8' outfile = os.path.join(output_dir, *path_parts) if outfile in known_files: logging.warning('File %s has multiple sources', outfile) return known_files.add(outfile) if is_in_previous_version(outfile, contents, encoding): return ensure_dirs(output_dir, path_parts[:-1]) with codecs.open(outfile, 'wb', encoding=encoding) as handle: handle.write(contents) with create_source(repo, cached=True) as source: config = source.read_config() defaultlocale = config.get('general', 'defaultlocale') locales = list(source.list_locales()) if defaultlocale not in locales: locales.append(defaultlocale) # First pass: compile the list of pages with given translation level def get_locale_file(page): try: return config.get('locale_overrides', page) except ConfigParser.Error: return page pagelist = set() blacklist = set() for page, format in source.list_pages(): for locale in locales: if locale == defaultlocale: pagelist.add((locale, page)) else: params = get_page_params(source, locale, page, format) if params['translation_ratio'] >= MIN_TRANSLATED: pagelist.add((locale, page)) else: blacklist.add((locale, get_locale_file(page))) # Override existance check to avoid linking to pages we don't generate orig_has_locale = source.has_locale def has_locale(locale, page): page = get_locale_file(page) if (locale, page) in blacklist: return False return orig_has_locale(locale, page) source.has_locale = has_locale source.resolve_link.cache_clear() # Second pass: actually generate pages this time for locale, page in pagelist: pagedata = process_page(source, locale, page, relative=relative) # Make sure links to static files are versioned pagedata = re.sub(r'(<script\s[^<>]*\bsrc="/[^"<>]+)', r'\1?%s' % source.version, pagedata) pagedata = re.sub(r'(<link\s[^<>]*\bhref="/[^"<>]+)', r'\1?%s' % source.version, pagedata) pagedata = re.sub(r'(<img\s[^<>]*\bsrc="/[^"<>]+)', r'\1?%s' % source.version, pagedata) write_file([locale] + page.split('/'), pagedata) for filename in source.list_localizable_files(): for locale in locales: if source.has_localizable_file(locale, filename): filedata = source.read_localizable_file(locale, filename) write_file([locale] + filename.split('/'), filedata, binary=True) for filename in source.list_static(): write_file(filename.split('/'), source.read_static(filename), binary=True) def remove_unknown(dir): files = os.listdir(dir) for filename in files: path = os.path.join(dir, filename) if os.path.isfile(path) and path not in known_files: os.remove(path) elif os.path.isdir(path): remove_unknown(path) if not os.listdir(path): os.rmdir(path) remove_unknown(output_dir)
def __init__(self, host, port, source_dir): self.host = host self.port = port self.source = create_source(source_dir) self.full_url = 'http://{0}:{1}'.format(host, port)