Пример #1
0
def existing_directories():
    directories = set()
    for r in config.all_repos():
        for search_path, depth in DIR_DEPTHS.items():
            directories.update(
                _recurse_subdirs(os.path.join(r, search_path), depth))
    return directories
Пример #2
0
def get_deployment_data_doc(site_name):
    stanzas = {
        files.path_leaf(repo): _get_repo_deployment_data_stanza(repo)
        for repo in config.all_repos()
    }
    basedeployment_data = OrderedDict([
        ("schema", "pegleg/DeploymentData/v1"),
        ("metadata",
         OrderedDict([
             ("schema", "metadata/Document/v1"),
             ("name", "deployment-version"),
             ("layeringDefinition",
              OrderedDict([("abstract", False), ("layer", "global")])),
             ("storagePolicy", "cleartext"),
         ])), ("data", OrderedDict([("documents", stanzas)]))
    ])
    try:
        data = util.definition.load(site_name)
        basedeployment_data['data'].update(
            {'site_type': data['data']['site_type']})
    except Exception as ex:
        LOG.debug(
            "Unable to get the site definition data for"
            " site: %s, Exception :%s", site_name, ex)
    try:
        basedeployment_data['data'].update(
            {'version': data['data']['repositories']['global']['revision']})
    except Exception as ex:
        LOG.debug(
            "Unable to get the site revision data for global in"
            " site: %s, Exception :%s", site_name, ex)
    return basedeployment_data
Пример #3
0
def directory_for(*, path):
    for r in config.all_repos():
        if path.startswith(r):
            partial_path = path[len(r):]
            parts = os.path.normpath(partial_path).split(os.sep)
            depth = DIR_DEPTHS.get(parts[0])
            if depth is not None:
                return os.path.join(r, *parts[:depth + 1])
Пример #4
0
def directories_for(*, site_name, site_type):
    library_list = [
        _global_root_path(),
        _site_type_root_path(site_type),
        _site_path(site_name),
    ]

    return [
        os.path.join(b, l) for b in config.all_repos() for l in library_list
    ]
Пример #5
0
def _run_precommand_decrypt(site_name):
    if config.get_decrypt_repos():
        LOG.info('Executing pre-command repository decryption...')
        repo_list = config.all_repos()
        for repo in repo_list:
            secrets_path = os.path.join(repo.rstrip(os.path.sep), 'site',
                                        site_name, 'secrets')
            if os.path.exists(secrets_path):
                LOG.info('Decrypting %s', secrets_path)
                run_decrypt(True, secrets_path, None, site_name)
    else:
        LOG.debug('Skipping pre-command repository decryption.')
Пример #6
0
def get_deployment_data_doc():
    stanzas = {
        files.path_leaf(repo): _get_repo_deployment_data_stanza(repo)
        for repo in config.all_repos()
    }
    return OrderedDict([("schema", "pegleg/DeploymentData/v1"),
                        ("metadata",
                         OrderedDict([
                             ("schema", "metadata/Document/v1"),
                             ("name", "deployment-version"),
                             ("layeringDefinition",
                              OrderedDict([("abstract", False),
                                           ("layer", "global")])),
                             ("storagePolicy", "cleartext"),
                         ])), ("data", OrderedDict([("documents", stanzas)]))])
Пример #7
0
def directories_for_each_repo(*, site_name, site_type):
    """Provide directories for each repo.

    When producing bucketized output files, the documents collected
    must be collated by repo. Provide the list of source directories
    by repo.
    """
    library_list = [
        _global_root_path(),
        _site_type_root_path(site_type),
        _site_path(site_name),
    ]

    dir_map = dict()
    for r in config.all_repos():
        dir_map[r] = [os.path.join(r, l) for l in library_list]

    return dir_map
Пример #8
0
def decrypt_repos(site_name):
    repo_list = config.all_repos()
    for repo in repo_list:
        pegleg_main.run_decrypt(True, repo, None, site_name)
Пример #9
0
def _expected_layer(filename):
    for r in config.all_repos():
        if filename.startswith(r):
            partial_name = filename[len(r):]
            parts = os.path.normpath(partial_name).split(os.sep)
            return parts[0]
Пример #10
0
def all():
    return search([
        os.path.join(r, k) for r in config.all_repos()
        for k in DIR_DEPTHS.keys()
    ])