Exemple #1
0
 def _extract_mergers(self, payload, headers):
     merge_header_headers = ""
     for h in [MERGE_HEADER, "X-%s" % (MERGE_HEADER)]:
         tmp_h = headers.get(h, "")
         if tmp_h:
             merge_header_headers = tmp_h
             break
     # Select either the merge-type from the content
     # or the merge type from the headers or default to our own set
     # if neither exists (or is empty) from the later.
     payload_yaml = util.load_yaml(payload)
     mergers_yaml = mergers.dict_extract_mergers(payload_yaml)
     mergers_header = mergers.string_extract_mergers(merge_header_headers)
     all_mergers = []
     all_mergers.extend(mergers_yaml)
     all_mergers.extend(mergers_header)
     if not all_mergers:
         all_mergers = DEF_MERGERS
     return (payload_yaml, all_mergers)
 def _extract_mergers(self, payload, headers):
     merge_header_headers = ''
     for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]:
         tmp_h = headers.get(h, '')
         if tmp_h:
             merge_header_headers = tmp_h
             break
     # Select either the merge-type from the content
     # or the merge type from the headers or default to our own set
     # if neither exists (or is empty) from the later.
     payload_yaml = util.load_yaml(payload)
     mergers_yaml = mergers.dict_extract_mergers(payload_yaml)
     mergers_header = mergers.string_extract_mergers(merge_header_headers)
     all_mergers = []
     all_mergers.extend(mergers_yaml)
     all_mergers.extend(mergers_header)
     if not all_mergers:
         all_mergers = DEF_MERGERS
     return (payload_yaml, all_mergers)
Exemple #3
0
# file and then finally that file was loaded as one big yaml file we need
# to mimic that behavior by altering the default strategy to be replacing
# keys of prior merges.
#
#
# For example
# #file 1
# a: 3
# #file 2
# a: 22
# #combined file (comments not included)
# a: 3
# a: 22
#
# This gets loaded into yaml with final result {'a': 22}
DEF_MERGERS = mergers.string_extract_mergers("dict(replace)+list()+str()")
CLOUD_PREFIX = "#cloud-config"
JSONP_PREFIX = "#cloud-config-jsonp"


class CloudConfigPartHandler(handlers.Handler):

    # The content prefixes this handler understands.
    prefixes = [CLOUD_PREFIX, JSONP_PREFIX]

    def __init__(self, paths, **_kwargs):
        handlers.Handler.__init__(self, PER_ALWAYS, version=3)
        self.cloud_buf = None
        self.cloud_fn = paths.get_ipath("cloud_config")
        if "cloud_config_path" in _kwargs:
            self.cloud_fn = paths.get_ipath(_kwargs["cloud_config_path"])
# file and then finally that file was loaded as one big yaml file we need
# to mimic that behavior by altering the default strategy to be replacing
# keys of prior merges.
#
#
# For example
# #file 1
# a: 3
# #file 2
# a: 22
# #combined file (comments not included)
# a: 3
# a: 22
#
# This gets loaded into yaml with final result {'a': 22}
DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()')
CLOUD_PREFIX = "#cloud-config"
JSONP_PREFIX = "#cloud-config-jsonp"


class CloudConfigPartHandler(handlers.Handler):

    # The content prefixes this handler understands.
    prefixes = [CLOUD_PREFIX, JSONP_PREFIX]

    def __init__(self, paths, **_kwargs):
        handlers.Handler.__init__(self, PER_ALWAYS, version=3)
        self.cloud_buf = None
        self.cloud_fn = paths.get_ipath("cloud_config")
        if 'cloud_config_path' in _kwargs:
            self.cloud_fn = paths.get_ipath(_kwargs["cloud_config_path"])