def get_file_contents(from_data, files, base_url=None, ignore_if=None, recurse_if=None): if recurse_if and recurse_if(from_data): if isinstance(from_data, dict): recurse_data = six.itervalues(from_data) else: recurse_data = from_data for value in recurse_data: get_file_contents(value, files, base_url, ignore_if, recurse_if) if isinstance(from_data, dict): for key, value in iter(from_data.items()): if ignore_if and ignore_if(key, value): continue if base_url and not base_url.endswith('/'): base_url = base_url + '/' str_url = parse.urljoin(base_url, value) if str_url not in files: file_content = utils.read_url_content(str_url) if is_template(file_content): template = get_template_contents( template_url=str_url, files=files)[1] file_content = jsonutils.dumps(template) files[str_url] = file_content # replace the data value with the normalised absolute URL from_data[key] = str_url
def get_file_contents(from_data, files, base_url=None, ignore_if=None, recurse_if=None): if recurse_if and recurse_if(from_data): if isinstance(from_data, dict): recurse_data = six.itervalues(from_data) else: recurse_data = from_data for value in recurse_data: get_file_contents(value, files, base_url, ignore_if, recurse_if) if isinstance(from_data, dict): for key, value in iter(from_data.items()): if ignore_if and ignore_if(key, value): continue if base_url and not base_url.endswith('/'): base_url = base_url + '/' str_url = parse.urljoin(base_url, value) if str_url not in files: file_content = utils.read_url_content(str_url) if is_template(file_content): template = get_template_contents(template_url=str_url, files=files)[1] file_content = jsonutils.dumps(template) files[str_url] = file_content # replace the data value with the normalised absolute URL from_data[key] = str_url
def _get_file_contents(from_data, files): if not isinstance(from_data, (dict, list)): return if isinstance(from_data, dict): recurse_data = six.itervalues(from_data) for key, value in six.iteritems(from_data): if _ignore_if(key, value): continue if not value.startswith(('http://', 'https://')): raise exceptions.GetFileError(value, 'get_file') if value not in files: file_content = heat_utils.read_url_content(value) if template_utils.is_template(file_content): template = get_template_files(template_url=value)[1] file_content = jsonutils.dumps(template) files[value] = file_content else: recurse_data = from_data for value in recurse_data: _get_file_contents(value, files)
def read_url_content(url): '''DEPRECATED! Use 'utils.read_url_content' instead.''' return utils.read_url_content(url)