def Run(self, test_file_path, path, style, suffix, exception=None): file_base = '_'.join(path) markdown_path = self.GetTestdataPath(test_file_path, file_base + '.src.md') try: markdown_data = console_attr.Decode( pkg_resources.GetResourceFromFile(markdown_path)) except IOError: file_base = '_'.join(['gcloud'] + path) markdown_path = self.GetTestdataPath(test_file_path, 'markdown', file_base + '.md') markdown_data = console_attr.Decode( pkg_resources.GetResourceFromFile(markdown_path)) f = io.StringIO(markdown_data) try: e = None render_document.RenderDocument(style, fin=f, out=log.out, notes='A special note.', title=' '.join(['gcloud'] + path)) except Exception as e: # pylint: disable=broad-except e = e.message if e != exception: if not exception: self.fail('Exception not expected but [%s] caught.' % e) else: self.fail('Exception [%s] expected but caught [%s].' % (exception, e)) self.AssertOutputIsGolden(test_file_path, file_base + suffix) self.ClearOutput()
def testListPackageResources(self): with files.TemporaryDirectory() as t: self.assertEqual([], sorted(pkg_resources.ListPackageResources(t))) Touch(os.path.join(t, 'foo.py'), '"""Foo module."""') self.assertEqual(['foo.py'], sorted(pkg_resources.ListPackageResources(t))) Touch(os.path.join(t, '__init__.py'), '"""Package marker."""') self.assertEqual(['__init__.py', 'foo.py'], sorted(pkg_resources.ListPackageResources(t))) os.makedirs(os.path.join(t, 'pkg')) self.assertEqual(['__init__.py', 'foo.py', 'pkg' + os.sep], sorted(pkg_resources.ListPackageResources(t))) Touch(os.path.join(t, 'pkg', '__init__.py'), '"""Package marker."""') self.assertEqual(['__init__.py', 'foo.py', 'pkg' + os.sep], sorted(pkg_resources.ListPackageResources(t))) Touch(os.path.join(t, 'bar'), 'BAR') self.assertEqual(['__init__.py', 'bar', 'foo.py', 'pkg' + os.sep], sorted(pkg_resources.ListPackageResources(t))) self.assertEqual( b'BAR', pkg_resources.GetResourceFromFile(os.path.join(t, 'bar'))) with self.assertRaises(IOError): pkg_resources.GetResourceFromFile( os.path.join(t, 'non_existant_file'))
def testCommonSubstitutions(self): self.Touch(self.temp_path, '__init__.yaml', contents=self.source_data) main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !COMMON foo.bar y: !COMMON foo.bar.a s: - _COMMON_: foo.bar i: j a: override - i: j t: - _COMMON_: foo.bar,foo.baz - i: j u: - _COMMON_list - i: j v: - _COMMON_list,list2 - i: j """) data = command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file)) self.assertEqual(data, self.answers)
def testRefSubstitutions(self): self.Touch(self.temp_path, '__init__.yaml', contents=self.source_data) main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !REF "{mod}:foo.bar" y: !REF "{mod}:foo.bar.a" s: - _REF_: "{mod}:foo.bar" i: j a: override - i: j t: - _REF_: "{mod}:foo.bar,{mod}:foo.baz" - i: j u: - "_REF_{mod}:list" - i: j v: - "_REF_{mod}:list,{mod}:list2" - i: j """.format(mod=self._TestResourcesModule())) data = command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file)) self.assertEqual(data, self.answers)
class _GlobalDataHolder(object): """A global data holder for the auto-generated tests. This is not best practice, but is the only reasonable way we can have a test generated for each YAML command we want to validate. The general approach is to search the surface tree for all implementations and compile a list of all those that exist while this module is being loaded. That allows us to use that list of commands as the seed to parameterize the test in the next class. We can't do this during test run time, because once tests are running, pytest has already loaded the test suite and further modifying it does not have any effect. It must be modified during module load time. """ SCHEMA = yaml.load( pkg_resources.GetResourceFromFile( os.path.join(os.path.dirname(yaml_command_schema.__file__), 'yaml_command_schema.yaml'))) COMMANDS = [] surface = os.path.dirname(surface.__file__) prefix_len = len(surface) for root, dirs, files in os.walk(six.text_type(surface)): for f in files: if f.endswith('.yaml') and f != '__init__.yaml': file_path = os.path.join(root, f) # Chop off the surface directory (plus extra '/') and the .yaml # extension. path = (file_path[prefix_len + 1:-5].replace('/', '.').replace( '\\', '.').split('.')) COMMANDS.append((path, file_path))
def _FilterYaml(parsed_yaml, schema_path): """Filter out fields from the yaml that are not in the schema. Args: parsed_yaml: yaml to filter schema_path: Path to schema, relative to schemas directory. """ full_schema_path = os.path.join(SCHEMA_DIR, schema_path) schema = yaml.load(pkg_resources.GetResourceFromFile(full_schema_path)) schema_dir = os.path.join(SCHEMA_DIR, os.path.dirname(schema_path)) validator = _GetValidator(schema, schema_dir) errors = list(validator.iter_errors(parsed_yaml)) has_warnings = False for error in errors: # There are other types of errors (for example, missing a required field), # but these are the only ones we expect to see on export and the only ones # we want to act on. There is no way to distinguish disallowed fields from # unrecognized fields. If we attempt to export an unrecognized value for a # recognized field (this will happen whenever we add a new enum value), or # if we attempt to export a resource that is missing a required field, we # will log the errors as warnings and the exported data will not be able to # be imported via the import command until the import command is updated. if _IsDisallowedPropertiesError(error): fields_to_remove = _ParseProperties(error.message) _ClearFields(fields_to_remove, error.path, parsed_yaml) else: log.warning(error.message) has_warnings = True if has_warnings: log.warning( 'The import command may need to be updated to handle the export data.' )
def _GetRefData(self, path): """Loads the YAML data from the given reference. A YAML reference must refer to a YAML file and an attribute within that file to extract. Args: path: str, The path of the YAML file to import. It must be in the form of: package.module:attribute.attribute, where the module path is separated from the sub attributes within the YAML by a ':'. Raises: LayoutException: If the given module or attribute cannot be loaded. Returns: The referenced YAML data. """ root = os.path.dirname(os.path.dirname(googlecloudsdk.__file__)) parts = path.split(':') if len(parts) != 2: raise LayoutException( 'Invalid Yaml reference: [{}]. References must be in the format: ' 'path(.path)+:attribute(.attribute)*'.format(path)) yaml_path = os.path.join(root, *parts[0].split('.')) yaml_path += '.yaml' try: data = yaml.safe_load( pkg_resources.GetResourceFromFile(yaml_path)) except IOError as e: raise LayoutException( 'Failed to load Yaml reference file [{}]: {}'.format( yaml_path, e)) return self._GetAttribute(data, parts[1], yaml_path)
def testValidateTemplate(self): val = pkg_resources.GetResourceFromFile(SCENARIO_TEMPLATE_PATH) template = yaml.load(val, version=yaml.VERSION_1_2) validator = scenario_schema.Validator(template) try: validator.Validate() except scenario_schema.ValidationError as e: self.fail(e)
def testValidateCreateExample(self): example = yaml.load( pkg_resources.GetResourceFromFile(CREATE_EXAMPLE_PATH), version=yaml.VERSION_1_2) validator = scenario_schema.Validator(example) try: validator.Validate() except scenario_schema.ValidationError as e: self.fail(e)
def testMissingRef(self): main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !REF asdf """) with self.assertRaisesRegex(command_loading.LayoutException, r'Invalid Yaml reference: \[asdf\].'): command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file))
def resolve_remote(ref): """pkg_resources $ref override -- schema_dir closure needed here.""" path = os.path.join(schema_dir, ref) data = pkg_resources.GetResourceFromFile(path) try: schema = yaml.load(data) except Exception as e: # pylint: disable=broad-except, avoid crash raise InvalidSchemaError(e) self.ValidateSchemaVersion(schema, path) return schema
def testMissingCommon(self): main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !COMMON foo.bar """) with self.assertRaisesRegex( command_loading.LayoutException, r'references \[common command\] data but it does not exist'): command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file))
def testMissingRefFile(self): main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !REF a:foo """) with self.assertRaisesRegex( command_loading.LayoutException, r'Failed to load Yaml reference file \[.*a\.yaml\]'): command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file))
def testMissingRefAttribute(self): main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !REF {mod}:asdf.asdf """.format(mod=self._TestResourcesModule())) with self.assertRaisesRegex( command_loading.LayoutException, r'references \[.*test_data\.yaml\] data attribute \[asdf\] in path \[' r'asdf.asdf\] but it does not exist.'): command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file))
def testMissingAttributeInCommon(self): self.Touch(self.temp_path, '__init__.yaml', contents=self.source_data) main_file = self.Touch(self.temp_path, 'main.yaml', contents="""\ x: !COMMON asdf.asdf """) with self.assertRaisesRegex( command_loading.LayoutException, r'references \[common command\] data attribute \[asdf\] in path ' r'\[asdf.asdf\] but it does not exist.'): command_loading.CreateYamlLoader(main_file).load( pkg_resources.GetResourceFromFile(main_file))
def _ValidateYaml(parsed_yaml, schema_path): """Validate yaml against schema. Args: parsed_yaml: yaml to validate schema_path: Path to schema, relative to schemas directory. Raises: ValidationError: if the template doesn't obey the schema. SchemaError: if the schema is invalid. """ schema = yaml.load( pkg_resources.GetResourceFromFile(os.path.join(SCHEMA_DIR, schema_path))) validators.validate(parsed_yaml, schema)
def _GetAllImplementations(impl_paths, path, construction_id, is_command, yaml_command_translator): """Gets all the release track command implementations. Can load both python and yaml modules. Args: impl_paths: [str], A list of file paths to the command implementation for this group or command. path: [str], A list of group names that got us down to this command group with respect to the CLI itself. This path should be used for things like error reporting when a specific element in the tree needs to be referenced. construction_id: str, A unique identifier for the CLILoader that is being constructed. is_command: bool, True if we are loading a command, False to load a group. yaml_command_translator: YamlCommandTranslator, An instance of a translator to use to load the yaml data. Raises: CommandLoadFailure: If the command is invalid and cannot be loaded. Returns: [(func->base._Common, [base.ReleaseTrack])], A list of tuples that can be passed to _ExtractReleaseTrackImplementation. Each item in this list represents a command implementation. The first element is a function that returns the implementation, and the second element is a list of release tracks it is valid for. """ implementations = [] for impl_file in impl_paths: if impl_file.endswith('.yaml'): if not is_command: raise CommandLoadFailure( '.'.join(path), Exception('Command groups cannot be implemented in yaml')) data = CreateYamlLoader(impl_file).load( pkg_resources.GetResourceFromFile(impl_file)) implementations.extend( (_ImplementationsFromYaml(path, data, yaml_command_translator))) else: module = _GetModuleFromPath(impl_file, path, construction_id) implementations.extend( _ImplementationsFromModule(module.__file__, list(module.__dict__.values()), is_command=is_command)) return implementations
def __init__(self, schema_path): """"Initilaizes the schema and validator for schema_path. The validator resolves references to all other schemas in the directory of schema_path. Yes, it's really this ugly defining a validator with a resolver to pkg_resources resources. Raises: IOError: if schema not found in installed resources. files.Error: if schema file not found. SchemaError: if the schema is invalid. Args: schema_path: JSON schema file path. Returns: The schema to validate and the validator. """ schema_dir = os.path.dirname(schema_path) class RefResolver(jsonschema.RefResolver): """$ref: resolver that consults pkg_resources.""" @staticmethod def resolve_remote(ref): """pkg_resources $ref override -- schema_dir closure needed here.""" path = os.path.join(schema_dir, ref) data = pkg_resources.GetResourceFromFile(path) try: schema = yaml.load(data) except Exception as e: # pylint: disable=broad-except, avoid crash raise InvalidSchemaError(e) self.ValidateSchemaVersion(schema, path) return schema try: schema = yaml.load(pkg_resources.GetResourceFromFile(schema_path)) except Exception as e: # pylint: disable=broad-except, avoid crash raise InvalidSchemaError(e) self.ValidateSchemaVersion(schema, schema_path) resolver = RefResolver.from_schema(schema) self._validator = jsonschema.validators.validator_for(schema)( schema, resolver=resolver) self._validate = self._validator.validate
def _GetValidator(schema, schema_dir): """"Construct a validator that uses the given schema. The validator is able to resolve references to all other schemas in the same directory. Args: schema: The schema to validate against. schema_dir: The full path to the directory containing the schema. Returns: A validator. """ validator = validators.validator_for(schema)(schema) validator_store = validator.resolver.store for resource in pkg_resources.ListPackageResources(schema_dir): schema = yaml.load( pkg_resources.GetResourceFromFile( os.path.join(schema_dir, resource))) validator_store[resource] = schema return validator
def _LoadSurveyContent(self): """Loads the survey yaml file and return the parsed data.""" survey_file = os.path.join(_GetSurveyContentDirectory(), self.name + '.yaml') survey_data = pkg_resources.GetResourceFromFile(survey_file) return yaml.load(survey_data)
def CreateYamlLoader(impl_path): """Creates a custom yaml loader that handles includes from common data. Args: impl_path: str, The path to the file we are loading data from. Returns: yaml.Loader, A yaml loader to use. """ # TODO(b/64147277) Allow for importing from other places. common_file_path = os.path.join(os.path.dirname(impl_path), '__init__.yaml') common_data = None if os.path.exists(common_file_path): common_data = yaml.safe_load( pkg_resources.GetResourceFromFile(common_file_path)) class Constructor(yaml.Constructor): """A custom yaml constructor. It adds 2 different import capabilities. Assuming __init__.yaml has the contents: foo: a: b c: d baz: - e: f - g: h The first uses a custom constructor to insert data into your current file, so: bar: !COMMON foo.a results in: bar: b The second mechanism overrides construct_mapping and construct_sequence to post process the data and replace the merge macro with keys from the other file. We can't use the custom constructor for this as well because the merge key type in yaml is processed before custom constructors which makes importing and merging not possible. So: bar: _COMMON_: foo i: j results in: bar: a: b c: d i: j This can also be used to merge list contexts, so: bar: - _COMMON_baz - i: j results in: bar: - e: f - g: h - i: j You may also use the !REF and _REF_ directives in the same way. Instead of pulling from the common file, they can pull from an arbitrary yaml file somewhere in the googlecloudsdk tree. The syntax looks like: bar: !REF googlecloudsdk.foo.bar:a.b.c This will load googlecloudsdk/foo/bar.yaml and from that file return the a.b.c nested attribute. """ INCLUDE_COMMON_MACRO = '!COMMON' MERGE_COMMON_MACRO = '_COMMON_' INCLUDE_REF_MACRO = '!REF' MERGE_REF_MACRO = '_REF_' def construct_mapping(self, *args, **kwargs): data = super(Constructor, self).construct_mapping(*args, **kwargs) data = self._ConstructMappingHelper(Constructor.MERGE_COMMON_MACRO, self._GetCommonData, data) return self._ConstructMappingHelper(Constructor.MERGE_REF_MACRO, self._GetRefData, data) def _ConstructMappingHelper(self, macro, source_func, data): attribute_path = data.pop(macro, None) if not attribute_path: return data modified_data = {} for path in attribute_path.split(','): modified_data.update(source_func(path)) # Add the explicit data last so it can override the imports. modified_data.update(data) return modified_data def construct_sequence(self, *args, **kwargs): data = super(Constructor, self).construct_sequence(*args, **kwargs) data = self._ConstructSequenceHelper( Constructor.MERGE_COMMON_MACRO, self._GetCommonData, data) return self._ConstructSequenceHelper(Constructor.MERGE_REF_MACRO, self._GetRefData, data) def _ConstructSequenceHelper(self, macro, source_func, data): new_list = [] for i in data: if isinstance(i, six.string_types) and i.startswith(macro): attribute_path = i[len(macro):] for path in attribute_path.split(','): new_list.extend(source_func(path)) else: new_list.append(i) return new_list def IncludeCommon(self, node): attribute_path = self.construct_scalar(node) return self._GetCommonData(attribute_path) def IncludeRef(self, node): attribute_path = self.construct_scalar(node) return self._GetRefData(attribute_path) def _GetCommonData(self, attribute_path): if not common_data: raise LayoutException( 'Command [{}] references [common command] data but it does not ' 'exist.'.format(impl_path)) return self._GetAttribute(common_data, attribute_path, 'common command') def _GetRefData(self, path): """Loads the YAML data from the given reference. A YAML reference must refer to a YAML file and an attribute within that file to extract. Args: path: str, The path of the YAML file to import. It must be in the form of: package.module:attribute.attribute, where the module path is separated from the sub attributes within the YAML by a ':'. Raises: LayoutException: If the given module or attribute cannot be loaded. Returns: The referenced YAML data. """ root = os.path.dirname(os.path.dirname(googlecloudsdk.__file__)) parts = path.split(':') if len(parts) != 2: raise LayoutException( 'Invalid Yaml reference: [{}]. References must be in the format: ' 'path(.path)+:attribute(.attribute)*'.format(path)) yaml_path = os.path.join(root, *parts[0].split('.')) yaml_path += '.yaml' try: data = yaml.safe_load( pkg_resources.GetResourceFromFile(yaml_path)) except IOError as e: raise LayoutException( 'Failed to load Yaml reference file [{}]: {}'.format( yaml_path, e)) return self._GetAttribute(data, parts[1], yaml_path) def _GetAttribute(self, data, attribute_path, location): value = data for attribute in attribute_path.split('.'): value = value.get(attribute, None) if not value: raise LayoutException( 'Command [{}] references [{}] data attribute [{}] in ' 'path [{}] but it does not exist.'.format( impl_path, location, attribute, attribute_path)) return value loader = yaml.YAML() loader.Constructor = Constructor loader.constructor.add_constructor(Constructor.INCLUDE_COMMON_MACRO, Constructor.IncludeCommon) loader.constructor.add_constructor(Constructor.INCLUDE_REF_MACRO, Constructor.IncludeRef) return loader
def __init__(self, test_data): self.schema = yaml.load( pkg_resources.GetResourceFromFile(_SCENARIO_SCHEMA_PATH)) self.test_data = test_data
def testGetResourceFromFile(self): this_file, _ = os.path.splitext(__file__) this_file_contents = pkg_resources.GetResourceFromFile(this_file + '.py') self.assertIn(b'This is the string I am looking for', this_file_contents)
def _CustomLoadYamlFile(path): return CreateYamlLoader(path).load(pkg_resources.GetResourceFromFile(path))
def _SafeLoadYamlFile(path): return yaml.safe_load(pkg_resources.GetResourceFromFile(path))