Ejemplo n.º 1
0
def _parseOverrides(overrides):
  global scriptlogger
  setting_overrides = {}

  # parse overrides
  if len(overrides) == 0:
    scriptlogger.debug('No overrides found')
    return setting_overrides

  scriptlogger.debug('Reading parameter schema')
  schemaFile, schemaFuncs = radiomics.getParameterValidationFiles()
  with open(schemaFile) as schema:
    settingsSchema = yaml.load(schema)['mapping']['setting']['mapping']

  # parse single value function
  def parse_value(value, value_type):
    if value_type == 'str':
      return value  # no conversion
    elif value_type == 'int':
      return int(value)
    elif value_type == 'float':
      return float(value)
    elif value_type == 'bool':
      return value == '1' or value.lower() == 'true'
    else:
      raise ValueError('Cannot understand value_type %s' % value_type)

  for setting in overrides:  # setting = "setting_key:setting_value"
    if ':' not in setting:
      scriptlogger.warning('Incorrect format for override setting "%s", missing ":"', setting)
    # split into key and value
    setting_key, setting_value = setting.split(':', 2)

    # Check if it is a valid PyRadiomics Setting
    if setting_key not in settingsSchema:
      scriptlogger.warning('Did not recognize override %s, skipping...', setting_key)
      continue

    # Try to parse the value by looking up its type in the settingsSchema
    try:
      setting_def = settingsSchema[setting_key]
      setting_type = 'str'  # If type is omitted in the schema, treat it as string (no conversion)
      if 'seq' in setting_def:
        # Multivalued setting
        if len(setting_def['seq']) > 0 and 'type' in setting_def['seq'][0]:
          setting_type = setting_def['seq'][0]['type']

        setting_overrides[setting_key] = [parse_value(val, setting_type) for val in setting_value.split(',')]
        scriptlogger.debug('Parsed "%s" as list (element type "%s"); value: %s',
                           setting_key, setting_type, setting_overrides[setting_key])
      else:
        if 'type' in setting_def:
          setting_type = setting_def['type']
        setting_overrides[setting_key] = parse_value(setting_value, setting_type)
        scriptlogger.debug('Parsed "%s" as type "%s"; value: %s', setting_key, setting_type, setting_overrides[setting_key])

    except Exception:
      scriptlogger.warning('Could not parse value %s for setting %s, skipping...', setting_value, setting_key)

  return setting_overrides
Ejemplo n.º 2
0
    def test_files_with_unicode_content_failing(self, tmpdir):
        """
        These tests should fail with the specified exception
        """
        # To trigger schema exception we must pass in a source file
        fail_data_2f_yaml = {
            'schema': {
                'type': 'map',
                'mapping': {
                    'msg': {
                        'type': 'int',
                    },
                }
            },
            'data': {
                'msg': 'Foobar',
            },
            'errors': ["Value 'Foobar' is not of type 'int'. Path: '/msg'"]
        }

        source_f = tmpdir.join(u"2få.json")
        source_f.write(yaml.safe_dump(fail_data_2f_yaml, allow_unicode=True))

        _fail_tests = [
            # Test mapping with unicode key and value but wrong type
            (u"1f.yaml", SchemaError),
            # Test unicode filename with validation errors.
            # It is not possible to package a file with unicode characters
            # like åäö in the filename in some python versions.
            # Mock a file with åäö during testing to properly simulate this again.
            (unicode(source_f), SchemaError),
            # Test unicode data inside seq but wrong type
            (u"3f.yaml", SchemaError),
        ]

        for failing_test, exception_type in _fail_tests:
            f = self.f(failing_test)

            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]
                errors = yaml_data["errors"]

            try:
                print(u"Running test files: {0}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
            except exception_type:
                pass  # OK
            else:
                raise AssertionError(
                    u"Exception {0} not raised as expected... FILES: {1} : {2}"
                    .format(exception_type, exception_type))

            compare(sorted(c.validation_errors),
                    sorted(errors),
                    prefix=u"Wrong validation errors when parsing files : {0}".
                    format(f))
Ejemplo n.º 3
0
    def test_files_with_unicode_content_success(self, tmpdir):
        """
        These tests should pass with no exception raised
        """
        fail_data_2s_yaml = {
            'schema': {
                'type': 'map',
                'mapping': {
                    'msg': {
                        'type': 'int',
                    },
                }
            },
            'data': {
                'msg': 123,
            },
            'errors': []
        }

        source_f = tmpdir.join(u"2så.json")
        source_f.write(yaml.safe_dump(fail_data_2s_yaml, allow_unicode=True))

        _pass_tests = [
            # Test mapping with unicode key and value
            u"1s.yaml",
            # # Test unicode filename.
            # It is not possible to package a file with unicode characters
            # like åäö in the filename in some python versions.
            # Mock a file with åäö during testing to properly simulate this again.
            unicode(source_f),
            # Test sequence with unicode keys
            u"3s.yaml",
        ]

        for passing_test_files in _pass_tests:
            f = self.f(passing_test_files)

            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]

            try:
                print(u"Running test files: {0}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
                compare(c.validation_errors, [],
                        prefix="No validation errors should exist...")
            except Exception as e:
                print(u"ERROR RUNNING FILES: {0}".format(f))
                raise e

            # This serve as an extra schema validation that tests more complex structures then testrule.py do
            compare(
                c.root_rule.schema_str,
                schema,
                prefix=
                u"Parsed rules is not correct, something have changed... files : {0}"
                .format(f))
Ejemplo n.º 4
0
    def test_files_with_unicode_content_failing(self, tmpdir):
        """
        These tests should fail with the specified exception
        """
        # To trigger schema exception we must pass in a source file
        fail_data_2f_yaml = {
            'schema': {
                'type': 'map',
                'mapping': {
                    'msg': {
                        'type': 'int',
                    },
                }
            },
            'data': {
                'msg': 'Foobar',
            },
            'errors': ["Value 'Foobar' is not of type 'int'. Path: '/msg'"]
        }

        source_f = tmpdir.join(u"2få.json")
        source_f.write(yaml.safe_dump(fail_data_2f_yaml, allow_unicode=True))

        _fail_tests = [
            # Test mapping with unicode key and value but wrong type
            (u"1f.yaml", SchemaError),
            # Test unicode filename with validation errors.
            # It is not possible to package a file with unicode characters
            # like åäö in the filename in some python versions.
            # Mock a file with åäö during testing to properly simulate this again.
            (unicode(source_f), SchemaError),
            # Test unicode data inside seq but wrong type
            (u"3f.yaml", SchemaError),
        ]

        for failing_test, exception_type in _fail_tests:
            f = self.f(failing_test)

            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]
                errors = yaml_data["errors"]

            try:
                print(u"Running test files: {}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
            except exception_type:
                pass  # OK
            else:
                raise AssertionError(u"Exception {} not raised as expected... FILES: {} : {}".format(exception_type, exception_type))

            compare(sorted(c.validation_errors), sorted(errors), prefix=u"Wrong validation errors when parsing files : {}".format(f))
Ejemplo n.º 5
0
    def test_files_with_unicode_content_success(self, tmpdir):
        """
        These tests should pass with no exception raised
        """
        fail_data_2s_yaml = {
            'schema': {
                'type': 'map',
                'mapping': {
                    'msg': {
                        'type': 'int',
                    },
                }
            },
            'data': {
                'msg': 123,
            },
            'errors': []
        }

        source_f = tmpdir.join(u"2så.json")
        source_f.write(yaml.safe_dump(fail_data_2s_yaml, allow_unicode=True))

        _pass_tests = [
            # Test mapping with unicode key and value
            u"1s.yaml",
            # # Test unicode filename.
            # It is not possible to package a file with unicode characters
            # like åäö in the filename in some python versions.
            # Mock a file with åäö during testing to properly simulate this again.
            unicode(source_f),
            # Test sequence with unicode keys
            u"3s.yaml",
        ]

        for passing_test_files in _pass_tests:
            f = self.f(passing_test_files)

            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]

            try:
                print(u"Running test files: {}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
                compare(c.validation_errors, [], prefix="No validation errors should exist...")
            except Exception as e:
                print(u"ERROR RUNNING FILES: {}".format(f))
                raise e

            # This serve as an extra schema validation that tests more complex structures then testrule.py do
            compare(c.root_rule.schema_str, schema, prefix=u"Parsed rules is not correct, something have changed... files : {}".format(f))
Ejemplo n.º 6
0
    def __init__(self, source_file=None, schema_files=[], source_data=None, schema_data=None, extensions=[]):
        """
        :param extensions:
            List of paths to python files that should be imported and available via 'func' keywork.
            This list of extensions can be set manually or they should be provided by the `--extension`
            flag from the cli. This list should not contain files specified by the `extensions` list keyword
            that can be defined at the top level of the schema.
        """
        log.debug(u"source_file: %s", source_file)
        log.debug(u"schema_file: %s", schema_files)
        log.debug(u"source_data: %s", source_data)
        log.debug(u"schema_data: %s", schema_data)
        log.debug(u"extension files: %s", extensions)

        self.source = None
        self.schema = None
        self.validation_errors = None
        self.validation_errors_exceptions = None
        self.root_rule = None
        self.extensions = extensions
        self.errors = []

        if source_file is not None:
            if not os.path.exists(source_file):
                raise CoreError(u"Provided source_file do not exists on disk: {}".format(source_file))

            with open(source_file, "r") as stream:
                if source_file.endswith(".json"):
                    try:
                        self.source = json.load(stream)
                    except Exception:
                        raise CoreError(u"Unable to load any data from source json file")
                elif source_file.endswith(".yaml") or source_file.endswith('.yml'):
                    try:
                        self.source = yaml.load(stream)
                    except Exception:
                        raise CoreError(u"Unable to load any data from source yaml file")
                else:
                    raise CoreError(u"Unable to load source_file. Unknown file format of specified file path: {}".format(source_file))

        if not isinstance(schema_files, list):
            raise CoreError(u"schema_files must be of list type")

        # Merge all schema files into one single file for easy parsing
        if len(schema_files) > 0:
            schema_data = {}
            for f in schema_files:
                if not os.path.exists(f):
                    raise CoreError(u"Provided source_file do not exists on disk : {0}".format(f))

                with open(f, "r") as stream:
                    if f.endswith(".json"):
                        try:
                            data = json.load(stream)
                        except Exception:
                            raise CoreError(u"No data loaded from file : {}".format(f))
                    elif f.endswith(".yaml") or f.endswith(".yml"):
                        data = yaml.load(stream)
                        if not data:
                            raise CoreError(u"No data loaded from file : {}".format(f))
                    else:
                        raise CoreError(u"Unable to load file : {} : Unknown file format. Supported file endings is [.json, .yaml, .yml]")

                    for key in data.keys():
                        if key in schema_data.keys():
                            raise CoreError(u"Parsed key : {} : two times in schema files...".format(key))

                    schema_data = dict(schema_data, **data)

            self.schema = schema_data

        # Nothing was loaded so try the source_data variable
        if self.source is None:
            log.debug(u"No source file loaded, trying source data variable")
            self.source = source_data
        if self.schema is None:
            log.debug(u"No schema file loaded, trying schema data variable")
            self.schema = schema_data

        # Test if anything was loaded
        if self.source is None:
            raise CoreError(u"No source file/data was loaded")
        if self.schema is None:
            raise CoreError(u"No schema file/data was loaded")

        # Merge any extensions defined in the schema with the provided list of extensions from the cli
        for f in self.schema.get('extensions', []):
            self.extensions.append(f)

        if not isinstance(self.extensions, list) and all([isinstance(e, str) for e in self.extensions]):
            raise CoreError(u"Specified extensions must be a list of file paths")

        self._load_extensions()
Ejemplo n.º 7
0
  def _parseOverrides(self):
    setting_overrides = {}

    # parse overrides
    if len(self.args.setting) == 0:
      self.logger.debug('No overrides found')
      return setting_overrides

    self.logger.debug('Reading parameter schema')
    schemaFile, schemaFuncs = radiomics.getParameterValidationFiles()
    with open(schemaFile) as schema:
      settingsSchema = yaml.load(schema)['mapping']['setting']['mapping']

    # parse single value function
    def parse_value(value, value_type):
      if value_type == 'str':
        return value  # no conversion
      elif value_type == 'int':
        return int(value)
      elif value_type == 'float':
        return float(value)
      elif value_type == 'bool':
        return value == '1' or value.lower() == 'true'
      else:
        raise ValueError('Cannot understand value_type "%s"' % value_type)

    for setting in self.args.setting:  # setting = "setting_key:setting_value"
      if ':' not in setting:
        self.logger.warning('Incorrect format for override setting "%s", missing ":"', setting)
        continue
      # split into key and value
      setting_key, setting_value = setting.split(':', 2)

      # Check if it is a valid PyRadiomics Setting
      if setting_key not in settingsSchema:
        self.logger.warning('Did not recognize override "%s", skipping...', setting_key)
        continue

      # Try to parse the value by looking up its type in the settingsSchema
      try:
        setting_def = settingsSchema[setting_key]
        setting_type = 'str'  # If type is omitted in the schema, treat it as string (no conversion)
        if 'seq' in setting_def:
          # Multivalued setting
          if len(setting_def['seq']) > 0 and 'type' in setting_def['seq'][0]:
            setting_type = setting_def['seq'][0]['type']

          setting_overrides[setting_key] = [parse_value(val, setting_type) for val in setting_value.split(',')]
          self.logger.debug('Parsed "%s" as list (element type "%s"); value: %s',
                            setting_key, setting_type, setting_overrides[setting_key])
        else:
          if 'type' in setting_def:
            setting_type = setting_def['type']
          setting_overrides[setting_key] = parse_value(setting_value, setting_type)
          self.logger.debug('Parsed "%s" as type "%s"; value: %s', setting_key, setting_type,
                            setting_overrides[setting_key])

      except (KeyboardInterrupt, SystemExit):
        raise
      except Exception:
        self.logger.warning('Could not parse value "%s" for setting "%s", skipping...', setting_value, setting_key)

    # Section for deprecated argument label
    if self.args.label is not None:
      self.logger.warning(
        'Argument "label" is deprecated. To specify a custom label, use argument "setting" as follows:'
        '"--setting=label:N", where N is the a label value.')
      setting_overrides['label'] = self.args.label
    # End deprecated section

    return setting_overrides
Ejemplo n.º 8
0
    def test_core_files(self):
        # These tests should pass with no exception raised
        pass_tests = [
            # Test sequence with only string values
            "1s.yaml",
            # Test sequence where the only valid items is integers
            "2s.yaml",
            # Test sequence with only booleans
            "3s.yaml",
            # Test mapping with different types of data and some extra conditions
            "4s.yaml",
            # Test sequence with mapping with valid mapping
            "5s.yaml",
            # Test mapping with sequence with mapping and valid data
            "6s.yaml",
            # Test most of the implemented functions
            "7s.yaml",
            # This will test the unique constraint
            "8s.yaml",
            #
            "9s.yaml",
            #
            "10s.yaml",
            #
            "11s.yaml",
            # This tests number validation rule
            "12s.yaml",
            # This test the text validation rule
            "13s.yaml",
            # This test the any validation rule
            "14s.yaml",
            #
            "15s.yaml",
            #
            # TODO: Currently slightly broken
            # # "16s.yaml",
            # This test that a regex that will compile
            "17s.yaml",
            # Test that type can be set to 'None' and it will validate ok
            "18s.yaml",
            # Test that range validates with map
            "19s.yaml",
            # Test that range validates with seq
            "20s.yaml",
            # Test that 'seq' can use seq instead of 'sequence'
            "21s.yaml",
            # Test that 'map' can be used instead of 'mapping'
            "22s.yaml",
            # Test that 're' can be used instead of 'regex'
            "23s.yaml",
            # Test that 'req' can be used instead of 'required'
            "24s.yaml",
            # Test that there is no need to specify 'type: seq' or 'type: map'
            "25s.yaml",
            # Test that the different types of timestamps can be validated
            "26s.yaml",
            # Test that multiple sequence values is supported
            "27s.yaml",
            # Test that multiple sequence values with matching 'all' is supported
            "28s.yaml",
            # Test that multiple sequence values with matching '*' is supported
            "29s.yaml",
            # Test that multiple sequence values with nested data structures work
            "30s.yaml",
            # Test that multiple sequence vlaues with nested lists works
            "31s.yaml",
            # Test Complex tree with many different structures
            "32s.yaml",
            # Test float range
            "33s.yaml",
            # Test float range with negative boundary
            "34s.yaml",
            # Test keyword regex default matching-rule any
            "35s.yaml",
            # Test keyword regex declared matching-rule any
            "36s.yaml",
            # Test keyword regex declared matching-rule all
            "37s.yaml",
            # Test mixed keyword regex and normal keyword
            "38s.yaml",
        ]

        _fail_tests = [
            # Test sequence with defined string content type but data only has integers
            ("1f.yaml", SchemaError),
            # Test sequence with defined string content type but data only has booleans
            ("2f.yaml", SchemaError),
            # Test sequence with defined booleans but with one integer
            ("3f.yaml", SchemaError),
            # Test sequence with strings and and lenght on each string
            ("4f.yaml", SchemaError),
            # Test mapping that do not work
            ("5f.yaml", SchemaError),
            # Test sequence with mapping with missing required key
            ("6f.yaml", SchemaError),
            # Test mapping with sequence with mapping and invalid data
            ("7f.yaml", SchemaError),
            #
            ("8f.yaml", SchemaError),
            # TODO: The reverse unique do not currently work proper # This will test the unique constraint but should fail
            ("9f.yaml", SchemaError),
            # This tests number validation rule with wrong data
            ("10f.yaml", SchemaError),
            # This test the text validation rule with wrong data
            ("11f.yaml", SchemaError),
            # This test that typechecking works when value in map is None
            ("12f.yaml", SchemaError),
            # Test that range validates on 'map' raise correct error
            ("13f.yaml", SchemaError),
            # Test that range validates on 'seq' raise correct error
            ("14f.yaml", SchemaError),
            # Test timestamps that should throw errors
            ("15f.yaml", SchemaError),
            # Test multiple sequence values with wrong sub type and 'any' matching rule
            ("16f.yaml", SchemaError),
            # Test multiple sequence values with wrong sub type and 'all' matching rule
            ("17f.yaml", SchemaError),
            # Test multiple nested sequence values with error in level 2 with 'any' matching rule
            ("18f.yaml", SchemaError),
            # Test float range value out of range
            ("19f.yaml", SchemaError),
            # Test float range value out of range (min-ex)
            ("20f.yaml", SchemaError),
            # Test keyword regex using default matching-rule 'any'
            ("21f.yaml", SchemaError),
            # Test keyword regex using declared matching-rule 'any'
            ("22f.yaml", SchemaError),
            # Test keyword regex using declared matching-rule 'all'
            ("23f.yaml", SchemaError),
            # Test that True/False is not valid integers
            ("24f.yaml", SchemaError),
        ]

        # Add override magic to make it easier to test a specific file
        if "S" in os.environ:
            pass_tests = [os.environ["S"]]
            _fail_tests = []
        elif "F" in os.environ:
            pass_tests = []
            _fail_tests = [(os.environ["F"], SchemaError)]

        for passing_test_file in pass_tests:
            f = self.f(os.path.join("success", passing_test_file))
            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]

            try:
                print("Running test files: {}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
                compare(c.validation_errors, [], prefix="No validation errors should exist...")
            except Exception as e:
                print("ERROR RUNNING FILES: {}".format(f))
                raise e

            # This serve as an extra schema validation that tests more complex structures then testrule.py do
            compare(c.root_rule.schema_str, schema, prefix="Parsed rules is not correct, something have changed... files : {}".format(f))

        for failing_test, exception_type in _fail_tests:
            f = self.f(os.path.join("fail", failing_test))
            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]
                errors = yaml_data["errors"]

            try:
                print("Running test files: {}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
            except exception_type:
                pass  # OK
            else:
                raise AssertionError("Exception {} not raised as expected... FILES: {} : {}".format(exception_type, exception_type))

            compare(sorted(c.validation_errors), sorted(errors), prefix="Wrong validation errors when parsing files : {}".format(f))
Ejemplo n.º 9
0
    def __init__(self, source_file=None, schema_files=None, source_data=None, schema_data=None, extensions=None, strict_rule_validation=False,
                 fix_ruby_style_regex=False, allow_assertions=False,):
        """
        :param extensions:
            List of paths to python files that should be imported and available via 'func' keywork.
            This list of extensions can be set manually or they should be provided by the `--extension`
            flag from the cli. This list should not contain files specified by the `extensions` list keyword
            that can be defined at the top level of the schema.
        """
        if schema_files is None:
            schema_files = []
        if extensions is None:
            extensions = []

        log.debug(u"source_file: %s", source_file)
        log.debug(u"schema_file: %s", schema_files)
        log.debug(u"source_data: %s", source_data)
        log.debug(u"schema_data: %s", schema_data)
        log.debug(u"extension files: %s", extensions)

        self.source = None
        self.schema = None
        self.validation_errors = None
        self.validation_errors_exceptions = None
        self.root_rule = None
        self.extensions = extensions
        self.errors = []
        self.strict_rule_validation = strict_rule_validation
        self.fix_ruby_style_regex = fix_ruby_style_regex
        self.allow_assertions = allow_assertions

        if source_file is not None:
            if not os.path.exists(source_file):
                raise CoreError(u"Provided source_file do not exists on disk: {0}".format(source_file))

            with open(source_file, "r") as stream:
                if source_file.endswith(".json"):
                    try:
                        self.source = json.load(stream)
                    except Exception:
                        raise CoreError(u"Unable to load any data from source json file")
                elif source_file.endswith(".yaml") or source_file.endswith('.yml'):
                    try:
                        self.source = yaml.load(stream)
                    except Exception:
                        raise CoreError(u"Unable to load any data from source yaml file")
                else:
                    raise CoreError(u"Unable to load source_file. Unknown file format of specified file path: {0}".format(source_file))

        if not isinstance(schema_files, list):
            raise CoreError(u"schema_files must be of list type")

        # Merge all schema files into one single file for easy parsing
        if len(schema_files) > 0:
            schema_data = {}
            for f in schema_files:
                if not os.path.exists(f):
                    raise CoreError(u"Provided source_file do not exists on disk : {0}".format(f))

                with open(f, "r") as stream:
                    if f.endswith(".json"):
                        try:
                            data = json.load(stream)
                        except Exception:
                            raise CoreError(u"No data loaded from file : {0}".format(f))
                    elif f.endswith(".yaml") or f.endswith(".yml"):
                        data = yaml.load(stream)
                        if not data:
                            raise CoreError(u"No data loaded from file : {0}".format(f))
                    else:
                        raise CoreError(u"Unable to load file : {0} : Unknown file format. Supported file endings is [.json, .yaml, .yml]")

                    for key in data.keys():
                        if key in schema_data.keys():
                            raise CoreError(u"Parsed key : {0} : two times in schema files...".format(key))

                    schema_data = dict(schema_data, **data)

            self.schema = schema_data

        # Nothing was loaded so try the source_data variable
        if self.source is None:
            log.debug(u"No source file loaded, trying source data variable")
            self.source = source_data
        if self.schema is None:
            log.debug(u"No schema file loaded, trying schema data variable")
            self.schema = schema_data

        # Test if anything was loaded
        if self.source is None:
            raise CoreError(u"No source file/data was loaded")
        if self.schema is None:
            raise CoreError(u"No schema file/data was loaded")

        # Merge any extensions defined in the schema with the provided list of extensions from the cli
        for f in self.schema.get('extensions', []):
            self.extensions.append(f)

        if not isinstance(self.extensions, list) and all(isinstance(e, str) for e in self.extensions):
            raise CoreError(u"Specified extensions must be a list of file paths")

        self._load_extensions()

        if self.strict_rule_validation:
            log.info("Using strict rule keywords validation...")