예제 #1
0
 def setUp(self):
     super(TestSingleFile, self).setUp()
     self.single_file_config = rpx.import_config(MOCK_SINGLE_FILE)
     self.single_file_output_file = \
         self.single_file_config['paths'][0]['to_file']
     self.multi_file_config = rpx.import_config(MOCK_MULTIPLE_FILES)
     self.multi_file_excluded_dirs = \
         self.multi_file_config['paths'][0]['excluded']
예제 #2
0
 def setUp(self):
     super(TestSingleFile, self).setUp()
     self.single_file_config = rpx.import_config(MOCK_SINGLE_FILE)
     self.single_file_output_file = \
         self.single_file_config['paths'][0]['to_file']
     self.multi_file_config = rpx.import_config(MOCK_MULTIPLE_FILES)
     self.multi_file_excluded_dirs = \
         self.multi_file_config['paths'][0]['excluded']
예제 #3
0
    def setUp(self):
        super(TestGetAllFiles, self).setUp()
        self.multi_file_config = rpx.import_config(MOCK_MULTIPLE_FILES)
        self.multi_file_excluded_dirs = \
            self.multi_file_config['paths'][0]['excluded']
        self.excluded_files = [os.path.join(
            self.multi_file_excluded_dirs[0], TEST_FILE_NAME)]
        self.base_dir = self.multi_file_config['paths'][0]['base_directory']

        self.version_files = []
        for root, _, files in os.walk(MULTIPLE_DIR):
            for f in files:
                if f == 'mock_VERSION':
                    self.version_files.append(os.path.join(root, f))
        self.version_files_without_excluded = \
            [f for f in self.version_files if f != EXCLUDED_FILE]
        self.excluded_files = [f for f in self.version_files if f not
                               in self.version_files_without_excluded]
예제 #4
0
    def setUp(self):
        super(TestGetAllFiles, self).setUp()
        self.multi_file_config = rpx.import_config(MOCK_MULTIPLE_FILES)
        self.multi_file_excluded_dirs = \
            self.multi_file_config['paths'][0]['excluded']
        self.excluded_files = [
            os.path.join(self.multi_file_excluded_dirs[0], TEST_FILE_NAME)
        ]
        self.base_dir = self.multi_file_config['paths'][0]['base_directory']

        self.version_files = []
        for root, _, files in os.walk(MULTIPLE_DIR):
            for f in files:
                if f == 'mock_VERSION':
                    self.version_files.append(os.path.join(root, f))
        self.version_files_without_excluded = \
            [f for f in self.version_files if f != EXCLUDED_FILE]
        self.excluded_files = [
            f for f in self.version_files
            if f not in self.version_files_without_excluded
        ]
예제 #5
0
def execute(plugins_version,
            core_version,
            configf,
            base_dir,
            prerelease=None,
            validate=True,
            verbose=False):
    config = rpx.import_config(os.path.expanduser(configf))
    paths = config.get('paths')
    if not paths:
        raise VCError('no paths configured in config yaml')
    variables = config.get('variables', {})

    # if it's a prerelease, restructure the version pattern
    if prerelease:
        version_version = '{0}-{1}'.format(
            core_version if core_version.count('.') == 2 else core_version +
            '.0', prerelease)
        python_plugins_version = '{0}{1}'.format(plugins_version,
                                                 prerelease).replace('m', 'a')
        python_core_version = '{0}{1}'.format(core_version,
                                              prerelease).replace('m', 'a')
        yaml_plugins_version = '{0}{1}'.format(plugins_version, prerelease)
        yaml_core_version = '{0}{1}'.format(core_version, prerelease)
    else:
        version_version = core_version if core_version.count('.') == 2 \
            else core_version + '.0'
        python_plugins_version = plugins_version
        python_core_version = core_version
        yaml_plugins_version = plugins_version
        yaml_core_version = core_version

    lgr.info('version_version:' + version_version)
    lgr.info('python_plugins_version:' + python_plugins_version)
    lgr.info('python_core_version:' + python_core_version)
    lgr.info('yaml_plugins_version:' + yaml_plugins_version)
    lgr.info('yaml_core_version:' + yaml_core_version)

    # validate that the versions are matching the allowed pattern
    v = ValidateVersions()
    v.validate_version_file_version(version_version)
    v.validate_python_version(python_plugins_version)
    v.validate_python_version(python_core_version)
    v.validate_yaml_version(yaml_plugins_version)
    v.validate_yaml_version(yaml_core_version)

    versions = {}

    # create variables for the different types of files
    versions['version_version'] = version_version
    versions['python_plugins_version'] = python_plugins_version
    versions['python_core_version'] = python_core_version
    versions['yaml_plugins_version'] = yaml_plugins_version
    versions['yaml_core_version'] = yaml_core_version
    variables.update(versions)

    # the reason for using the handle_file method instead of handle_path is
    # that we want to be able to run the do_validate function on every file
    # after it is processed.
    for p in paths:
        variables = variables if variables else {}
        if type(variables) is not dict:
            raise RuntimeError('variables must be of type dict')
        var_expander = rpx.VarHandler(verbose)
        p = var_expander.expand(variables, p)

        p['base_directory'] = base_dir
        path_to_handle = os.path.join(p['base_directory'], p['path'])
        if not p.get('type'):
            if os.path.isfile(path_to_handle):
                p['path'] = path_to_handle
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(os.path.basename(p['path']), p['path'])
            else:
                raise VCError('file not found: {0}'.format(path_to_handle))
        else:
            if os.path.isfile(path_to_handle):
                raise VCError('if `type` is specified, `path` must not be a '
                              'path to a single file.')
            files = rpx.get_all_files(p['type'], p['path'], base_dir,
                                      p.get('excluded', []), verbose)
            for f in files:
                p['path'] = f
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(p['type'], f)
예제 #6
0
 def setUp(self):
     super(TestValidator, self).setUp()
     self.single_file_config = rpx.import_config(MOCK_SINGLE_FILE)
     self.single_file_output_file = \
         self.single_file_config['paths'][0]['to_file']
예제 #7
0
 def test_import_config_file(self):
     outcome = rpx.import_config(MOCK_SINGLE_FILE)
     self.assertEquals(type(outcome), dict)
     self.assertIn('paths', outcome.keys())
예제 #8
0
파일: test_repex.py 프로젝트: nir0s/repex
 def test_import_bad_config_file(self):
     try:
         import_config(BAD_CONFIG_FILE)
     except Exception as ex:
         self.assertIn('mapping values are not allowed here', str(ex))
예제 #9
0
파일: test_repex.py 프로젝트: nir0s/repex
 def test_fail_import_config_file(self):
     try:
         import_config('')
     except RuntimeError as ex:
         self.assertEquals(str(ex), 'cannot access config file')
예제 #10
0
 def setUp(self):
     super(TestValidator, self).setUp()
     self.single_file_config = rpx.import_config(MOCK_SINGLE_FILE)
     self.single_file_output_file = \
         self.single_file_config['paths'][0]['to_file']
예제 #11
0
 def test_import_config_file(self):
     outcome = rpx.import_config(MOCK_SINGLE_FILE)
     self.assertEquals(type(outcome), dict)
     self.assertIn('paths', outcome.keys())
예제 #12
0
def execute(plugins_version, core_version,
            configf, base_dir, prerelease=None,
            validate=True, verbose=False):
    config = rpx.import_config(os.path.expanduser(configf))
    paths = config.get('paths')
    if not paths:
        raise VCError('no paths configured in config yaml')
    variables = config.get('variables', {})

    # if it's a prerelease, restructure the version pattern
    if prerelease:
        version_version = '{0}-{1}'.format(
            core_version if core_version.count('.') == 2
            else core_version + '.0', prerelease)
        python_plugins_version = '{0}{1}'.format(
            plugins_version, prerelease).replace('m', 'a')
        python_core_version = '{0}{1}'.format(
            core_version, prerelease).replace('m', 'a')
        yaml_plugins_version = '{0}{1}'.format(
            plugins_version, prerelease)
        yaml_core_version = '{0}{1}'.format(
            core_version, prerelease)
    else:
        version_version = core_version if core_version.count('.') == 2 \
            else core_version + '.0'
        python_plugins_version = plugins_version
        python_core_version = core_version
        yaml_plugins_version = plugins_version
        yaml_core_version = core_version

    lgr.info('version_version:' + version_version)
    lgr.info('python_plugins_version:' + python_plugins_version)
    lgr.info('python_core_version:' + python_core_version)
    lgr.info('yaml_plugins_version:' + yaml_plugins_version)
    lgr.info('yaml_core_version:' + yaml_core_version)

    # validate that the versions are matching the allowed pattern
    v = ValidateVersions()
    v.validate_version_file_version(version_version)
    v.validate_python_version(python_plugins_version)
    v.validate_python_version(python_core_version)
    v.validate_yaml_version(yaml_plugins_version)
    v.validate_yaml_version(yaml_core_version)

    versions = {}

    # create variables for the different types of files
    versions['version_version'] = version_version
    versions['python_plugins_version'] = python_plugins_version
    versions['python_core_version'] = python_core_version
    versions['yaml_plugins_version'] = yaml_plugins_version
    versions['yaml_core_version'] = yaml_core_version
    variables.update(versions)

    # the reason for using the handle_file method instead of handle_path is
    # that we want to be able to run the do_validate function on every file
    # after it is processed.
    for p in paths:
        variables = variables if variables else {}
        if type(variables) is not dict:
            raise RuntimeError('variables must be of type dict')
        var_expander = rpx.VarHandler(verbose)
        p = var_expander.expand(variables, p)

        p['base_directory'] = base_dir
        path_to_handle = os.path.join(p['base_directory'], p['path'])
        if not p.get('type'):
            if os.path.isfile(path_to_handle):
                p['path'] = path_to_handle
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(os.path.basename(p['path']), p['path'])
            else:
                raise VCError('file not found: {0}'.format(path_to_handle))
        else:
            if os.path.isfile(path_to_handle):
                raise VCError('if `type` is specified, `path` must not be a '
                              'path to a single file.')
            files = rpx.get_all_files(
                p['type'], p['path'], base_dir, p.get('excluded', []), verbose)
            for f in files:
                p['path'] = f
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(p['type'], f)