Exemple #1
0
 def test_get_all_regex_files_with_exclusion(self):
     mock_yaml_files = [os.path.join('single', 'mock_VERSION')]
     files = rpx.get_all_files(
         'mock.*', TEST_RESOURCES_DIR_PATTERN, TEST_RESOURCES_DIR,
         ['multiple'], True, '.*yaml',)
     self.assertEquals(len(mock_yaml_files), len(files))
     for f in mock_yaml_files:
         self.assertIn(os.path.join(TEST_RESOURCES_DIR, f), files)
Exemple #2
0
 def test_get_all_files_with_dir_exclusion(self):
     files = rpx.get_all_files(
         TEST_FILE_NAME, TEST_RESOURCES_DIR_PATTERN, TEST_RESOURCES_DIR,
         self.multi_file_excluded_dirs)
     for version_file in self.version_files_without_excluded:
         self.assertIn(version_file, files)
     for f in self.excluded_files:
         self.assertNotIn(os.path.join(self.base_dir, f), files)
Exemple #3
0
 def test_get_all_regex_files(self):
     mock_yaml_files = [f for f in os.listdir(TEST_RESOURCES_DIR)
                        if (f.startswith('mock') and f.endswith('yaml'))]
     files = rpx.get_all_files(
         'mock.*\.yaml', TEST_RESOURCES_DIR_PATTERN, TEST_RESOURCES_DIR)
     self.assertEquals(len(mock_yaml_files), len(files))
     for f in mock_yaml_files:
         self.assertIn(os.path.join(TEST_RESOURCES_DIR, f), files)
Exemple #4
0
 def test_get_all_files_with_dir_exclusion(self):
     files = rpx.get_all_files(TEST_FILE_NAME, TEST_RESOURCES_DIR_PATTERN,
                               TEST_RESOURCES_DIR,
                               self.multi_file_excluded_dirs)
     for version_file in self.version_files_without_excluded:
         self.assertIn(version_file, files)
     for f in self.excluded_files:
         self.assertNotIn(os.path.join(self.base_dir, f), files)
Exemple #5
0
 def test_get_all_regex_files(self):
     mock_yaml_files = [
         f for f in os.listdir(TEST_RESOURCES_DIR)
         if (f.startswith('mock') and f.endswith('yaml'))
     ]
     files = rpx.get_all_files('mock.*\.yaml', TEST_RESOURCES_DIR_PATTERN,
                               TEST_RESOURCES_DIR)
     self.assertEquals(len(mock_yaml_files), len(files))
     for f in mock_yaml_files:
         self.assertIn(os.path.join(TEST_RESOURCES_DIR, f), files)
Exemple #6
0
 def test_get_all_regex_files_with_exclusion(self):
     mock_yaml_files = [os.path.join('single', 'mock_VERSION')]
     files = rpx.get_all_files(
         'mock.*',
         TEST_RESOURCES_DIR_PATTERN,
         TEST_RESOURCES_DIR,
         ['multiple'],
         True,
         '.*yaml',
     )
     self.assertEquals(len(mock_yaml_files), len(files))
     for f in mock_yaml_files:
         self.assertIn(os.path.join(TEST_RESOURCES_DIR, f), files)
Exemple #7
0
 def test_iterate_multiple_files(self):
     v = {
         'preversion': '3.1.0-m2',
         'version': '3.1.0-m3'
     }
     iterate(MOCK_CONFIG_MULTIPLE_FILES, v)
     files = get_all_files(
         'mock_VERSION', TEST_RESOURCES_DIR_PATTERN, TEST_RESOURCES_DIR)
     for fl in files:
         with open(fl) as f:
             self.assertIn('3.1.0-m3', f.read())
     v['preversion'] = '3.1.0-m3'
     v['version'] = '3.1.0-m2'
     iterate(MOCK_CONFIG_MULTIPLE_FILES, v)
     for fl in files:
         with open(fl) as f:
             self.assertIn('3.1.0-m2', f.read())
Exemple #8
0
def execute(plugins_version,
            core_version,
            configf,
            base_dir,
            prerelease=None,
            validate=True,
            verbose=False):
    config = rpx.import_config(os.path.expanduser(configf))
    paths = config.get('paths')
    if not paths:
        raise VCError('no paths configured in config yaml')
    variables = config.get('variables', {})

    # if it's a prerelease, restructure the version pattern
    if prerelease:
        version_version = '{0}-{1}'.format(
            core_version if core_version.count('.') == 2 else core_version +
            '.0', prerelease)
        python_plugins_version = '{0}{1}'.format(plugins_version,
                                                 prerelease).replace('m', 'a')
        python_core_version = '{0}{1}'.format(core_version,
                                              prerelease).replace('m', 'a')
        yaml_plugins_version = '{0}{1}'.format(plugins_version, prerelease)
        yaml_core_version = '{0}{1}'.format(core_version, prerelease)
    else:
        version_version = core_version if core_version.count('.') == 2 \
            else core_version + '.0'
        python_plugins_version = plugins_version
        python_core_version = core_version
        yaml_plugins_version = plugins_version
        yaml_core_version = core_version

    lgr.info('version_version:' + version_version)
    lgr.info('python_plugins_version:' + python_plugins_version)
    lgr.info('python_core_version:' + python_core_version)
    lgr.info('yaml_plugins_version:' + yaml_plugins_version)
    lgr.info('yaml_core_version:' + yaml_core_version)

    # validate that the versions are matching the allowed pattern
    v = ValidateVersions()
    v.validate_version_file_version(version_version)
    v.validate_python_version(python_plugins_version)
    v.validate_python_version(python_core_version)
    v.validate_yaml_version(yaml_plugins_version)
    v.validate_yaml_version(yaml_core_version)

    versions = {}

    # create variables for the different types of files
    versions['version_version'] = version_version
    versions['python_plugins_version'] = python_plugins_version
    versions['python_core_version'] = python_core_version
    versions['yaml_plugins_version'] = yaml_plugins_version
    versions['yaml_core_version'] = yaml_core_version
    variables.update(versions)

    # the reason for using the handle_file method instead of handle_path is
    # that we want to be able to run the do_validate function on every file
    # after it is processed.
    for p in paths:
        variables = variables if variables else {}
        if type(variables) is not dict:
            raise RuntimeError('variables must be of type dict')
        var_expander = rpx.VarHandler(verbose)
        p = var_expander.expand(variables, p)

        p['base_directory'] = base_dir
        path_to_handle = os.path.join(p['base_directory'], p['path'])
        if not p.get('type'):
            if os.path.isfile(path_to_handle):
                p['path'] = path_to_handle
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(os.path.basename(p['path']), p['path'])
            else:
                raise VCError('file not found: {0}'.format(path_to_handle))
        else:
            if os.path.isfile(path_to_handle):
                raise VCError('if `type` is specified, `path` must not be a '
                              'path to a single file.')
            files = rpx.get_all_files(p['type'], p['path'], base_dir,
                                      p.get('excluded', []), verbose)
            for f in files:
                p['path'] = f
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(p['type'], f)
Exemple #9
0
 def test_get_all_files_no_exclusion(self):
     files = rpx.get_all_files(
         TEST_FILE_NAME, TEST_RESOURCES_DIR_PATTERN, TEST_RESOURCES_DIR)
     for version_file in self.version_files:
         self.assertIn(version_file, files)
Exemple #10
0
 def test_get_all_files_no_exclusion(self):
     files = rpx.get_all_files(TEST_FILE_NAME, TEST_RESOURCES_DIR_PATTERN,
                               TEST_RESOURCES_DIR)
     for version_file in self.version_files:
         self.assertIn(version_file, files)
def execute(plugins_version, core_version,
            configf, base_dir, prerelease=None,
            validate=True, verbose=False):
    config = rpx.import_config(os.path.expanduser(configf))
    paths = config.get('paths')
    if not paths:
        raise VCError('no paths configured in config yaml')
    variables = config.get('variables', {})

    # if it's a prerelease, restructure the version pattern
    if prerelease:
        version_version = '{0}-{1}'.format(
            core_version if core_version.count('.') == 2
            else core_version + '.0', prerelease)
        python_plugins_version = '{0}{1}'.format(
            plugins_version, prerelease).replace('m', 'a')
        python_core_version = '{0}{1}'.format(
            core_version, prerelease).replace('m', 'a')
        yaml_plugins_version = '{0}{1}'.format(
            plugins_version, prerelease)
        yaml_core_version = '{0}{1}'.format(
            core_version, prerelease)
    else:
        version_version = core_version if core_version.count('.') == 2 \
            else core_version + '.0'
        python_plugins_version = plugins_version
        python_core_version = core_version
        yaml_plugins_version = plugins_version
        yaml_core_version = core_version

    lgr.info('version_version:' + version_version)
    lgr.info('python_plugins_version:' + python_plugins_version)
    lgr.info('python_core_version:' + python_core_version)
    lgr.info('yaml_plugins_version:' + yaml_plugins_version)
    lgr.info('yaml_core_version:' + yaml_core_version)

    # validate that the versions are matching the allowed pattern
    v = ValidateVersions()
    v.validate_version_file_version(version_version)
    v.validate_python_version(python_plugins_version)
    v.validate_python_version(python_core_version)
    v.validate_yaml_version(yaml_plugins_version)
    v.validate_yaml_version(yaml_core_version)

    versions = {}

    # create variables for the different types of files
    versions['version_version'] = version_version
    versions['python_plugins_version'] = python_plugins_version
    versions['python_core_version'] = python_core_version
    versions['yaml_plugins_version'] = yaml_plugins_version
    versions['yaml_core_version'] = yaml_core_version
    variables.update(versions)

    # the reason for using the handle_file method instead of handle_path is
    # that we want to be able to run the do_validate function on every file
    # after it is processed.
    for p in paths:
        variables = variables if variables else {}
        if type(variables) is not dict:
            raise RuntimeError('variables must be of type dict')
        var_expander = rpx.VarHandler(verbose)
        p = var_expander.expand(variables, p)

        p['base_directory'] = base_dir
        path_to_handle = os.path.join(p['base_directory'], p['path'])
        if not p.get('type'):
            if os.path.isfile(path_to_handle):
                p['path'] = path_to_handle
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(os.path.basename(p['path']), p['path'])
            else:
                raise VCError('file not found: {0}'.format(path_to_handle))
        else:
            if os.path.isfile(path_to_handle):
                raise VCError('if `type` is specified, `path` must not be a '
                              'path to a single file.')
            files = rpx.get_all_files(
                p['type'], p['path'], base_dir, p.get('excluded', []), verbose)
            for f in files:
                p['path'] = f
                rpx.handle_file(p, variables, verbose=verbose)
                if validate:
                    do_validate_files(p['type'], f)