def test_file_no_permissions_to_write_to_file(self): file = { 'path': MOCK_TEST_FILE, 'match': '3.1.0-m2', 'replace': '3.1.0-m2', 'with': '3.1.0-m3', 'to_file': '/mock.test' } try: rpx.handle_file(file, verbose=True) except IOError as ex: self.assertIn('Permission denied', str(ex))
def test_file_validation_failed(self): file = { 'path': MOCK_TEST_FILE, 'match': 'MISSING_MATCH', 'replace': 'MISSING_PATTERN', 'with': '', 'to_file': MOCK_TEST_FILE + '.test', 'validate_before': True } try: handle_file(file, verbose=True) except RepexError as ex: self.assertEqual(str(ex), 'prevalidation failed')
def test_file_validation_failed(self): file = { 'path': MOCK_TEST_FILE, 'match': 'MISSING_MATCH', 'replace': 'MISSING_PATTERN', 'with': '', 'to_file': 'VERSION.test', 'validate_before': True } try: rpx.handle_file(file, verbose=True) except SystemExit as ex: self.assertEqual( str(codes.mapping['prevalidation_failed']), str(ex))
def test_file_validation_failed(self): file = { 'path': MOCK_TEST_FILE, 'match': 'MISSING_MATCH', 'replace': 'MISSING_PATTERN', 'with': '', 'to_file': 'VERSION.test', 'validate_before': True } try: rpx.handle_file(file, verbose=True) except SystemExit as ex: self.assertEqual(str(codes.mapping['prevalidation_failed']), str(ex))
def test_file_must_include_missing(self): file = { 'path': MOCK_TEST_FILE, 'match': '3.1.0-m2', 'replace': '3.1.0', 'with': '', 'to_file': 'VERSION.test', 'validate_before': True, 'must_include': ['MISSING_INCLUSION'] } try: rpx.handle_file(file, verbose=True) except SystemExit as ex: self.assertEqual(str(codes.mapping['prevalidation_failed']), str(ex))
def test_file_must_include_missing(self): file = { 'path': MOCK_TEST_FILE, 'match': '3.1.0-m2', 'replace': '3.1.0', 'with': '', 'to_file': MOCK_TEST_FILE + '.test', 'validate_before': True, 'must_include': [ 'MISSING_INCLUSION' ] } try: handle_file(file, verbose=True) except RepexError as ex: self.assertEqual(str(ex), 'prevalidation failed')
def test_file_must_include_missing(self): file = { 'path': MOCK_TEST_FILE, 'match': '3.1.0-m2', 'replace': '3.1.0', 'with': '', 'to_file': 'VERSION.test', 'validate_before': True, 'must_include': [ 'MISSING_INCLUSION' ] } try: rpx.handle_file(file, verbose=True) except SystemExit as ex: self.assertEqual( str(codes.mapping['prevalidation_failed']), str(ex))
def test_file_does_not_exist(self): file = { 'path': 'MISSING_FILE', 'match': '3.1.0-m2', 'replace': '3.1.0', 'with': '', 'validate_before': True } result = rpx.handle_file(file, verbose=True) self.assertFalse(result)
def execute(plugins_version, core_version, configf, base_dir, prerelease=None, validate=True, verbose=False): config = rpx.import_config(os.path.expanduser(configf)) paths = config.get('paths') if not paths: raise VCError('no paths configured in config yaml') variables = config.get('variables', {}) # if it's a prerelease, restructure the version pattern if prerelease: version_version = '{0}-{1}'.format( core_version if core_version.count('.') == 2 else core_version + '.0', prerelease) python_plugins_version = '{0}{1}'.format(plugins_version, prerelease).replace('m', 'a') python_core_version = '{0}{1}'.format(core_version, prerelease).replace('m', 'a') yaml_plugins_version = '{0}{1}'.format(plugins_version, prerelease) yaml_core_version = '{0}{1}'.format(core_version, prerelease) else: version_version = core_version if core_version.count('.') == 2 \ else core_version + '.0' python_plugins_version = plugins_version python_core_version = core_version yaml_plugins_version = plugins_version yaml_core_version = core_version lgr.info('version_version:' + version_version) lgr.info('python_plugins_version:' + python_plugins_version) lgr.info('python_core_version:' + python_core_version) lgr.info('yaml_plugins_version:' + yaml_plugins_version) lgr.info('yaml_core_version:' + yaml_core_version) # validate that the versions are matching the allowed pattern v = ValidateVersions() v.validate_version_file_version(version_version) v.validate_python_version(python_plugins_version) v.validate_python_version(python_core_version) v.validate_yaml_version(yaml_plugins_version) v.validate_yaml_version(yaml_core_version) versions = {} # create variables for the different types of files versions['version_version'] = version_version versions['python_plugins_version'] = python_plugins_version versions['python_core_version'] = python_core_version versions['yaml_plugins_version'] = yaml_plugins_version versions['yaml_core_version'] = yaml_core_version variables.update(versions) # the reason for using the handle_file method instead of handle_path is # that we want to be able to run the do_validate function on every file # after it is processed. for p in paths: variables = variables if variables else {} if type(variables) is not dict: raise RuntimeError('variables must be of type dict') var_expander = rpx.VarHandler(verbose) p = var_expander.expand(variables, p) p['base_directory'] = base_dir path_to_handle = os.path.join(p['base_directory'], p['path']) if not p.get('type'): if os.path.isfile(path_to_handle): p['path'] = path_to_handle rpx.handle_file(p, variables, verbose=verbose) if validate: do_validate_files(os.path.basename(p['path']), p['path']) else: raise VCError('file not found: {0}'.format(path_to_handle)) else: if os.path.isfile(path_to_handle): raise VCError('if `type` is specified, `path` must not be a ' 'path to a single file.') files = rpx.get_all_files(p['type'], p['path'], base_dir, p.get('excluded', []), verbose) for f in files: p['path'] = f rpx.handle_file(p, variables, verbose=verbose) if validate: do_validate_files(p['type'], f)
def execute(plugins_version, core_version, configf, base_dir, prerelease=None, validate=True, verbose=False): config = rpx.import_config(os.path.expanduser(configf)) paths = config.get('paths') if not paths: raise VCError('no paths configured in config yaml') variables = config.get('variables', {}) # if it's a prerelease, restructure the version pattern if prerelease: version_version = '{0}-{1}'.format( core_version if core_version.count('.') == 2 else core_version + '.0', prerelease) python_plugins_version = '{0}{1}'.format( plugins_version, prerelease).replace('m', 'a') python_core_version = '{0}{1}'.format( core_version, prerelease).replace('m', 'a') yaml_plugins_version = '{0}{1}'.format( plugins_version, prerelease) yaml_core_version = '{0}{1}'.format( core_version, prerelease) else: version_version = core_version if core_version.count('.') == 2 \ else core_version + '.0' python_plugins_version = plugins_version python_core_version = core_version yaml_plugins_version = plugins_version yaml_core_version = core_version lgr.info('version_version:' + version_version) lgr.info('python_plugins_version:' + python_plugins_version) lgr.info('python_core_version:' + python_core_version) lgr.info('yaml_plugins_version:' + yaml_plugins_version) lgr.info('yaml_core_version:' + yaml_core_version) # validate that the versions are matching the allowed pattern v = ValidateVersions() v.validate_version_file_version(version_version) v.validate_python_version(python_plugins_version) v.validate_python_version(python_core_version) v.validate_yaml_version(yaml_plugins_version) v.validate_yaml_version(yaml_core_version) versions = {} # create variables for the different types of files versions['version_version'] = version_version versions['python_plugins_version'] = python_plugins_version versions['python_core_version'] = python_core_version versions['yaml_plugins_version'] = yaml_plugins_version versions['yaml_core_version'] = yaml_core_version variables.update(versions) # the reason for using the handle_file method instead of handle_path is # that we want to be able to run the do_validate function on every file # after it is processed. for p in paths: variables = variables if variables else {} if type(variables) is not dict: raise RuntimeError('variables must be of type dict') var_expander = rpx.VarHandler(verbose) p = var_expander.expand(variables, p) p['base_directory'] = base_dir path_to_handle = os.path.join(p['base_directory'], p['path']) if not p.get('type'): if os.path.isfile(path_to_handle): p['path'] = path_to_handle rpx.handle_file(p, variables, verbose=verbose) if validate: do_validate_files(os.path.basename(p['path']), p['path']) else: raise VCError('file not found: {0}'.format(path_to_handle)) else: if os.path.isfile(path_to_handle): raise VCError('if `type` is specified, `path` must not be a ' 'path to a single file.') files = rpx.get_all_files( p['type'], p['path'], base_dir, p.get('excluded', []), verbose) for f in files: p['path'] = f rpx.handle_file(p, variables, verbose=verbose) if validate: do_validate_files(p['type'], f)