def main(log_printer=None, section: Section=None): start_path = get_config_directory(section) log_printer = log_printer or LogPrinter(ConsolePrinter()) if start_path is None: log_printer.err("Can only delete .orig files if .coafile is found") return 255 orig_files = glob(os.path.abspath(os.path.join(start_path, '**', '*.orig'))) not_deleted = 0 for ofile in orig_files: log_printer.info("Deleting old backup file... " + os.path.relpath(ofile)) try: os.remove(ofile) except: not_deleted += 1 log_printer.warn("Couldn't delete... " + os.path.relpath(ofile)) if not_deleted: log_printer.warn(not_deleted + " .orig backup files could not be" " deleted, possibly because you lack the permission" " to do so. coala may not be able to create" " backup files when patches are applied.") return 0
def main(log_printer=None, section: Section = None): start_path = get_config_directory(section) log_printer = log_printer or LogPrinter(ConsolePrinter()) if start_path is None: log_printer.err("Can only delete .orig files if .coafile is found") return 255 orig_files = glob(os.path.abspath(os.path.join(start_path, '**', '*.orig'))) not_deleted = 0 for ofile in orig_files: log_printer.info("Deleting old backup file... " + os.path.relpath(ofile)) try: os.remove(ofile) except: not_deleted += 1 log_printer.warn("Couldn't delete... " + os.path.relpath(ofile)) if not_deleted: log_printer.warn(not_deleted + " .orig backup files could not be" " deleted, possibly because you lack the permission" " to do so. coala may not be able to create" " backup files when patches are applied.") return 0
def test_no_dirname(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = "*Dir?" file_list = [TestFiles.dir1, TestFiles.dir2] results = sorted([os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual(results, file_list) os.curdir = old_curdir
def test_no_dirname(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = '*Dir?' file_list = [TestFiles.dir1, TestFiles.dir2] results = sorted([os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual(results, file_list) os.curdir = old_curdir
def test_no_dirname_recursive(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = '**' file_list = [ TestFiles.file1, TestFiles.file2, TestFiles.file3, TestFiles.file11, TestFiles.file12, TestFiles.file_paren, TestFiles.file_brack, TestFiles.dir1, TestFiles.dir2 ] results = sorted([ os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern) ]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual(results, file_list) os.curdir = old_curdir
def retrieve_files(file_globs, directory): """ Returns matched filenames acoording to the list of file globs and supported files of the extractor. """ matches = [] cwd = os.getcwd() os.chdir(directory) for g in file_globs: matches += glob(g) matched_files = [f for f in matches if not os.path.isdir(f)] os.chdir(cwd) return matched_files
def test_no_dirname_recursive(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = '**' file_list = [ TestFiles.file1, TestFiles.file2, TestFiles.file3, TestFiles.file11, TestFiles.file12, TestFiles.file_paren, TestFiles.file_brack, TestFiles.dir1, TestFiles.dir2 ] results = sorted([ os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern) ]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual([ i for i in results if re.search(r"(__pycache__|\.pyc)", i) is None ], file_list) os.curdir = old_curdir
def test_no_dirname_recursive(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = '**' file_list = [TestFiles.file1, TestFiles.file2, TestFiles.file3, TestFiles.file11, TestFiles.file12, TestFiles.file_paren, TestFiles.file_brack, TestFiles.dir1, TestFiles.dir2] results = sorted([os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual(results, file_list) os.curdir = old_curdir
def test_no_dirname_recursive(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = "**" file_list = [ TestFiles.file1, TestFiles.file2, TestFiles.file3, TestFiles.file11, TestFiles.file12, TestFiles.file_paren, TestFiles.file_brack, TestFiles.dir1, TestFiles.dir2, ] results = sorted([os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual([i for i in results if "__pycache__" not in i], file_list) os.curdir = old_curdir
def test_no_dirname_recursive(self): old_curdir = os.curdir os.curdir = TestFiles.glob_test_dir pattern = '**' file_list = [TestFiles.file1, TestFiles.file2, TestFiles.file3, TestFiles.file11, TestFiles.file12, TestFiles.file_paren, TestFiles.file_brack, TestFiles.dir1, TestFiles.dir2] results = sorted([os.path.normcase(os.path.join(os.curdir, g)) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual([i for i in results if re.search(r"(__pycache__|\.pyc)", i) is None], file_list) os.curdir = old_curdir
def _test_glob(self, pattern, file_list): results = sorted([os.path.normcase(g) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual(results, file_list)
def _test_glob(self, pattern, file_list): results = sorted([os.path.normcase(g) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual([i for i in results if "__pycache__" not in i], file_list)
def _test_glob(self, pattern, file_list): results = sorted([os.path.normcase(g) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual([ i for i in results if re.search(r"(__pycache__|\.pyc)", i) is None ], file_list)
def _test_glob(self, pattern, file_list): results = sorted([os.path.normcase(g) for g in glob(pattern)]) file_list = sorted([os.path.normcase(f) for f in file_list]) self.assertEqual([i for i in results if re.search(r"(__pycache__|\.pyc)", i) is None], file_list)
def main(): args = create_upload_parser().parse_args() os.chdir(os.path.join(os.path.dirname(__file__), '..')) os.makedirs(os.path.join('bears', 'upload'), exist_ok=True) bear_version = VERSION if 'dev' in bear_version: bear_version = bear_version[:bear_version.find('dev')] + (str( int(time.time()))) else: bear_version = repr(bear_version) + '.' + str(int(time.time())) for bear_file_name in sorted(set(glob('bears/**/*Bear.py'))): bear_object = next( iimport_objects(bear_file_name, attributes='kind', local=True), None) if bear_object: bear_name, _ = os.path.splitext(os.path.basename(bear_file_name)) create_file_structure_for_packages(os.path.join('bears', 'upload'), bear_file_name, bear_name) if bear_object.REQUIREMENTS: for requirement in bear_object.REQUIREMENTS: if isinstance(requirement, PipRequirement): with open( os.path.join('bears', 'upload', bear_name, 'requirements.txt'), 'a') as reqtxt: reqtxt.write(requirement.package + '==' + requirement.version + '\n') if os.path.exists( os.path.join('bears', 'upload', bear_name, 'requirements.txt')): with open( os.path.join('bears', 'upload', bear_name, 'MANIFEST.in'), 'w') as manifest: manifest.write('include requirements.txt') substitution_dict = { 'NAME': repr(bear_name), 'VERSION': bear_version, 'AUTHORS': str(bear_object.AUTHORS), 'AUTHORS_EMAILS': str(bear_object.AUTHORS_EMAILS), 'MAINTAINERS': str(bear_object.maintainers), 'MAINTAINERS_EMAILS': str(bear_object.maintainers_emails), 'PLATFORMS': str(bear_object.PLATFORMS), 'LICENSE': str(bear_object.LICENSE), 'LONG_DESCRIPTION': str(bear_object.__doc__), 'BEAR_NAME': bear_name, 'ENTRY': 'coala' + bear_name } create_file_from_template( os.path.join('bears', 'setup.py.in'), os.path.join('bears', 'upload', bear_name, 'setup.py'), substitution_dict) bear_dist_name = bear_name + '-' + bear_version if args.register: perform_register(os.path.join('bears', 'upload', bear_name), bear_dist_name) if args.upload: perform_upload(os.path.join('bears', 'upload', bear_name))
def main(): args = create_upload_parser().parse_args() os.chdir(os.path.join(args.bears, '..')) os.makedirs(os.path.join('bears', 'upload'), exist_ok=True) bear_version = VERSION if 'dev' in bear_version: bear_version = bear_version[:bear_version.find('dev')] + ( str(int(time.time()))) else: bear_version = repr(bear_version) + '.' + str(int(time.time())) for bear_file_name in sorted(set(glob('bears/**/*Bear.py'))): bear_object = next(iimport_objects( bear_file_name, attributes='kind', local=True), None) if bear_object: bear_name, _ = os.path.splitext(os.path.basename(bear_file_name)) create_file_structure_for_packages( os.path.join('bears', 'upload'), bear_file_name, bear_name) if bear_object.REQUIREMENTS: for requirement in bear_object.REQUIREMENTS: if isinstance(requirement, PipRequirement): with open(os.path.join( 'bears', 'upload', bear_name, 'requirements.txt'), 'a') as reqtxt: reqtxt.write( requirement.package + '==' + requirement.version + '\n') if os.path.exists(os.path.join('bears', 'upload', bear_name, 'requirements.txt')): with open(os.path.join( 'bears', 'upload', bear_name, 'MANIFEST.in'), 'w') as manifest: manifest.write('include requirements.txt') substitution_dict = {'NAME': repr(bear_name), 'VERSION': bear_version, 'AUTHORS': str(bear_object.AUTHORS), 'AUTHORS_EMAILS': str(bear_object.AUTHORS_EMAILS), 'MAINTAINERS': str(bear_object.maintainers), 'MAINTAINERS_EMAILS': str(bear_object.maintainers_emails), 'PLATFORMS': str(bear_object.PLATFORMS), 'LICENSE': str(bear_object.LICENSE), 'LONG_DESCRIPTION': str(bear_object.__doc__), 'BEAR_NAME': bear_name, 'ENTRY': 'coala' + bear_name} create_file_from_template(args.template, os.path.join('bears', 'upload', bear_name, 'setup.py'), substitution_dict) bear_dist_name = bear_name + '-' + bear_version if args.register: perform_register(os.path.join('bears', 'upload', bear_name), bear_dist_name) if args.upload: perform_upload(os.path.join('bears', 'upload', bear_name))