def modelignore_matcher(directory): # See if a modelignore file exists and return a matcher or return a truth function mipath = Path(directory / ".modelignore").absolute() if mipath.exists(): return gitignore_parser.parse_gitignore(mipath, base_dir=directory) else: return lambda fn: False
def main(): with open('tokens.json') as file: token = json.loads(file.read())['github'] commit = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('utf-8').strip() with open('pack.json') as file: manifest = json.loads(file.read()) print(f'Prepare Release v{manifest["version"]} on commit {commit}') print('Delete old build data') if os.path.isdir('build'): shutil.rmtree('build') os.makedirs('build') print('Read gitignore') gitignore = gitignore_parser.parse_gitignore('.gitignore', '.') print('Generate manifest') modlist.generateManifest() with open('manifest.json') as file: manifest = json.loads(file.read()) print('Creating changelog') changelog_creator.main(manifest) print('Download ModListCreator') request = Request( f'https://github.com/MelanX/ModListCreator/releases/download/v{MOD_LIST_CREATOR_VERSION}/ModListCreator-{MOD_LIST_CREATOR_VERSION}.jar' ) response = urlopen(request) with open(os.path.join('build', 'ModListCreator.jar'), mode='wb') as file: file.write(response.read()) print('Update root directory modlist.') subprocess.check_call([ 'java', '-jar', os.path.join('build', 'ModListCreator.jar'), '--md', '--manifest', 'manifest.json', '--output', '.', '--detailed' ]) print('Create empty overrides folder.') target_dir = os.path.join('build', 'overrides') if os.path.isdir(target_dir): shutil.rmtree(target_dir) os.makedirs(target_dir) print('Prepare CurseForge pack.') createModpackZip(manifest, gitignore) print('Prepare Server zip.') createServerZip(manifest, gitignore) print('Uploading to GitHub') uploadToGithub(token, manifest) print('Done')
def snapshot(self, root=None): result = {"dirs": {}, "files": {}} root = root or self._root_path if os.path.isdir(root) and os.path.split(root)[-1] == ".git": # Auto ignore .git directory return gitignore_path = os.path.join(self._root_path, ".gitignore") if os.path.isfile(gitignore_path): matches = gitignore_parser.parse_gitignore(gitignore_path) match = False try: match = matches(root) except ValueError: utils.logger.info("[%s] Ignore invalid path %s" % (self.__class__.__name__, root)) return None if match: utils.logger.debug("[%s] Path %s ignored" % (self.__class__.__name__, root)) return None root_dir = Directory(root) for subdir in root_dir.get_dirs(): res = self.snapshot(subdir.path) if res: result["dirs"][subdir.name] = res for file in root_dir.get_files(): if file.name.endswith(".pyc"): # Auto ignore .pyc files continue result["files"][file.name] = file.hash return result
def __init__(self, config_object: dict, search_path: str, use_gitignore: bool, print_results=VERBOSE_OUTPUT, write_results=SAVE_ON_COMPLETE, output_path=DEFAULT_OUTPUT_PATH, user_filters: list = []): self.search_path = search_path self.use_gitignore = use_gitignore if use_gitignore: gitignore_file = os.path.join(search_path, '.gitignore') if os.path.exists(gitignore_file): module_logger.debug(f'Using gitignore file: {gitignore_file}') self.gitignore_matcher = parse_gitignore(gitignore_file) self.blacklisted_extensions = config_object.get( 'blacklisted_extensions', []) self.blacklisted_paths = [ path.format(sep=os.path.sep) for path in config_object['blacklisted_paths'] ] self.red_flag_extensions = config_object.get('red_flag_extensions', []) self.max_file_size = config_object.get('max_file_size', MAX_FILE_SIZE) self.whitelisted_strings = config_object.get('whitelisted_strings', []) self.write_results = write_results self.print_results = print_results self.matched_signatures = [] self.output_path = output_path # $ Make Configuration Objects For each of the Signatures in the Config Object. self.signatures: List[Signature] = self.load_signatures( config_object.get('signatures', {}), user_filters) module_logger.info( f'Secret Sniffer Initialised For Path: {search_path}')
def _all_non_ignored_files() -> Iterable[Path]: """All source files, whether Python, C, or C#.""" matches = parse_gitignore(GC_PATH / ".gitignore") return walk_files_recursive( GC_PATH, # '.git' should be implicit in gitignore, but parse_gitignore doesn't handle that filter_dir=lambda dir_path: dir_path.name != ".git" and not matches(dir_path), )
def _add_files_to_zipfile(zipf): if os.path.isfile(".ebignore"): logging.info("Using .ebignore") ignore_matches = gitignore_parser.parse_gitignore(".ebignore") elif os.path.isfile(".gitignore"): logging.info("Using .gitignore") ignore_matches = gitignore_parser.parse_gitignore(".gitignore") else: logging.info("Couldn't find .ebignore or .gitignore") ignore_matches = lambda x: False base_dir = os.getcwd() # We use an absolute path for scandir so the entry.path field is absolute for entry in _scantree(base_dir): relative_path = entry.path[len(base_dir) + 1:] if ignore_matches(entry.path): logging.debug("Ignoring %s based on .ebignore", relative_path) continue logging.debug("Adding %s to zipfile", relative_path) zipf.write(relative_path) logging.info("Created zipfile with %s items", len(zipf.namelist()))
def _get_docker_mounts(name): result = {'target/' + name.lower(): 'target'} gitignore = path('.gitignore') if not exists(gitignore): gitignore = _defaults.path('.gitignore') is_in_gitignore = parse_gitignore(gitignore, base_dir=path('.')) for file_name in listdir(path('.')): if is_in_gitignore(path(file_name)): continue result[file_name] = file_name path_in_docker = lambda p: '/root/%s/%s' % (SETTINGS['app_name'], p) return {path(src): path_in_docker(dest) for src, dest in result.items()}
def get_structure_yamls( path: str, recursive: bool) -> Tuple[bool, List[Tuple[str, Dict[str, Any]]]]: global FILE_EXT_PPTSTRUCT global FILE_EXT_PPTIGNORE pattern_create = FILE_EXT_PPTSTRUCT regex_create = re.compile(pattern_create) pattern_ignore = FILE_EXT_PPTIGNORE regex_ignore = re.compile(pattern_ignore) match_ignore = None # extract ignore file (if one exists) for fname in os.listdir(path): if regex_ignore.match(fname): match_ignore = parse_gitignore(fname, base_dir=path) break structures = [] for subpath, _, files in os.walk(path): if not recursive and not (subpath == path): continue if not (match_ignore is None) and match_ignore(subpath): continue files = [fname for fname in files if regex_create.match(fname)] if len(files) == 0: if subpath == path: raise FileNotFoundError( 'No file matching the pattern \'{}\' could be found in the project directory!' .format(pattern_create)) else: fname = files[0] # extract instruction for structure from yml file: fname_full = fname try: fname_full = os.path.join(subpath, fname) with open(fname_full, 'r') as fp: struct = load(fp, Loader=FullLoader) except: raise FileExistsError( 'Could not open file `{}`.'.format(fname_full)) force_ignore = get_dict_value(struct, 'ignore', typ=bool, default=False) if force_ignore == True or (not subpath == path and force_ignore == 'backwards'): continue structures.append((subpath, struct)) if not recursive and (subpath == path): break return True, structures
def read_gitignore() -> Callable: """Reads the gitignore file and returns a method that can check if a path matches the globs""" _gitignore_path = PROJECT_ROOT / ".gitignore" if not _gitignore_path.is_file(): logger.warn( "Could not find an ignore file at: {}".format(_gitignore_path)) return lambda x: False gitignores = parse_gitignore(_gitignore_path) return gitignores
def validate_yaml_dir(path, schema_spec, ssl_cert='',continue_on_error=True): ignore_file = os.path.join(path, '.nlignore') nl_ignore_matcher = parse_gitignore(ignore_file) if os.path.exists(ignore_file) else None first_time_check = True extensions = ['yml','yaml','json'] any_errs = False for root, dirs, files in os.walk(path): for file in files: file_path = os.path.join(root, file) (any_errs,first_time_check) = validate_yaml_dir_file(file_path,schema_spec,extensions,nl_ignore_matcher,any_errs,first_time_check,continue_on_error,ssl_cert) if any_errs: raise ValueError('One or more errors in files underneath this directory.')
def zip_dir(path): # find and load .nlignore ignore_file = os.path.join(path, '.nlignore') nl_ignore_matcher = parse_gitignore(ignore_file) if os.path.exists(ignore_file) else None temp_zip = tempfile.NamedTemporaryFile('w+b') ziph = zipfile.ZipFile(temp_zip, 'x', zipfile.ZIP_DEFLATED) for root, dirs, files in os.walk(path): for file in files: file_path = os.path.join(root, file) if not is_not_to_be_included(file_path, nl_ignore_matcher): ziph.write(file_path, file_path.replace(str(path), '')) ziph.close() temp_zip.seek(0) return temp_zip
def _copy_files(self, workspace): self.ssh_client.exec_command(f'[ ! -d "{workspace}" ] && mkdir -p "{workspace}"') sftp_client = self.ssh_client.open_sftp() sftp_client.chdir(workspace) self._print_log('>> Copy local files to remote') if os.path.isfile('.gitignore'): should_ignore = parse_gitignore('.gitignore') else: should_ignore = None for dirpath, _, filenames in os.walk('.', topdown=True): dirpath = os.path.normpath(dirpath) if should_ignore and should_ignore(dirpath): continue if len(dirpath) >= 4 and dirpath[0:4] == '.git': continue try: remote_file_list = sftp_client.listdir_attr(dirpath) except IOError: sftp_client.mkdir(dirpath) remote_file_list = sftp_client.listdir_attr(dirpath) # Get make time of the remote files remote_mtimes = {} for file_stat in remote_file_list: remote_mtimes[file_stat.filename] = file_stat.st_mtime for filename in filenames: filepath = os.path.normpath(os.path.join(dirpath, filename)) if should_ignore and should_ignore(filepath): continue local_mtime = os.stat(filepath).st_mtime # If local mtime greater than remote's, it means file changed. if local_mtime > remote_mtimes.get(filename, -1): self._print_log(f'./{filepath}...', end=' ') sftp_client.put(filepath, filepath) self._print_log('uploaded') else: self._print_log(f'./{filepath} skipped') sftp_client.close()
def on_created(self, event): logging.debug("Created %s: %s", 'directory' if event.is_directory else 'file', event.src_path) fullpath = Path(event.src_path) parent_dir = fullpath.parent if not event.is_directory and IGNORE_FILENAME_RE.match(event.src_path): logging.debug("Found new ignore file at %s", event.src_path); matcher = parse_gitignore(event.src_path) self.ignorefiles_by_dir[parent_dir][fullpath] = matcher else: dir_matchers = self.ignorefiles_by_dir.get(parent_dir) if len(dir_matchers) == 0: return matches_any = any(m(str(fullpath)) for m in dir_matchers.values()) logging.debug("New file matches ignore rule!");
def __init__(self, path: Path, include: Optional[List[str]] = None, exclude: Optional[List[str]] = None, excludeFile: Optional[str] = None, useGitignore: bool = False) -> None: configRaw = Path(__file__).parent.parent.parent.joinpath( ".sanitizer.json").read_text() config = json.loads(configRaw) self.path = path self.workspace = Files._findWorkspace(path) self.exclude = Filter( config.get("exclude", []) + ([] if exclude is None else exclude)) self.excludeFile = excludeFile self.excludeFileCache: Dict[Path, Optional[Filter]] = {} self.include = Filter(["**"] if include is None else include) self.gitignoreMatches = parse_gitignore( self.workspace / ".gitignore") if useGitignore else None
def zip_dir(path, save): # validate save parameter, if provided, is a zip if save is not None: save = os.path.abspath(save) if not save.endswith(".zip"): raise cli_exception.CliException( 'If you specify where to save the zip file, it must end with .zip' ) # find and load .nlignore ignore_file = os.path.join(path, '.nlignore') nl_ignore_matcher = parse_gitignore(ignore_file) if os.path.exists( ignore_file) else None # always to work in a separate file, if save is provided, or otherwise temp_zip = tempfile.NamedTemporaryFile('w+b', delete=False) ziph = zipfile.ZipFile(temp_zip, 'x', zipfile.ZIP_DEFLATED) for root, dirs, files in os.walk(path): for file in files: file_path = os.path.join(root, file) if not is_not_to_be_included(file_path, nl_ignore_matcher): ziph.write(file_path, file_path.replace(str(path), '')) ziph.close() # by default we return the temp file file_stream = temp_zip # if save file specified, copy temp to the specified save file if save is not None: # only delete the specified save if temp was created temp_zip.close() shutil.move(temp_zip.name, save) # return an open file stream to the new file specified by save file_stream = open(save) # always ensure that stream starts at beginning file_stream.seek(0) return file_stream
def walk_dir_filtered_by_ignored(tree_to_walk, ignore_file, pkgroot): push_dir(pkgroot) ignored = None if (ignore_file != None): try: # Temporarly make a copy of the ignore file in the package root tmpname = os.path.join(pkgroot, TEMP_IGNORE_FILE_NAME) shutil.copy(ignore_file, tmpname) ignored = parse_gitignore(tmpname) os.remove(TEMP_IGNORE_FILE_NAME) except: ignored = None list = [] for root, dirs, files in os.walk(tree_to_walk): if (len(files) > 0): if (ignored == None or ignored(root) == False): list.append(standardize_dir_sep(root)) pop_dir() return list
def _copy_from_uri(uri: str, project_path: Path, output_path) -> Optional[Path]: # TODO: git uri src_path = Path(uri) gitignore_path = src_path / ".gitignore" if gitignore_path.exists(): matches = parse_gitignore(gitignore_path.as_posix()) def ignore(src, names): return [ f for f in names if matches(os.path.join(src, f)) or (f == ".git") ] else: ignore = None shutil.copytree(src_path, project_path, ignore=ignore, dirs_exist_ok=True) src_zip = Path( shutil.make_archive(output_path / "src", "zip", project_path)) return src_zip
def __calculate_repo_size(self): """ Calculates size of the repo. Returns: int -- size of repository in bytes int -- number of files in repository """ # getting gitignore matcher gitignore_path = os.path.join(self.path, '.gitignore') if not os.path.exists(gitignore_path): raise GitIgnoreNotExist in_gitignore = parse_gitignore(gitignore_path) # list files in root # with prepended repo path files = [ os.path.join(self.target_dir, file) for file in os.listdir(self.target_dir) ] count = 0 repo_size = 0 for file in files: if in_gitignore(file): continue if os.path.isdir(file): # save files in the folder # prepending with folder path files.extend( os.path.join(file, inner_file) for inner_file in os.listdir(file)) else: count += 1 repo_size += os.path.getsize(file) return repo_size, count
def isignored(abspath): home = os.path.expanduser("~") gitignore = os.path.join(home, r'Desktop\wereader-chrome\.gitignore') matches = parse_gitignore(gitignore) return matches(abspath)
def __init__(self, gitignore_file): self._matcher = parse_gitignore(gitignore_file) self._source_dir = path.realpath(path.dirname(gitignore_file)) self._gitignore_file = gitignore_file self._logger = logging.getLogger(self.__class__.__name__)
from gitignore_parser import parse_gitignore parser = argparse.ArgumentParser() parser.add_argument('top', default=os.getcwd(), nargs='?', help='Start from directory.') args = parser.parse_args() top = args.top.rstrip('/') ignore = [] for root, dirs, files in tqdm(os.walk(top, topdown=True, followlinks=False)): try: matches = parse_gitignore(root + '/.gitignore') for f in files: fpath = root + '/' + f if matches(fpath): ignore.append(fpath) newdirs = [] for d in dirs: fpath = root + '/' + d if matches(fpath): ignore.append(fpath) else: newdirs.append(d) dirs[:] = newdirs
def _parse_gitignore_string(data: str, fake_base_dir: str = None): with patch('builtins.open', mock_open(read_data=data)): success = parse_gitignore(f'{fake_base_dir}/.gitignore', fake_base_dir) return success
def _parse_gitignore_string(s, fake_base_dir=None): with NamedTemporaryFile('w') as tmp: tmp.write(s) tmp.seek(0) return parse_gitignore(tmp.name, fake_base_dir)
def exec_potodo( path: Path, exclude: List[Path], above: int, below: int, only_fuzzy: bool, offline: bool, hide_reserved: bool, counts: bool, json_format: bool, exclude_fuzzy: bool, exclude_reserved: bool, only_reserved: bool, show_reservation_dates: bool, no_cache: bool, is_interactive: bool, matching_files: bool, ) -> None: """ Will run everything based on the given parameters :param path: The path to search into :param exclude: folders or files to be ignored :param above: The above threshold :param below: The below threshold :param only_fuzzy: Should only fuzzies be printed :param offline: Will not connect to internet :param hide_reserved: Will not show the reserved files :param counts: Render list with counts not percentage :param json_format: Format output as JSON. :param exclude_fuzzy: Will exclude files with fuzzies in output. :param exclude_reserved: Will print out only files that aren't reserved :param only_reserved: Will print only reserved files :param show_reservation_dates: Will show the reservation dates :param no_cache: Disables cache (Cache is disabled when files are modified) :param is_interactive: Switches output to an interactive CLI menu :param matching_files: Should the file paths be printed instead of normal output """ cache_args = { "path": path, "exclude": exclude, "above": above, "below": below, "only_fuzzy": only_fuzzy, "offline": offline, "hide_reserved": hide_reserved, "counts": counts, "json_format": json_format, "exclude_fuzzy": exclude_fuzzy, "exclude_reserved": exclude_reserved, "only_reserved": only_reserved, "show_reservation_dates": show_reservation_dates, "no_cache": no_cache, "is_interactive": is_interactive, } try: ignore_matches = parse_gitignore(".potodoignore", base_dir=path) except FileNotFoundError: ignore_matches = parse_gitignore("/dev/null") # Initialize the arguments issue_reservations = get_issue_reservations(offline, hide_reserved, path) dir_stats: List[Any] = [] if is_interactive: directory_options = get_dir_list(repo_path=path, exclude=exclude, ignore_matches=ignore_matches) while True: selected_dir = _directory_list_menu(directory_options) if selected_dir == (len(directory_options) - 1): exit(0) directory = directory_options[selected_dir] file_options = get_files_from_dir(directory=directory, repo_path=path, exclude=exclude) # TODO: Add stats on files and also add reservations selected_file = _file_list_menu(directory, file_options) if selected_file == (len(file_options) + 1): exit(0) elif selected_file == len(file_options): continue file = file_options[selected_file] final_choice = _confirmation_menu(file, directory) if final_choice == 3: exit(0) elif final_choice == 2: continue else: break if final_choice == 0: webbrowser.open( f"https://github.com/python/python-docs-fr/issues/new?title=Je%20travaille%20sur%20" f"{directory}/{file}" f"&body=%0A%0A%0A---%0AThis+issue+was+created+using+potodo+interactive+mode." ) else: exit() else: po_files_and_dirs = get_po_stats_from_repo_or_cache( path, exclude, cache_args, ignore_matches, no_cache) for directory_name, po_files in sorted(po_files_and_dirs.items()): # For each directory and files in this directory buffer: List[Any] = [] folder_stats: Dict[str, int] = {"translated": 0, "total": 0} printed_list: List[bool] = [] for po_file in sorted(po_files): # For each file in those files from that directory if not only_fuzzy or po_file.fuzzy_entries: if exclude_fuzzy and po_file.fuzzy_entries: continue buffer_add( buffer, folder_stats, printed_list, po_file, issue_reservations, above, below, counts, json_format, exclude_reserved, only_reserved, show_reservation_dates, matching_files, ) # Once all files have been processed, print the dir and the files # or store them into a dict to print them once all directories have # been processed. if json_format: add_dir_stats(directory_name, buffer, folder_stats, printed_list, dir_stats) else: print_dir_stats(directory_name, buffer, folder_stats, printed_list) if json_format: print( json.dumps( dir_stats, indent=4, separators=(",", ": "), sort_keys=False, default=json_dateconv, ))
def load_gitignore(self): if ('.gitignore' in self._contents): path = self._contents['.gitignore'].location Package_Class.add_gitignore(parse_gitignore(path))
with tarfile.open(archive_path, 'w:gz') as archive: archive.add('./', exclude=exclude_select_dirs) def exclude_select_dirs(path): try: return is_hidden_dir(path) or is_in_gitignore(Path(os.getcwd(), path)) except IndexError: return False def is_hidden_dir(path): return os.path.isdir(path) and os.path.split(path)[1][0] == '.' is_in_gitignore = parse_gitignore('.gitignore', base_dir=os.getcwd()) def copy_app_to_remote(c): c.put(str(archive_path), str(app_target_dir)) def unzip_app_on_remote(c): remote_archive_path = app_target_dir / os.path.split(archive_path)[1] print(remote_archive_path) c.run(f'tar -xzf {remote_archive_path} --directory {app_target_dir}') c.run(f'rm {remote_archive_path}') def create_run_output_dir(c, run_output_dirname): c.run(f'mkdir -p {training_output_dir / run_output_dirname}')
def include_matches(self): if os.path.exists("%s/.autorestart_includes" % self.working_dir): return parse_gitignore("%s/.autorestart_includes" % self.working_dir) else: return lambda x: True
#!/usr/bin/env python3 import os import shutil import logging import argparse from gitignore_parser import parse_gitignore from git import Repo BACKUP_IGNORE_FILE = ".backupignore" TIMESHIFT_FOLDER_SRC = "/home/daniel/data/timeshift" TIMESHIFT_FOLDER_DST = "/media/daniel/backup-ext4" REGULAR_BACKUP_FOLDER_SRC = "/home/daniel/data" REGULAR_BACKUP_FOLDER_DST = "/media/daniel/backup" REPOS_FOLDER = "/home/daniel/data/meus-repositorios" should_ignore = parse_gitignore(BACKUP_IGNORE_FILE) def backup_file(full_path_src, path_dst): full_path_dst = os.path.join(path_dst, os.path.basename(full_path_src)) if os.path.exists(full_path_dst): logging.debug(f'deleting {full_path_dst}') os.remove(full_path_dst) logging.debug(f'copying {full_path_src} to {full_path_dst}') shutil.copy2(full_path_src, path_dst) def ignore(src, names): return [name for name in names if should_ignore(name)]