def _get_repo_url(self, repo: str, remote: RepoClonerRemoteConfig): username = host = prefix = '' if remote.username_cfg_entry: username = get_config().get( *remote.username_cfg_entry.rsplit('.', 1)) if not username: raise Exception( f'Missing username for {remote.name} (config: core.{remote.username_cfg_entry})' ) if remote.host_cfg_entry: host = get_config().get(*remote.host_cfg_entry.rsplit('.', 1)) if not host: raise Exception( f'Missing username for {remote.name}(config: core.{remote.host_cfg_entry})' ) if remote.prefix: prefix = remote.prefix for r, v in remote.custom_prefix_map.items(): if r == repo: prefix = v break if not remote.url_template: raise Exception( f'Missing URL template for {remote.name} in clone-repo.yaml') url = remote.url_template.format(username=username, host=host, prefix=prefix, repo=repo) log_debug('Prepared URL', repo=repo, remote=remote.name, url=url) return url
def log_debug_dict(c: dict, level=0): for k, v in c.items(): if isinstance(v, collections.abc.Mapping): log_debug(f'{" " * level}{k}:') log_debug_dict(v, level + 2) else: log_debug(f'{" " * level}{k}: {v}')
def sum_of_day( self, entry: Entry ) -> typing.Tuple[time.struct_time, int, int, int, int]: log_debug(f'Summarize date: {entry.date}') def create_dt(s: str) -> datetime.datetime: log_debug(f'Create datetime from string: {s}') parts = list(map(int, s.split(':'))) return datetime.datetime(entry.date.tm_year, entry.date.tm_mon, entry.date.tm_mday, *parts) seconds = 0 for i in entry.intervals: interval = tuple(i) if not interval[1]: interval = (interval[0], datetime.datetime.now().strftime('%H:%M:%S')) log_debug(f'Processing interval: {interval}') t_from = create_dt(interval[0]) t_to = create_dt(interval[1]) diff = t_to - t_from seconds += diff.seconds seconds = int(seconds) hours = seconds // 3600 minutes = (seconds % 3600) // 60 diff_from_required = 8 * 3600 - seconds return (entry.date, seconds, hours, minutes, -diff_from_required)
def _create_project(name: typing.Optional[str] = None, *, branch: typing.Optional[str] = None, upstream_remote: typing.Optional[str] = None, upstream_branch: typing.Optional[str] = None, ticket_id: typing.Optional[str] = None) -> Project: project_dir = app_config().projectdir if not name or name == '.': fname = find_file_recursively(Project.CFG_FILE) if not fname and os.getcwd().startswith(project_dir): fname = os.path.join( project_dir, os.getcwd()[len(project_dir) + len(os.sep):].split(os.sep, 1)[0], Project.CFG_FILE) if not fname: raise ProjectError( f"Current directory is not part of any project; directory={os.getcwd()}" ) name = os.path.basename(os.path.dirname(fname)) log_debug('Creating project directory if not exists', dict(directory=os.path.join(project_dir, name))) os.makedirs(os.path.join(project_dir, name), exist_ok=True) update_project(name, branch=branch, upstream_remote=upstream_remote, upstream_branch=upstream_branch, ticket_id=ticket_id) return Project(name)
def update_project(project_name: str, *, branch: typing.Optional[str] = None, upstream_remote: typing.Optional[str] = None, upstream_branch: typing.Optional[str] = None, ticket_id: typing.Optional[str] = None): project_dir = f'{app_config().projectdir}/{project_name}' if not os.path.exists(os.path.join(project_dir, Project.CFG_FILE)): log_debug('Initial, still non-existing project config') core_version = INITIAL_VERSION custom_version = INITIAL_VERSION else: log_debug('Selecting version specific Updater') core_version, custom_version = _get_core_and_custom_version( project_dir) while True: updaters = [] if core_version in _registered_core_updaters: updaters += _registered_core_updaters[core_version] updaters += _registered_core_updaters[ANY_VERSION] if updaters: MultiUpdater(project_dir, branch=branch, upstream_remote=upstream_remote, upstream_branch=upstream_branch, ticket_id=ticket_id, updaters=updaters).update() while True: updaters = [] if custom_version in _registered_updaters: updaters += _registered_updaters[custom_version] updaters += _registered_updaters[ANY_VERSION] if not updaters: break MultiUpdater(project_dir, branch=branch, upstream_remote=upstream_remote, upstream_branch=upstream_branch, ticket_id=ticket_id, updaters=updaters).update() _, new_custom_version = _get_core_and_custom_version(project_dir) if new_custom_version == custom_version: break else: custom_version = new_custom_version new_core_version, _ = _get_core_and_custom_version(project_dir) if new_core_version == core_version: break else: core_version = new_core_version
def _fetch_archives(self): for fetch_date in self._each_day(): date = fetch_date.strftime('%Y-%m-%d') log_debug('Fetching/Checking historical details', dict(date=date)) filename = os.path.join( self.archived_url_directory, self.ARCHIVED_URL_FILENAME.format(date=date)) directory = os.path.join(self.archived_directory, date) _Fetcher(directory, self.timestamp, historical_mode=True).fetch()
def register_config_directory(self, directory: str): if directory in self.config_directories: log_debug('Not registering duplicated config directory', dict(name=directory)) return False log_debug('Registering config directory', dict(name=directory)) self.config_directories.append(directory) self.env_config.add_from_directory(directory) return True
def _entries(self): def create_tuple(s: str) -> typing.Tuple[str, str]: return tuple(s.split('-')) with open(self.filename) as f: for line in f: line = line.strip() log_debug(f'Reading line: {line}') parts = re.sub("[\t ]+", ' ', line).split() log_debug(f'Split: {parts}') yield Entry(parts[0], list(map(create_tuple, parts[1:])))
def _load(self, path: str): module = os.path.basename(path)[:-3].replace('/', '.').replace('\\', '.') log_debug( 'Loading environment file', dict(filename=path, module_name=module, current_env=self._current_env)) spec = importlib.util.spec_from_file_location(module, path) mfspec = importlib.util.module_from_spec(spec) spec.loader.exec_module(mfspec)
def _create_dir_if_missing(self, directory: str, *, with_dot_keep_file: bool = False): directory = os.path.join(self._project_dir, directory) if not os.path.exists(directory): log_debug('Creating project subdirectory', dict(directory=directory)) os.makedirs(directory, exist_ok=True) if with_dot_keep_file: filename = os.path.join(directory, '.keep') if not os.path.exists(filename): log_debug('Add missing .keep file', dict(directory=directory)) pathlib.Path(filename).touch()
def _fetch_urls(self): if not os.path.exists(self.archived_url_directory): os.makedirs(self.archived_url_directory) for fetch_date in self._each_day(): log_debug('Fetching/Checking historical URL info', dict(date=fetch_date.strftime('%Y-%m-%d'))) filename = os.path.join( self.archived_url_directory, self.ARCHIVED_URL_FILENAME.format( date=fetch_date.strftime('%Y-%m-%d'))) if not os.path.exists(filename): self.url_fetcher.fetch_to_file( self.URL_FORMAT_STRING.format( date=fetch_date.strftime('%Y%m%d')), filename)
def r(ctx: click.Context, *_, **kwargs): app_context.add_cmd_args(ctx.info_name, ctx.params) app_context.command_names.current = get_invoked_subommand( ctx.parent) app_context.command_names.invoked_subcommand = get_invoked_subommand( ctx) app_context.command_names.invoked_subcommand_primary_name = ctx.invoked_subcommand app_context.current_args = Node.create_from(ctx.params) for k, v in kwargs.items(): ctx.obj.add_arg(k, v) log_debug('Starting command', name=c.name) res = c().run(ctx.obj) if ctx.invoked_subcommand is None or res: ctx.exit(res)
def _add_remote(self, repo: str, repo_directory: str, remote: RepoClonerRemoteConfig, remotes: typing.Optional[typing.List[str]] = None): if repo not in remote.excluded and ( not remotes or remote.name in remotes) and not self._git.is_existing_remote( remote.name, cwd=repo_directory): log_debug(f'Fetching remote: {remote.name} @ {repo}') self._git.run([ 'remote', 'add', '-f', remote.name, self._get_repo_url(repo, remote) ], cwd=repo_directory) else: log_debug(f'Ignoring remote: {remote.name} @ {repo}')
def _process_change(self, changed_file: str) -> bool: if self._skip_based_on_basename(os.path.basename(changed_file)): log_debug(f'{self.__class__.__name__}: skip file by basename', filename=changed_file) return True for directory in self.directories: if not changed_file.startswith(directory): continue path = changed_file[len(directory) + 1:] if self._skip_based_on_path(path): log_debug(f'{self.__class__.__name__}: skip file by path', filename=changed_file) return True return False
def sync(self, path: str, entry: DetailedEntry): log_debug('Sync entry', path=path) remote_name = os.path.join(self.remote_root_directory, entry.remote_name) if self.fs.exists(path): if self.fs.is_dir(path): self.fs.makedir(remote_name) else: if FileSyncFlags.RECURSIVE in entry.entry.flags: self.fs.makedir(os.path.dirname(remote_name)) self.fs.copy(path, remote_name) if FileSyncFlags.WITHOUT_CHMOD not in entry.entry.flags: self.fs.chown(remote_name, entry.entry.owner, entry.entry.group) self.fs.chmod(remote_name, entry.entry.permissions) else: self.fs.remove(remote_name)
def _fetch_deceased_nth(self, n: int, entries: list) -> int: log_debug('Fetching deceased pages', dict(page_idx=n)) suffix = '' if n < 1 else f'?page={n}' raw_page = self._fetch_url( f'{self.BASE_URL}/elhunytak/{suffix}').decode('UTF-8') parser = etree.HTMLParser() root = etree.parse(StringIO(raw_page), parser) rows = root.xpath( "//tr[contains(@class, 'odd') or contains(@class, 'even')]") first, last = 0, 0 for row in rows: index, gender, age, deseases = row[0].text, row[1].text, row[ 2].text, row[3].text try: age = int(age.strip()) except ValueError: age = -1 data = dict(index=int(index.strip()), gender=self._map_gender(gender.strip()), age=age) data['deseases'] = [x.strip() for x in deseases.split(',')] entries.append(data) if not last: last = data['index'] first = data['index'] with open(os.path.join( self.directory, self.deceased_file_template.format(first=first, last=last)), 'wt', encoding='UTF-8') as f: f.write(raw_page) return first
def run_modules(config: Config, messages: Messages): for module_class in modules: module = module_class(config, messages) log_debug('Run module', dict(classs_name=module_class.__name__)) module.run()
def create_dt(s: str) -> datetime.datetime: log_debug(f'Create datetime from string: {s}') parts = list(map(int, s.split(':'))) return datetime.datetime(entry.date.tm_year, entry.date.tm_mon, entry.date.tm_mday, *parts)
commit.commit_date = commit_date commit.committer = committer commit.subject = subject commit.distance = distance return commit class Git: def run(self, args: typing.List[str], /, *, cwd: typing.Optional[str] = None, env: typing.Optional[dict] = None): log_debug( f'Running git command: {args}; cwd={cwd if cwd else "current dir"}' ) subprocess.run(['git'] + args, check=True, cwd=cwd, env=env) def run_output( self, args: typing.List[str], /, *, cwd: typing.Optional[str] = None, env: typing.Optional[dict] = None, strip: bool = True, ) -> str: log_debug( f'Running git command with output: {args}; cwd={cwd if cwd else "current dir"}' )
def register_watcher(self, watcher: ChangeWatcher): log_debug('Registering change watcher', {'class': type(watcher).__name__}) self._watchers.append(watcher)