class handle_exe(object): 'A class to deal with windows handle.exe program.' _log = logger('handle') @classmethod def _find_handle_exe(clazz): 'Find the handle.exe' extra_path = [ r'c:\Program Files (x86)\Microsoft Visual Studio\Installer\resources\app\layout', r'c:\Program Files (x86)\Microsoft Visual Studio\Installer\resources\app\ServiceHub\Services\Microsoft.Visua#lStudio.Setup.Service', ] handle_exe_name = 'handle.exe' exe = which.which(handle_exe_name, extra_path=extra_path) if exe: return exe raise handle_error('Failed to find {}'.format(handle_exe_name)) @classmethod def call_handle(clazz, args, raise_error=True): command_line.check_args_type(args) check.check_bool(raise_error) if isinstance(args, (list, tuple)): parsed_args = list(args) else: parsed_args = command_line.parse_args(args) handle_exe = clazz._find_handle_exe() cmd = [handle_exe, '-nobanner'] + args return execute.execute(cmd, raise_error=raise_error)
class pip_project_config(object): 'Class to manager a pip project config.' _log = logger('pip') def __init__(self, filename): check.check_string(filename) options = simple_config_options( key_check_type=simple_config_options.KEY_CHECK_ANY) self._config = simple_config_editor(filename, options=options) @property def python_exe(self): return self._config.get_value_with_default('pip_project', 'python_exe', None) @property def python_exe(self): return self._config.get_value_with_default('pip_project', 'python_exe', None) @python_exe.setter def python_exe(self, python_exe): return self._config.set_value('pip_project', 'python_exe', python_exe)
class docker_cleanup(object): 'Class to deal with cleaning up docker side effects.' _logger = logger('docker') _lock = multiprocessing.Lock() @classmethod def cleanup(clazz, untagged_images=True, exited_containers=True, running_containers=False): clazz._lock.acquire() try: clazz._cleanup_i(untagged_images, exited_containers, running_containers) pass finally: clazz._lock.release() @classmethod def _cleanup_i(clazz, untagged_images, exited_containers, running_containers): if running_containers: docker_container.remove_all_running() if exited_containers: docker_container.remove_all_exited() if untagged_images: docker_images.remove_all_untagged()
class file_cache_item(file_cache_item_base): _log = logger('file_cache') def __init__(self, filename): super(file_cache_item, self).__init__() self.filename = path.abspath(path.normpath(filename)) self._checksum = file_util.checksum('sha256', self.filename) self._log.log_d( 'file_cache_item:__init__: filename={} checksum={}'.format( self.filename, self._checksum)) def __str__(self): return '{}:{}'.format(self.filename, self._checksum) def save(self, info): assert path.isfile(self.filename) self._log.log_d('file_cache_item:save: filename={} info={}'.format( self.filename, info)) file_util.copy(self.filename, info.cached_filename) file_util.save(info.checksum_filename, self._checksum + line_break.DEFAULT_LINE_BREAK) def checksum(self): return self._checksum def name(self): return self.filename
class vmware_server_controller(object): _log = logger('vmware_server_controller') def __init__(self): self._server = None self._reset() def start(self, *args, **kargs): if self._server: return self._log.log_i('controller start: args=%s; kargs=%s' % (str(args), str(kargs))) self._server = vmware_server(*args, **kargs) self._log.log_i('starting server.') self._server.start() self.address = self._server.address self.pid = self._server.pid self.version = self._server.version self._log.log_i('server started on %s' % (str(self.address))) def stop(self): if not self._server: return self._log.log_i('stopping server.') self._server.stop() self._reset() def _reset(self): self.address = None self.pid = None self.version = None self._server = None
class vfs_config(namedtuple('vfs_config', 'fs_type, values')): 'Filesystem configuration.' log = logger('vfs_config') def __new__(clazz, fs_type, values): check.check_string(fs_type) check.check_dict(values, check.STRING_TYPES, check.STRING_TYPES) return clazz.__bases__[0].__new__(clazz, fs_type, values) @classmethod def load(clazz, config_filename): if not path.isfile(config_filename): raise vfs_error( 'config_filename not found: {}'.format(config_filename)) config = simple_config.from_file(config_filename) sections = config.find_all_sections('fsconfig') if not sections: raise vfs_error( 'no fsconfig section found: {}'.format(config_filename)) if len(sections) != 1: raise vfs_error( 'only one fsconfig section should be given: {}'.format( config_filename)) values = sections[0].to_key_value_list(resolve_env_vars=True).to_dict() fs_type = values.get('vfs_type', None) if fs_type is None: raise vfs_error( 'no fs_type found in fsconfig: {}'.format(config_filename)) del values['vfs_type'] return vfs_config(fs_type, values)
class pipenv_exe(object): 'Class to deal with the pipenv executable.' _log = logger('pipenv') _PIPENV_VERSION_PATTERN = r'^pipenv,\s+version\s+(.+)$' @classmethod def version(clazz, pipenv_exe): 'Return the version info of a pipenv executable' check.check_string(pipenv_exe) cmd = [pipenv_exe, '--version'] rv = execute.execute(cmd) f = re.findall(clazz._PIPENV_VERSION_PATTERN, rv.stdout.strip()) if not f: raise pipenv_error( 'not a valid pipenv version for {}: "{}"'.format( pipenv_exe, rv.stdout)) if len(f) != 1: raise pipenv_error( 'not a valid pipenv version for {}: "{}"'.format( pipenv_exe, rv.stdout)) version = f[0] return version
class dir_combine(object): 'A class to combine directories' _log = logger('dir_combine') @classmethod def combine(clazz, files, options=None): check.check_string_seq(files) check.check_dir_combine_options(options, allow_none=True) options = options or dir_combine_options() info = clazz.combine_info(files, options=options) info.items.move_files(options.dup_file_timestamp, options.dup_file_count) root_dirs = info.resolved_files.root_dirs() for next_possible_empty_root in root_dirs: file_find.remove_empty_dirs(next_possible_empty_root) _combine_info_result = namedtuple('_combine_info_result', 'items, resolved_files') @classmethod def combine_info(clazz, files, options=None): check.check_string_seq(files) check.check_dir_combine_options(options, allow_none=True) options = options or dir_combine_options() if options.ignore_empty: should_ignore = lambda d: not path.exists(d) or dir_util.is_empty(d ) files = [f for f in files if not should_ignore(f)] resolved_files = clazz._resolve_files(files, options.recursive) if not resolved_files: return clazz._combine_info_result(dir_operation_item_list(), resolved_files) destination_dir = options.destination_dir or resolved_files[0].dirname destination_dir_abs = path.abspath(destination_dir) items = dir_operation_item_list() for resolved_file in resolved_files: src_filename = resolved_file.filename_abs if options.flatten: src_basename = path.basename(src_filename) else: src_basename = resolved_file.filename dst_filename = path.join(destination_dir_abs, src_basename) item = dir_operation_item(src_filename, dst_filename) items.append(item) return clazz._combine_info_result(items, resolved_files) @classmethod def _resolve_files(clazz, files, recursive): resolver_options = file_resolver_options(sort_order='depth', sort_reverse=True, recursive=recursive) return file_resolver.resolve_files(files, options=resolver_options)
def _test_log_func_stdout(args): l = logger('foo') l.configure('format=very_brief') l.log_c('critical') l.log_d('debug') l.log_e('error') l.log_i('info') l.log_w('warning')
def _func(args): l = logger('foo') l.reset() l.configure('format=very_brief') l.log_c('critical') l.log_d('debug') l.log_e('error') l.log_i('info') l.log_w('warning')
class vmware_util(object): _log = logger('vmware_util') @classmethod def killall_vmrest(clazz): rv = execute.execute('killall vmrest', raise_error=False) if rv.exit_code == 0: clazz._log.log_i('killed some vmrest')
def _test_log_func_stdout_and_output_filename(args): inner_tmp_log = args[0] l = logger('foo') l.configure('format=very_brief') l.configure('output=file:{}'.format(inner_tmp_log)) l.log_c('critical') l.log_d('debug') l.log_e('error') l.log_i('info') l.log_w('warning')
class handle(object): 'A class to abstract some handle operations.' _log = logger('handle') @classmethod def open_handles(clazz, pid): check.check_int(pid) args = [ '-p', str(pid) ] rv = handle_exe.call_handle(args) return handle_output_parser.parse_handle_output(rv.stdout)
class git_projects_cli_handler(cli_command_handler): _log = logger('git_status') def __init__(self, cli_args): super(git_projects_cli_handler, self).__init__(cli_args, options_class=git_repo_status_options) check.check_git_repo_status_options(self.options) self._log.log_d('options={}'.format(self.options)) def status(self, dirs): check.check_string_seq(dirs) git_dirs = git_dir._resolve_git_dirs(dirs) repos = [git_repo(d) for d in git_dirs] for item in git_status_getter.get_status(repos, options=self.options): self._print_status2(item.repo, item.status, self.options) return 0 @classmethod def _print_status2(self, repo, status, options): check.check_git_repo(repo) check.check_git_repo_status(status) check.check_git_repo_status_options(options) branch_status = status.branch_status has_changes = len(status.change_status) > 0 has_branch_status = branch_status.behind != 0 or branch_status.ahead != 0 if True in [has_changes, has_branch_status, options.force_show]: blurb = [ repo.root.replace(path.expanduser('~'), '~'), repo.remote_get_url() ] blurb.append(status.last_commit.commit_hash_short) branch_status_blurb = ['*' + status.active_branch] if branch_status.ahead != 0: branch_status_blurb.append('ahead %d' % (branch_status.ahead)) if branch_status.behind != 0: branch_status_blurb.append('behind %d' % (branch_status.behind)) if branch_status_blurb: blurb.append('[%s]' % (', '.join(branch_status_blurb))) print('%s:' % (' '.join(blurb))) for item in status.change_status: print(' %3s %s' % (item.action.value, item.filename)) print('')
class python_pip_exe(object): 'Class to call pip --version and parse the output.' _log = logger('python') _PIP_VERSION_PATTERN = r'^pip\s+([\d\.]+)\s+from\s+(.+)\s+\(python\s+(\d+\.\d+)\)$' _pip_version_info = namedtuple('_pip_version_info', 'version, where, python_version') @classmethod def version_info(clazz, pip_exe, pythonpath=None): 'Return the version info of a pip executable' check.check_string(pip_exe) check.check_string_seq(pythonpath, allow_none=True) pythonpath = pythonpath or [] cmd = [pip_exe, '--version'] env_PYTHONPATH = os.pathsep.join(pythonpath) with env_override(env={'PYTHONPATH': env_PYTHONPATH}) as env: clazz._log.log_d('version_info: pip_exe={} PYTHONPATH={}'.format( pip_exe, env_PYTHONPATH)) try: output_bytes = subprocess.check_output( cmd, stderr=subprocess.STDOUT) output = codecs.decode(output_bytes, 'utf-8').strip() except subprocess.CalledProcessError as ex: output_bytes = ex.output output = codecs.decode(output_bytes, 'utf-8').strip() msg = 'version_info: Failed to run: "{}" - {}'.format( ' '.join(cmd), output) clazz._log.log_w(msg) raise python_error(msg, status_code=ex.returncode) clazz._log.log_d('version_info: output="{}"'.format(output)) f = re.findall(clazz._PIP_VERSION_PATTERN, output) clazz._log.log_d('version_info: f="{}"'.format(f)) if not f: raise python_error( 'version_info: not a valid pip version for {}: "{}"'.format( pip_exe, output)) if len(f[0]) != 3: raise python_error( 'version_info: not a valid pip version for {}: "{}"'.format( pip_exe, output)) version = f[0][0] where = f[0][1] python_version = f[0][2] return clazz._pip_version_info(version, where, python_version)
class command_line_tools(object): 'Class to deal with the command_line_tools executable.' _log = logger('command_line_tools') @classmethod def installed(clazz, verbose=False): 'Return True of command line tools are installed.' exe = which.which('xcode-select') if not exe: return False cmd = [exe, '--print-path'] rv = execute.execute(cmd, raise_error=False) if rv.exit_code != 0: if verbose: print('not installed') return False print('installed') return True @classmethod def install(clazz, verbose): 'Install the command line tools.' installed = clazz.installed(False) clazz._log.log_i('install: installed={}'.format(installed)) if installed: raise command_line_tools_error( 'command line tools already installed.') with command_line_tools_force(force=True) as force: available_updates = softwareupdater.available() clazz._log.log_i( 'install: available_updates={}'.format(available_updates)) for next_update in available_updates: clazz._log.log_i('install: next_update={}'.format(next_update)) if 'command line tools' in next_update.title.lower(): print('installing: {}'.format(next_update.label)) softwareupdater.install(next_update.label, verbose) @classmethod def ensure(clazz, verbose): 'Ensure that the command line tools are installed.' if clazz.installed(False): return clazz.install(verbose)
class lsof(object): 'A class to abstract some lsof operations.' _log = logger('lsof') @classmethod def lsof(clazz, pid=None): 'Return a list of lsof items for all process or a specific process' check.check_int(pid, allow_none=True) args = [] if pid: args.extend(['-p', str(pid)]) rv = lsof_command.call_command(args) return lsof_output_parser.parse_lsof_output(rv.stdout)
class docker_tag(object): 'Class to deal with docker tag.' _logger = logger('docker') @classmethod def tag(clazz, source_repo, source_tag, target_repo, target_tag, non_blocking=False): check.check_string(source_repo) check.check_string(source_tag, allow_none=True) check.check_string(target_repo) check.check_string(target_tag, allow_none=True) if source_repo == target_repo: raise docker_error('source_repo and target_repo are the same.') docker_source_repo = docker_util.make_tagged_image_name( source_repo, source_tag) docker_target_repo = docker_util.make_tagged_image_name( target_repo, target_tag) tag_args = [ 'tag', docker_source_repo, docker_target_repo, ] rv = docker_exe.call_docker(tag_args, non_blocking=non_blocking) if rv.exit_code != 0: raise docker_error('failed to tag: {}'.format(' '.join(tag_args))) @classmethod def tag_image(clazz, image_id, repo, tag): check.check_string(image_id) check.check_string(repo) check.check_string(tag) if not docker_images.has_image(image_id): raise docker_error('image not found: {}'.format(image_id)) tagged_image_name = docker_util.make_tagged_image_name(repo, tag) tag_args = ['tag', image_id, tagged_image_name] rv = docker_exe.call_docker(tag_args) if rv.exit_code != 0: raise docker_error('failed to tag: {}'.format(' '.join(tag_args)))
class refactor_reindent(object): _log = logger('reindent') @classmethod def reindent_file(clazz, filename, indent, backup): check.check_string(filename) check.check_int(indent) check.check_bool(backup) file_check.check_file(filename) clazz._log.log_method_d() backup_args = [] if backup else ['--nobackup'] args = ['--indent', str(indent)] + backup_args + [filename] reindent_main(args)
class refactor_project(object): _log = logger('refactor') _rename_item = namedtuple('_rename_item', 'src, dst') @classmethod def rename(clazz, files, src_pattern, dst_pattern, options = None): check.check_string(src_pattern) check.check_string(dst_pattern) check.check_refactor_options(options, allow_none = True) clazz._log.log_method_d() refactor_files.rename_files(files, src_pattern, dst_pattern, options = options) clazz.replace_text(files, src_pattern, dst_pattern, options = options) @classmethod def replace_text(clazz, files, src_pattern, dst_pattern, options = None): check.check_string(src_pattern) check.check_string(dst_pattern) check.check_refactor_options(options, allow_none = True) clazz._log.log_method_d() options = options or refactor_options() text_files = refactor_files.resolve_text_files(files) matching_files = refactor_files.match_files(text_files, src_pattern, options = options) replacements = { src_pattern: dst_pattern } for filename in matching_files: file_replace.replace(filename, replacements, backup = False, word_boundary = options.word_boundary, word_boundary_chars = options.word_boundary_chars) @classmethod def copy(clazz, files, src_pattern, dst_pattern, copy_dirs, options = None): check.check_string(src_pattern) check.check_string(dst_pattern) check.check_refactor_options(options, allow_none = True) check.check_bool(copy_dirs) clazz._log.log_method_d() copied_items = refactor_files.copy_files(files, src_pattern, dst_pattern, copy_dirs, options = options) copied_files = sorted([ item.dst for item in copied_items ]) clazz.replace_text(copied_files, src_pattern, dst_pattern, options = options)
class dim_task_processor(object): _DEFAULT_NUM_PROCESSES = int(((multiprocessing.cpu_count() / 2.0) - 1.0) + 0.5) log = logger('processor') def __init__(self, num_processes=None): self._num_processes = num_processes or self._DEFAULT_NUM_PROCESSES def run(self, descriptors): check.check_dim_task_descriptor_seq(descriptors) return self.run_single(descriptors) #return self.run_multi(descriptors) def run_single(self, descriptors): check.check_dim_task_descriptor_seq(descriptors) build_results = [] for descriptor in descriptors: build_result = descriptor.function(descriptor) build_results.append(build_result) return build_results def run_multi(self, descriptors): check.check_dim_task_descriptor_seq(descriptors) with multiprocessing.Pool(self._num_processes) as pool: results = [] for descriptor in descriptors: self.log.log_d('run: adding task {}'.format(descriptor)) result = pool.apply_async( descriptor.function, args=(descriptor, )) #, callback = task_callback) results.append(result) self.log.log_d('run: closing pool') pool.close() self.log.log_d('run: joining') pool.join() self.log.log_d('run: join returns') for i, result in enumerate(results): self.log.log_d('run: {}: {}'.format(i, result.get())) self.log.log_d('run: ends') return results
class vmware_cli_handler(cli_command_handler): 'vmware cli handler.' _log = logger('vmware_cli_handler') def __init__(self, cli_args): super(vmware_cli_handler, self).__init__(cli_args, options_class=vmware_options, delegate=self._comand_handler_delegate) check.check_vmware_options(self.options) self._vmware = vmware(self.options) _COMMANDS_WITH_RUN_PROGRAM_OPTIONS = ( 'vm_can_run_programs', 'vm_run_package', 'vm_run_program', 'vm_run_script', 'vm_run_script_file', ) def _comand_handler_delegate(self, command_name, options, *args, **kwargs): check.check_string(command_name) check.check_vmware_options(options) check.check_tuple(args) check.check_dict(kwargs) if command_name in self._COMMANDS_WITH_RUN_PROGRAM_OPTIONS: options, left_over_args = self.make_options( vmware_run_program_options, kwargs) function_args = left_over_args function_args['run_program_options'] = options else: function_args = kwargs func = getattr(self._vmware, command_name) rv = func(*args, **function_args) if rv == None: return 0 if isinstance(rv, bool): return 0 if rv else 1 if isinstance(rv, int): return rv if hasattr(rv, 'exit_code'): return getattr(rv, 'exit_code') return 0
class docker_pull(object): 'Class to deal with docker pull.' _logger = logger('docker') @classmethod def pull(clazz, image_name, image_tag, non_blocking = False): check.check_string(image_name) check.check_string(image_tag, allow_none = True) docker_image = docker_util.make_tagged_image_name(image_name, image_tag) pull_args = [ 'pull', docker_image, ] rv = docker_exe.call_docker(pull_args, non_blocking = non_blocking) if rv.exit_code != 0: raise docker_error('failed to pull: {}'.format(' '.join(pull_args)))
class pyinstaller_exe(object): 'Class to deal with the pyinstaller executable.' _log = logger('pyinstaller') @classmethod def call_pyinstaller(clazz, args, build_dir = None, replace_env = None): check.check_string_seq(args) check.check_string(build_dir, allow_none = True) check.check_dict(replace_env, check.STRING_TYPES, check.STRING_TYPES) cmd = command_line.parse_args(args) replace_env = replace_env or {} env = os_env.clone_current_env(d = {}) env.update(replace_env) clazz._log.log_d('using env: {}'.format(pprint.pformat(env))) clazz._log.log_d('calling pyinstaller: {}'.format(' '.join(cmd))) if build_dir: file_util.mkdir(build_dir) dist_dir = path.join(build_dir, 'dist') work_dir = path.join(build_dir, 'work') spec_dir = path.join(build_dir, 'spec') args = args[:] args.extend([ '--distpath', dist_dir ]) args.extend([ '--workpath', work_dir ]) args.extend([ '--specpath', spec_dir ]) try: with env_override(env = env) as _: PyInstaller_run(pyi_args = args, pyi_config = None) except Exception as ex: raise pyinstaller_error(str(ex)) @classmethod def find_exe(clazz, raise_error = False): 'Return the pyinstaller executable or None if not found' return which.which('pyinstaller', raise_error = raise_error) @classmethod def exe_version(clazz, pyinstaller_exe): 'Return the pyinstaller executable version' check.check_string(pyinstaller_exe) return PyInstaller_version
class port_probe(object): _log = logger('port_probe') @classmethod def is_open(clazz, address): check.check_tuple(address) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: rv = sock.connect_ex(address) except Exception as ex: clazz._log.log_d('caught: {}'.format(str(ex))) return False if rv != 0: clazz._log.log_d('not open: {} - {} - {}'.format( address, rv, errno.errorcode[rv])) return rv == 0
class scutil(object): 'Class to deal with the macos scutil program.' _log = logger('scutil') @classmethod def get_value(clazz, key): 'Get a value.' check.check_string(key) host.check_is_macos() cmd = [ 'scutil', '--get', key ] rv = execute.execute(cmd, raise_error = False) # check for "not set" if rv.exit_code == 1: return None if rv.exit_code != 0: cmd_flat = ' '.join(cmd) msg = 'scutil command failed: {} - {}\n{}'.format(cmd_flat, rv.exit_code, rv.stdout) raise scutil_error(msg, status_code = rv.exit_code) return rv.stdout.strip() @classmethod def set_value(clazz, key, value): 'Set a value.' check.check_string(key) check.check_string(value) host.check_is_macos() cmd = [ 'scutil', '--set', key, value ] rv = execute.execute(cmd, raise_error = False) if rv.exit_code != 0: cmd_flat = ' '.join(cmd) msg = 'scutil command failed: {} - {}\n{}'.format(cmd_flat, rv.exit_code, rv.stdout) raise scutil_error(msg, status_code = rv.exit_code)
class python_exe_filename(object): 'Class to deal with the python exe filename.' _log = logger('python') @classmethod def name(clazz, filename): check.check_string(filename) return python_source.exe_name(filename) @classmethod def version(clazz, exe_type, filename): check.check_string(exe_type) check.check_string(filename) name = python_source.exe_name(filename) f = re.findall(r'^{}(\d.*)$'.format(exe_type), name) if not f: return None return f[0]
class command_line_tools_force(object): _log = logger('command_line_tools') # touching this file forces softwareupdate to list the xcode command line tools _FORCE_COMMAND_LINE_TOOLS_FILE = '/tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress' def __init__(self, force=True): self._force = force self._log.log_i('command_line_tools_force: __init__()') def __enter__(self): self._log.log_i('command_line_tools_force: __enter__()') if self._force: file_util.save(self._FORCE_COMMAND_LINE_TOOLS_FILE) return self def __exit__(self, type, value, traceback): self._log.log_i('command_line_tools_force: __exit__()') if self._force: file_util.remove(self._FORCE_COMMAND_LINE_TOOLS_FILE)
class git_download_cli_handler(cli_command_handler): _log = logger('git_download') def __init__(self, cli_args): super(git_download_cli_handler, self).__init__(cli_args, options_class=git_download_options) check.check_git_download_options(self.options) self._log.log_d('options={}'.format(self.options)) def fetch(self, address, revision, output_filename): check.check_string(address) check.check_string(revision) check.check_string(output_filename, allow_none=True) output_filename = git_download.download( address, revision, output_filename=output_filename, base_name=None, download_options=self.options) if self.options.verbose: print('Downloaded {}/{} to {}'.format(address, revision, output_filename)) return 0 def available(self, address, prefix, limit, sort_type, reverse): check.check_string(address) check.check_int(limit, allow_none=True) check.check_string(prefix, allow_none=True) check.check_string(sort_type, allow_none=True) check.check_bool(reverse) tags = git.list_remote_tags_for_address(address, sort_type=sort_type, reverse=reverse, limit=limit, prefix=prefix) tags.output('brief', output_filename=None) return 0
class string_list_parser(object): _log = logger('string_list_parser') def __init__(self, options=0): self._options = options self.STATE_EXPECTING_STRING = _state_expecting_string(self) self.STATE_DONE = _state_done(self) self.state = self.STATE_EXPECTING_STRING def _run(self, text): self._log.log_d('_run(%s)' % (text)) for token in string_lexer.tokenize(text, 'string_list_parser', options=self._options): strings = self.state.handle_token(token) if strings: for s in strings: self._log.log_i('parse: new string: \"%s\"' % (s)) yield s assert self.state == self.STATE_DONE def _change_state(self, new_state, msg): assert new_state if new_state != self.state: self._log.log_d('transition: %20s -> %-20s; %s' % (self.state.__class__.__name__, new_state.__class__.__name__, msg)) self.state = new_state @classmethod def parse(clazz, text, options=0): return clazz(options=options)._run(text) @classmethod def parse_to_list(clazz, text, options=0): return [x for x in clazz.parse(text, options=options)]