def __init__(self, combined_stats): #: Dictionary, where key is the name of the tag as a string and value #: is an instance of :class:`~robot.model.stats.TagStat`. self.tags = NormalizedDict(ignore=['_']) #: Dictionary, where key is the name of the created tag as a string # and value is an instance of :class:`~robot.model.stats.TagStat`. self.combined = combined_stats
def __init__(self, critical_stats, non_critical_stats, combined_stats): #: Dictionary, where key is the name of the tag as a string and value #: is an instance of :class:`~robot.model.stats.TagStat`. self.tags = NormalizedDict(ignore='_') #: List of :class:`~robot.model.stats.CriticalTagStat` objects. self.critical = critical_stats #: List of :class:`~robot.model.stats.CriticalTagStat` objects. self.non_critical = non_critical_stats #: List of :class:`~robot.model.stats.CombinedTagStat` objects. self.combined = combined_stats
def register_run_keyword(self, libname, keyword, args_to_process=None): if args_to_process is None: args_to_process = self._get_args_from_method(keyword) keyword = keyword.__name__ if libname not in self._libs: self._libs[libname] = NormalizedDict(ignore=['_']) self._libs[libname][keyword] = int(args_to_process)
class TagStatistics(object): """Container for tag statistics.""" def __init__(self, critical_stats, non_critical_stats, combined_stats): #: Dictionary, where key is the name of the tag as a string and value #: is an instance of :class:`~robot.model.stats.TagStat`. self.tags = NormalizedDict(ignore='_') #: List of :class:`~robot.model.stats.CriticalTagStat` objects. self.critical = critical_stats #: List of :class:`~robot.model.stats.CriticalTagStat` objects. self.non_critical = non_critical_stats #: List of :class:`~robot.model.stats.CombinedTagStat` objects. self.combined = combined_stats def visit(self, visitor): visitor.visit_tag_statistics(self) def __iter__(self): crits = self._get_critical_and_non_critical_matcher() tags = [t for t in self.tags.values() if t.name not in crits] return iter(sorted(chain(self.critical, self.non_critical, self.combined, tags))) def _get_critical_and_non_critical_matcher(self): crits = [stat for stat in self.critical + self.non_critical if isinstance(stat.pattern, SingleTagPattern)] return NormalizedDict([(unicode(stat.pattern), None) for stat in crits], ignore='_')
class EmptyFinder(object): identifiers = '$@&' find = NormalizedDict({ '${EMPTY}': '', '@{EMPTY}': (), '&{EMPTY}': {} }, ignore='_').__getitem__
class ForLoop(_WithSteps): """The parsed representation of a for-loop. :param list declaration: The literal cell values that declare the loop (excluding ":FOR"). :param str comment: A comment, default None. :ivar str flavor: The value of the 'IN' item, uppercased. Typically 'IN', 'IN RANGE', 'IN ZIP', or 'IN ENUMERATE'. :ivar list vars: Variables set per-iteration by this loop. :ivar list items: Loop values that come after the 'IN' item. :ivar str comment: A comment, or None. :ivar list steps: A list of steps in the loop. """ flavors = {'IN', 'IN RANGE', 'IN ZIP', 'IN ENUMERATE'} normalized_flavors = NormalizedDict((f, f) for f in flavors) def __init__(self, parent, declaration, comment=None): self.parent = parent self.flavor, index = self._get_flavor_and_index(declaration) self.vars = declaration[:index] self.items = declaration[index + 1:] self.comment = Comment(comment) self.steps = [] def _get_flavor_and_index(self, declaration): for index, item in enumerate(declaration): if item in self.flavors: return item, index if item in self.normalized_flavors: correct = self.normalized_flavors[item] self._report_deprecated_flavor_syntax(item, correct) return correct, index if normalize(item).startswith('in'): return item.upper(), index return 'IN', len(declaration) def _report_deprecated_flavor_syntax(self, deprecated, correct): self.parent.report_invalid_syntax( "Using '%s' as a FOR loop separator is deprecated. " "Use '%s' instead." % (deprecated, correct), level='WARN') def is_comment(self): return False def is_for_loop(self): return True def as_list(self, indent=False, include_comment=True): comments = self.comment.as_list() if include_comment else [] return ['FOR'] + self.vars + [self.flavor] + self.items + comments def __iter__(self): return iter(self.steps) def is_set(self): return True
def register_run_keyword(self, libname, keyword, args_to_process=None, deprecation_warning=True): if deprecation_warning: warnings.warn(self._deprecation_warning(), UserWarning) if args_to_process is None: args_to_process = self._get_args_from_method(keyword) keyword = keyword.__name__ if libname not in self._libs: self._libs[libname] = NormalizedDict(ignore=['_']) self._libs[libname][keyword] = int(args_to_process)
class TagStatistics(object): """Container for tag statistics. """ def __init__(self, combined_stats): #: Dictionary, where key is the name of the tag as a string and value #: is an instance of :class:`~robot.model.stats.TagStat`. self.tags = NormalizedDict(ignore=['_']) #: Dictionary, where key is the name of the created tag as a string # and value is an instance of :class:`~robot.model.stats.TagStat`. self.combined = combined_stats def visit(self, visitor): visitor.visit_tag_statistics(self) def __iter__(self): return iter(sorted(self.tags.values() + self.combined))
def start_suite(self, suite): self._output.library_listeners.new_suite_scope() result = TestSuite(source=suite.source, name=suite.name, doc=suite.doc, metadata=suite.metadata, starttime=get_timestamp(), rpa=self._settings.rpa) if not self.result: result.set_criticality(self._settings.critical_tags, self._settings.non_critical_tags) self.result = Result(root_suite=result, rpa=self._settings.rpa) self.result.configure(status_rc=self._settings.status_rc, stat_config=self._settings.statistics_config) else: self._suite.suites.append(result) self._suite = result self._suite_status = SuiteStatus(self._suite_status, self._settings.exit_on_failure, self._settings.exit_on_error, self._settings.skip_teardown_on_exit) ns = Namespace(self._variables, result, suite.resource) ns.start_suite() ns.variables.set_from_variable_table(suite.resource.variables) EXECUTION_CONTEXTS.start_suite(result, ns, self._output, self._settings.dry_run) self._context.set_suite_variables(result) if not self._suite_status.failures: ns.handle_imports() ns.variables.resolve_delayed() result.doc = self._resolve_setting(result.doc) result.metadata = [(self._resolve_setting(n), self._resolve_setting(v)) for n, v in result.metadata.items()] self._context.set_suite_variables(result) self._output.start_suite( ModelCombiner(suite, result, tests=suite.tests, suites=suite.suites, test_count=suite.test_count)) self._output.register_error_listener(self._suite_status.error_occurred) self._run_setup(suite.keywords.setup, self._suite_status) self._executed_tests = NormalizedDict(ignore='_')
def _normalize(self, tags): normalized = NormalizedDict(((unic(t), 1) for t in tags), ignore='_') for removed in '', 'NONE': if removed in normalized: normalized.pop(removed) return tuple(normalized)
def __setitem__(self, key, value): if not is_string(key): key = unic(key) if not is_string(value): value = unic(value) NormalizedDict.__setitem__(self, key, value)
def __init__(self, initial=None): NormalizedDict.__init__(self, initial, ignore='_')
def __init__(self, parent=None, source=None): self.parent = parent self.source = abspath(source) if source else None self.children = [] self._tables = NormalizedDict(self._get_tables())
def __init__(self, variables): self.data = NormalizedDict(ignore='_') self._variables = variables
class HandlerStore(object): def __init__(self, source): self._source = source self._normal = NormalizedDict(ignore='_') self._embedded = [] def add(self, handler, embedded=False): if embedded: self._embedded.append(handler) else: self._normal[handler.name] = handler def remove(self, name): if name in self._normal: self._normal.pop(name) self._embedded = [e for e in self._embedded if not e.matches(name)] def __iter__(self): return iter(sorted(self._normal.values() + self._embedded, key=attrgetter('name'))) def __len__(self): return len(self._normal) + len(self._embedded) def __contains__(self, name): if name in self._normal: return True return any(template.matches(name) for template in self._embedded) def __getitem__(self, name): try: return self._normal[name] except KeyError: return self._find_embedded(name) def _find_embedded(self, name): embedded = [template.create(name) for template in self._embedded if template.matches(name)] if len(embedded) == 1: return embedded[0] self._raise_no_single_match(name, embedded) def _raise_no_single_match(self, name, found): if self._source is None: where = "Test case file" elif self._is_resource(self._source): where = "Resource file '%s'" % self._source else: where = "Test library '%s'" % self._source if not found: raise DataError("%s contains no keywords matching name '%s'." % (where, name)) error = ["%s contains multiple keywords matching name '%s':" % (where, name)] names = sorted(handler.orig_name for handler in found) raise DataError('\n '.join(error + names)) def _is_resource(self, source): extension = splitext(source)[1][1:].lower() return extension in RESOURCE_EXTENSIONS
def __init__(self, source): self._source = source self._normal = NormalizedDict(ignore='_') self._embedded = []
def __init__(self, source, source_type): self.source = source self.source_type = source_type self._normal = NormalizedDict(ignore='_') self._embedded = []
class VariableStore(object): def __init__(self, variables): self.data = NormalizedDict(ignore='_') self._variables = variables def resolve_delayed(self): for name, value in self.data.items(): try: self._resolve_delayed(name, value) except DataError: pass def _resolve_delayed(self, name, value): if not isinstance(value, VariableTableValueBase): return value try: self.data[name] = value.resolve(self._variables) except DataError as err: # Recursive resolving may have already removed variable. if name in self: self.remove(name) value.report_error(err) variable_not_found('${%s}' % name, self.data, "Variable '${%s}' not found." % name) return self.data[name] def __getitem__(self, name): return self._resolve_delayed(name, self.data[name]) def update(self, store): self.data.update(store.data) def clear(self): self.data.clear() def add(self, name, value, overwrite=True, decorated=True): if decorated: name, value = self._undecorate(name, value) if overwrite or name not in self.data: self.data[name] = value def _undecorate(self, name, value): validate_var(name) if name[0] == '@': if not is_list_like(value): self._raise_cannot_set_type(name, value, 'list') value = list(value) if name[0] == '&': if not is_dict_like(value): self._raise_cannot_set_type(name, value, 'dictionary') value = DotDict(value) return name[2:-1], value def _raise_cannot_set_type(self, name, value, expected): raise DataError("Cannot set variable '%s': Expected %s-like value, " "got %s." % (name, expected, type_name(value))) def remove(self, name): if name in self.data: self.data.pop(name) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __contains__(self, name): return name in self.data def as_dict(self, decoration=True): if decoration: variables = (self._decorate(name, self[name]) for name in self) else: variables = self.data return NormalizedDict(variables, ignore='_') def _decorate(self, name, value): if is_dict_like(value): name = '&{%s}' % name elif is_list_like(value): name = '@{%s}' % name else: name = '${%s}' % name return name, value
def start_suite(self): if not self._scopes: self._suite = NormalizedDict(ignore='_') else: self._suite = self._scopes[-1].copy() self._scopes.append(self._suite)
class HandlerStore(object): TEST_LIBRARY_TYPE = 'Test library' TEST_CASE_FILE_TYPE = 'Test case file' RESOURCE_FILE_TYPE = 'Resource file' def __init__(self, source, source_type): self.source = source self.source_type = source_type self._normal = NormalizedDict(ignore='_') self._embedded = [] def add(self, handler, embedded=False): if embedded: self._embedded.append(handler) elif handler.name not in self._normal: self._normal[handler.name] = handler else: error = DataError('Keyword with same name defined multiple times.') self._normal[handler.name] = UserErrorHandler( error, handler.name, handler.libname) raise error def __iter__(self): handlers = list(self._normal.values()) + self._embedded return iter(sorted(handlers, key=attrgetter('name'))) def __len__(self): return len(self._normal) + len(self._embedded) def __contains__(self, name): if name in self._normal: return True return any(template.matches(name) for template in self._embedded) def create_runner(self, name): return self[name].create_runner(name) def __getitem__(self, name): try: return self._normal[name] except KeyError: return self._find_embedded(name) def _find_embedded(self, name): embedded = [ template for template in self._embedded if template.matches(name) ] if len(embedded) == 1: return embedded[0] self._raise_no_single_match(name, embedded) def _raise_no_single_match(self, name, found): if self.source_type == self.TEST_CASE_FILE_TYPE: source = self.source_type else: source = "%s '%s'" % (self.source_type, self.source) if not found: raise KeywordError("%s contains no keywords matching name '%s'." % (source, name)) error = [ "%s contains multiple keywords matching name '%s':" % (source, name) ] names = sorted(handler.name for handler in found) raise KeywordError('\n '.join(error + names))
class HandlerStore(object): TEST_LIBRARY_TYPE = 'Test library' TEST_CASE_FILE_TYPE = 'Test case file' RESOURCE_FILE_TYPE = 'Resource file' def __init__(self, source, source_type): self.source = source self.source_type = source_type self._normal = NormalizedDict(ignore='_') self._embedded = [] def add(self, handler, embedded=False): if embedded: self._embedded.append(handler) elif handler.name not in self._normal: self._normal[handler.name] = handler else: error = DataError('Keyword with same name defined multiple times.') self._normal[handler.name] = UserErrorHandler(error, handler.name, handler.libname) raise error def __iter__(self): handlers = list(self._normal.values()) + self._embedded return iter(sorted(handlers, key=attrgetter('name'))) def __len__(self): return len(self._normal) + len(self._embedded) def __contains__(self, name): if name in self._normal: return True return any(template.matches(name) for template in self._embedded) def create_runner(self, name): return self[name].create_runner(name) def __getitem__(self, name): try: return self._normal[name] except KeyError: return self._find_embedded(name) def _find_embedded(self, name): embedded = [template for template in self._embedded if template.matches(name)] if len(embedded) == 1: return embedded[0] self._raise_no_single_match(name, embedded) def _raise_no_single_match(self, name, found): if self.source_type == self.TEST_CASE_FILE_TYPE: source = self.source_type else: source = "%s '%s'" % (self.source_type, self.source) if not found: raise KeywordError("%s contains no keywords matching name '%s'." % (source, name)) error = ["%s contains multiple keywords matching name '%s':" % (source, name)] names = sorted(handler.name for handler in found) raise KeywordError('\n '.join(error + names))
def as_dict(self, decoration=True): if decoration: variables = (self._decorate(name, self[name]) for name in self) else: variables = self.data return NormalizedDict(variables, ignore='_')
def _escape_and_encode_targets(self, targets): return NormalizedDict( (html_escape(key), self._encode_uri_component(value)) for key, value in targets.items())
def _get_critical_and_non_critical_matcher(self): crits = [stat for stat in self.critical + self.non_critical if isinstance(stat.pattern, SingleTagPattern)] return NormalizedDict([(unicode(stat.pattern), None) for stat in crits], ignore='_')