Example #1
0
class TagStatistics:
    def __init__(self,
                 include=None,
                 exclude=None,
                 combine=None,
                 docs=None,
                 links=None):
        self.stats = NormalizedDict(ignore=['_'])
        self._include = MultiMatcher(include, ignore=['_'])
        self._exclude = MultiMatcher(exclude, ignore=['_'])
        self._combine = combine or []
        info = TagStatInfo(docs or [], links or [])
        self._get_doc = info.get_doc
        self._get_links = info.get_links

    def add_test(self, test, critical):
        self._add_tags_statistics(test, critical)
        self._add_combined_statistics(test)

    def _add_tags_statistics(self, test, critical):
        for tag in test.tags:
            if not self._is_included(tag):
                continue
            if tag not in self.stats:
                self.stats[tag] = TagStat(tag, self._get_doc(tag),
                                          self._get_links(tag),
                                          critical.is_critical(tag),
                                          critical.is_non_critical(tag))
            self.stats[tag].add_test(test)

    def _is_included(self, tag):
        if self._include and not self._include.match(tag):
            return False
        return not self._exclude.match(tag)

    def _add_combined_statistics(self, test):
        for pattern, name in self._combine:
            name = name or pattern
            if name not in self.stats:
                self.stats[name] = TagStat(name,
                                           self._get_doc(name),
                                           self._get_links(name),
                                           combined=pattern)
            if TagPatterns(pattern).match(test.tags):
                self.stats[name].add_test(test)

    def serialize(self, serializer):
        serializer.start_tag_stats(self)
        for stat in sorted(self.stats.values()):
            stat.serialize(serializer)
        serializer.end_tag_stats(self)

    def sort(self):
        for stat in self.stats.values():
            stat.tests.sort()
class TagStatistics:

    def __init__(self, include=None, exclude=None, combine=None, docs=None,
                 links=None):
        self.stats = NormalizedDict(ignore=['_'])
        self._include = MultiMatcher(include, ignore=['_'])
        self._exclude = MultiMatcher(exclude, ignore=['_'])
        self._combine = combine or []
        info = TagStatInfo(docs or [], links or [])
        self._get_doc = info.get_doc
        self._get_links = info.get_links

    def add_test(self, test, critical):
        self._add_tags_statistics(test, critical)
        self._add_combined_statistics(test)

    def _add_tags_statistics(self, test, critical):
        for tag in test.tags:
            if not self._is_included(tag):
                continue
            if tag not in self.stats:
                self.stats[tag] = TagStat(tag, self._get_doc(tag),
                                          self._get_links(tag),
                                          critical.is_critical(tag),
                                          critical.is_non_critical(tag))
            self.stats[tag].add_test(test)

    def _is_included(self, tag):
        if self._include and not self._include.match(tag):
            return False
        return not self._exclude.match(tag)

    def _add_combined_statistics(self, test):
        for pattern, name in self._combine:
            name = name or pattern
            if name not in self.stats:
                self.stats[name] = TagStat(name, self._get_doc(name),
                                           self._get_links(name),
                                           combined=pattern)
            if TagPatterns(pattern).match(test.tags):
                self.stats[name].add_test(test)

    def serialize(self, serializer):
        serializer.start_tag_stats(self)
        for stat in sorted(self.stats.values()):
            stat.serialize(serializer)
        serializer.end_tag_stats(self)

    def sort(self):
        for stat in self.stats.values():
            stat.tests.sort()
Example #3
0
class DiffResults(object):

    def __init__(self):
        self._stats = NormalizedDict()
        self.column_names = []

    @property
    def rows(self):
        return (RowStatus(name, statuses)
                for name, statuses in sorted(self._stats.items()))

    def add_output(self, path, column=None):
        self._add_suite(ExecutionResult(path).suite)
        self.column_names.append(column or path)
        for stats in self._stats.values():
            self._add_missing_statuses(stats)

    def _add_suite(self, suite):
        self._add_to_stats(suite)
        for sub_suite in suite.suites:
            self._add_suite(sub_suite)
        for test in suite.tests:
            self._add_to_stats(test)

    def _add_to_stats(self, item):
        stats = self._stats.setdefault(item.longname, [])
        self._add_missing_statuses(stats)
        stats.append(ItemStatus(item))

    def _add_missing_statuses(self, stats):
        while len(stats) < len(self.column_names):
            stats.append(MissingStatus())
class DiffResults(object):
    def __init__(self):
        self._stats = NormalizedDict()
        self.column_names = []

    @property
    def rows(self):
        return (RowStatus(name, statuses)
                for name, statuses in sorted(self._stats.items()))

    def add_output(self, path, column=None):
        self._add_suite(ExecutionResult(path).suite)
        self.column_names.append(column or path)
        for stats in self._stats.values():
            self._add_missing_statuses(stats)

    def _add_suite(self, suite):
        self._add_to_stats(suite)
        for sub_suite in suite.suites:
            self._add_suite(sub_suite)
        for test in suite.tests:
            self._add_to_stats(test)

    def _add_to_stats(self, item):
        stats = self._stats.setdefault(item.longname, [])
        self._add_missing_statuses(stats)
        stats.append(ItemStatus(item))

    def _add_missing_statuses(self, stats):
        while len(stats) < len(self.column_names):
            stats.append(MissingStatus())
 def test_itervalues_and_values(self):
     nd = NormalizedDict({'A': 1, 'b': 3, 'C': 2})
     iterator = nd.itervalues()
     assert_false(isinstance(iterator, list))
     assert_equals(list(iterator), [1, 3, 2])
     assert_equals(list(iterator), [])
     assert_equals(list(nd.itervalues()), nd.values())
class HandlerStore(object):

    def __init__(self, source):
        self._source = source
        self._normal = NormalizedDict(ignore='_')
        self._embedded = []

    def add(self, handler, embedded=False):
        if embedded:
            self._embedded.append(handler)
        elif handler.name not in self._normal:
            self._normal[handler.name] = handler
        else:
            error = 'Keyword with same name defined multiple times.'
            self._normal[handler.name] = UserErrorHandler(handler.name, error)
            raise DataError(error)

    def __iter__(self):
        return iter(sorted(self._normal.values() + self._embedded,
                           key=attrgetter('name')))

    def __len__(self):
        return len(self._normal) + len(self._embedded)

    def __contains__(self, name):
        if name in self._normal:
            return True
        return any(template.matches(name) for template in self._embedded)

    def __getitem__(self, name):
        try:
            return self._normal[name]
        except KeyError:
            return self._find_embedded(name)

    def _find_embedded(self, name):
        embedded = [template.create(name) for template in self._embedded
                    if template.matches(name)]
        if len(embedded) == 1:
            return embedded[0]
        self._raise_no_single_match(name, embedded)

    def _raise_no_single_match(self, name, found):
        if self._source is None:
            where = "Test case file"
        elif self._is_resource(self._source):
            where = "Resource file '%s'" % self._source
        else:
            where = "Test library '%s'" % self._source
        if not found:
            raise DataError("%s contains no keywords matching name '%s'."
                            % (where, name))
        error = ["%s contains multiple keywords matching name '%s':"
                 % (where, name)]
        names = sorted(handler.orig_name for handler in found)
        raise DataError('\n    '.join(error + names))

    def _is_resource(self, source):
        extension = splitext(source)[1][1:].lower()
        return extension in RESOURCE_EXTENSIONS
 def test_keys_values_and_items_are_returned_in_same_order(self):
     nd = NormalizedDict()
     for i, c in enumerate('abcdefghijklmnopqrstuvwxyz0123456789!"#%&/()=?'):
         nd[c.upper()] = i
         nd[c+str(i)] = 1
     assert_equals(nd.items(), zip(nd.keys(), nd.values()))
     assert_equals(list(nd.iteritems()), zip(nd.iterkeys(), nd.itervalues()))
 def test_itervalues_and_values(self):
     nd = NormalizedDict({'A': 1, 'b': 3, 'C': 2})
     iterator = nd.itervalues()
     assert_false(isinstance(iterator, list))
     assert_equal(list(iterator), [1, 3, 2])
     assert_equal(list(iterator), [])
     assert_equal(list(nd.itervalues()), nd.values())
Example #9
0
class TagStatistics(object):
    """Container for tag statistics."""

    def __init__(self, critical_stats, non_critical_stats, combined_stats):
        #: Dictionary, where key is the name of the tag as a string and value
        #: is an instance of :class:`~robot.model.stats.TagStat`.
        self.tags = NormalizedDict(ignore='_')
        #: List of :class:`~robot.model.stats.CriticalTagStat` objects.
        self.critical = critical_stats
        #: List of :class:`~robot.model.stats.CriticalTagStat` objects.
        self.non_critical = non_critical_stats
        #: List of :class:`~robot.model.stats.CombinedTagStat` objects.
        self.combined = combined_stats

    def visit(self, visitor):
        visitor.visit_tag_statistics(self)

    def __iter__(self):
        crits = self._get_critical_and_non_critical_matcher()
        tags = [t for t in self.tags.values() if t.name not in crits]
        return iter(sorted(chain(self.critical, self.non_critical,
                                 self.combined, tags)))

    def _get_critical_and_non_critical_matcher(self):
        crits = [stat for stat in self.critical + self.non_critical
                 if isinstance(stat.pattern, SingleTagPattern)]
        return NormalizedDict([(unicode(stat.pattern), None) for stat in crits],
                              ignore='_')
Example #10
0
class TagStatistics(object):
    """Container for tag statistics."""
    def __init__(self, critical_stats, non_critical_stats, combined_stats):
        #: Dictionary, where key is the name of the tag as a string and value
        #: is an instance of :class:`~robot.model.stats.TagStat`.
        self.tags = NormalizedDict(ignore='_')
        #: List of :class:`~robot.model.stats.CriticalTagStat` objects.
        self.critical = critical_stats
        #: List of :class:`~robot.model.stats.CriticalTagStat` objects.
        self.non_critical = non_critical_stats
        #: List of :class:`~robot.model.stats.CombinedTagStat` objects.
        self.combined = combined_stats

    def visit(self, visitor):
        visitor.visit_tag_statistics(self)

    def __iter__(self):
        crits = self._get_critical_and_non_critical_matcher()
        tags = [t for t in self.tags.values() if t.name not in crits]
        return iter(
            sorted(chain(self.critical, self.non_critical, self.combined,
                         tags)))

    def _get_critical_and_non_critical_matcher(self):
        crits = [
            stat for stat in self.critical + self.non_critical
            if isinstance(stat.pattern, SingleTagPattern)
        ]
        return NormalizedDict([(unicode(stat.pattern), None)
                               for stat in crits],
                              ignore='_')
 def test_itervalues_and_values(self):
     nd = NormalizedDict({"A": 1, "b": 3, "C": 2})
     iterator = nd.itervalues()
     assert_false(isinstance(iterator, list))
     assert_equals(list(iterator), [1, 3, 2])
     assert_equals(list(iterator), [])
     assert_equals(list(nd.itervalues()), nd.values())
 def test_keys_values_and_items_are_returned_in_same_order(self):
     nd = NormalizedDict()
     for i, c in enumerate('abcdefghijklmnopqrstuvwxyz0123456789!"#%&/()=?'):
         nd[c.upper()] = i
         nd[c+str(i)] = 1
     assert_equals(nd.items(), zip(nd.keys(), nd.values()))
     assert_equals(list(nd.iteritems()), zip(nd.iterkeys(), nd.itervalues()))
Example #13
0
class HandlerStore(object):
    TEST_LIBRARY_TYPE = 'Test library'
    TEST_CASE_FILE_TYPE = 'Test case file'
    RESOURCE_FILE_TYPE = 'Resource file'

    def __init__(self, source, source_type):
        self.source = source
        self.source_type = source_type
        self._normal = NormalizedDict(ignore='_')
        self._embedded = []

    def add(self, handler, embedded=False):
        if embedded:
            self._embedded.append(handler)
        elif handler.name not in self._normal:
            self._normal[handler.name] = handler
        else:
            error = 'Keyword with same name defined multiple times.'
            self._normal[handler.name] = UserErrorHandler(handler.name, error,
                                                          handler.libname)
            raise DataError(error)

    def __iter__(self):
        return iter(sorted(self._normal.values() + self._embedded,
                           key=attrgetter('name')))

    def __len__(self):
        return len(self._normal) + len(self._embedded)

    def __contains__(self, name):
        if name in self._normal:
            return True
        return any(template.matches(name) for template in self._embedded)

    def __getitem__(self, name):
        try:
            return self._normal[name]
        except KeyError:
            return self._find_embedded(name)

    def _find_embedded(self, name):
        embedded = [template.create(name) for template in self._embedded
                    if template.matches(name)]
        if len(embedded) == 1:
            return embedded[0]
        self._raise_no_single_match(name, embedded)

    def _raise_no_single_match(self, name, found):
        if self.source_type == self.TEST_CASE_FILE_TYPE:
            source = self.source_type
        else:
            source = "%s '%s'" % (self.source_type, self.source)
        if not found:
            raise DataError("%s contains no keywords matching name '%s'."
                            % (source, name))
        error = ["%s contains multiple keywords matching name '%s':"
                 % (source, name)]
        names = sorted(handler.orig_name for handler in found)
        raise DataError('\n    '.join(error + names))
class TagStatistics(object):
    def __init__(self, combined_stats):
        self.tags = NormalizedDict(ignore=['_'])
        self.combined = combined_stats

    def visit(self, visitor):
        visitor.visit_tag_statistics(self)

    def __iter__(self):
        return iter(sorted(self.tags.values() + self.combined))
Example #15
0
class TagStatistics(object):

    def __init__(self, combined_stats):
        self.tags = NormalizedDict(ignore=['_'])
        self.combined = combined_stats

    def visit(self, visitor):
        visitor.visit_tag_statistics(self)

    def __iter__(self):
        return iter(sorted(self.tags.values() + self.combined))
class TagStatistics(object):
    """Container for tag statistics."""
    def __init__(self, combined_stats):
        #: Dictionary, where key is the name of the tag as a string and value
        #: is an instance of :class:`~robot.model.stats.TagStat`.
        self.tags = NormalizedDict(ignore='_')
        #: List of :class:`~robot.model.stats.CombinedTagStat` objects.
        self.combined = combined_stats

    def visit(self, visitor):
        visitor.visit_tag_statistics(self)

    def __iter__(self):
        return iter(sorted(chain(self.combined, self.tags.values())))
Example #17
0
class TagStatistics(object):
    """Container for tag statistics."""

    def __init__(self, combined_stats):
        #: Dictionary, where key is the name of the tag as a string and value
        #: is an instance of :class:`~robot.model.stats.TagStat`.
        self.tags = NormalizedDict(ignore=['_'])
        #: List of :class:`~robot.model.stats.CombinedTagStat` objects.
        self.combined = combined_stats

    def visit(self, visitor):
        visitor.visit_tag_statistics(self)

    def __iter__(self):
        stats = list(self.tags.values()) + self.combined
        return iter(sorted(stats))
Example #18
0
class HandlerStore(object):
    TEST_LIBRARY_TYPE = 'Test library'
    TEST_CASE_FILE_TYPE = 'Test case file'
    RESOURCE_FILE_TYPE = 'Resource file'

    def __init__(self, source, source_type):
        self.source = source
        self.source_type = source_type
        self._normal = NormalizedDict(ignore='_')
        self._embedded = []

    def add(self, handler, embedded=False):
        if embedded:
            self._embedded.append(handler)
        elif handler.name not in self._normal:
            self._normal[handler.name] = handler
        else:
            error = 'Keyword with same name defined multiple times.'
            self._normal[handler.name] = UserErrorHandler(
                handler.name, error, handler.libname)
            raise DataError(error)

    def __iter__(self):
        handlers = list(self._normal.values()) + self._embedded
        return iter(sorted(handlers, key=attrgetter('name')))

    def __len__(self):
        return len(self._normal) + len(self._embedded)

    def __contains__(self, name):
        if name in self._normal:
            return True
        return any(template.matches(name) for template in self._embedded)

    def create_runner(self, name):
        return self[name].create_runner(name)

    def __getitem__(self, name):
        try:
            return self._normal[name]
        except KeyError:
            return self._find_embedded(name)

    def _find_embedded(self, name):
        embedded = [
            template for template in self._embedded if template.matches(name)
        ]
        if len(embedded) == 1:
            return embedded[0]
        self._raise_no_single_match(name, embedded)

    def _raise_no_single_match(self, name, found):
        if self.source_type == self.TEST_CASE_FILE_TYPE:
            source = self.source_type
        else:
            source = "%s '%s'" % (self.source_type, self.source)
        if not found:
            raise DataError("%s contains no keywords matching name '%s'." %
                            (source, name))
        error = [
            "%s contains multiple keywords matching name '%s':" %
            (source, name)
        ]
        names = sorted(handler.name for handler in found)
        raise DataError('\n    '.join(error + names))
Example #19
0
class HandlerStore(object):
    def __init__(self, source):
        self._source = source
        self._normal = NormalizedDict(ignore='_')
        self._embedded = []

    def add(self, handler, embedded=False):
        if embedded:
            self._embedded.append(handler)
        else:
            self._normal[handler.name] = handler

    def remove(self, name):
        if name in self._normal:
            self._normal.pop(name)
        self._embedded = [e for e in self._embedded if not e.matches(name)]

    def __iter__(self):
        return iter(
            sorted(self._normal.values() + self._embedded,
                   key=attrgetter('name')))

    def __len__(self):
        return len(self._normal) + len(self._embedded)

    def __contains__(self, name):
        if name in self._normal:
            return True
        return any(template.matches(name) for template in self._embedded)

    def __getitem__(self, name):
        try:
            return self._normal[name]
        except KeyError:
            return self._find_embedded(name)

    def _find_embedded(self, name):
        embedded = [
            template.create(name) for template in self._embedded
            if template.matches(name)
        ]
        if len(embedded) == 1:
            return embedded[0]
        self._raise_no_single_match(name, embedded)

    def _raise_no_single_match(self, name, found):
        if self._source is None:
            where = "Test case file"
        elif self._is_resource(self._source):
            where = "Resource file '%s'" % self._source
        else:
            where = "Test library '%s'" % self._source
        if not found:
            raise DataError("%s contains no keywords matching name '%s'." %
                            (where, name))
        error = [
            "%s contains multiple keywords matching name '%s':" % (where, name)
        ]
        names = sorted(handler.orig_name for handler in found)
        raise DataError('\n    '.join(error + names))

    def _is_resource(self, source):
        extension = splitext(source)[1][1:].lower()
        return extension in RESOURCE_EXTENSIONS
class Applications:
    _database = DataBasePaths(True).getConnectedFile()

    def __init__(self):
        self._apps = NormalizedDict()
        self._old_apps = NormalizedDict()
        for alias, url in self._get_aliases_and_urls_from_db():
            self._old_apps[alias] = url

    def _get_aliases_and_urls_from_db(self):
        items = []
        for connection in self._read_lines():
            items.append(connection.rsplit('\t', 1))
        return items

    def _read_lines(self):
        if os.path.exists(self._database):
            f = open(self._database, 'rb')
            data = f.read().splitlines()
            f.close()
            return [line for line in data if self._is_valid_connection(line)]
        return []

    def _is_valid_connection(self, line):
        return len(line.rsplit('\t', 1)) == 2

    def add(self, alias, app):
        self._apps[alias] = app
        self._old_apps[alias] = app.rmi_url
        self._store()

    def _store(self):
        data = ['%s\t%s' % (alias, url) for alias, url in self._old_apps.items()]
        data_txt = '\n'.join(data)
        self._write(data_txt)
        print "*TRACE* Stored to connected applications database: \n%s" % data_txt

    def _write(self, data):
        f = open(self._database, 'wb')
        f.write(data)
        f.close()

    def has_connected_to_application(self, alias):
        return self._apps.has_key(alias)

    def get_application(self, alias):
        return self._apps[alias]

    def get_applications(self):
        return self._apps.values()

    def get_aliases(self):
        return self._apps.keys()

    def delete(self, alias):
        del(self._apps[normalize(alias)])
        del(self._old_apps[normalize(alias)])
        self._store()

    def delete_all_connected(self):
        for alias in self._apps.keys():
            self.delete(alias)

    def get_alias_for(self, application):
        for alias, app in self._apps.items():
            if app == application:
                return alias
        return None

    def get_url(self, alias):
        for name, url in self._get_aliases_and_urls_from_db():
            if eq(name, alias):
                return url
        return None
class Applications:
    _database = DataBasePaths(True).getConnectedFile()

    def __init__(self):
        self._apps = NormalizedDict()
        self._old_apps = NormalizedDict()
        for alias, url in self._get_aliases_and_urls_from_db():
            self._old_apps[alias] = url

    def _get_aliases_and_urls_from_db(self):
        items = []
        for connection in self._read_lines():
            items.append(connection.split('\t'))
        return items

    def _read_lines(self):
        if os.path.exists(self._database):
            f = open(self._database, 'rb')
            data = f.read().splitlines()
            f.close()
            return data
        return []

    def add(self, alias, app):
        self._apps[alias] = app
        self._old_apps[alias] = app.rmi_url
        self._store()

    def _store(self):
        data = [
            '%s\t%s' % (alias, url) for alias, url in self._old_apps.items()
        ]
        self._write('\n'.join(data))
        print "*TRACE* Stored to connected applications database: ", data

    def _write(self, data):
        f = open(self._database, 'wb')
        f.write(data)
        f.close()

    def has_connected_to_application(self, alias):
        return self._apps.has_key(alias)

    def get_application(self, alias):
        return self._apps[alias]

    def get_applications(self):
        return self._apps.values()

    def get_aliases(self):
        return self._apps.keys()

    def delete(self, alias):
        del (self._apps[normalize(alias)])
        del (self._old_apps[normalize(alias)])
        self._store()

    def delete_all_connected(self):
        for alias in self._apps.keys():
            self.delete(alias)

    def get_alias_for(self, application):
        for alias, app in self._apps.items():
            if app == application:
                return alias
        return None

    def get_url(self, alias):
        for name, url in self._get_aliases_and_urls_from_db():
            if eq(name, alias):
                return url
        return None