Example #1
0
def main():
    # Setting pattern for redis
    pattern = 'factorial:'

    # Searching for parameters passed in the terminal
    ap = ArgumentParser()
    ap.add_argument("-a", "--address", required=True, help="address for connection with redis")
    ap.add_argument("-p", "--port", required=True, help="port for connection with redis")
    args = vars(ap.parse_args())

    # Instantiating an object that connects to a redis database using the parameters passed
    db = Database(host=args['address'], port=int(args['port']), db=0)
    print("--- Welcome to Factorial Calculator ---")
    while True:
        number = input("Please enter a non negative integer number: ")

        # Checking if the entered string is alphanumeric or, if it is a number, less than 0
        try:
            if number.isalpha() == True or int(number) < 0:
                print("You didn't write non negative integer!")
            else:
                # Checking if the number has already been calculated and is in the cache
                if(db.get(pattern + number) is None):
                    print("--- Calculating factorial ---")
                    result = factorial(int(number))
                    print(result)
                    # Setting result in cache
                    db[pattern + number] = result
                else:
                    print("--- Fetching result in cache ---")
                    print(db[pattern + number].decode())
        except ValueError:
                print("You didn't write non negative integer!")
Example #2
0
def query_cache(key=None, st=None, request=None, project_id=None):
    project_id = project_id if project_id \
        else json.loads(request.session['project_id'])

    database = Database()
    ac_key_tag = namespace(project_id)
    ac_ids = database.autocomplete(namespace=ac_key_tag)
    return ac_ids.search(str(st))
Example #3
0
def redis_connection():
    if redis_pw == "NONE":
        return Database(host=redis_host, port=redis_port, db=0)
    else:
        return Database(host=redis_host,
                        port=redis_port,
                        db=0,
                        password=redis_pw)
Example #4
0
    def get_consumer_group(self, create=False):
        db = Database(host=REDIS_HOST, port=REDIS_PORT)
        stream_keys = ['all_observations']

        cg = db.time_series('cg-obs', stream_keys)
        if create:
            for stream in stream_keys:
                db.xadd(stream, {'data': ''})

        if create:
            cg.create()
            cg.set_id('$')

        return cg.all_observations
Example #5
0
    def __init__(self, service_name, namespace, paths_dict, socket_path, hardfail, consumer_name='CANNON', **kwargs):
        self.db = Database(unix_socket_path=str(socket_path.resolve()))
        self.conf = ConfigManager(namespace, self.db)
        self.ledger = CannonLedger(self, service_name, consumer_name)
        self.paths_dict = paths_dict

        service = self.conf.service.get(service_name)
        if not service:
            print(f'No service named "{service_name}"')
            exit(1) if hardfail else None

        key_test = [i for i in service._data if i in service._keys]
        if not key_test:
            print(f'Service named "{service_name}" missing {key_test}')
            exit(1) if hardfail else None

        # check for run context class
        if service.shell not in self.paths_dict:
            print(f'No python file named {service.shell}.py')
            exit(1) if hardfail else None
        elif service.collector not in self.paths_dict:
            print(f'No python file named {service.collector}.py')
            exit(1) if hardfail else None

        self.service = service
        self.dataframes = BlindDataFrames(self)
Example #6
0
def get_sim_log(db: Database, tournament_id, left_team,
                right_team) -> List[str]:
    key = get_sim_log_key(tournament_id, left_team, right_team)
    data = db.get(key)
    if data is not None:
        data = json.loads(data)
    return data
Example #7
0
def polydmon(event, community, sql, redis, consumer_name, quiet):
    db = Database(redis)
    communities = community if 'all' not in community else polyd_communities

    if 'all' in event:
        streams = [f'polyd-{c}-all' for c in communities]
    else:
        streams = []
        for c in communities:
            for e in events:
                streams.append(f'polyd-{c}-{e}')

    c = consumer.EventConsumer(streams, __name__, consumer_name, db)

    if not quiet:
        event_handlers = [print_event]
    else:
        event_handlers = []

    if sql:
        from .models import EventHandler
        sql_handler = EventHandler(sql)
        event_handlers.append(sql_handler.handle_event)

    # this will quit if either thread exits
    for event in c.iter_events():
        for handler in event_handlers:
            handler(event)
Example #8
0
 def __init__(self, db: Database):
     self.stream: Stream = db.Stream(EVENT_STREAM)
     self.sender_tag: str = fftbg.server.get_name()
     self.last_id = '$'
     messages = self.stream.revrange('+', '-', 1)
     if messages:
         self.last_id = messages[-1][0]
Example #9
0
def mock_redis():
    """Monkey patch service cache with mocked redis."""
    from renku.service.cache.base import BaseCache
    from renku.service.cache.models.user import User
    from renku.service.cache.models.job import Job
    from renku.service.cache.models.file import File
    from renku.service.cache.models.project import Project
    from renku.service.jobs.queues import WorkerQueues

    monkey_patch = MonkeyPatch()
    with monkey_patch.context() as m:
        fake_redis = fakeredis.FakeRedis()
        fake_model_db = Database(connection_pool=fake_redis.connection_pool)

        m.setattr(WorkerQueues, 'connection', fake_redis)
        m.setattr(BaseCache, 'cache', fake_redis)
        m.setattr(BaseCache, 'model_db', fake_model_db)

        m.setattr(Job, '__database__', fake_model_db)
        m.setattr(User, '__database__', fake_model_db)
        m.setattr(File, '__database__', fake_model_db)
        m.setattr(Project, '__database__', fake_model_db)

        yield

    monkey_patch.undo()
Example #10
0
def get_importance(db: Database, tournament_id, left_team,
                   right_team) -> List[dict]:
    key = get_importance_key(tournament_id, left_team, right_team)
    data = db.get(key)
    if data is not None:
        data = json.loads(data)
    return data
    def __init__(self, can_channel="can0"):
        self.logger = logging.getLogger()
        self.start_time = None
        self.rdb = redis.Redis(host='localhost', port=6379, db=0)
        self.idb = InfluxDBClient('localhost', 8086, 'USERNAME', 'PASSWORD',
                                  'DATABASE')
        self.can_channel = can_channel
        self.describer = None
        self.init_prettyj1939(pgns=True, spns=True)

        # self.analyse_d = Dict(redis=self.rdb, key='cansnap')
        # self.simple_series = RedisSimpleTimeSeries(client=self.rdb)
        self.wdb = Database(host='localhost', port=6379, db=0)  # Database()
        self.dbstream = None
        self.streamid = None
        self.streamlist = self.wdb.List(f'{self.can_channel}')
        self.sumcount = 0
        # self.ids=Dict(redis=self.rdb, key='cansnap')#self.wdb.Hash('cansnap')
        self.ids = {}
Example #12
0
 def batch_get(self, query=None, cache_ttl=None):
     cache_on = bool(cache_ttl)
     if isinstance(obj, Query):
         request_params = obj.json(camelify=True)
     elif isinstance(obj, dict):
         request_params = _camelify(obj)
     else:
         raise InvalidJsonError()
     request_params['viewId'] = self.view_id
     self.request_query = {'reportRequests': request_params}
     execute = self.analytics(body=self.request_query).execute
     if cache_on:
         if not self.cache:
             self.cache = Database(**redis_args).cache()
         deco = self.cache.cached(self.key_fn,
                                  timeout=self.cache_ttl,
                                  metrics=True)
         execute = deco(execute)
     resp = execute()
     return resp
Example #13
0
    def __init__(self,
                 topics,
                 poll_interval=0.1,
                 start=False,
                 group="test",
                 **kwargs):

        from walrus import Database
        self.consumer = None
        self.topics = topics
        self.group = group
        self.poll_interval = poll_interval
        self.db = Database()
        self.consumer = self.db.consumer_group(self.group, self.topics)
        self.consumer.create()  # Create the consumer group.
        self.consumer.set_id('$')  # 不会从头读

        super(from_redis, self).__init__(ensure_io_loop=True, **kwargs)
        self.stopped = True
        if start:
            self.start()
Example #14
0
class StreamHelperOps():
    def __init__(self):
        self.stream_keys = ['all_observations']
        self.db = Database(host=url.hostname, port=url.port)

    def create_push_cg(self):
        self.get_push_cg(create=True)

    def get_push_cg(self, create=False):
        cg = self.db.time_series('cg-push', self.stream_keys)
        if create:
            for stream in self.stream_keys:
                self.db.xadd(stream, {'data': ''})
            cg.create()
            cg.set_id('$')

        return cg

    def create_pull_cg(self):
        self.get_pull_cg(create=True)

    def get_pull_cg(self, create=False):
        cg = self.db.time_series('cg-pull', self.stream_keys)
        if create:
            for stream in self.stream_keys:
                self.db.xadd(stream, {'data': ''})

            cg.create()
            cg.set_id('$')

        return cg
Example #15
0
class BaseCache:
    """Cache management."""

    config_ = {
        'host': REDIS_HOST,
        'port': REDIS_PORT,
        'db': REDIS_DATABASE,
        'password': REDIS_PASSWORD,
        'retry_on_timeout': True,
        'health_check_interval':
        int(os.getenv('CACHE_HEALTH_CHECK_INTERVAL', 60))
    }

    cache = redis.Redis(**config_)
    model_db = Database(**config_)

    def set_record(self, name, key, value):
        """Insert a record to hash set."""
        if isinstance(value, dict):
            value = json.dumps(value)

        self.cache.hset(name, key, value)

    def invalidate_key(self, name, key):
        """Invalidate a cache `key` in users hash set."""
        try:
            self.cache.hdel(name, key)
        except RedisError:
            pass

    def get_record(self, name, key):
        """Return record values from hash set."""
        result = self.cache.hget(name, key)
        if result:
            return json.loads(result.decode('utf-8'))

    def get_all_records(self, name):
        """Return all record values from hash set."""
        return [
            json.loads(record.decode('utf-8'))
            for record in self.cache.hgetall(name).values()
        ]

    def scan_iter(self, pattern):
        """Scan keys to return all user cached elements."""
        return self.cache.scan_iter(match=pattern)

    def hash_table(self, hash_table):
        """Return hash table."""
        return self.cache.hgetall(hash_table)
def event_generator(event, community, redis, retry, quiet):
    db = Database(redis)
    communities = community if 'all' not in community else polyd_communities.keys(
    )

    ws_q = queue.Queue(maxsize=1000)
    ws_threads = [
        WSThread(c, polyd_communities[c], ws_q, retry) for c in communities
    ]

    # lazily create producers
    producers = {
        f'polyd-{c}-all': producer.EventProducer(f'polyd-{c}-all',
                                                 db,
                                                 max_len=20000)
        for c in communities
    }

    for t in ws_threads:
        t.start()

    events = event

    def handler(_, __):
        for t in ws_threads:
            t.stop_thread = True

    signal.signal(signal.SIGINT, handler)

    # this will quit if either thread exits
    for event in iter(ws_q.get, None):
        print(event)
        if event.event in events or 'all' in events:
            stream_name = f'polyd-{event.community}-{event.event}'
            community_stream = f'polyd-{event.community}-all'
            if stream_name not in producers:
                producers[stream_name] = producer.EventProducer(stream_name,
                                                                db,
                                                                max_len=20000)
            producers[stream_name].add_event(event)
            producers[community_stream].add_event(event)

            if not quiet:
                logger.info(str(event))

    for t in ws_threads:
        t.join()
Example #17
0
    def __init__(self, baseurl, *args, headers=None, cache_ttl=0,
            logger=None, redis_host='localhost', redis_port=6379,
            redis_db=0, redis_password=None, redis_socket_timeout=None, **kw
        ):
        self.db = Database(
                    host=redis_host, port=redis_port,
                    db=redis_db, password=redis_password,
                    socket_timeout=redis_socket_timeout
                 )

        super(PrefixedURLSession, self).__init__(*args, **kw)
        self.baseurl = baseurl
        self.cache_ttl = cache_ttl
        self.logger = logger or logging.getLogger()
        if headers:
            self.headers.update(headers)
        self.__post_init__()
Example #18
0
def polyd_to_s3(community, redis, consumer_name, access_key, secret_key,
                bucket, endpoint, region, expires, psd_key, quiet):
    session = requests.Session()
    session.headers.update({'Authorization': psd_key})
    db = Database(redis)
    communities = community if 'all' not in community else polyd_communities

    streams = [f'polyd-{c}-bounty' for c in communities]

    c = consumer.EventConsumer(streams,
                               'polyd_to_s3',
                               consumer_name,
                               db,
                               consume_from_end=True)

    logger = logging.get_logger()

    if quiet:
        import logging as l
        logger.setLevel(l.WARN)

    # for now, we don't send these to 'all' which is really the websocket events
    producers = {
        c: producer.EventProducer(f'polyd-{c}-downloaded', db, max_len=20000)
        for c in communities
    }
    executor = thread.BoundedExecutor(100, 16)

    for event in c.iter_events():
        logger.info('Processing: %s', event)
        # only process FILE artifacts
        if event.artifact_type != 'FILE':
            continue
        client = transfer.get_client(access_key, secret_key, endpoint, region)
        key = event.uri
        executor.submit(transfer.event_to_s3,
                        event,
                        bucket,
                        key,
                        client,
                        producers[event.community],
                        session,
                        expires=expires)

    executor.shutdown()
Example #19
0
class Client(object):

    def __init__(self, credentials, view_id, redis_args=None):
        credentials = service_account.Credentials.from_service_account_info(credentials)
        # Build the service object.
        self.redis_args = redis_args or {}
        self.view_id = view_id
        self.build(credentials)


    def build(self, credentials):
        self.analytics = build('analyticsreporting', 'v4', credentials=credentials).reports().batchGet


    def batch_get(self, query=None, cache_ttl=None):
        cache_on = bool(cache_ttl)
        if isinstance(query, Query):
            request_params = query.json(camelify=True)
        elif isinstance(query, dict):
            request_params = _camelify(query)
        else:
            raise InvalidJsonError()
        request_params['viewId'] = self.view_id
        self.request_query = {
            'reportRequests': request_params
        }
        execute = self.analytics(body=self.request_query).execute
        if cache_on:
            if not self.cache:
                self.cache = Database(**redis_args).cache()
            deco = self.cache.cached(self.key_fn, timeout=self.cache_ttl, metrics=True)
            execute = deco(execute)
        resp = execute()
        return resp

    def key_fn(self, a, k):
        return hashlib.md5(
            pickle.dumps((self.request_query, a, k))
        ).hexdigest()

    def __getattr__(self, name):
        if name == 'query':
            return Query()
Example #20
0
def clamav_scan(community, redis, consumer_name, access_key, secret_key,
                endpoint, region, psd_key, eth_key, clamav_host, threads,
                quiet):
    eth_key = unhexlify(eth_key)
    session = requests.Session()
    session.headers.update({'Authorization': psd_key})
    db = Database(redis)
    communities = community if 'all' not in community else polyd_communities

    streams = [f'polyd-{c}-downloaded' for c in communities]

    c = consumer.EventConsumer(streams,
                               'clamav_scan',
                               consumer_name,
                               db,
                               consume_from_end=True)

    logger = logging.get_logger()

    psd_api = api.PolydAPI(psd_key)

    if quiet:
        import logging as l
        logger.setLevel(l.WARN)

    # for now, we don't produce anything on finish.
    # producers = {c: producer.EventProducer(f'polyd-{c}-downloaded', db) for c in communities}

    executor = thread.BoundedExecutor(10, threads)

    for event in c.iter_events():
        logger.info('Processing: %s, %s', event, event.bounty)

        client = scan.get_client(access_key, secret_key, endpoint, region)

        executor.submit(scan.scan_event, event, client, clamav_host, psd_api,
                        eth_key, c)

    executor.shutdown()
Example #21
0
from walrus import Database
REDIS_URL = 'redis://redis:6379'
db = Database.from_url(REDIS_URL)
cache = db.cache()


def get_visited_links():
    return cache.get('visited', None) or []


def append_visited_links(link):
    visited = cache.get('visited', None) or []
    cache.set('visited', visited + [link])

    processing = cache.get('processing', None) or []
    cache.set('processing', processing + [link])


def set_item_data(data):
    if data:
        data = cache.get('data', None) or []
        cache.set('data', data + [data])


def pop_processing():
    pop_link = None
    processing = cache.get('processing', None) or []

    if processing:
        pop_link = processing.pop()
        cache.set('processing', processing)
import settings
from esl_events_config import ESL_EVENT_FAMILY_DICT
from cache import RefreshCache

esl_url_root = 'http://api.esl.tv/v1'
# esl_url_root = 'http://cdn1.api.esl.tv/v1'
facebook_graph_url_root = 'https://graph.facebook.com'
esl_event_url = esl_url_root + '/event/bydomainurl?livedomain={esl_event_domain}&liveurl={esl_event_path}'
esl_channel_url = esl_url_root + '/channel/eventchannels?pid={esl_event_id}&hideservice=web'
facebook_graph_page_url = facebook_graph_url_root + '/{facebook_id}?fields=link,username&access_token={facebook_app_id}|{facebook_app_secret}'
facebook_graph_page_live_videos_url = facebook_graph_url_root + '/{facebook_page_username}/live_videos?access_token={facebook_access_token}'
facebook_stream_fetch_url = 'https://www.facebook.com/video/tahoe/async/{facebook_video_id}/?chain=true&isvideo=true&originalmediaid={facebook_video_id}&playerorigin=permalink&playersuborigin=tahoe&ispermalink=true&numcopyrightmatchedvideoplayedconsecutively=0&dpr=1'  # dpr = device pixel ratio
facebook_video_embed_url = 'https://www.facebook.com/embedvideo/video.php'

db = Database(host=settings.REDIS_HOST,
              port=settings.REDIS_PORT,
              db=settings.REDIS_DB)
cache = RefreshCache(db, name='cache', default_timeout=3600)

esl_event_family_dict = ESL_EVENT_FAMILY_DICT


def set_esl_event_family_dict():
    db['esl_event_family_dict'] = pickle.dumps(esl_event_family_dict,
                                               pickle.HIGHEST_PROTOCOL)


def get_esl_event_family_dict():
    global esl_event_family_dict
    esl_event_family_dict = pickle.loads(db['esl_event_family_dict'])
    return esl_event_family_dict
Example #23
0
import hashlib
import logging
import string

import requests

try:
    import _pickle as pickle
except:
    import pickle

from urllib.parse import urljoin
from walrus import Database

db = Database()
cache = db.cache()


class BaseRequest:
    def __init__(self, session, type, path, cache_ttl=None):
        self.cache_ttl = cache_ttl if cache_ttl is not None else session.cache_ttl
        self.cache_on = bool(self.cache_ttl)
        self.session = session
        self.logger = session.logger
        self.type = type
        self.path = path
        self.send = self.method()
        if self.cache_on:
            deco = cache.cached(self.key_fn,
                                timeout=self.cache_ttl,
                                metrics=True)
Example #24
0
import urllib2
import json
import os
import ssl
from collections import OrderedDict
from flask import Flask
from walrus import Database
import default_settings

app = Flask(__name__)
app.config.from_object(default_settings)
if 'BUTLERCAM_SETTINGS_FILE' in os.environ:
    app.config.from_envvar('BUTLERCAM_SETTINGS_FILE')
PIPELINES = app.config['PIPELINES']

db = Database(host=app.config['REDIS_HOST'], db=0)
cache = db.cache(default_timeout=app.config['CACHE_TIMEOUT'])

def get_all_builds(pipeline):
    """
    Given the name of a pipeline, the saved builds for that pipeline are
    retrieved, Jenkins is queried, and the combined results are returned.
    """

    pref_url = PIPELINES[pipeline][1]
    api_url = pref_url + '/api/json'
    context = ssl._create_unverified_context()
    data = urllib2.urlopen(api_url, context=context).read()
    parsed = json.loads(data)

    builds = get_saved_builds(pipeline)
Example #25
0
def xstream(timeseries=True, name=None):
    db = Database()
    if timeseries:
        return (db.time_series('ExecutablePrice', ['xbid', 'xask']))
    else:
        return (db.Stream(name))
        # create an instance of User v1
        user = UserModel(name=random.choice([
            "Juan",
            "Peter",
            "Michael",
            "Moby",
            "Kim",
        ]),
                         age=random.randint(1, 50))

        msgid = consumer_group.my_stream.add({"message": user.serialize()})
        print(f"Producing message {msgid}")

    print("Producer finished....")
    print("#" * 80)
    sleep(2)


if __name__ == "__main__":
    db = Database()
    stream_name = 'my-stream'
    db.Stream(stream_name)  # Create a new stream instance

    # create the consumer group
    consumer_group = db.consumer_group('my-consumer-group-1', [stream_name])
    consumer_group.create()  # Create the consumer group.
    consumer_group.set_id('$')

    produce(consumer_group)
    consume(consumer_group)
Example #27
0
from walrus import Database, Model, ListField, SetField, HashField
from config import REDIS_URL

db = Database.from_url(REDIS_URL)
LISTENER_TASK_KEY = 'listener:task_id'


class RBase(Model):
    __database__ = db

    def to_dict(self):
        data = {}
        for name, field in self._fields.items():
            if name in self._data:
                data[name] = field.db_value(self._data[name])
            else:
                if isinstance(field, ListField):
                    type_func = list
                elif isinstance(field, SetField):
                    type_func = set
                elif isinstance(field, HashField):
                    type_func = dict
                else:
                    type_func = lambda x: x
                data[name] = type_func(getattr(self, name))
        return data

    @classmethod
    def get(cls, id):
        try:
            return super().get(cls.id==id)
Example #28
0
import urllib2
from walrus import Database

db = Database()
autocomplete = db.autocomplete(namespace='stocks')

def load_data():
    url = 'http://media.charlesleifer.com/blog/downloads/misc/NYSE.txt'
    contents = urllib2.urlopen(url).read()
    for row in contents.splitlines()[1:]:
        ticker, company = row.split('\t')
        autocomplete.store(
            ticker,
            company,
            {'ticker': ticker, 'company': company})

def search(p, **kwargs):
    return autocomplete.search(p, **kwargs)

if __name__ == '__main__':
    autocomplete.flush()
    print 'Loading data (may take a few seconds...)'
    load_data()

    print 'Search stock data by typing a partial phrase.'
    print 'Examples: "uni sta", "micro", "food", "auto"'
    print 'Type "q" at any time to quit'

    while 1:
        cmd = raw_input('? ')
        if cmd == 'q':
Example #29
0
import os

import requests
from walrus import Database

from utils.parsers.package_parser import PackageParser

base_url = "https://replicate.npmjs.com"

db = Database(host=os.environ.get('REDIS_HOST'),
              port=os.environ.get('REDIS_PORT'),
              db=os.environ.get('REDIS_DB'),
              password=os.environ.get('REDIS_PASSWORD'))

cache = db.cache()

WEEKS_IN_SECONDS = 60 * 60 * 24 * 7


class NodeUtil:
    def parse_package(self, package_path):
        package_parser = PackageParser(package_path)
        package_data = package_parser.parse()
        return package_data

    def fetch_info(self, package_list: list) -> dict:
        info_dict = {}
        for req in package_list:
            info = self.fetch_npm(req)
            info_dict[req] = info
        return info_dict
Example #30
0
import os
import unittest
from distutils.version import StrictVersion

from walrus import Database

HOST = os.environ.get('WALRUS_REDIS_HOST') or '127.0.0.1'
PORT = os.environ.get('WALRUS_REDIS_PORT') or 6379

db = Database(host=HOST, port=PORT, db=15)

REDIS_VERSION = None


def requires_version(min_version):
    def decorator(fn):
        global REDIS_VERSION
        if REDIS_VERSION is None:
            REDIS_VERSION = db.info()['redis_version']
        too_old = StrictVersion(REDIS_VERSION) < StrictVersion(min_version)
        return unittest.skipIf(too_old,
                               'redis too old, requires %s' % min_version)(fn)

    return decorator


def stream_test(fn):
    test_stream = os.environ.get('TEST_STREAM')
    if not test_stream:
        return requires_version('4.9.101')(fn)
    else:
Example #31
0
from walrus import Database, Model, TextField, IntegerField, JSONField

db = Database(host="localhost",
              port=6379,
              charset="utf-8",
              decode_responses=True)

MODES = (('public', 'public'), ('private', 'private'))


class Lobby(Model):
    __database__ = db
    name = TextField(primary_key=True)
    timestamp = IntegerField()
    mode = TextField()
    password = TextField()

    game = IntegerField(index=True)
    setup = IntegerField()
    owner = IntegerField()


class Player(Model):
    __database__ = db
    name = TextField(
        primary_key=True)  ## combination of game, lobbyname and playername
    lobby = TextField(index=True)
    user = IntegerField()

    data = JSONField()
Example #32
0
import unittest

from walrus import Database


db = Database(db=15)


class WalrusTestCase(unittest.TestCase):
    def setUp(self):
        db.flushdb()
        db._transaction_local.pipes = []

    def tearDown(self):
        db.flushdb()
        db._transaction_local.pipes = []

    def assertList(self, values, expected):
        values = list(values)
        self.assertEqual(len(values), len(expected))
        for value, item in zip(values, expected):
            self.assertEqual(value, item)