Exemplo n.º 1
0
def get_redis_lite(*args, **kwargs):
    try:
        imp.find_module('redislite')
        import redislite

        if args:
            settings_file = args[0] + '.settings'

            try:
                remove_settings = False
                with open(settings_file, 'r') as f:
                    settings = json.load(f)
                    if not os.path.exists(settings['unixsocket']):
                        remove_settings = True
            except Exception as e:
                pass

            if remove_settings:
                os.remove(settings_file)

        return redislite.StrictRedis(*args, **kwargs)


    except ImportError:
        raise EnvironmentError('Redislite module is not available')
Exemplo n.º 2
0
def resolve_host(target):
    """
    :param target: str The host:port pair or path
    :return:
    """
    target = target.strip()
    if target.startswith('redis://') or target.startswith('unix://'):
        return redis.StrictRedis.from_url(target)

    try:
        hostname, port = target.split(':')
        return redis.StrictRedis(host=hostname, port=int(port))
    except ValueError:
        start = time.time()
        while True:
            try:
                redislite.StrictRedis.start_timeout = \
                    REDISLITE_LOAD_WAIT_TIMEOUT
                conn = redislite.StrictRedis(target)
            except BusyLoadingError:
                logging.info('%s loading', target)
                elapsed = time.time() - start
                if elapsed > REDISLITE_LOAD_WAIT_TIMEOUT:
                    raise BusyLoadingError('unable to load rdb %s' % target)
                time.sleep(REDISLITE_LOAD_WAIT_INTERVAL_SECS)
                continue

            if conn.info('persistence').get('loading', 0):
                logging.warn('%s loading', target)
                time.sleep(REDISLITE_LOAD_WAIT_INTERVAL_SECS)
                elapsed = time.time() - start
                if elapsed > REDISLITE_LOAD_WAIT_TIMEOUT:
                    raise BusyLoadingError('unable to load rdb %s' % target)
                continue
            return conn
Exemplo n.º 3
0
 def test_shutdown_race_condition(self):
     """
     Test that there is no race condition when a shutdown is run after
     a large data operation.
     """
     if os.path.exists('bug.redis'):
         os.remove('bug.redis')
     db = redislite.StrictRedis('bug.redis')
     for key in range(10000):
         db.hset("h1", os.urandom(32), " " * 65536)
     db.save()
     db._cleanup()
     del db
     db = redislite.StrictRedis('bug.redis')
     db._cleanup()
     if os.path.exists('bug.redis'):
         os.remove('bug.redis')
Exemplo n.º 4
0
def build_redis(port):
    if port is None:
        client = redislite.StrictRedis()
    else:
        client = redis.StrictRedis(port=int(port))

    redpipe.reset()
    redpipe.connect_redis(client)
    return client
Exemplo n.º 5
0
def work(identification, graph_name, step_to_do, redis_fn):
    global redis_connection, strict_redis_connection, sparql_server, step, step_graph
    step = step_to_do

    log('work ' + '[' + str(step) + ']')

    #for Collections
    step_graph = ConjunctiveGraph(sparqlstore.SPARQLStore(sparql_uri),
                                  graph_name)

    sparql_server = sparql.SPARQLServer(sparql_uri)
    redis_connection = redislite.Redis(redis_fn)
    strict_redis_connection = redislite.StrictRedis(redis_fn)

    gv_output_file_name = identification + '_' + str(step).zfill(7) + '.gv'

    if list(subjects(RDF.type, kbdbg.frame)) == []:
        log('no frames.' + '[' + str(step) + ']')
        put_last_bindings(step, [])
        return

    if (step == global_start - 1):
        gv_output_file_name = 'dummy'
    try:
        os.unlink(gv_output_file_name)
    except FileNotFoundError:
        pass

    gv_output_file = open(gv_output_file_name, 'w')
    e = Emitter(gv_output_file, step)
    e.generate_gv_image()
    gv_output_file.close()

    if (step == global_start - 1):
        return

    log('convert..' + '[' + str(step) + ']')
    #cmd, args = subprocess.check_output, ("convert", '-regard-warnings', "-extent", '6000x3000',  gv_output_file_name, '-gravity', 'NorthWest', '-background', 'white', gv_output_file_name + '.svg')
    cmd, args = subprocess.check_output, ("dot", '-Tsvg', gv_output_file_name,
                                          '-O')
    try:
        r = cmd(args, stderr=subprocess.STDOUT)
        if r != b"":
            raise RuntimeError('[' + str(step) + '] ' + str(r))
    except subprocess.CalledProcessError as e:
        log('[' + str(step) + ']' + e.output)
    log('convert done.' + '[' + str(step) + ']')

    if len(stats):
        print('stats:')
        for i in stats:
            print(i)
        #stats.clear()

    redis_connection._cleanup()
    strict_redis_connection._cleanup()
Exemplo n.º 6
0
def _patch_up_redislite():
    import redislite

    global _test_redis_client
    previous_redis_instance = _test_redis_client
    _test_redis_client = redislite.StrictRedis()

    try:
        yield _test_redis_client
    finally:
        _test_redis_client = previous_redis_instance
Exemplo n.º 7
0
 def redis_sync(self, db: int = 0) -> Redis:
     if config.get("redis.use_redislite"):
         REDIS_DB_PATH = os.path.join(
             config.get("redis.redislite.db_path",
                        default_redislite_db_path))
         return redislite.StrictRedis(REDIS_DB_PATH, decode_responses=True)
     self.red = ConsoleMeRedis(
         host=self.host,
         port=self.port,
         db=self.db,
         charset="utf-8",
         decode_responses=True,
     )
     return self.red
    def setUp(self):
        self.mockRedis = redislite.StrictRedis()
        self.agent = RandomPolicyAgent(self.mockRedis)

        self.actionSchema = {
            "type": "object",
            "properties": {
                "action": {
                    "type": "string",
                    "enum": [
                        "lookLeft",
                        "lookUp",
                        "lookRight",
                        "lookDown",
                        "moveLeft",
                        "moveForward",
                        "moveRight",
                        "moveBack"
                    ]
                },
                "actuator": {
                    "type": "string",
                    "enum": ["look", "move"]
                },
                "direction" : {
                    "type": "string",
                    "enum": ["left", "up", "right", "down", "back", "forward"]
                },
                "factor": {
                    "type" : "number"
                }
            },
            "required": ["action", "actuator", "direction", "factor"]
        }

        self.actionSequenceSchema = {
            "type": "object",
            "properties": {
                "actions": {
                    "type": "array",
                    "items": self.actionSchema,
                    "additionalItems": False
                }
            }
        }
Exemplo n.º 9
0
 def test_redis_log_small_chunks(self):
     r = redislite.StrictRedis()
     lines = r.redis_log_tail(4, width=20)
     self.assertIsInstance(lines, list)
     self.assertEqual(len(lines), 4)
Exemplo n.º 10
0
 def test_redis_log_many_lines(self):
     r = redislite.StrictRedis()
     lines = r.redis_log_tail(lines=99999)
     self.assertIsInstance(lines, list)
Exemplo n.º 11
0
 def test_redis_log_tail(self):
     r = redislite.StrictRedis()
     lines = r.redis_log_tail(2)
     self.assertIsInstance(lines, list)
     self.assertEqual(len(lines), 2)
Exemplo n.º 12
0
    def get_patcher(self):
        import redislite

        connection = redislite.StrictRedis()
        return patch("agir.api.redis._get_auth_redis_client",
                     lambda: connection)
Exemplo n.º 13
0
def work(serialized_graph, input_file_name, step, no_parallel, redis_fn):
    global redis_connection, strict_redis_connection
    strict_redis_connection = redis_fn

    log('work ' + '[' + str(step) + ']')

    redis_connection = redislite.Redis(redis_fn)
    strict_redis_connection = redislite.StrictRedis(redis_fn)

    gv_output_file_name = input_file_name + '_' + str(step).zfill(5) + '.gv'

    log('loads ' + '[' + str(step) + ']')

    g = pickle.loads(serialized_graph)
    #g = Graph(OrderedAndIndexedStore())
    #for i in ujson.loads(serialized_graph):
    #	g.add(i)

    #log('work' + str(id(g)) + ' ' + str(id(g.store)) + ' ' + str(id(g.store.indexes))  + ' ' + str(id(g.store.indexes['ttft']))  + ' ' + str(id(g.store.indexes['ttft'][rdflib.URIRef('http://kbd.bg/Rule1')])))
    g.store.locked = True
    if list(g.subjects(RDF.type, kbdbg.frame)) == []:
        log('no frames.' + '[' + str(step) + ']')
        put_last_bindings(step, [])
        return

    if (step == global_start - 1):
        gv_output_file_name = 'dummy'
    try:
        os.unlink(gv_output_file_name)
    except FileNotFoundError:
        pass

    gv_output_file = open(gv_output_file_name, 'w')
    e = Emitter(g, gv_output_file, step)
    e.generate_gv_image()
    gv_output_file.close()

    if (step == global_start - 1):
        return

    log('convert..' + '[' + str(step) + ']')
    #cmd, args = subprocess.check_output, ("convert", '-regard-warnings', "-extent", '6000x3000',  gv_output_file_name, '-gravity', 'NorthWest', '-background', 'white', gv_output_file_name + '.svg')
    cmd, args = subprocess.check_output, ("dot", '-Tsvg', gv_output_file_name,
                                          '-O')
    if True:
        try:
            r = cmd(args, stderr=subprocess.STDOUT)
            if r != b"":
                raise RuntimeError('[' + str(step) + '] ' + str(r))
        except subprocess.CalledProcessError as e:
            log('[' + str(step) + ']' + e.output)
        log('convert done.' + '[' + str(step) + ']')
    else:

        def do_or_die(args):
            r = cmd(args, stderr=subprocess.STDOUT)
            if r != b"":
                log(r)
                raise RuntimeError(r)
                #exit()

        futures.append(graphviz_pool.submit(do_or_die, args))

    redis_connection._cleanup()
    strict_redis_connection._cleanup()
Exemplo n.º 14
0
#!/usr/bin/env python
import unittest
import string
import random
import webtest
import os
import shutil
import redislite
import napfs
import falcon
import hashlib
from napfs.helpers import condense_byte_ranges, \
    get_last_contiguous_byte

redis_connection = redislite.StrictRedis(dbfilename='/tmp/test-napfs.db')
NAPFS_DATA_DIR = '/tmp/test-napfs'

NOT_FOUND_BODY = b'{"title": "404 Not Found"}'
NOT_FOUND_BODY_LEN = str(len(NOT_FOUND_BODY))


def create_app():
    if not os.path.exists(NAPFS_DATA_DIR):
        os.mkdir(NAPFS_DATA_DIR)
    return webtest.TestApp(
        napfs.create_app(data_dir=NAPFS_DATA_DIR,
                         redis_connection=redis_connection))


def create_router_app(router):
    if not os.path.exists(NAPFS_DATA_DIR):
Exemplo n.º 15
0
import napfs
import redislite

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='kick off the napfs server')
    parser.add_argument('-p',
                        '--port',
                        default=3035,
                        type=int,
                        help="specify the port to listen to")

    parser.add_argument('-d',
                        '--data-dir',
                        type=str,
                        default="/tmp/napfs",
                        help="specify the directory to use to write the files")

    args = parser.parse_args()
    httpd = make_server(
        '127.0.0.1', args.port,
        napfs.create_app(data_dir=args.data_dir,
                         redis_connection=redislite.StrictRedis()))

    print("starting server on port %s" % args.port)
    print("data-dir %s" % args.data_dir)

    try:
        httpd.serve_forever()
    except KeyboardInterrupt:
        print("done")
Exemplo n.º 16
0
#!/usr/bin/env python2
import os
import redis
from flask import Flask, render_template, redirect, request, url_for, make_response
import uuid

if 'VCAP_SERVICES' in os.environ:
    VCAP_SERVICES = json.loads(os.environ['VCAP_SERVICES'])
    CREDENTIALS = VCAP_SERVICES["rediscloud"][0]["credentials"]
    r = redis.Redis(host=CREDENTIALS["hostname"],
                    port=CREDENTIALS["port"],
                    password=CREDENTIALS["password"])
else:
    import redislite
    r = redislite.StrictRedis()

app = Flask(__name__)
my_uuid = str(uuid.uuid1())


@app.route('/')
def mainmenu():

    response = """
	<HTML><BODY><h2>
	<a href="/agenda">Agenda</a><br>
	<a href="/survey">Take Survey</a><br>
	<a href="/dumpsurveys">Survey Results</a><br>
	</h2>
	</BODY>
	"""
Exemplo n.º 17
0
#!/usr/bin/env python
import redpipe
import redislite
import unittest
import redmab

test_conn = redislite.StrictRedis()

redpipe.connect_redis(test_conn, 'test')


def clean():
    test_conn.flushall()


class BasicTestCase(unittest.TestCase):
    def setUp(self):
        clean()

    def tearDown(self):
        clean()

    def mab(self, arms, name='test', alpha=5, beta=5,
            klass=redmab.MultiArmedBandit, expires=3600):
        storage = redmab.create_storage('test', 'test')
        return klass(
            name=name,
            storage=storage,
            arms=arms,
            alpha=alpha,
            beta=beta,
Exemplo n.º 18
0
 def test_redis_log_attribute(self):
     r = redislite.StrictRedis()
     self.assertIn('The server is now ready to accept connections',
                   r.redis_log)
Exemplo n.º 19
0
    parser = argparse.ArgumentParser(description="Speedtest quicktest")
    parser.add_argument('-t', '--host', help='Remote host address')
    parser.add_argument('-p', '--port', help='Remote port')

    args = parser.parse_args()

    if args.host:
        host = args.host

    if args.port:
        port = args.port

    slave = '{} {}'.format(host, port)
    print slave
    #rc = redislite.StrictRedis(serverconfig={'slaveof': slave})
    rc = redislite.StrictRedis(host=host, port=port)
    lst = List(redis=rc, key='speed')

    print "Keys in redis: ", rc.keys()
    print "No of items in redis['speed']: ", len(lst)
    print "Weekly speed\n", processor.average_speed_weekly(lst)

    settings = Dict(redis=rc, key='settings')
    if settings.get('last_test'):
        print 'Last test: ', settings['last_test']
    else:
        print 'No last run'

    settings['last_test'] = datetime.now()

    print "Last hour aggr: ", settings.get('last_hour') if settings.get(
Exemplo n.º 20
0
    # program.  Many of the data structures Redis can store are
    # nearly identical to standard Python built in data types such
    # as lists and dictionaries.
    # We are using the RedisList object from the redis_collections
    # module to access Redis.  This provides us with a python
    # object that works almost exactly like a normal Python list,
    # except the information is stored in Redis.
    # This means the information in the list can persist between
    # program runs or be stored to mulitple Redis servers using
    # Redis repliataion.
    # Unlike SQL databases, the default Redis configuration will
    # do a single disk write operation every 15 minutes instead
    # every time data is written.  This significantly decreases
    # wear on flash/sd-card.
    # We are going to create two lists in Redis to store our temps
    # one for celsius and one for fahranheit.
    redis_connection = redislite.StrictRedis(redisrdb)
    temp_c_list = RedisList(redis=redis_connection, key='temp_c')
    temp_f_list = RedisList(redis=redis_connection, key='temp_f')

    del temp_c_list[:]
    del temp_f_list[:]

    # Now loop through
    while True:
        temp_c_list.append(read_temp_c())
        temp_f_list.append(read_temp_f())
        print('Temp F:', read_temp_f(), 'Hourly Average Temp F:',
              sum(temp_f_list[-3600:]) / len(temp_f_list[-3600:]))
        time.sleep(frequency)
Exemplo n.º 21
0
#!/usr/bin/env python

# std lib
import os
import unittest
from io import StringIO

# 3rd party
import redislite

# our package
import redkeys  # noqa

TEST_DIR = os.path.dirname(__file__)
SRC_RDB = os.path.join(TEST_DIR, '.redis_src.db')
SRC = redislite.StrictRedis(SRC_RDB)


def flush_redis_data(conn):
    if conn is None:
        return

    conn.flushdb()


def clean():
    flush_redis_data(SRC)


class BasicTestCase(unittest.TestCase):
    data = [
Exemplo n.º 22
0
 def test_redis_log_tail_no_log(self):
     r = redislite.StrictRedis()
     if os.path.exists(r.logfile):
         os.remove(r.logfile)
     lines = r.redis_log_tail()
     self.assertEqual(lines, [])
Exemplo n.º 23
0
 def test_redis_log_tail_empty_log(self):
     r = redislite.StrictRedis()
     with open(r.logfile, 'w'):
         pass
     lines = r.redis_log_tail()
     self.assertEqual(lines, [])
Exemplo n.º 24
0
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from redis_collections import Dict
import redislite

# database stored in a file (simplest way)
# TODO: change engine type if needed
db_uri = "sqlite:///database.sql"
engine = create_engine(db_uri)

Base = declarative_base()
Session = sessionmaker(bind=engine)

cache_uri = 'storage.rdb'
redis_connection = redislite.StrictRedis(cache_uri)
Cache = Dict(redis=redis_connection, key='storage')
Exemplo n.º 25
0
from helpers import *
from log import get_logger

MB = 1024.0 * 1024.0
LOCAL_DIR = path.dirname(path.abspath(__file__))
PID_FILE = path.join(LOCAL_DIR, 'speedtest.pid')
DB_FILE = path.join(LOCAL_DIR, 'speedtest.db')
PORT = 8002
MIN_DATETIME = datetime(2016, 1, 1)

logger = get_logger('speedtest')

is_daemon = False

redis = redislite.StrictRedis(DB_FILE, serverconfig={'port': PORT})
data = redis_collections.List(redis=redis, key='speed')
settings = redis_collections.Dict(redis=redis, key='settings')


def get_splitted_output(cmd):
    return subprocess.check_output(cmd).decode().replace('\n', '').replace(
        '\r', '').split(',')


def get_speed():
    headers = get_splitted_output(['speedtest', '--csv-header'])
    try:
        speed = get_splitted_output(['speedtest', '--csv'])
    except Exception as ex:
        if not is_daemon:
Exemplo n.º 26
0
import os
import unittest
from six import StringIO

# 3rd party
import redis
import redislite
import rediscluster
import redislite.patch

# our package
import redisimp  # noqa

TEST_DIR = os.path.dirname(__file__)
SRC_RDB = os.path.join(TEST_DIR, '.redis_src.db')
SRC = redislite.StrictRedis(SRC_RDB)
SRC_ALT_RDB = os.path.join(TEST_DIR, '.redis_src_alt.db')
SRC_ALT = redislite.StrictRedis(SRC_ALT_RDB)
DST_RDB = os.path.join(TEST_DIR, '.redis_dst.db')
DST = redislite.StrictRedis(DST_RDB)


def flush_redis_data(conn):
    if conn is None:
        return

    if rediscluster and isinstance(conn, rediscluster.RedisCluster):
        conns = [
            redis.StrictRedis(host=node['host'], port=node['port'])
            for node in conn.connection_pool.nodes.nodes.values()
            if node.get('server_type', None) == 'master'
Exemplo n.º 27
0
def get_redis_connection(host):
    if redislite and host.startswith("redislite://"):
        return redislite.StrictRedis(host[12:])
    else:
        return redis.StrictRedis.from_url(host)
Exemplo n.º 28
0
 def test_redis_log_attribute(self):
     r = redislite.StrictRedis()
     self.assertIn('Server started, Redis version', r.redis_log)
Exemplo n.º 29
0
        {'code': 'es'},
        {'code': 'ru'},
        {'code': 'uk'},
    ),
    'default': {
        'fallback': 'en',
        'hide_untranslated': False,
    }
}


LOGIN_REDIRECT_URL = 'dashboard'
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'


AUTHENTICATION_BACKENDS = [
    'django.contrib.auth.backends.ModelBackend',
    'account.authentication.EmailAuthBackend',
]


#from redislite import StrictRedis #Redis
import redislite

# Create a Redis instance using redislite
REDIS_DB_PATH = os.path.join('/tmp/shop_redis.db')
RDB = redislite.StrictRedis(REDIS_DB_PATH)
REDIS_SOCKET_PATH = 'redis+socket://%s' % (RDB.socket_file, )

Exemplo n.º 30
0
def work(identification, graph_name, _range_start, _range_end, redis_fn):
    global redis_connection, strict_redis_connection, sparql_server, current_step, range_start, range_end, ss, just_unbound_bindings, frames_done_count, frame_templates, bnode_strings
    range_start, range_end = _range_start, _range_end
    sparql_server = sparql.SPARQLServer(sparql_uri)
    redis_connection = redislite.Redis(redis_fn)
    strict_redis_connection = redislite.StrictRedis(redis_fn)
    frame_templates = redis_collections.Dict(key='frames',
                                             redis=strict_redis_connection,
                                             writeback=True)
    bnode_strings = redis_collections.Dict(key='bnodes',
                                           redis=strict_redis_connection,
                                           writeback=True)

    raw = defaultdict(list)
    #todo limit queries with range_start, then uncomment this
    #if range_start != 0:
    #	raw = redis_load('checkpoint'+str(range_start - 1))

    raw['frames'] += list(
        query(('frame', 'parent', 'is_for_rule', 'step_finished',
               'step_created'), """WHERE
	{
		""" + frame_query() + """
		OPTIONAL {?frame kbdbg:has_parent ?parent}.
		?frame kbdbg:is_for_rule ?is_for_rule. 
	}"""))

    raw['bnodes'] += list(
        query(('bnode', 'frame', 'items', 'step_created', 'step_finished'),
              """WHERE
		{
		?bnode kbdbg:has_items ?items.
		?bnode kbdbg:has_parent ?frame.
		GRAPH ?g_created {?bnode rdf:type kbdbg:bnode}.
		""" + step_magic('_created') + """
		OPTIONAL {
			GRAPH ?g_finished{?frame kbdbg:is_finished true}.
			""" + step_bind('_finished') + """
		}
		}"""))

    raw['results'] += list(
        query(('uri', 'value', 'step_unbound'), """WHERE {GRAPH ?g_created 
			{
				?uri rdf:type kbdbg:result.
				?uri rdf:value ?value.
			}.""" + step_magic('_created') + """
			OPTIONAL {GRAPH ?g_unbound {?uri kbdbg:was_ubound true}.}.""" +
              step_bind('_unbound') + '}'))

    raw['bindings'] += list(
        query(('x', 'source', 'target', 'source_frame', 'target_frame',
               'source_is_bnode', 'target_is_bnode', 'source_term_idx',
               'target_term_idx', 'source_is_in_head', 'target_is_in_head',
               'source_arg_idx', 'target_arg_idx', 'stepbinding_unbound',
               'stepbinding_failed', 'stepbinding_created'), """WHERE 
		{
		GRAPH ?gbinding_created {?x rdf:type kbdbg:binding.}.
		""" + step_magic('binding_created') + """
		OPTIONAL {GRAPH ?gbinding_unbound {?x kbdbg:was_unbound true}.
		""" + step_bind('binding_unbound') + """
		}.
		OPTIONAL {GRAPH ?gbinding_failed  {?x kbdbg:failed true}.
		""" + step_bind('binding_failed') + """
		}.
		?x kbdbg:has_source ?source.
		?x kbdbg:has_target ?target.
		?source kbdbg:has_frame ?source_frame.
		?target kbdbg:has_frame ?target_frame.
		OPTIONAL {?source kbdbg:is_bnode ?source_is_bnode.}.
		OPTIONAL {?target kbdbg:is_bnode ?target_is_bnode.}.
		?source kbdbg:term_idx ?source_term_idx.
		?target kbdbg:term_idx ?target_term_idx.
		OPTIONAL {?source kbdbg:is_in_head ?source_is_in_head.}.
		OPTIONAL {?target kbdbg:is_in_head ?target_is_in_head.}.
		OPTIONAL {?source kbdbg:arg_idx ?source_arg_idx.}.
		OPTIONAL {?target kbdbg:arg_idx ?target_arg_idx.}.
		}"""))

    current_step = range_end
    redis_save('checkpoint' + str(range_end), filter_out_irrelevant_stuff(raw))
    current_step = '666'

    last_bindings = raw['bindings'][:]
    for i in range(range_start, range_end + 1):
        current_step = i
        ss = '[' + str(current_step) + ']'
        info('work ' + ss)
        state = filter_out_irrelevant_stuff(raw)
        if len(state['frames']) == 0:
            info('no frames.' + ss)
            continue

        #wont work with euler-style steps
        #if last_bindings == state['bindings']:
        #	return 'end'

        #todo make emitter save data to output, not to file
        e = Emitter()
        e.gv("digraph frame" + str(current_step) +
             "{  ")  #splines=ortho;#gv("pack=true")
        e.do_frames(state['frames'])
        e.do_bnodes(state['bnodes'])
        e.do_results(state['results'])
        e.do_bindings(state['bindings'])
        e.gv("}")
        info('}..' + ss)

        args = (ss, identification + '_' + str(current_step).zfill(7) + '.gv',
                e.output)
        while len(graphviz_futures) > 10000:
            info('sleeping')
            time.sleep(10)
            check_futures2(graphviz_futures)

        graphviz_futures.append(graphviz_pool.submit(output, *args))
        check_futures2(graphviz_futures)

    #print_stats()
    redis_connection._cleanup()
    strict_redis_connection._cleanup()