Exemplo n.º 1
0
 def setUp(self):
     #get redis connection
     #TODO: able to override with env vars
     self.url_cache = redis_collections.Dict(
         key='test-url-cache-key-delete-me')
     self.hash_cache = redis_collections.Dict(
         key='test-hash-cache-key-delete-me')
     self.url_cache['https://example.edu'] = {
         'If-None-Match': 'nice etag',
         'If-Modified-Since': 'since test val'
     }
Exemplo n.º 2
0
def run(architecture_file, model_file, num_batches=10, pattern="centralModel-*"):
    print "evaluating Q values..."
    redisInstance = Redis(host='localhost', port=6379, db=0)
    model_keys = redisInstance.keys(pattern)
    results = {}

    net = BaristaNet(architecture_file, model_file, None)
    replay_dataset = ReplayDataset("temp-q-converge-dset.hdf5",
                                   net.state[0].shape,
                                   dset_size=1000,
                                   overwrite=True)
    net.add_dataset(replay_dataset)

    game = SnakeGame()
    preprocessor = generate_preprocessor(net.state.shape[2:], gray_scale)
    exp_gain = ExpGain(net, ['w', 'a', 's', 'd'], preprocessor, game.cpu_play,
                       replay_dataset, game.encode_state())

    print "Generating new experiences..."
    for _ in xrange(100):
        exp_gain.generate_experience(1e5)
    print "Done"

    for key in model_keys:
        print "Evaluating model:", key
        model = dict(redisC.Dict(key=key, redis=redisInstance))
        q_avg = evaluate_model(net, model, num_batches)
        results[key] = q_avg

    for key in sorted(results.keys()):
        print key.ljust(25) + "%0.4f" % results[key]
Exemplo n.º 3
0
def saveSnapshot(snapshot_name, model):
    print "saving snapshot"
    snapshot = redisC.Dict(redis=redisInstance, key=snapshot_name)
    for key in model:
        snapshot[key] = model[key]
    filename = snapshot_name + str(datetime.now())
    with open(filename, 'wb') as f:
        pickle.dump(dict(model), f)
    print "[SNAPSHOT] Model snapshot saved:", snapshot_name
Exemplo n.º 4
0
 def __init__(self,
              cdb=None,
              url_couchdb=None,
              couchdb_name=None,
              couch_view=COUCHDB_VIEW,
              bucket_bases=BUCKET_BASES,
              object_auth=None,
              get_if_object=False,
              ignore_content_type=False,
              url_cache=None,
              hash_cache=None,
              harvested_object_cache=None):
     self._config = config()
     if cdb:
         self._couchdb = cdb
     else:
         if not url_couchdb:
             url_couchdb = self._config['couchdb_url']
         self._couchdb = get_couchdb(url=url_couchdb, dbname=couchdb_name)
     self._bucket_bases = bucket_bases
     self._view = couch_view
     # auth is a tuple of username, password
     self._auth = object_auth
     self.get_if_object = get_if_object  # if object field exists, get
     self.ignore_content_type = ignore_content_type  # Don't check content-type in headers
     self._redis = Redis(
         host=self._config['redis_host'],
         port=self._config['redis_port'],
         password=self._config['redis_password'],
         socket_connect_timeout=self._config['redis_connect_timeout'])
     self._url_cache = url_cache if url_cache is not None else \
         redis_collections.Dict(key='ucldc-image-url-cache',
                                redis=self._redis)
     self._hash_cache = hash_cache if hash_cache is not None else \
         redis_collections.Dict(key='ucldc-image-hash-cache',
                                redis=self._redis)
     self._object_cache = harvested_object_cache if harvested_object_cache \
         else \
         redis_collections.Dict(
             key='ucldc:harvester:harvested-images',
             redis=self._redis)
Exemplo n.º 5
0
def initParams(solver_filename, reset=True):
    global redisInstance
    redisInstance = Redis(host='localhost', port=6379, db=0)
    averageReward = redisC.Dict(redis=redisInstance, key="averageReward")

    if reset:
        # Remove all previously saved snapshots from redis
        for name in redisInstance.keys(MODEL_NAME + "-*"):
            snapshot = redisC.Dict(redis=redisInstance, key=name)
            snapshot.clear()

        centralModel.clear()
        rmsprop.clear()
        adagrad.clear()
        averageReward.clear()
        redisInstance.set("iteration", 0)

        # Instantiate model parameters according to initialization
        # scheme specified in .prototxt file
        solver = SGDSolver(solver_filename, )
        for name in solver.net.params:
            if is_tracked_param(name):
                parameters = solver.net.params[name]
                init = []
                for i in range(len(parameters)):
                    init.append(np.array(parameters[i].data, dtype='float32'))
                centralModel[name] = init

        print
        print "[Redis Collection]: Initialized the following parameters:"
        for key in centralModel.keys():
            print "  - " + key + ' (%d parameters)' % len(centralModel[key])

    else:
        solver = SGDSolver(solver_filename, )
        for name in solver.net.params:
            parameters = solver.net.params[name]
            assert (name in centralModel.keys()
                    and len(centralModel[name]) == len(parameters) and
                    "Model in Redis database does not match specified solver.")
Exemplo n.º 6
0
 def __init__(self, app, schedule=None, max_interval=None,
              Publisher=None, lazy=False, sync_every_tasks=None, **kwargs):
     #self.schedule_filename = kwargs.get('schedule_filename')
     redis_uri = app.conf.CELERY_SCHEDULE_BACKEND
     # set redis manager
     self.manager = RedisManager(redis_uri)
     #keys = self.manager.inspect(pattern='*', debug=False)
     
     self._prefix = app.conf.CELERY_REDIS_SCHEDULER_KEY_PREFIX
     
     self._schedule = redis_collections.Dict(key=self._prefix, redis=self.manager.conn)
     Scheduler.__init__(self, app, schedule=schedule, 
                        max_interval=max_interval, Publisher=Publisher, 
                        lazy=lazy, sync_every_tasks=sync_every_tasks, **kwargs)
Exemplo n.º 7
0
 def __init__(s, g, gv_output_file, step):
     s.g, s.gv_output_file = g, gv_output_file
     s.step = step
     s.frame_templates = redis_collections.Dict(
         redis=strict_redis_connection)
'''one time script to populate redis with harvested image object data'''
from harvester.config import config
from harvester.couchdb_init import get_couchdb
from harvester.couchdb_pager import couchdb_pager
from redis import Redis
import redis_collections

_config = config()

_redis = Redis(host=_config['redis_host'],
               port=_config['redis_port'],
               password=_config['redis_password'],
               socket_connect_timeout=_config['redis_connect_timeout'])

object_cache = redis_collections.Dict(key='ucldc:harvester:harvested-images',
                                      redis=_redis)

_couchdb = get_couchdb(url=_config['couchdb_url'], dbname='ucldc')
v = couchdb_pager(_couchdb, include_docs='true')
for r in v:
    doc = r.doc
    if 'object' in doc:
        did = doc['_id']
        if 'object_dimensions' not in doc:
            print "NO DIMS for {} -- not caching".format(did)
        else:
            object_cache[did] = [doc['object'], doc['object_dimensions']]
            print "OBJECT CACHE : {} === {}".format(did, object_cache[did])
Exemplo n.º 9
0
from log import get_logger

MB = 1024.0 * 1024.0
LOCAL_DIR = path.dirname(path.abspath(__file__))
PID_FILE = path.join(LOCAL_DIR, 'speedtest.pid')
DB_FILE = path.join(LOCAL_DIR, 'speedtest.db')
PORT = 8002
MIN_DATETIME = datetime(2016, 1, 1)

logger = get_logger('speedtest')

is_daemon = False

redis = redislite.StrictRedis(DB_FILE, serverconfig={'port': PORT})
data = redis_collections.List(redis=redis, key='speed')
settings = redis_collections.Dict(redis=redis, key='settings')


def get_splitted_output(cmd):
    return subprocess.check_output(cmd).decode().replace('\n', '').replace(
        '\r', '').split(',')


def get_speed():
    headers = get_splitted_output(['speedtest', '--csv-header'])
    try:
        speed = get_splitted_output(['speedtest', '--csv'])
    except Exception as ex:
        if not is_daemon:
            print(ex)
        logger.error(ex)
Exemplo n.º 10
0
 def __init__(self, options, *args, **kwargs):
     super(StorageObject, self).__init__(options, *args, **kwargs)
     self.redis = redis.Redis(host=self.options.redis_host,
                              port=self.options.redis_port)
     self.storage = redis_collections.Dict(key=self.options.redis_key,
                                           redis=self.redis)
Exemplo n.º 11
0
def saveAverageReward():
    print "saving all rewards to disk"
    averageReward = redisC.Dict(redis=redisInstance, key='averageReward')
    filename = "averageReward" + str(datetime.now())
    with open(filename, 'wb') as f:
        pickle.dump(dict(averageReward), f)
Exemplo n.º 12
0
def work(identification, graph_name, _range_start, _range_end, redis_fn):
    global redis_connection, strict_redis_connection, sparql_server, current_step, range_start, range_end, ss, just_unbound_bindings, frames_done_count, frame_templates, bnode_strings
    range_start, range_end = _range_start, _range_end
    sparql_server = sparql.SPARQLServer(sparql_uri)
    redis_connection = redislite.Redis(redis_fn)
    strict_redis_connection = redislite.StrictRedis(redis_fn)
    frame_templates = redis_collections.Dict(key='frames',
                                             redis=strict_redis_connection,
                                             writeback=True)
    bnode_strings = redis_collections.Dict(key='bnodes',
                                           redis=strict_redis_connection,
                                           writeback=True)

    raw = defaultdict(list)
    #todo limit queries with range_start, then uncomment this
    #if range_start != 0:
    #	raw = redis_load('checkpoint'+str(range_start - 1))

    raw['frames'] += list(
        query(('frame', 'parent', 'is_for_rule', 'step_finished',
               'step_created'), """WHERE
	{
		""" + frame_query() + """
		OPTIONAL {?frame kbdbg:has_parent ?parent}.
		?frame kbdbg:is_for_rule ?is_for_rule. 
	}"""))

    raw['bnodes'] += list(
        query(('bnode', 'frame', 'items', 'step_created', 'step_finished'),
              """WHERE
		{
		?bnode kbdbg:has_items ?items.
		?bnode kbdbg:has_parent ?frame.
		GRAPH ?g_created {?bnode rdf:type kbdbg:bnode}.
		""" + step_magic('_created') + """
		OPTIONAL {
			GRAPH ?g_finished{?frame kbdbg:is_finished true}.
			""" + step_bind('_finished') + """
		}
		}"""))

    raw['results'] += list(
        query(('uri', 'value', 'step_unbound'), """WHERE {GRAPH ?g_created 
			{
				?uri rdf:type kbdbg:result.
				?uri rdf:value ?value.
			}.""" + step_magic('_created') + """
			OPTIONAL {GRAPH ?g_unbound {?uri kbdbg:was_ubound true}.}.""" +
              step_bind('_unbound') + '}'))

    raw['bindings'] += list(
        query(('x', 'source', 'target', 'source_frame', 'target_frame',
               'source_is_bnode', 'target_is_bnode', 'source_term_idx',
               'target_term_idx', 'source_is_in_head', 'target_is_in_head',
               'source_arg_idx', 'target_arg_idx', 'stepbinding_unbound',
               'stepbinding_failed', 'stepbinding_created'), """WHERE 
		{
		GRAPH ?gbinding_created {?x rdf:type kbdbg:binding.}.
		""" + step_magic('binding_created') + """
		OPTIONAL {GRAPH ?gbinding_unbound {?x kbdbg:was_unbound true}.
		""" + step_bind('binding_unbound') + """
		}.
		OPTIONAL {GRAPH ?gbinding_failed  {?x kbdbg:failed true}.
		""" + step_bind('binding_failed') + """
		}.
		?x kbdbg:has_source ?source.
		?x kbdbg:has_target ?target.
		?source kbdbg:has_frame ?source_frame.
		?target kbdbg:has_frame ?target_frame.
		OPTIONAL {?source kbdbg:is_bnode ?source_is_bnode.}.
		OPTIONAL {?target kbdbg:is_bnode ?target_is_bnode.}.
		?source kbdbg:term_idx ?source_term_idx.
		?target kbdbg:term_idx ?target_term_idx.
		OPTIONAL {?source kbdbg:is_in_head ?source_is_in_head.}.
		OPTIONAL {?target kbdbg:is_in_head ?target_is_in_head.}.
		OPTIONAL {?source kbdbg:arg_idx ?source_arg_idx.}.
		OPTIONAL {?target kbdbg:arg_idx ?target_arg_idx.}.
		}"""))

    current_step = range_end
    redis_save('checkpoint' + str(range_end), filter_out_irrelevant_stuff(raw))
    current_step = '666'

    last_bindings = raw['bindings'][:]
    for i in range(range_start, range_end + 1):
        current_step = i
        ss = '[' + str(current_step) + ']'
        info('work ' + ss)
        state = filter_out_irrelevant_stuff(raw)
        if len(state['frames']) == 0:
            info('no frames.' + ss)
            continue

        #wont work with euler-style steps
        #if last_bindings == state['bindings']:
        #	return 'end'

        #todo make emitter save data to output, not to file
        e = Emitter()
        e.gv("digraph frame" + str(current_step) +
             "{  ")  #splines=ortho;#gv("pack=true")
        e.do_frames(state['frames'])
        e.do_bnodes(state['bnodes'])
        e.do_results(state['results'])
        e.do_bindings(state['bindings'])
        e.gv("}")
        info('}..' + ss)

        args = (ss, identification + '_' + str(current_step).zfill(7) + '.gv',
                e.output)
        while len(graphviz_futures) > 10000:
            info('sleeping')
            time.sleep(10)
            check_futures2(graphviz_futures)

        graphviz_futures.append(graphviz_pool.submit(output, *args))
        check_futures2(graphviz_futures)

    #print_stats()
    redis_connection._cleanup()
    strict_redis_connection._cleanup()