def update_graph_context(updates,update_count=False): if update_count : with collection.pipeline() as pipe: for x in updates: context_vector = x['updated_context'] unit_context_vector = context_vector / np.linalg.norm(context_vector) pipe.hset(x['connection_key'],'context',unit_context_vector.tobytes()) pipe.hincrby(x['connection_key'],'update_count',1) pipe.execute() else : with collection.pipeline() as pipe: for x in updates: context_vector = x['updated_context'] unit_context_vector = context_vector / np.linalg.norm(context_vector) pipe.hset(x['connection_key'],'context',unit_context_vector.tobytes()) pipe.execute()
def update_graph_nodes(updates): with collection.pipeline() as pipe: for x in updates: #context_vector = x['updated_context'] update_vector = x['update_vector'] weight_vector = x['weight_vector'] unit_update_vector = update_vector /np.linalg.norm(update_vector) unit_weight_vector = weight_vector / np.linalg.norm(weight_vector) #unit_context_vector = context_vector / np.linalg.norm(context_vector) #pipe.hset(x['connection_key'],'context',unit_context_vector.tobytes()) pipe.hset(x['connection_key'], 'update_vector', unit_update_vector.tobytes()) pipe.hset(x['connection_key'], 'weight_vector', unit_weight_vector.tobytes()) pipe.execute()
def release_db_lock(unlock=False): if unlock: with collection.pipeline() as pipe: for key in collection.keys(): pipe.hset(key,b'lock',0) pipe.execute()