def remove_block(self, block): r = get_redis_instance() block_key = ExpKeys.get_block_key(block.uuid) pipe = r.pipeline(transaction=True) if block.create_new_scope: for sub_block_uuid in block.children_blocks: sub_block = self.get_block(sub_block_uuid) self.remove_block(sub_block) pipe.hdel(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), block.sub_scope_name) block.on_remove(exp=self) # find all bound variables, that provided by this block for other_uuid, other_block in self.get_blocks(self.get_all_block_uuids()): if other_uuid == block.uuid: continue for f_name, bound_var in other_block.bound_inputs.items(): if bound_var.block_uuid == block.uuid: other_block.bound_inputs.pop(f_name) self.store_block(other_block) # Remove information related to block from redis pipe.lrem(ExpKeys.get_exp_blocks_list_key(self.pk), 0, block.uuid) pipe.srem(ExpKeys.get_all_exp_keys_key(self.pk), block_key) pipe.hdel(ExpKeys.get_blocks_uuid_by_alias(self.pk), block.base_name) scope = Scope(self, block.scope_name) scope.remove_vars_from_block(block) pipe.execute()
def remove_block(self, block): r = get_redis_instance() block_key = ExpKeys.get_block_key(block.uuid) pipe = r.pipeline(transaction=True) if block.create_new_scope: for sub_block_uuid in block.children_blocks: sub_block = self.get_block(sub_block_uuid) self.remove_block(sub_block) pipe.hdel(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), block.sub_scope_name) block.on_remove(exp=self) # find all bound variables, that provided by this block for other_uuid, other_block in self.get_blocks( self.get_all_block_uuids()): if other_uuid == block.uuid: continue for f_name, bound_var in other_block.bound_inputs.items(): if bound_var.block_uuid == block.uuid: other_block.bound_inputs.pop(f_name) self.store_block(other_block) # Remove information related to block from redis pipe.lrem(ExpKeys.get_exp_blocks_list_key(self.pk), 0, block.uuid) pipe.srem(ExpKeys.get_all_exp_keys_key(self.pk), block_key) pipe.hdel(ExpKeys.get_blocks_uuid_by_alias(self.pk), block.base_name) scope = Scope(self, block.scope_name) scope.remove_vars_from_block(block) pipe.execute()
def get_all_scopes_with_block_uuids(self, redis_instance=None): if redis_instance is None: r = get_redis_instance() else: r = redis_instance return r.hgetall(ExpKeys.get_scope_creating_block_uuid_keys(self.pk))
def delete_exp(exp): """ We need to clean 3 areas: - keys in redis storage - uploaded and created files - delete exp object through ORM @param exp: Instance of Experiment to be deleted @return: None """ # redis r = get_redis_instance() all_exp_keys = ExpKeys.get_all_exp_keys_key(exp.pk) keys_to_delete = r.smembers(all_exp_keys) keys_to_delete.update(all_exp_keys) r.delete(keys_to_delete) # uploaded data data_files = UploadedData.objects.filter(exp=exp) for f in data_files: try: os.remove(f.data.path) except: pass f.delete() try: shutil.rmtree(exp.get_data_folder()) except: pass # deleting an experiment exp.delete()
def send(self): msg = self.to_dict() r = get_redis_instance() r.publish(ExpKeys.get_exp_notify_publish_key(self.exp_id), json.dumps(msg)) log.debug("Sent notification: %s", json.dumps(msg))
def save_file_input(self, exp, field_name, file_obj, multiple=False, upload_meta=None): if upload_meta is None: upload_meta = {} if not hasattr(self, field_name): raise Exception("Block doesn't have field: %s" % field_name) orig_name = file_obj.name local_filename = "%s_%s_%s" % (self.uuid[:8], field_name, file_obj.name) if not multiple: log.debug("Storing single upload to field: %s", field_name) ud, is_created = UploadedData.objects.get_or_create( exp=exp, block_uuid=self.uuid, var_name=field_name) file_obj.name = local_filename ud.data = file_obj ud.save() ufw = UploadedFileWrapper(ud.pk) ufw.orig_name = orig_name setattr(self, field_name, ufw) exp.store_block(self) else: log.debug("Adding upload to field: %s", field_name) ud, is_created = UploadedData.objects.get_or_create( exp=exp, block_uuid=self.uuid, var_name=field_name, filename=orig_name) file_obj.name = local_filename ud.data = file_obj ud.filename = orig_name ud.save() ufw = UploadedFileWrapper(ud.pk) ufw.orig_name = orig_name r = get_redis_instance() with redis_lock.Lock( r, ExpKeys.get_block_global_lock_key(self.exp_id, self.uuid)): log.debug("Enter lock, file: %s", orig_name) block = exp.get_block(self.uuid) attr = getattr(block, field_name) attr[orig_name] = ufw log.debug("Added upload `%s` to collection: %s", orig_name, attr.keys()) exp.store_block(block) log.debug("Exit lock, file: %s", orig_name)
def store(self, redis_instance=None): if redis_instance is None: r = get_redis_instance() else: r = redis_instance key = ExpKeys.get_scope_key(self.exp.pk, scope_name=self.name) r.set(key, pickle.dumps(self.scope_vars)) log.debug("Scope `%s` was saved to storage", self.name)
def auto_exec_task(exp, scope_name, is_init=False): r = get_redis_instance() lock_key = ExpKeys.get_auto_exec_task_lock_key(exp.pk, scope_name) with redis_lock.Lock(r, lock_key): try: sr = ScopeRunner(exp, scope_name) sr.execute(is_init) except Exception, e: log.exception(e)
def change_block_alias(self, block, new_base_name): r = get_redis_instance() key = ExpKeys.get_blocks_uuid_by_alias(self.pk) pipe = r.pipeline() pipe.hdel(key, block.base_name) pipe.hset(key, new_base_name, block.uuid) pipe.execute() block.base_name = new_base_name self.store_block(block, redis_instance=r)
def get_meta_block_by_sub_scope(self, scope_name, redis_instance=None): if redis_instance is None: r = get_redis_instance() else: r = redis_instance block_uuid = r.hget(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), scope_name) if not block_uuid: raise KeyError("Doesn't have a scope with name %s" % scope_name) else: return self.get_block(block_uuid, r)
def auto_exec_task(exp, scope_name, is_init=False): r = get_redis_instance() lock_key = ExpKeys.get_auto_exec_task_lock_key(exp.pk, scope_name) with redis_lock.Lock(r, lock_key): try: sr = ScopeRunner(exp, scope_name) sr.execute(is_init) exp.log("root", "Scope %s finished" % scope_name, "INFO") except Exception, e: exp.error() exp.log("root", e.message, "CRITICAL") log.exception(e)
def get_all_block_uuids(self, redis_instance=None): """ @param redis_instance: Redis client @return: list of block uuids """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance return r.lrange(ExpKeys.get_exp_blocks_list_key(self.pk), 0, -1) or []
def save_file_input(self, exp, field_name, file_obj, multiple=False, upload_meta=None): if upload_meta is None: upload_meta = {} if not hasattr(self, field_name): raise Exception("Block doesn't have field: %s" % field_name) orig_name = file_obj.name local_filename = "%s_%s_%s" % (self.uuid[:8], field_name, file_obj.name) if not multiple: exp.log(self.uuid, "Storing single upload to field: %s" % field_name) log.debug("Storing single upload to field: %s", field_name) ud, is_created = UploadedData.objects.get_or_create( exp=exp, block_uuid=self.uuid, var_name=field_name) file_obj.name = local_filename ud.data = file_obj ud.save() ufw = UploadedFileWrapper(ud.pk) ufw.orig_name = orig_name setattr(self, field_name, ufw) exp.store_block(self) else: exp.log(self.uuid, "Adding upload to field: %s" % field_name) log.debug("Adding upload to field: %s", field_name) ud, is_created = UploadedData.objects.get_or_create( exp=exp, block_uuid=self.uuid, var_name=field_name, filename=orig_name) file_obj.name = local_filename ud.data = file_obj ud.filename = orig_name ud.save() ufw = UploadedFileWrapper(ud.pk) ufw.orig_name = orig_name r = get_redis_instance() with redis_lock.Lock(r, ExpKeys.get_block_global_lock_key(self.exp_id, self.uuid)): exp.log(self.uuid, "Enter lock, file: %s" % orig_name) log.debug("Enter lock, file: %s", orig_name) block = exp.get_block(self.uuid) attr = getattr(block, field_name) attr[orig_name] = ufw exp.log(self.uuid, "Added upload `%s` to collection: %s" % (orig_name, attr.keys())) log.debug("Added upload `%s` to collection: %s", orig_name, attr.keys()) exp.store_block(block) exp.log(self.uuid, "Exit lock, file: %s" % orig_name) log.debug("Exit lock, file: %s", orig_name)
def get_ctx(self, redis_instance=None): if redis_instance is None: r = get_redis_instance() else: r = redis_instance key_context = ExpKeys.get_context_store_key(self.pk) pickled_ctx = r.get(key_context) if pickled_ctx is not None: ctx = pickle.loads(pickled_ctx) else: raise KeyError("Context wasn't found for exp_id: %s" % self.pk) return ctx
def load(self, redis_instance=None): if redis_instance is None: r = get_redis_instance() else: r = redis_instance key = ExpKeys.get_scope_key(self.exp.pk, scope_name=self.name) raw = r.get(key) if raw is not None: self.scope_vars = pickle.loads(raw) # TODO: set scope name during scope_var creation for scope_var in self.scope_vars: scope_var.scope_name = self.name
def get_block_aliases_map(self, redis_instance=None): """ @param redis_instance: Redis @return: Map { uuid -> alias } @rtype: dict """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance orig_map = r.hgetall(ExpKeys.get_blocks_uuid_by_alias(self.pk)) return dict([(uuid, alias) for alias, uuid in orig_map.iteritems()])
def get_all_block_uuids(self, redis_instance=None): """ @type included_inner_blocks: list of str @param included_inner_blocks: uuids of inner blocks to be included @param redis_instance: Redis client @return: list of block uuids """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance return r.lrange(ExpKeys.get_exp_blocks_list_key(self.pk), 0, -1) or []
def get_block(block_uuid, redis_instance=None): """ @type block_uuid: str @param block_uuid: Block instance identifier @type redis_instance: Redis @param redis_instance: Instance of redis client @rtype: GenericBlock @return: Block instance """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance return pickle.loads(r.get(ExpKeys.get_block_key(block_uuid)))
def get_block_aliases_map(self, redis_instance=None): """ @param redis_instance: Redis @return: Map { uuid -> alias } @rtype: dict """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance orig_map = r.hgetall(ExpKeys.get_blocks_uuid_by_alias(self.pk)) return dict([ (uuid, alias) for alias, uuid in orig_map.iteritems() ])
def get_block_by_alias(self, alias, redis_instance=None): """ @type alias: str @param alias: Human readable block name, can be altered @type redis_instance: Redis @param redis_instance: Instance of redis client @rtype: GenericBlock @return: Block instance """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance uuid = r.hget(ExpKeys.get_blocks_uuid_by_alias(self.pk), alias) return self.get_block(uuid, r)
def get_blocks(block_uuid_list, redis_instance=None): """ @type block_uuid_list: list @param block_uuid_list: List of Block instance identifier @type redis_instance: Redis @param redis_instance: Instance of redis client @rtype: GenericBlock @return: Block instance """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance log.debug("getting_blocks: %s", block_uuid_list) return [(uuid, pickle.loads(r.get(ExpKeys.get_block_key(uuid)))) for uuid in block_uuid_list]
def get_blocks(block_uuid_list, redis_instance=None): """ @type block_uuid_list: list @param block_uuid_list: List of Block instance identifier @type redis_instance: Redis @param redis_instance: Instance of redis client @rtype: GenericBlock @return: Block instance """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance return [(uuid, pickle.loads(r.get(ExpKeys.get_block_key(uuid)))) for uuid in block_uuid_list]
def halt_execution_task(exp, scope_name): log.debug("halt execution invoked") r = get_redis_instance() lock_key = ExpKeys.get_auto_exec_task_lock_key(exp.pk, scope_name) with redis_lock.Lock(r, lock_key): try: if scope_name == "root": AllUpdated( exp.pk, comment=u"An error occurred during experiment execution", silent=False, mode=NotifyMode.ERROR).send() else: block = exp.get_meta_block_by_sub_scope(scope_name) block.do_action("error", exp) except Exception, e: log.exception(e)
def halt_execution_task(exp, scope_name): log.debug("halt execution invoked") r = get_redis_instance() lock_key = ExpKeys.get_auto_exec_task_lock_key(exp.pk, scope_name) with redis_lock.Lock(r, lock_key): try: if scope_name == "root": AllUpdated( exp.pk, comment=u"An error occurred during experiment execution", silent=False, mode=NotifyMode.ERROR ).send() else: block = exp.get_meta_block_by_sub_scope(scope_name) block.do_action("error", exp) except Exception, e: log.exception(e)
def load(self, redis_instance=None): if redis_instance is None: r = get_redis_instance() else: r = redis_instance key = ExpKeys.get_scope_key(self.exp.pk, scope_name=self.name) raw = r.get(key) if raw is not None: vars_p = pickle.loads(raw) if isinstance(vars_p, list): self.scope_vars = vars_p[0] self.temp_vars = dict() elif isinstance(vars_p, set): self.scope_vars = vars_p self.load_temp() if isinstance(self.temp_vars, list): self.temp_vars = dict() # TODO: set scope name during scope_var creation for scope_var in self.scope_vars: scope_var.scope_name = self.name
def store_block(self, block, new_block=False, redis_instance=None, dont_execute_pipe=False): if redis_instance is None: r = get_redis_instance() else: r = redis_instance if not isinstance(r, StrictPipeline): pipe = r.pipeline() else: pipe = r block_key = ExpKeys.get_block_key(block.uuid) if new_block: pipe.rpush(ExpKeys.get_exp_blocks_list_key(self.pk), block.uuid) pipe.sadd(ExpKeys.get_all_exp_keys_key(self.pk), block_key) pipe.hset(ExpKeys.get_blocks_uuid_by_alias(self.pk), block.base_name, block.uuid) if block.create_new_scope: pipe.hset(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), block.sub_scope_name, block.uuid) if block.scope_name != "root": # need to register in parent block parent_uuid = r.hget( ExpKeys.get_scope_creating_block_uuid_keys(self.pk), block.scope_name) parent = self.get_block(parent_uuid, r) # TODO: remove code dependency here parent.children_blocks.append(block.uuid) self.store_block(parent, new_block=False, redis_instance=pipe, dont_execute_pipe=True) pipe.set(block_key, pickle.dumps(block)) if not dont_execute_pipe: pipe.execute() log.info("block %s was stored with state: %s [uuid: %s]", block.base_name, block.state, block.uuid)
def post_init(self, redis_instance=None): ## TODO: RENAME TO init experiment and invoke on first save if redis_instance is None: r = get_redis_instance() else: r = redis_instance pipe = r.pipeline() pipe.hset(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), "root", None) pipe.sadd(ExpKeys.get_all_exp_keys_key(self.pk),[ ExpKeys.get_exp_blocks_list_key(self.pk), ExpKeys.get_blocks_uuid_by_alias(self.pk), ExpKeys.get_scope_creating_block_uuid_keys(self.pk), ExpKeys.get_scope_key(self.pk, "root") ]) pipe.execute()
def on_sub_scope_done(self, exp, *args, **kwargs): """ @type exp: Experiment This action should be called by ScopeRunner when all blocks in sub-scope have exec status == done """ r = get_redis_instance() with redis_lock.Lock( r, ExpKeys.get_block_global_lock_key(self.exp_id, self.uuid)): cell = self.res_seq.sequence[self.inner_output_manager.iterator] for name, scope_var in self.collector_spec.bound.iteritems(): var = exp.get_scope_var_value(scope_var) log.debug("Collected %s from %s", var, scope_var.title) if var is not None: if hasattr(var, "clone"): cell[name] = var.clone( "%s_%s" % (self.uuid, self.inner_output_manager.iterator)) else: cell[name] = deepcopy(var) self.res_seq.sequence[self.inner_output_manager.iterator] = cell exp.store_block(self) if len(cell) < len(self.res_seq.fields): self.do_action("continue_collecting_sub_scope", exp) else: try: self.inner_output_manager.next() self.do_action("run_sub_scope", exp) except StopIteration, e: # All folds was processed without errors self.build_result_collection(exp) self.do_action("success", exp)
def on_sub_scope_done(self, exp, *args, **kwargs): """ @type exp: Experiment This action should be called by ScopeRunner when all blocks in sub-scope have exec status == done """ r = get_redis_instance() with redis_lock.Lock(r, ExpKeys.get_block_global_lock_key(self.exp_id, self.uuid)): cell = self.res_seq.sequence[self.inner_output_manager.iterator] for name, scope_var in self.collector_spec.bound.iteritems(): var = exp.get_scope_var_value(scope_var) exp.log(self.uuid, "Collected %s from %s" % (var, scope_var.title), severity="CRITICAL") log.debug("Collected %s from %s", var, scope_var.title) if var is not None: if hasattr(var, "clone"): cell[name] = var.clone("%s_%s" % (self.uuid, self.inner_output_manager.iterator)) else: cell[name] = deepcopy(var) self.res_seq.sequence[self.inner_output_manager.iterator] = cell exp.store_block(self) if len(cell) < len(self.res_seq.fields): self.do_action("continue_collecting_sub_scope", exp) else: try: self.inner_output_manager.next() self.do_action("run_sub_scope", exp) except StopIteration, e: # All folds were processed without errors self.build_result_collection(exp) self.do_action("success", exp)
def get_blocks(block_uuid_list, redis_instance=None): """ @type block_uuid_list: list @param block_uuid_list: List of Block instance identifier @type redis_instance: Redis @param redis_instance: Instance of redis client @rtype: GenericBlock @return: Block instance """ if redis_instance is None: r = get_redis_instance() else: r = redis_instance log.debug("getting_blocks: %s", block_uuid_list) # for uuid in block_uuid_list: # try: # uuid, block = pickle.loads(r.get(ExpKeys.get_block_key(uuid))) # except TypeError: # pass return [(uuid, pickle.loads(r.get(ExpKeys.get_block_key(uuid)))) for uuid in block_uuid_list]
def store_block(self, block, new_block=False, redis_instance=None, dont_execute_pipe=False): if redis_instance is None: r = get_redis_instance() else: r = redis_instance if not isinstance(r, StrictPipeline): pipe = r.pipeline() else: pipe = r block_key = ExpKeys.get_block_key(block.uuid) if new_block: pipe.rpush(ExpKeys.get_exp_blocks_list_key(self.pk), block.uuid) pipe.sadd(ExpKeys.get_all_exp_keys_key(self.pk), block_key) pipe.hset(ExpKeys.get_blocks_uuid_by_alias(self.pk), block.base_name, block.uuid) if block.create_new_scope: pipe.hset(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), block.sub_scope_name, block.uuid) if block.scope_name != "root": # need to register in parent block parent_uuid = r.hget(ExpKeys.get_scope_creating_block_uuid_keys(self.pk), block.scope_name) parent = self.get_block(parent_uuid, r) # TODO: remove code dependency here parent.children_blocks.append(block.uuid) self.store_block(parent, new_block=False, redis_instance=pipe, dont_execute_pipe=True) pipe.set(block_key, pickle.dumps(block)) if not dont_execute_pipe: pipe.execute() log.info("block %s was stored with state: %s [uuid: %s]", block.base_name, block.state, block.uuid)