def run_pipeline(p, **kwargs): todo_list = [] if not isinstance(p, Pipeline): raise PipelineCheckError("check pipeline with error", "%s is not a Pipeline" % p) for processor in p.processors: print(processor) op = operator_detail(processor["name"]) ins = op.inspect_instance(processor["instance"]) todo_list.append(ins) op = operator_detail(p.encoder["name"]) ins = op.inspect_instance(p.encoder["instance"]) todo_list.append(ins) def runner(todo_list): metadata, vectors = [], [] urls = [kwargs['url']] if kwargs['url'] else [] datas = [kwargs['data']] if kwargs['data'] else [] try: for num, i in enumerate(todo_list): if num == len(todo_list) - 1: vectors, _ = execute(i, urls=urls, datas=datas) return vectors _, metadatas = execute(i, urls=urls, datas=datas) urls = [x.url for x in metadatas] datas = [x.data for x in metadatas] return metadata except Exception as e: raise RPCExecError("Execute with error", e) try: return runner(todo_list) except Exception as e: logger.error(e) raise e
def create_pipeline(name, processors=None, encoder=None, description=None): try: p = MongoIns.search_by_name(PIPELINE_COLLECTION_NAME, name) if p: raise ExistError(f"pipeline <{name}> already exists", "") pro = [] encoder_res = {} processor_res = {} for processor in processors: pr = operator_detail(processor["name"]) processor_res["operator"] = pr.to_dict() processor_res["instance"] = pr.inspect_instance( processor["instance"]) pro.append(processor_res) encoder_info = operator_detail(encoder["name"]) encoder_res["operator"] = encoder_info.to_dict() encoder_res["instance"] = encoder_info.inspect_instance( encoder["instance"]) pipe = Pipeline(name, description, pro, encoder_res) pipe.metadata = pipe._metadata() if pipeline_illegal(pipe): raise PipelineIllegalError("Pipeline illegal check error", "") MongoIns.insert_documents(PIPELINE_COLLECTION_NAME, pipe.to_dict()) return pipe except Exception as e: logger.error(e, exc_info=True) raise e
def new_pipeline(name, input, index_file_size, processors, encoder, description=None): try: encoder = operator_detail(encoder) pipe = Pipeline(name=name, input=input, output=encoder.output, dimension=encoder.dimension, index_file_size=index_file_size, metric_type=encoder.metric_type, description=description, processors=processors.split(","), encoder=encoder.name) if pipeline_ilegal(pipe): return PipelineIlegalError("Pipeline ilegal check error", "") milvus_collection_name = f"{name}_{encoder.name}" MilvusIns.new_milvus_collection(milvus_collection_name, encoder.dimension, index_file_size, encoder.metric_type) return pipe.save() except Exception as e: print(e) logger.error(e) return e
def create_operator_instance_api(name): args = reqparse.RequestParser(). \ add_argument("instanceName", type=str, required=True). \ parse_args() args = from_view_dict(args) ins_name = args['instance_name'] op = operator_detail(name) return op.new_instance(ins_name)
def create_milvus_collections_by_fields(app): for field in search_fields(app.fields): if field.type == "pipeline": pipe = pipeline_detail(field.value) name = pipe.encoder.get("name") instance_name = pipe.encoder.get("instance") encoder = operator_detail(name) instance = encoder.inspect_instance(instance_name) ei = identity(instance.endpoint) MilvusIns.new_milvus_collection( f"{app.name}_{name}_{instance_name}", int(ei["dimension"]), 1024, "l2")
def start_operator_instance_api(name, ins_name): op = operator_detail(name) return op.start_instance(ins_name)
def delete_operator_instance_api(name, ins_name): op = operator_detail(name) return op.delete_instance(ins_name)
def operator_instance_list_api(name): op = operator_detail(name) return op.list_instances()
def operator_detail_api(name): return operator_detail(name)
def wrapper(*args, **kwargs): operator = operator_detail(operator_name) operator.new_instance(name) func(*args, **kwargs) operator.delete_instance(name)
def create_operator_instance_api(name, ins_name): op = operator_detail(name) return op.new_instance(ins_name)