Пример #1
0
def perf_tuning(self, temp_json, input_params):
    """
    Define a celery job which runs OLive pipeline perf-tuning.
    Args:
        temp_json: Input specs for perf-tuning job.
        input_params: JSON object storing the input arguments from frontend user.
    Return:
        JSON response with run status and results.
    """
    response_object = {'status': 'success'}

    pipeline = onnxpipeline.Pipeline()

    # create result dir
    result_dir = os.path.join(app_config.PERF_RES_DIR, get_timestamp())
    if not os.path.exists(result_dir):
        os.makedirs(result_dir)
    result = pipeline.perf_tuning(input_json=temp_json, result=result_dir)
    try:
        r = pipeline.get_result(result)
        response_object['result'] = json.dumps(r.latency)
        response_object['profiling'] = r.profiling_ops
    except RuntimeError:
        pass

    response_object['logs'] = pipeline.output
    # clean up result dir
    # if os.path.exists(result_dir):
    #    rmtree(result_dir)
    clean(app.root_path)
    return response_object
Пример #2
0
def main():
    args = get_args()
    pipeline = onnxpipeline.Pipeline()
    model = pipeline.convert_model(
        model_type=args.model_type,
        model=pipeline.win_path_to_linux_relative(args.model),
        model_input_shapes=args.model_input_shapes,
        model_inputs_names=args.model_inputs_names,
        model_outputs_names=args.model_outputs_names,
        target_opset=args.target_opset,
        input_json=args.input_json,
        initial_types=args.initial_types,
        caffe_model_prototxt=args.caffe_model_prototxt,
        windows=not args.linux)
    if 'Runtimes' in pipeline.client.info(
    ) and 'nvidia' in pipeline.client.info()['Runtimes']:
        pipeline.perf_tuning(model=model,
                             result=pipeline.win_path_to_linux_relative(
                                 args.result),
                             runtime=args.gpu,
                             windows=not args.linux)
    else:
        if args.gpu:
            print('Not support Nvidia in local machine. Need to be installed.')
        args.gpu = False
        pipeline.perf_tuning(model=model,
                             result=args.result,
                             runtime=args.gpu,
                             windows=not args.linux)
Пример #3
0
def convert(self, model_name, temp_json, cur_ts, root_path, input_params):
    """
    Define a celery job which runs OLive pipeline onnx-converter.
    Args:
        model_name: Model path to run onnx-convert.
        temp_json: Input specs for onnx-convert job.
        cur_ts: Unique timestamp for the job.
        root_path: Project app absolute root path.
        input_params: JSON object storing the input arguments from frontend user
    Return:
        JSON response with run status and results.
    """
    response_object = {'status': 'success'}
    # Initiate pipeline object with targeted directory
    pipeline = onnxpipeline.Pipeline(
        convert_directory=os.path.join(app_config.CONVERT_RES_DIR, cur_ts))
    model = pipeline.convert_model(model=model_name, input_json=temp_json)
    try:
        with open(posixpath.join(pipeline.convert_directory,
                                 'output.json')) as f:
            json_data = json.load(f)
            response_object['output_json'] = json_data

        response_object['logs'] = pipeline.output
        response_object['converted_model'] = model
    except:
        #fail to convert
        pass

    target_dir = app_config.DOWNLOAD_DIR
    input_root = os.path.join(root_path, target_dir)
    # compress input directory
    compress_path = os.path.join(pipeline.convert_directory,
                                 app_config.TEST_DATA_DIR)
    input_path = os.path.join(input_root, cur_ts)
    if not os.path.exists(input_path):
        os.makedirs(input_path)
    tar = tarfile.open(os.path.join(input_path, app_config.COMPRESS_NAME),
                       "w:gz")
    try:
        tar.add(compress_path, arcname=app_config.TEST_DATA_DIR)
    except:
        # fail to generate input
        pass

    tar.close()

    # copy converted onnx model
    if os.path.exists(pipeline.convert_path):
        copyfile(pipeline.convert_path,
                 os.path.join(input_root, cur_ts, pipeline.convert_name))

    response_object['input_path'] = posixpath.join(target_dir, cur_ts,
                                                   app_config.COMPRESS_NAME)
    response_object['model_path'] = posixpath.join(target_dir, cur_ts,
                                                   pipeline.convert_name)

    clean(root_path)

    return response_object
Пример #4
0
 def test_constructor_fail(self):
     directory_name = 'not_exist_directory'
     try:
         pipeline = onnxpipeline.Pipeline(directory_name)
         self.fail("Pipeline should raised RuntimeError")
     except:
         self.assertRaises(RuntimeError)
Пример #5
0
 def test_cntk_pass(self):
     directory_name = self.deep_dir['cntk']
     pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_pass, print_logs=self.print_logs)
     def test_convert_pass():
         model = pipeline.convert_model(model_type='cntk', model='ResNet50_ImageNet_Caffe.model')
         return model
     model = test_convert_pass()
     output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
     self.check_json_staus(['SUCCESS', 'SUCCESS'], self.check_converted_json(output_json))
Пример #6
0
 def test_tensorflow_pass(self):
     directory_name = self.deep_dir['tensorflow']
     pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_pass, print_logs=self.print_logs)
     def test_convert_pass():
         model = pipeline.convert_model(model_type='tensorflow')
         return model
     model = test_convert_pass()
     output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
     self.check_json_staus(['SUCCESS', 'SUCCESS'], self.check_converted_json(output_json))
Пример #7
0
    def test_pytorch_pass(self):
        directory_name = self.deep_dir['pytorch']
        pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_pass, print_logs=self.print_logs)
        def test_convert_pass():
            model = pipeline.convert_model(model_type='pytorch', model='saved_model.pb', model_input_shapes='(1,3,224,224)')
            return model

        model = test_convert_pass()
        output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
        self.check_json_staus(['SUCCESS', 'SUCCESS'], self.check_converted_json(output_json))
Пример #8
0
    def test_convert_from_onnx(self):
        directory_name = self.deep_dir['onnx']
        pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_pass)
        def test_convert_pass():
            model = pipeline.convert_model(model_type='onnx', model='model.onnx', model_input_shapes='(1,3,224,224)')
            return model

        model = test_convert_pass()
        output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
        self.check_json_staus(['SUCCESS', 'SKIPPED'], self.check_converted_json(output_json))
Пример #9
0
    def test_cntk_fail(self):
        directory_name = self.deep_dir['cntk']
        pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_fail, print_logs=self.print_logs)
        def test_convert_fail_no_model():
            model = pipeline.convert_model(model_type='cntk')
            return model

        model = test_convert_fail_no_model()
        output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
        self.check_json_staus(['FAILED', 'FAILED'], self.check_converted_json(output_json))
Пример #10
0
    def test_tensorflow_fail(self):
        directory_name = self.deep_dir['tensorflow']
        pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_fail)
        def test_convert_fail_no_model():
            model = pipeline.convert_model(model_type='tensorflow', model='not_exist_path')
            return model

        model = test_convert_fail_no_model()
        output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
        self.check_json_staus(['FAILED', 'FAILED'], self.check_converted_json(output_json))
Пример #11
0
def convert():
    response_object = {'status': 'success'}

    pipeline = onnxpipeline.Pipeline()

    # may not work in Unix OS cuz the permission
    if os.path.exists(pipeline.convert_directory):
        rmtree(pipeline.convert_directory)
    os.mkdir(pipeline.convert_directory)

    model_name, temp_json = get_params(request, pipeline.convert_directory)

    model = pipeline.convert_model(model=model_name, input_json=temp_json)
    try:
        with open(posixpath.join(pipeline.convert_directory,
                                 'output.json')) as f:
            json_data = json.load(f)
            response_object['output_json'] = json_data

        response_object['logs'] = pipeline.output
        response_object['converted_model'] = model
    except:
        #fail to convert
        pass

    target_dir = app_config.DOWNLOAD_DIR
    input_root = os.path.join(app.root_path, app_config.STATIC_DIR, target_dir)
    # compress input directory
    compress_path = os.path.join(pipeline.convert_directory,
                                 app_config.INPUT_DIR)
    input_path = os.path.join(input_root)
    if not os.path.exists(input_path):
        os.makedirs(input_path)
    tar = tarfile.open(os.path.join(input_path, app_config.COMPRESS_NAME),
                       "w:gz")
    try:
        tar.add(compress_path, arcname=app_config.INPUT_DIR)
    except:
        # fail to generate input
        pass

    tar.close()

    # copy converted onnx model
    if os.path.exists(pipeline.convert_path):
        copyfile(pipeline.convert_path,
                 os.path.join(input_root, pipeline.convert_name))

    response_object['input_path'] = posixpath.join(target_dir,
                                                   app_config.COMPRESS_NAME)
    response_object['model_path'] = posixpath.join(target_dir,
                                                   pipeline.convert_name)

    return jsonify(response_object)
Пример #12
0
def convert(self, model_name, temp_json, cur_ts, root_path, input_params):
    response_object = {'status': 'success'}
    # Initiate pipeline object with targeted directory
    pipeline = onnxpipeline.Pipeline(
        convert_directory=os.path.join(app_config.CONVERT_RES_DIR, cur_ts))
    model = pipeline.convert_model(model=model_name, input_json=temp_json)
    try:
        with open(posixpath.join(pipeline.convert_directory,
                                 'output.json')) as f:
            json_data = json.load(f)
            response_object['output_json'] = json_data

        response_object['logs'] = pipeline.output
        response_object['converted_model'] = model
    except:
        #fail to convert
        pass

    target_dir = app_config.DOWNLOAD_DIR
    input_root = os.path.join(root_path, target_dir)
    # compress input directory
    compress_path = os.path.join(pipeline.convert_directory,
                                 app_config.TEST_DATA_DIR)
    input_path = os.path.join(input_root, cur_ts)
    if not os.path.exists(input_path):
        os.makedirs(input_path)
    tar = tarfile.open(os.path.join(input_path, app_config.COMPRESS_NAME),
                       "w:gz")
    try:
        tar.add(compress_path, arcname=app_config.TEST_DATA_DIR)
    except:
        # fail to generate input
        pass

    tar.close()

    # copy converted onnx model
    if os.path.exists(pipeline.convert_path):
        copyfile(pipeline.convert_path,
                 os.path.join(input_root, cur_ts, pipeline.convert_name))

    response_object['input_path'] = posixpath.join(target_dir, cur_ts,
                                                   app_config.COMPRESS_NAME)
    response_object['model_path'] = posixpath.join(target_dir, cur_ts,
                                                   pipeline.convert_name)

    clean(root_path)

    return response_object
Пример #13
0
    def test_pytorch_fail(self):
        directory_name = self.deep_dir['pytorch']
        pipeline = onnxpipeline.Pipeline(directory_name, convert_directory=self.convert_dir_fail)
        def test_convert_fail_no_shapes():
            model = pipeline.convert_model(model_type='pytorch', model='saved_model.pb') #model_input_shapes='(1,3,224,224)')
            return model
        def test_convert_fail_no_model():
            model = pipeline.convert_model(model_type='pytorch', model_input_shapes='(1,3,224,224)')
            return model    
        
        model = test_convert_fail_no_shapes()
        output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
        self.check_json_staus(['FAILED', 'FAILED'], self.check_converted_json(output_json))

        model = test_convert_fail_no_model()
        output_json = osp.join(pipeline.path, pipeline.convert_directory, config.OUTPUT_JSON)
        self.check_json_staus(['FAILED', 'FAILED'], self.check_converted_json(output_json))
Пример #14
0
def perf_tuning():

    response_object = {'status': 'success'}

    pipeline = onnxpipeline.Pipeline()
    if 'file' in request.files:
        _, temp_json = get_params(request, RESERVED_INPUT_PATH)
    else:
        _, temp_json = get_params(request, './test')

    result = pipeline.perf_tuning(input_json=temp_json)

    response_object['logs'] = pipeline.output
    try:
        r = pipeline.get_result(result)
        response_object['result'] = json.dumps(r.latency)
        response_object['profiling'] = r.profiling_ops
    except RuntimeError:
        pass

    return jsonify(response_object)
Пример #15
0
def perf_tuning(self, temp_json, input_params):

    response_object = {'status': 'success'}

    pipeline = onnxpipeline.Pipeline()

    # create result dir
    result_dir = os.path.join(app_config.PERF_RES_DIR, get_timestamp())
    if not os.path.exists(result_dir):
        os.makedirs(result_dir)
    result = pipeline.perf_tuning(input_json=temp_json, result=result_dir)
    try:
        r = pipeline.get_result(result)
        response_object['result'] = json.dumps(r.latency)
        response_object['profiling'] = r.profiling_ops
    except RuntimeError:
        pass

    response_object['logs'] = pipeline.output
    # clean up result dir
    if os.path.exists(result_dir):
        rmtree(result_dir)
    clean(app.root_path)
    return response_object
Пример #16
0
 def test_constructor_pass(self):
     directory_name = self.deep_dir['pytorch']
     pipeline = onnxpipeline.Pipeline(directory_name, print_logs=self.print_logs)
     self.assertEquals(osp.join(os.getcwd(), directory_name), pipeline.path)