コード例 #1
0
def build_engine_from_onnx(onnx_path, engine_name, batch_size, TRT_LOGGER):
    model = ModelProto()
    with open(onnx_path, "rb") as f:
        model.ParseFromString(f.read())

    d0 = model.graph.input[0].type.tensor_type.shape.dim[1].dim_value
    d1 = model.graph.input[0].type.tensor_type.shape.dim[2].dim_value
    d2 = model.graph.input[0].type.tensor_type.shape.dim[3].dim_value
    shape = [batch_size, d0, d1, d2]
    engine = eng.build_engine(TRT_LOGGER, onnx_path, shape=shape)
    eng.save_engine(engine, engine_name)
    return engine
コード例 #2
0
def main(args):
    engine_name = args.plan_file
    onnx_path = args.onnx_file
    batch_size = 1

    model = ModelProto()
    with open(onnx_path, "rb") as f:
        model.ParseFromString(f.read())

    d0 = model.graph.input[0].type.tensor_type.shape.dim[1].dim_value
    d1 = model.graph.input[0].type.tensor_type.shape.dim[2].dim_value
    d2 = model.graph.input[0].type.tensor_type.shape.dim[3].dim_value
    shape = [batch_size, d0, d1, d2]
    engine = eng.build_engine(onnx_path, shape=shape)
    eng.save_engine(engine, engine_name)
コード例 #3
0
def create_engine(onnx_path, engine_output_path):
    
    batch_size = 1 

    model = ModelProto()
    with open(onnx_path, "rb") as f:
      model.ParseFromString(f.read())
    print('ONNX model laoded...')
    
    print('Creating engine from this onnx file, ', onnx_path)
    d0 = model.graph.input[0].type.tensor_type.shape.dim[1].dim_value
    d1 = model.graph.input[0].type.tensor_type.shape.dim[2].dim_value
    d2 = model.graph.input[0].type.tensor_type.shape.dim[3].dim_value
    shape = [batch_size , d0, d1 ,d2]
    engine = eng.build_engine(onnx_path, shape= shape)
    eng.save_engine(engine, engine_output_path)
    print('TRT engine created and saved at, ', engine_output_path)
コード例 #4
0
def main(args):
    engine_name = args.plan_file
    onnx_path = args.onnx_file
    batch_size = config.batch_size 
    
    model = ModelProto()
    with open(onnx_path, "rb") as f:
        print("parsing")
        model.ParseFromString(f.read())
 
    d0 = model.graph.input[0].type.tensor_type.shape.dim[1].dim_value
    d1 = model.graph.input[0].type.tensor_type.shape.dim[2].dim_value
    d2 = model.graph.input[0].type.tensor_type.shape.dim[3].dim_value
    d3 = model.graph.input[0].type.tensor_type.shape.dim[4].dim_value
    d4 = model.graph.input[0].type.tensor_type.shape.dim[5].dim_value
    shape = [batch_size , d0, d1 , d2, d3, d4] # for CNN-LSTM model
    #shape = [1, d0, d1 , d2, d3]
    print("data shape required",shape)
    engine = eng.build_engine(onnx_path, shape= shape)
    eng.save_engine(engine, engine_name) 
コード例 #5
0
import engine as eng
import argparse
from onnx import ModelProto
import tensorrt as trt

engine_name = "yolo_pedestrian.plan"
onnx_path = "model.onnx"
batch_size = 1

model = ModelProto()
with open(onnx_path, "rb") as f:
    model.ParseFromString(f.read())

d0 = model.graph.input[0].type.tensor_type.shape.dim[1].dim_value
d1 = model.graph.input[0].type.tensor_type.shape.dim[2].dim_value
d2 = model.graph.input[0].type.tensor_type.shape.dim[3].dim_value
shape = [batch_size, d0, d1, d2]
engine = eng.build_engine(onnx_path, shape=shape)
eng.save_engine(engine, engine_name)
コード例 #6
0
ファイル: message.py プロジェクト: priestd09/steve
def latest():
  conn = engine.build_engine().connect()
  results = conn.execute(select([" * FROM messages"])).fetchall()
  return [build_message(r) for r in results]
コード例 #7
0
ファイル: message.py プロジェクト: priestd09/steve
def build_message(result):
  return Message({
      "id":              int(result[0]),
      "recipient":       result[1],
      "sender":          result[2],
      'who_from':        result[3],
      'subject':         result[4],
      'body_plain':      result[5],
      'stripped_text':   result[6],
      'timestamp':       result[7],
      'signature':       result[8],
      'message_headers': result[9]
  })

if __name__ == "__main__":
  e = engine.build_engine()

  metadata = MetaData(bind=e)
  messages_table = Table('messages', metadata,
    Column('id', Integer, primary_key=True),
    Column('recipient', String(255)),
    Column('sender', String(255)),
    Column('who_from', String(255)),
    Column('subject', String(255)),
    Column('body_plain', Text),
    Column('stripped_text', Text),
    Column('timestamp', Integer),
    Column('signature', String(255)),
    Column('message_headers', Text),
  )
コード例 #8
0
import engine as eng
import argparse
from onnx import ModelProto
import tensorrt as trt

args = argparse.ArgumentParser()
args.add_argument('-engine-name')
args.add_argument('-onnx-path')

args = args.parse_args()

batch_size = 1

model = ModelProto()
with open(args.onnx_path, "rb") as f:
    model.ParseFromString(f.read())  # load the model to get the input shape

dims = model.graph.input[0].type.tensor_type.shape.dim[1:4]
shape = [batch_size] + [d.dim_value for d in dims]
print(shape)

# build and save the engine
engine = eng.build_engine(args.onnx_path, shape)
eng.save_engine(engine, args.engine_name)
コード例 #9
0
ファイル: onnx_to_trt.py プロジェクト: yunlong12/aXeleRate
import engine as eng
import argparse
import os
from onnx import ModelProto
import tensorrt as trt 
 
batch_size = 1 
 
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--onnx', help='File path to .onnx model', required=True)
parser.add_argument('--precision', help='FP32 or FP16', required=True)
args = parser.parse_args()

engine_name = os.path.dirname(args.onnx)+os.path.basename(args.onnx).split('.')[0]+'.plan'

model = ModelProto()
with open(args.onnx, "rb") as f:
    model.ParseFromString(f.read())

d0 = model.graph.input[0].type.tensor_type.shape.dim[1].dim_value
d1 = model.graph.input[0].type.tensor_type.shape.dim[2].dim_value
d2 = model.graph.input[0].type.tensor_type.shape.dim[3].dim_value
shape = [batch_size , d0, d1 ,d2]
engine = eng.build_engine(args.onnx, shape = shape, precision=args.precision)
eng.save_engine(engine, engine_name)