def ResNet50(framework: Framework, version: str = '1', enable_trt=False): """Export, generate model family and register ResNet50 Arguments: framework (Framework): Framework name. version (str): Model version. enable_trt (bool): Flag for enabling TRT conversion. """ if framework == Framework.TENSORFLOW: model = tf.keras.applications.ResNet50() # converting to trt if not enable_trt: tfs_dir = generate_path(model_name='ResNet50', framework=framework, task=Task.IMAGE_CLASSIFICATION, engine=Engine.TFS, version=str(version)) TFSConverter.from_tf_model(model, tfs_dir) model = str(tfs_dir.with_suffix('.zip')) register_model( model, dataset='imagenet', metric={Metric.ACC: 0.76}, task=Task.IMAGE_CLASSIFICATION, inputs=[ IOShape([-1, 224, 224, 3], dtype=float, name='input_1', format=ModelInputFormat.FORMAT_NHWC) ], outputs=[IOShape([-1, 1000], dtype=float, name='probs')], architecture='ResNet50', framework=framework, version=ModelVersion(version), convert=enable_trt, ) elif framework == Framework.PYTORCH: model = models.resnet50(pretrained=True) register_model( model, dataset='imagenet', metric={Metric.ACC: 0.76}, task=Task.IMAGE_CLASSIFICATION, inputs=[ IOShape([-1, 3, 224, 224], dtype=float, name='INPUT__0', format=ModelInputFormat.FORMAT_NCHW) ], outputs=[IOShape([-1, 1000], dtype=float, name='probs')], architecture='ResNet50', framework=framework, version=ModelVersion(version), ) else: raise ValueError('Framework not supported.')
def ResNet101(framework: Framework, version: str = "1"): if framework == Framework.TENSORFLOW: model = tf.keras.applications.ResNet101() register_model( model, dataset='imagenet', acc=..., # TODO: to be filled task='image classification', inputs=[ IOShape([-1, 224, 224, 3], dtype=float, name='input_1', format=ModelInputFormat.FORMAT_NHWC) ], outputs=[IOShape([-1, 1000], dtype=float, name='probs')], architecture='ResNet101', framework=framework, version=ModelVersion(version)) elif framework == Framework.PYTORCH: model = models.resnet101(pretrained=True) register_model( model, dataset='imagenet', acc=..., # TODO task='image classification', inputs=[ IOShape([-1, 3, 224, 224], dtype=float, name='INPUT__0', format=ModelInputFormat.FORMAT_NCHW) ], outputs=[IOShape([-1, 1000], dtype=float, name='probs')], architecture='ResNet101', framework=framework, version=ModelVersion(version)) else: raise ValueError('Framework not supported.')
def test_register_model(): model = ModelBO('ResNet50', framework=Framework.PYTORCH, engine=Engine.TRT, version=ModelVersion(1), dataset='ImageNet', acc=0.8, task='image classification', inputs=[ IOShape([-1, 3, 224, 224], dtype=float, format=ModelInputFormat.FORMAT_NCHW) ], outputs=[IOShape([-1, 1000], dtype=int)], weight=Weight(bytes([123]))) assert ModelService.post_model(model)
def test_register_model(): model = ModelBO('ResNet50', framework=Framework.PYTORCH, engine=Engine.TRT, version=ModelVersion(1), dataset='ImageNet', metric={Metric.ACC: 0.80}, task=Task.IMAGE_CLASSIFICATION, inputs=[ IOShape([-1, 3, 224, 224], dtype=float, format=ModelInputFormat.FORMAT_NCHW) ], outputs=[IOShape([-1, 1000], dtype=int)], weight=Weight(bytes([123]))) assert ModelService.post_model(model)
def generate_path(model_name: str, framework: Framework, engine: Engine, version: Union[ModelVersion, str, int]): """Generate saved path from model """ model_name = str(model_name) if not isinstance(framework, Framework): raise ValueError( f'Expecting framework type to be `Framework`, but got {type(framework)}' ) if not isinstance(engine, Engine): raise ValueError( f'Expecting engine type to be `Engine`, but got {type(engine)}') if not isinstance(version, ModelVersion): version = ModelVersion(str(version)) return Path.home( ) / '.modelci' / model_name / f'{framework.name.lower()}-{engine.name.lower()}' / str( version)
def parse_path(path: Path): """Obtain filename, framework and engine from saved path. """ if re.match(r'^.*?[!/]*/[a-z]+-[a-z]+/\d+$', str(path.with_suffix(''))): filename = path.name architecture = path.parent.parent.stem info = path.parent.name.split('-') framework = Framework[info[0].upper()] engine = Engine[info[1].upper()] version = ModelVersion(Path(filename).stem) return { 'architecture': architecture, 'framework': framework, 'engine': engine, 'version': version, 'filename': filename, 'base_dir': path.parent } else: raise ValueError('Incorrect model path pattern')
def register_model_from_yaml(file_path: Union[Path, str]): def convert_ioshape_plain_to_ioshape(ioshape_plain): """Convert IOShape-like dictionary to IOShape. """ # unpack i, ioshape_plain = ioshape_plain assert isinstance(ioshape_plain['shape'], Iterable), \ f'inputs[{i}].shape expected to be iterable, but got {ioshape_plain["shape"]}' assert isinstance(ioshape_plain['dtype'], str), \ f'inputs[{i}].dtype expected to be a `DataType`, but got {ioshape_plain["dtype"]}.' ioshape_plain['dtype'] = DataType[ioshape_plain['dtype']] return IOShape(**ioshape_plain) # check if file exist file_path = Path(file_path) assert file_path.exists( ), f'Model definition file at {str(file_path)} does not exist' # read yaml with open(file_path) as f: model_config = yaml.safe_load(f) model_weight_path = model_config['weight'] origin_model = os.path.expanduser(model_weight_path) dataset = model_config['dataset'] metric = model_config['metric'] inputs_plain = model_config['inputs'] outputs_plain = model_config['outputs'] parent_model_id = model_config.get('parent_model_id', '') model_input = model_config.get('model_input', None) architecture = model_config.get('architecture', None) task = model_config.get('task', None) framework = model_config.get('framework', None) engine = model_config.get('engine', None) model_status = model_config.get('model_status', None) version = model_config.get('version', None) convert = model_config.get('convert', True) # convert inputs and outputs inputs = list( map(convert_ioshape_plain_to_ioshape, enumerate(inputs_plain))) outputs = list( map(convert_ioshape_plain_to_ioshape, enumerate(outputs_plain))) # wrap POJO if task is not None: task = Task[task.upper()] if metric is not None: metric = {Metric[key.upper()]: val for key, val in metric[0].items()} if framework is not None: framework = Framework[framework.upper()] if engine is not None: engine = Engine[engine.upper()] if model_status is not None: model_status = [ModelStatus[item.upper()] for item in model_status] if version is not None: version = ModelVersion(version) # os.path.expanduser register_model( origin_model=origin_model, dataset=dataset, metric=metric, task=task, parent_model_id=parent_model_id, inputs=inputs, outputs=outputs, model_input=model_input, architecture=architecture, framework=framework, engine=engine, version=version, model_status=model_status, convert=convert, )
def update_finetune_model_as_new(id: str, updated_layer: Structure, dry_run: bool = False): # noqa """ Temporary function for finetune CV models. The function's functionality is overlapped with `update_model_structure_as_new`. Please use the `update_model_structure_as_new` in next release. Examples: Fine-tune the model by modify the layer with name 'fc' (last layer). The layer has a changed argument out_features = 10. op_='M' indicates the operation to this layer ('fc') is 'Modify'. There is no changes in layer connections. Therefore, the structure change summary is [M] fc: (...) out_features=10 >>> from collections import OrderedDict >>> structure_data = { ... 'layer': OrderedDict({'fc': {'out_features': 10, 'op_': 'M', 'type_': 'torch.nn.Linear'}}) ... } >>> update_finetune_model_as_new(id=..., updated_layer=Structure.parse_obj(structure_data)) Args: id (str): ID of the model to be updated. updated_layer (Structure): Contains layers to be fine-tuned. dry_run (bool): Test run for verify if the provided parameter (i.e. model specified in `id` and updated layers) is valid. Returns: """ if len(updated_layer.layer.items()) == 0: return True model = ModelService.get_model_by_id(id) if model.engine != Engine.PYTORCH: raise ValueError(f'model {id} is not supported for editing. ' f'Currently only support model with engine=PYTORCH') # download model as local cache cache_path = get_remote_model_weight(model=model) net = torch.load(cache_path) for layer_name, layer_param in updated_layer.layer.items(): layer_op = getattr(layer_param, 'op_') # update layer if layer_op == Operation.MODIFY: # check if the layer name exists # TODO check if layer path exists eg."layer1.0.conv1" if not hasattr(net, layer_name): raise ModelStructureError( f'Structure layer name `{layer_name}` not found in model {id}.' ) net_layer = getattr(net, layer_name) # check if the provided type matches the original type layer_type = type(net_layer) layer_type_provided = eval(layer_param.type_.value) # nosec if layer_type is not layer_type_provided: raise ModelStructureError( f'Expect `{layer_name}.type_` to be {layer_type}, ' f'but got {layer_type_provided}') # get layer parameters layer_param_old = layer_param.parse_layer_obj(net_layer) layer_param_data = layer_param_old.dict(exclude_none=True, exclude={'type_', 'op_'}) layer_param_update_data = layer_param.dict( exclude_none=True, exclude={'type_', 'op_'}) # replace 'null' with None. See reason :class:`ModelLayer`. for k, v in layer_param_update_data.items(): if v == 'null': layer_param_update_data[k] = None # update the layer parameters layer_param_data.update(layer_param_update_data) layer = layer_type(**layer_param_data) setattr(net, layer_name, layer) else: # if layer_op is Operation.ADD, # 1. check if the layer name not exists # 2. add a layer # 3. change the `forward` function according to the connections # if layer_op is Operation.DELETE, # 1. check if the layer exists # 2. delete the layer # 3. change the `forward` function raise ValueError( 'Operation not permitted. Please use `update_model_structure_as_new`.' ) input_tensors = list() bs = 1 for input_ in model.inputs: input_tensor = torch.rand(bs, *input_.shape[1:]).type( model_data_type_to_torch(input_.dtype)) input_tensors.append(input_tensor) # parse output tensors output_shapes = list() output_tensors = net(*input_tensors) if not isinstance(output_tensors, (list, tuple)): output_tensors = (output_tensors, ) for output_tensor in output_tensors: output_shape = IOShape(shape=[bs, *output_tensor.shape[1:]], dtype=type_to_data_type(output_tensor.dtype)) output_shapes.append(output_shape) if not dry_run: # TODO return validation result for dry_run mode # TODO apply Semantic Versioning https://semver.org/ # TODO reslove duplicate model version problem in a more efficient way version = ModelVersion(model.version.ver + 1) previous_models = ModelService.get_models( architecture=model.architecture, task=model.task, framework=model.framework, engine=Engine.NONE) if len(previous_models): last_version = max(previous_models, key=lambda k: k.version.ver).version.ver version = ModelVersion(last_version + 1) saved_path = generate_path_plain(architecture=model.architecture, task=model.task, framework=model.framework, engine=Engine.NONE, version=version) saved_path.parent.mkdir(parents=True, exist_ok=True) torch.save(model, saved_path.with_suffix('.pt')) mlmodelin = MLModel(dataset='', metric={key: 0 for key in model.metric.keys()}, task=model.task, inputs=model.inputs, outputs=output_shapes, architecture=model.name, framework=model.framework, engine=Engine.NONE, model_status=[ModelStatus.DRAFT], parent_model_id=model.id, version=version, weight=saved_path) register_model(mlmodelin, convert=False, profile=False) model_bo = ModelService.get_models(architecture=model.architecture, task=model.task, framework=model.framework, engine=Engine.NONE, version=version)[0] return {'id': model_bo.id}
def register_model_from_yaml(file_path: Union[Path, str]): def convert_ioshape_plain_to_ioshape(ioshape_plain): """Convert IOShape-like dictionary to IOShape. """ # unpack i, ioshape_plain = ioshape_plain assert isinstance(ioshape_plain['shape'], Iterable), \ f'inputs[{i}].shape expected to be iterable, but got {ioshape_plain["shape"]}' assert isinstance(ioshape_plain['dtype'], str), \ f'inputs[{i}].dtype expected to be a `DataType`, but got {ioshape_plain["dtype"]}.' ioshape_plain['dtype'] = DataType[ioshape_plain['dtype']] return IOShape(**ioshape_plain) # check if file exist file_path = Path(file_path) assert file_path.exists( ), f'Model definition file at {str(file_path)} does not exist' # read yaml with open(file_path) as f: model_config = yaml.safe_load(f) origin_model = model_config['weight'] dataset = model_config['dataset'] acc = model_config['acc'] task = model_config['task'] inputs_plain = model_config['inputs'] outputs_plain = model_config['outputs'] architecture = model_config.get('architecture', None) framework = model_config.get('framework', None) engine = model_config.get('engine', None) version = model_config.get('version', None) convert = model_config.get('convert', True) # convert inputs and outputs inputs = list( map(convert_ioshape_plain_to_ioshape, enumerate(inputs_plain))) outputs = list( map(convert_ioshape_plain_to_ioshape, enumerate(outputs_plain))) # wrap POJO if framework is not None: framework = Framework[framework.upper()] if engine is not None: engine = Engine[engine.upper()] if version is not None: version = ModelVersion(version) register_model( origin_model=origin_model, dataset=dataset, acc=acc, task=task, inputs=inputs, outputs=outputs, architecture=architecture, framework=framework, engine=engine, version=version, convert=convert, )