def main():
    init_logging('webcamInferenceOnnx.log')

    args = parse_args()
    modelFilePath = args.modelFilePath

    inference(modelFilePath)
def main():
    init_logging('activeLearning.log')

    args = parse_args()

    datasetDir = args.datasetDir
    trainingDir = args.trainingDir if args.trainingDir is not None else datasetDir
    learningRate = args.learningRate
    checkpointFilePath = args.checkpointFilePath
    modelEncoder = args.modelEncoder

    # efficientNet sizes: 224, 240, 260, 300, 380, 456, 528, 600
    # efficientNet optimizer: RMSProp, decay 0.9 and momentum 0.9; batch norm momentum 0.99, weight decay 1e-5; initial learning rate 0.256 that decays by 0.97 every 2.4 epochs
    # efficientNet special strategy: stochastic depth with survival probability 0.8, model size dependant dropout
    optimizer = tf.keras.optimizers.Adam(
        lr=learningRate,
        # beta_1=0.9,
        # beta_2=0.999,
        # epsilon=1e-07,
        # amsgrad=False,
        # name='Adam'
    )

    model, preprocess_input = makeModel(optimizer, modelEncoder)
    if checkpointFilePath is not None:
        model.load_weights(checkpointFilePath)
        logger.info('model weights from %s are loaded', checkpointFilePath)

    humanDataset, activeLearningDataset, nonHumanDataset, valHumanDataset, valNonHumanDataset = openSegmentationDatasets(
        datasetDir)

    explicitTrain(model, preprocess_input, humanDataset, activeLearningDataset,
                  nonHumanDataset, valHumanDataset, valNonHumanDataset,
                  trainingDir, modelEncoder, args.batchSize, args.epochs,
                  args.startEpoch)
예제 #3
0
def main():
    init_logging('saveModel.log')

    args = parse_args()
    checkpointFilePath = args.checkpointFilePath
    modelFilePath = args.modelFilePath

    model, preprocess_input = makeModel()
    model.load_weights(checkpointFilePath)
    logger.info('model weights from %s are loaded', checkpointFilePath)
    model.save(modelFilePath)
    logger.info('model saved to %s', modelFilePath)
def main():
    init_logging('training.log')

    logger.debug('gc enabled: {}'.format(gc.isenabled()))
    # gc.set_debug(gc.DEBUG_LEAK)

    args = parse_args()
    datasetDir = args.datasetDir
    trainingDir = args.trainingDir if args.trainingDir is not None else datasetDir
    learningRate = args.learningRate
    checkpointFilePath = args.checkpointFilePath
    modelEncoder = args.modelEncoder

    # efficientNet sizes: 224, 240, 260, 300, 380, 456, 528, 600
    # efficientNet optimizer: RMSprop, decay 0.9 and momentum 0.9; batch norm momentum 0.99, weight decay 1e-5; initial learning rate 0.256 that decays by 0.97 every 2.4 epochs
    # efficientNet special strategy: stochastic depth with survival probability 0.8, model size dependant dropout
    # if modelEncoder == 'resnet18':
    # optimizer = tfa.keras.optimizers.AdamW(
    optimizer = tf.keras.optimizers.Adam(
        lr=learningRate,
        # beta_1=0.9,
        # beta_2=0.999,
        # epsilon=1e-07,
        # amsgrad=False,
        # name='Adam'
    )
    # else: #modelEncoder == 'efficientNet3':
    #     optimizer = tf.keras.optimizers.RMSprop(
    #         lr=learningRate,
    #         momentum=0.9
    #     )

    model, preprocess_input = makeModel(optimizer, modelEncoder)
    if checkpointFilePath is not None:
        model.load_weights(checkpointFilePath)
        # model = tf.saved_model.load(checkpointFilePath)
        logger.info('model weights from %s are loaded', checkpointFilePath)

    humanDataset, nonHumanDataset, valHumanDataset, valNonHumanDataset = openSegmentationDatasets(
        datasetDir)  #,train_nh_number=0)
    explicitTrain(model,
                  preprocess_input,
                  humanDataset,
                  nonHumanDataset,
                  valHumanDataset,
                  valNonHumanDataset,
                  trainingDir,
                  modelEncoder,
                  imageSize=320,
                  batchSize=args.batchSize,
                  epochs=args.epochs,
                  startEpoch=args.startEpoch)
예제 #5
0
def main():
    init_logging('quantizeOnnx.log')

    args = parse_args()
    modelFilePath = args.modelFilePath
    quantizedModelFilePath = args.quantizedModelFilePath
    datasetDir = args.datasetDir

    # dr = ModelCalibrationDataReader(datasetDir, modelFilePath)
    # quantize_static(modelFilePath, quantizedModelFilePath, dr)
    quantize_dynamic(modelFilePath,
                     quantizedModelFilePath,
                     weight_type=QuantType.QInt8)
    print('Calibrated and quantized model saved.')
예제 #6
0
def main():
    init_logging('webcamInference.log')

    args = parse_args()
    modelFilePath = args.modelFilePath

    model = tf.keras.models.load_model(
        modelFilePath,
        compile=False,
        custom_objects={'iou_score': sm.metrics.iou_score}
    )

    # model = tf.saved_model.load(modelFilePath)

    print("model summary\n{}".format(model.summary()))

    inference(model, imageSize=320)
def main():
    init_logging('tf2onnx.log')

    args = parse_args()
    checkpointFilePath = args.checkpointFilePath
    modelFilePath = args.modelFilePath
    onnxFilePath = args.onnxFilePath

    if checkpointFilePath is not None:
        model, preprocess_input = makeModel()
        model.load_weights(checkpointFilePath)
        logger.info('model weights from %s are loaded', checkpointFilePath)
        model.save(modelFilePath)
        logger.info('model saved to %s', modelFilePath)

    # python -m tf2onnx.convert --saved-model tensorflow-model-path --opset 10 --output model.onnx
    subprocess.run([
        "python3.8", "-m", "tf2onnx.convert", "--saved-model", modelFilePath,
        "--opset", "11", "--output", onnxFilePath
    ])
예제 #8
0
def main():
    args = parse_args()
    datasetDir = args.datasetDir
    # datasetDir = '../data' # for debug assign your datafolder

    init_logging('prepare.log')

    humanDataset = CocoDatasetBuilder(
        os.path.join(datasetDir, 'train2017'),
        os.path.join(datasetDir,
                     'annotations/instances_train2017.json')).addClasses(
                         [1]).build()
    CocoDataset.save(humanDataset,
                     os.path.join(datasetDir,
                                  'human'))  #, startIndex=(61600 + 2515))

    nonHumanDataset = CocoDatasetBuilder(
        os.path.join(datasetDir, 'train2017'),
        os.path.join(datasetDir, 'annotations/instances_train2017.json')
    ).selectAll().filterNonClasses([1]).build(shuffle=True)
    CocoDataset.save(nonHumanDataset,
                     os.path.join(datasetDir,
                                  'nonHuman'))  #, startIndex=28288)

    valHumanDataset = CocoDatasetBuilder(
        os.path.join(datasetDir, 'val2017'),
        os.path.join(datasetDir,
                     'annotations/instances_val2017.json')).addClasses(
                         [1]).build()
    CocoDataset.save(valHumanDataset, os.path.join(datasetDir, 'valHuman'))

    valNonHumanDataset = CocoDatasetBuilder(
        os.path.join(datasetDir, 'val2017'),
        os.path.join(datasetDir, 'annotations/instances_val2017.json')
    ).selectAll().filterNonClasses([1]).build(shuffle=True)
    CocoDataset.save(valNonHumanDataset, os.path.join(datasetDir,
                                                      'valNonHuman'))