state = {
        'model': {
            "name": crnn.__class__.__name__,
            'args': '',
            "kwargs": crnn_kwargs,
            'state_dict': crnn.state_dict()
        },
        'optimizer': {
            "name": optimizer.__class__.__name__,
            'args': '',
            "kwargs": optim_kwargs,
            'state_dict': optimizer.state_dict()
        },
        "pooling_time_ratio": pooling_time_ratio,
        'scaler': scaler.state_dict(),
        "many_hot_encoder": many_hot_encoder.state_dict()
    }

    save_best_cb = SaveBest("sup")

    # Eval 2018
    eval_2018_df = dataset.initialize_and_get_df(cfg.eval2018,
                                                 reduced_number_of_data)
    eval_2018 = DataLoadDf(eval_2018_df,
                           dataset.get_feature_file,
                           many_hot_encoder.encode_strong_df,
                           transform=transforms_valid)

    [crnn] = to_cuda_if_available([crnn])
    for epoch in range(cfg.n_epoch):
        crnn = crnn.train()
Пример #2
0
    print("min classes examples: " + str(min_length))
    number = min_length * 10
    print('number valid: ' + str(number))

    test_triplets = DataLoadDfTripletLabeledExhaustif(test_df_fr,
                                                      encode_function_label,
                                                      transform=Compose(trans_fr_sc_embed),
                                                      number=min(cfg.number_test, len(test_df_fr.dropna())))
    # #########
    # End of DATA
    # ########

    if resume_training is None:
        state.update({
            'scaler': scaler.state_dict(),
            'many_hot_encoder': many_hot_encoder.state_dict()
        })

    model_directory, log_directory = get_dirs("pretrained" + "_bs_" + str(batch_size) + "adam")

    if frames_in_sec is not None:
        fr = frames_in_sec
    elif segment:
        fr = "seg"
    else:
        fr = "unknown"
    params_name = {
        "early_stopping": cfg.early_stopping,
        "conv_dropout": cfg.conv_dropout,
        "frames": fr,
    }
Пример #3
0
    if not args.segment:
        trans_emb.append(Unsqueeze(0))

    train_set_emb = DataLoadDf(train_weak_df,
                               many_hot_encoder.encode_weak,
                               transform=Compose(trans_emb))
    valid_set_val = DataLoadDf(valid_weak_df,
                               many_hot_encoder.encode_weak,
                               transform=Compose(trans_emb))
    test_set_val = DataLoadDf(test_df,
                              many_hot_encoder.encode_weak,
                              transform=Compose(trans_emb))

    emb_state = {
        "scaler": scaler.state_dict(),
        "many_hot_encoder": many_hot_encoder.state_dict()
    }
    emb_model, emb_state = get_model(emb_state, args)
    emb_model = to_cuda_if_available(emb_model)
    # Classif_model
    if args.segment:
        X, y = train_set[0]
    else:
        X, y = next(iter(train_load))
    X = to_cuda_if_available(X)
    emb = emb_model(X)
    LOG.info("shape input CNN: x {}, y {}".format(X.shape, y.shape))
    LOG.info("shape after CNN: {}".format(emb.shape))

    if args.n_layers_classif == 2:
        dimensions = [32, 16]