Exemplo n.º 1
0
def compute_memory_usage(config, training_set_metadata) -> int:
    update_config_with_metadata(config, training_set_metadata)
    lm = LudwigModel.create_model(config)
    model_size = lm.get_model_size()  # number of parameters in model
    batch_size = config[TRAINER][BATCH_SIZE]
    return model_size * (BYTES_PER_WEIGHT +
                         BYTES_OPTIMIZER_PER_WEIGHT) * batch_size
Exemplo n.º 2
0
def setup_model_scaffolding(
        raw_df,
        input_features,
        output_features
):
    # setup input feature for testing
    config = {'input_features': input_features,
              'output_features': output_features}

    # setup model scaffolding to for testing
    model = LudwigModel(config)
    training_set, _, _, training_set_metadata = preprocess_for_training(
        config,
        training_set=raw_df,
        skip_save_processed_input=True
    )
    model.training_set_metadata = training_set_metadata
    update_config_with_metadata(
        model.config,
        training_set_metadata
    )
    model.model = model.create_model(model.config)

    # setup batcher to go through synthetic data
    with training_set.initialize_batcher() as batcher:
        yield model, batcher
Exemplo n.º 3
0
def compute_memory_usage(config, training_set_metadata) -> int:
    update_config_with_metadata(config, training_set_metadata)
    lm = LudwigModel.create_model(config)
    model_tensors = lm.collect_weights()
    total_size = 0
    batch_size = config[TRAINING][BATCH_SIZE]
    for tnsr in model_tensors:
        total_size += tnsr[1].numpy().size * batch_size
    total_bytes = total_size * 32  # assumes 32-bit precision
    return total_bytes