def main(): """main""" args = parse_arguments() data_files = list(Path(args.data_path).glob("*.csv")) width, height = parse_resolution(args.resolution) max_parameter = MaxBitrateParameter(w_max=width, h_max=height) dfw = DataFrameWrapper( read_df_by_keys(data_files, KEYS.TRAINING_SET), max_parameter ) dfw_val = DataFrameWrapper( read_df_by_keys(data_files, KEYS.VALIDATION_SET), max_parameter ) in_train = dfw.get_ml_input() in_val = dfw_val.get_ml_input() out_train = dfw.get_ml_output() out_val = dfw_val.get_ml_output() input_scaler = StandardScaler().fit(np.concatenate([in_train, in_val], axis=0)) output_scaler = StandardScaler().fit(np.concatenate([out_train, out_val], axis=0)) train_db = create_db( in_train, out_train, input_scaler, output_scaler, args.batch_size ) val_db = create_db(in_val, out_val, input_scaler, output_scaler, args.batch_size) model = create_model(args.model, args.learning_rate) train(model, train_db, val_db, args.epochs) model = tf.keras.models.load_model(MODEL_CHECKPOINT) save_model(model, args.batch_size, args.learning_rate, args.epochs) validate(model, dfw, input_scaler, output_scaler, "training") validate(model, dfw_val, input_scaler, output_scaler, "validation") dfw_test = DataFrameWrapper( read_df_by_keys(data_files, KEYS.TEST_SET), max_parameter ) validate(model, dfw_test, input_scaler, output_scaler)
def gcf(k_size, l, m, o): # noqa: E741 return l * pow(MaxBitrateParameter().k_size_min / k_size, m) + o
def rcf(width, j): return pow(width / MaxBitrateParameter().w_max, j)
def ncf(gop, c, d, e): return c * pow(MaxBitrateParameter().gop_min / gop, d) + e
def tcf(rate, b): return pow(rate / MaxBitrateParameter().f_max, b)
def scf(qp, a): return pow(qp / MaxBitrateParameter().qp_min, -a)
def sdcf(sigma, t, u, v): return t * pow(MaxBitrateParameter().sigma_min / sigma, u) + v