Esempio n. 1
0
def main():
    input_data = open('input')

    basic_fuel_mass = calculate_mass_fuel(prepare_data(input_data),
                                          CalculationMethods.SIMPLE)
    total_fuel_mass = calculate_mass_fuel(prepare_data(input_data),
                                          CalculationMethods.TOTAL)

    print(f"{basic_fuel_mass=}")
    print(f"{total_fuel_mass=}")
Esempio n. 2
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:], chosen_times = ['0.0', '12.0'])
    conv = calc_convergence(geo_data, field_data, '12.0', solution)

    plot_data = []
    norm = 'L2'
    field = 'psi'

    stat_file = open('moving_vort_conv.txt', 'w')
    save_conv(geo_data, conv, 'Moving vortices', norm, field, stat_file)

    for opt in conv.keys():
        nys, errs = zip(*sorted(conv[opt][norm][field].items()))
        plot_data.append((nys, errs, opt))

    ord_data = []
    ord2 = lambda n : 2e-0 * (n / 24.) ** (-2)
    ny2 = np.array([300, 900])
    nyt = 450
    ord_data.append((ny2, ord2(ny2), nyt, ord2(nyt+10), '2nd order', -94 - 180 / pi * np.arctan(-2)))

    ord3 = lambda n : 1e-1 * (n / 24.) ** (-3)
    ny3 = np.array([300, 900])
    nyt = 480
    ord_data.append((ny3, ord3(ny3), nyt, ord3(nyt+20), '3rd order', -113 - 180 / pi * np.arctan(-3)))

    conv_plot(plot_data, ord_data, fname = 'moving_vort_conv.pdf')

    panel_plot(geo_data, field_data, opt = 'nug|abs|div_2nd|div_3rd', time = '12.0', ny = 192, fname = 'moving_vort_panel.pdf')
Esempio n. 3
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:])
    conv = calc_convergence(geo_data, field_data, '1.0', solution)

    plot_data = []
    norm = 'L2'
    field = 'psi'

    stat_file = open('manufactured_3d_conv.txt', 'w')
    save_conv(geo_data, conv, 'Manufactured solution in 3D', norm, field,
              stat_file)

    for opt in conv.keys():
        nys, errs = zip(*sorted(conv[opt][norm][field].items()))
        plot_data.append((nys, errs, opt))

    ord_data = []
    ord2 = lambda n: 3e-2 * (n / float(9))**(-2)
    ny2 = np.array([40, 140])
    nyt = 70
    ord_data.append((ny2, ord2(ny2), nyt, ord2(nyt + 4), '2nd order',
                     -92 - 180 / np.pi * np.arctan(-2)))

    ord3 = lambda n: 16e-3 * (n / float(9))**(-3)
    ny3 = np.array([40, 140])
    nyt = 70
    ord_data.append((ny3, ord3(ny3), nyt, ord3(nyt + 4), '3rd order',
                     -110 - 180 / np.pi * np.arctan(-3)))

    conv_plot(plot_data, ord_data, fname='manufactured_3d_conv.pdf')
Esempio n. 4
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:])
    conv = calc_convergence(geo_data, field_data, '1.0', solution)

    plot_data = []
    norm = 'L2'
    field = 'psi'

    stat_file = open('manufactured_3d_conv.txt', 'w')
    save_conv(geo_data, conv, 'Manufactured solution in 3D', norm, field, stat_file)

    for opt in conv.keys():
        nys, errs = zip(*sorted(conv[opt][norm][field].items()))
        plot_data.append((nys, errs, opt))

    ord_data = []
    ord2 = lambda n : 3e-2 * (n / float(9)) ** (-2)
    ny2 = np.array([40, 140])
    nyt = 70
    ord_data.append((ny2, ord2(ny2), nyt, ord2(nyt+4), '2nd order', -92 - 180 / np.pi * np.arctan(-2)))

    ord3 = lambda n : 16e-3 * (n / float(9)) ** (-3)
    ny3 = np.array([40, 140])
    nyt = 70
    ord_data.append((ny3, ord3(ny3), nyt, ord3(nyt+4), '3rd order', -110 - 180 / np.pi * np.arctan(-3)))

    conv_plot(plot_data, ord_data, fname = 'manufactured_3d_conv.pdf')
Esempio n. 5
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:])
    conv = calc_convergence(geo_data, field_data, '5.0', solution)

    nys = [120, 240]

    mixing_diags = calc_mixing_diags(geo_data, field_data, nys)
    filament_diags = calc_filament_diags(geo_data, field_data, nys)

    for ny in nys:
        for opt in conv.keys():
            write_stats(conv, mixing_diags, filament_diags, opt, ny)
Esempio n. 6
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:])
    conv = calc_convergence(geo_data, field_data, '5.0', solution)

    nys = [120, 240]

    mixing_diags = calc_mixing_diags(geo_data, field_data, nys)
    filament_diags = calc_filament_diags(geo_data, field_data, nys)

    for ny in nys:
        for opt in conv.keys():
            write_stats(conv, mixing_diags, filament_diags, opt, ny)
Esempio n. 7
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:],
                                        chosen_times=['0.0', '2.5', '5.0'])
    conv = calc_convergence(geo_data, field_data, '5.0', solution)

    plot_data = []
    norm = 'L2'
    field = 'gh'

    for opt in conv.keys():
        nys, errs = zip(*sorted(conv[opt][norm]['gh'].items()))
        plot_data.append((nys, errs, opt))

    stat_file = open('reversing_deform_conv.txt', 'w')
    save_conv(geo_data, conv, 'Reversing deformational flow', norm, field,
              stat_file)
    stat_file.close()

    ord_data = []
    ord2 = lambda n: 1.5e-1 * (n / 120.)**(-2)
    ny2 = np.array([400, 1000])
    nyt = 580
    ord_data.append((ny2, ord2(ny2), nyt, ord2(nyt + 10), '2nd order',
                     -95 - 180 / np.pi * np.arctan(-2)))

    ord3 = lambda n: 1e-1 * (n / 120.)**(-3)
    ny3 = np.array([400, 1000])
    nyt = 640
    ord_data.append((ny3, ord3(ny3), nyt, ord3(nyt + 20), '3rd order',
                     -115 - 180 / np.pi * np.arctan(-3)))

    conv_plot(plot_data, ord_data, fname='reversing_deform_conv.pdf')

    panel_plot(field_data[120]['nug|iga|div_2nd|div_3rd|fct']['2.5'],
               fname='reversing_deform_panel.pdf')

    mixing_ny = 240
    mixing_diags = calc_mixing_diags(geo_data, field_data, nys=[mixing_ny])

    stat_file = open('reversing_deform_mixing.txt', 'w')
    save_mixing_diags(mixing_diags, mixing_ny, stat_file)
    stat_file.close()

    plot_mixing(field_data,
                mixing_diags,
                mixing_ny,
                fname='reversing_deform_mixing.pdf')
Esempio n. 8
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:], chosen_times = ['0.0', '2.5', '5.0'])
    conv = calc_convergence(geo_data, field_data, '5.0', solution)

    plot_data = []
    norm = 'L2'
    field = 'gh'

    for opt in conv.keys():
        nys, errs = zip(*sorted(conv[opt][norm]['gh'].items()))
        plot_data.append((nys, errs, opt))

    stat_file = open('reversing_deform_conv.txt', 'w')
    save_conv(geo_data, conv, 'Reversing deformational flow', norm, field, stat_file)
    stat_file.close()

    ord_data = []
    ord2 = lambda n : 1.5e-1 * (n / 120.) ** (-2)
    ny2 = np.array([400, 1000])
    nyt = 580
    ord_data.append((ny2, ord2(ny2), nyt, ord2(nyt+10), '2nd order', -95 - 180 / np.pi * np.arctan(-2)))

    ord3 = lambda n : 1e-1 * (n / 120.) ** (-3)
    ny3 = np.array([400, 1000])
    nyt = 640
    ord_data.append((ny3, ord3(ny3), nyt, ord3(nyt+20), '3rd order', -115 - 180 / np.pi * np.arctan(-3)))

    conv_plot(plot_data, ord_data, fname = 'reversing_deform_conv.pdf')

    panel_plot(field_data[120]['nug|iga|div_2nd|div_3rd|fct']['2.5'], fname = 'reversing_deform_panel.pdf')

    mixing_ny = 240
    mixing_diags = calc_mixing_diags(geo_data, field_data, nys = [mixing_ny])

    stat_file = open('reversing_deform_mixing.txt', 'w')
    save_mixing_diags(mixing_diags, mixing_ny, stat_file)
    stat_file.close()

    plot_mixing(field_data, mixing_diags, mixing_ny, fname = 'reversing_deform_mixing.pdf')
Esempio n. 9
0
def main():
    geo_data, field_data = prepare_data(sys.argv[1:],
                                        chosen_times=['0.0', '12.0'])
    conv = calc_convergence(geo_data, field_data, '12.0', solution)

    plot_data = []
    norm = 'L2'
    field = 'psi'

    stat_file = open('moving_vort_conv.txt', 'w')
    save_conv(geo_data, conv, 'Moving vortices', norm, field, stat_file)

    for opt in conv.keys():
        nys, errs = zip(*sorted(conv[opt][norm][field].items()))
        plot_data.append((nys, errs, opt))

    ord_data = []
    ord2 = lambda n: 2e-0 * (n / 24.)**(-2)
    ny2 = np.array([300, 900])
    nyt = 450
    ord_data.append((ny2, ord2(ny2), nyt, ord2(nyt + 10), '2nd order',
                     -94 - 180 / pi * np.arctan(-2)))

    ord3 = lambda n: 1e-1 * (n / 24.)**(-3)
    ny3 = np.array([300, 900])
    nyt = 480
    ord_data.append((ny3, ord3(ny3), nyt, ord3(nyt + 20), '3rd order',
                     -113 - 180 / pi * np.arctan(-3)))

    conv_plot(plot_data, ord_data, fname='moving_vort_conv.pdf')

    panel_plot(geo_data,
               field_data,
               opt='nug|abs|div_2nd|div_3rd',
               time='12.0',
               ny=192,
               fname='moving_vort_panel.pdf')
Esempio n. 10
0
def main():
    """"""
    hp.download_files(cf.URLS, cf.TMP_DL_DIR)
    hp.decompress_files(cf.TMP_DL_DIR)
    numbers = hp.prepare_data(cf.TMP_DL_DIR)
    hp.load_db(numbers, cf.DB_PATH, cf.DB_NAME)
Esempio n. 11
0
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import cross_val_score
from helpers import plot_feature_importance, plot_partial_dependence, prepare_data
from collections import Counter
import matplotlib.pyplot as plt



path = r'data\dataset_telecom_01.csv'

X, y, df, y_df = prepare_data(path)

clf = GradientBoostingClassifier(n_estimators=200, learning_rate=0.1, subsample=0.5, max_depth=2, random_state=0)
scores = cross_val_score(clf, X, y, cv=5)
print(scores)  # check if model has learned any pattern


clf = GradientBoostingClassifier(n_estimators=200, learning_rate=0.1,
                                    subsample=0.5, max_depth=2, random_state=0).fit(X, y)

plot_feature_importance(clf, df.columns, top_most_important=7) # show top features which had influence on churn


plot_partial_dependence(clf, X, df.columns, ['Factor_03'])  # Dependence of feature and churn
plot_partial_dependence(clf, X, df.columns, ['MonthlyCharges']) # Dependence of feature and churn


for col in df.columns:
    plot_partial_dependence(clf, X, df.columns, ['C_03_Electronic check'] + [col]) # Dependence of feature and churn

# If a pre-trained ResNet is required, load the weights.
# This must be done AFTER the variables are initialized with sess.run(tf.global_variables_initializer())
if init_fn is not None:
    init_fn(sess)

# Load a previous checkpoint if desired
model_checkpoint_name = "checkpoints_1/latest_model_" + args.model + "_" + args.dataset + ".ckpt"
if args.continue_training or not args.mode == "train":
    print('Loaded latest model checkpoint')
    saver.restore(sess, model_checkpoint_name)

avg_scores_per_epoch = []

# Load the data
print("Loading the data ...")
train_input_names,train_output_names, val_input_names, val_output_names, test_input_names, test_output_names = helpers.prepare_data(args.dataset)

##-------------------------------------------------------------------------------------------------##
if args.mode == "train":

    print("\n***** Begin training *****")
    print("Dataset -->", args.dataset)
    print("Model -->", args.model)
    print("Crop Height -->", args.crop_height)
    print("Crop Width -->", args.crop_width)
    print("Num Epochs -->", args.num_epochs)
    print("Batch Size -->", args.batch_size)
    print("Num Classes -->", num_classes)
    print("Class Balancing -->", args.class_balancing)
    print("Learning Rate -->", args.learn_rate)
    print("")
Esempio n. 13
0
from helpers import prepare_data, graph_overfitting
import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_absolute_error
import matplotlib.pyplot as plt

path = r'data\dataset_telecom_01.csv'

X, y, df, _ = prepare_data(path)
months = df.pop("Factor_03")
df['MonthlyCharges'] = np.log(df['MonthlyCharges'] + 1)
X = df.values
y_months = months.values

### Predicting after how many months a client will churn ###
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y_months,
                                                    test_size=0.4,
                                                    random_state=0)

params = {
    'n_estimators': 500,
    'max_depth': 2,
    'learning_rate': 0.01,
    'subsample': 0.5,
    'loss': 'ls'
}
reg_model = GradientBoostingRegressor(**params)

reg_model.fit(X_train, y_train)
Esempio n. 14
0
    ]

    # Replace values in banking_crisis with boolean values
    crises_df = crises_df.replace(
        {"banking_crisis": {
            "crisis": 1,
            "no_crisis": 0
        }})
    crises_df = crises_df[crises_df["year"] > 1957]

    # Gather all boolean crises from after 1957
    return crises_df[crises_cols + ["cc3"]], crises_df["cc3"].unique()


# Gather our dataframe and the countries we have minus the portuguese colonies
crises_df, ccs = prepare_data()
ccs = [cc for cc in ccs if get_colonist(cc) != "PRT"]

# Plot the bar plots that vizualise the data the mwu-test is performed over
plot_ranks(crises_df, ccs)

# Perform the mann whitney u test for each tail
for alternative in ["less", "greater"]:
    for crisis in crises_df:
        # Skip over country code column
        if crisis == "cc3":
            continue

        fra_sample, gbr_sample = [], []
        # Gather each countries data
        for cc in ccs: