コード例 #1
0
ファイル: predict35minutes.py プロジェクト: shiji203/DeepRain
def generate_Data_5_7(path, sum=7500, invalid_value=80):
    trainbundles = []
    testbundle = None
    print("generiere Samples von 2004-2017 ...")
    for year in range(2004, 2018):
        sb = sample_bundle.load_Sample_Bundle(path.format(year))
        sys.stdout.write(" clear")
        sys.stdout.flush()
        b = sb.clear_by_sum(sum)  # 2017 hat anderen Grenzwert
        #plt.hist(b,bins=100)
        #plt.show()
        #sb.replace_borders(invalid_value)   #entferne Radarfreie Zonen
        sys.stdout.write(" normalize")
        sys.stdout.flush()
        sb.normalize()  # normalisieren zwischen [0;1]

        sys.stdout.write("\r|{}{}|".format('##' * (year - 2003),
                                           '  ' * (13 - (year - 2003))))
        sys.stdout.flush()

        trainbundles.append(sb)

    print("\tfinished")
    return trainbundles, testbundle
コード例 #2
0
def default_values_UNet64_output_2016():
    learningcurve = [
        0.0115894, 0.00958822, 0.00796455, 0.00732153, 0.00727009, 0.00711318,
        0.00692073, 0.00675846, 0.00665379, 0.00645629, 0.00637198, 0.00650981,
        0.0064181, 0.00635487, 0.00624361, 0.00618663, 0.00612047, 0.00611733,
        0.00620965, 0.00614361, 0.00616507, 0.00612534, 0.00606998, 0.00603275,
        0.00605507, 0.00602762, 0.00608261, 0.00606057, 0.00606904, 0.00604447,
        0.00601669, 0.00601813, 0.00598835, 0.00599038, 0.00603436, 0.00598364,
        0.00598707, 0.00600294, 0.00601596, 0.00600256, 0.0059786, 0.00595138,
        0.00595035, 0.00595839, 0.00597264, 0.00595936, 0.00598617, 0.00596159,
        0.00599627, 0.00592671, 0.00591463, 0.00597174, 0.00601303, 0.00591218,
        0.00599138, 0.00586511, 0.00585967, 0.0058672, 0.00586569, 0.00587476,
        0.00590325, 0.00586857, 0.00587302, 0.00583622, 0.00586059, 0.00585119,
        0.00583102, 0.00580262, 0.00583791, 0.00588642, 0.00589478, 0.00580198,
        0.00578195, 0.00577411, 0.00583903, 0.00592595, 0.00573809, 0.00575651,
        0.00580867, 0.0057976, 0.00574949, 0.00574277, 0.00572543, 0.0057714,
        0.00575325, 0.00593839, 0.00577013, 0.00578963, 0.00575294, 0.005784,
        0.00582843, 0.0057558, 0.00588208, 0.00589379, 0.00601915, 0.00569832,
        0.00571483, 0.00572478, 0.00578767, 0.00569442
    ]

    prediction_loss = [
        1.86470236e-06, 2.8264612e-06, 2.57799939e-06, 2.24289355e-05,
        0.00015157, 0.00026531, 0.00033583, 0.00037115, 0.0004648, 0.0007185,
        0.00069467, 0.00075835, 0.00068323, 0.00084506, 0.00081046, 0.00067252,
        0.00044094, 0.00037288, 0.00024517, 0.00016385, 0.00019954, 0.00014589,
        9.01579817e-05, 2.38962804e-09, 8.98763642e-07, 1.22362092e-05,
        3.10096528e-05, 3.37138407e-06, 2.8856611e-06, 5.61989257e-05,
        1.71629722e-05, 5.08496547e-09, 4.94259982e-06, 3.37298162e-05,
        8.39688232e-05, 0.00017017, 0.00014647, 0.00017797, 0.00022459,
        0.00023736, 0.0002829, 0.00024595, 0.00019492, 5.99015221e-05,
        3.99775451e-05, 4.63488214e-05, 9.46813496e-06, 2.8748552e-05,
        7.44204943e-05, 5.18611243e-06
    ]

    baseline_loss = [
        7.396494136870434e-07, 2.2677575932333715e-06, 3.161344194540561e-06,
        3.26459474721261e-05, 0.0002704300929930796, 0.0005964502835447904,
        0.0006755326977604766, 0.0008103178465013457, 0.001028285395040369,
        0.0014358960495963091, 0.0013702099252691272, 0.0013215620194156094,
        0.0014910693903787005, 0.0014552545895809305, 0.0016509913554882737,
        0.001446596561418685, 0.0009419228782199154, 0.0006850129757785468,
        0.0004638991013071895, 0.000308084630911188, 0.0003598450716070742,
        0.000374457840734333, 0.00034752258746635915, 0.0, 4.8997080449827e-06,
        1.4049584294502115e-05, 8.270181540753557e-05, 7.411512399077278e-06,
        3.195135284505959e-06, 4.742391748366013e-05, 5.453881920415225e-05,
        1.87728277585544e-06, 9.476523452518261e-06, 5.482041162053057e-05,
        0.00013154871323529412, 0.0002496222907054979, 0.00020320459678969626,
        0.00028636822376009233, 0.00028379259179161864, 0.0003579677888312187,
        0.0004275173611111111, 0.00031476400302768166, 0.00024243229767397157,
        8.324622741253364e-05, 5.39718798058439e-05, 8.502964604959632e-05,
        1.7590139609765473e-05, 5.9633764657823906e-05, 0.00017642328070934256,
        8.902074923106497e-06
    ]
    statistical_evaluation_lr_baseline(baseline_loss, prediction_loss,
                                       learningcurve)
    samplebundle = sample_bundle.load_Sample_Bundle("RegenTage2016")
    samplebundle.normalize()
    blul = (([0, 63]), ([35, 35], "blueviolet"))
    purl = (([0, 63]), ([41, 41], "purple"))
    skyl = (([3, 32]), ([35, 17], "skyblue"))
    orl = (([32, 32]), ([0, 63], "orange"))

    net_model = load_model(
        "C:\\Users\\TopSecret!\\Documents\\aMSI1\\Teamprojekt\\DeepRain\\NetworkTypes\\UNet64_output3x3\\UNet64_outputexpansed_2016_100.h5",
        verbose=False)
    data, label = samplebundle.get_all_data_label(channels_Last=True)
    prediction = net_model.predict(np.expand_dims(data[14], axis=0))
    plt.imshow(prediction.reshape((64, 64)).T, cmap="gray", vmax=0.85)
    plt.plot(blul[0], *blul[1])
    plt.plot(purl[0], *purl[1])
    plt.plot(skyl[0], *skyl[1])
    plt.plot(orl[0], *orl[1])
    plt.title("prediction")
    plt.show()

    return
コード例 #3
0
def default_values_UNet64_2016():
    learningcurve = [
        0.0125825, 0.01090752, 0.00996938, 0.00877155, 0.00785926, 0.00749076,
        0.00738954, 0.00710216, 0.00695553, 0.00685954, 0.00685263, 0.00683551,
        0.0067013, 0.00669573, 0.00663494, 0.0065361, 0.00656253, 0.00655892,
        0.00641627, 0.00643257, 0.00642143, 0.00643228, 0.00638859, 0.00635681,
        0.00639316, 0.00631695, 0.0062926, 0.00633417, 0.00622689, 0.00616801,
        0.00625138, 0.00621941, 0.00615199, 0.00620083, 0.00616311, 0.00617193,
        0.00621265, 0.00614376, 0.0061507, 0.00614031, 0.00610141, 0.00613152,
        0.00613031, 0.00610272, 0.00609479, 0.0061553, 0.0061067, 0.00605778,
        0.00606069, 0.00603255, 0.00605208, 0.00601227, 0.00600995, 0.00601041,
        0.00601996, 0.0060082, 0.00602065, 0.00619004, 0.00603388, 0.00603091,
        0.00607781, 0.00611581, 0.00606192, 0.00599001, 0.00598812, 0.00596377,
        0.00595005, 0.00597897, 0.00595829, 0.00594891, 0.00595861, 0.00597969,
        0.00591869, 0.00593133, 0.00592642, 0.00592538, 0.00590671, 0.00598633,
        0.0060014, 0.00590481, 0.00590337, 0.00598106, 0.00590425, 0.00589903,
        0.00587542, 0.00588902, 0.0059049, 0.00590722, 0.00590535, 0.00589348,
        0.00586809, 0.00586199, 0.00587707, 0.00585575, 0.00584643, 0.00583368,
        0.00580768, 0.00581735, 0.00581284, 0.00581762
    ]

    prediction_loss = [
        2.05482041e-06, 2.16494586e-06, 2.3332747e-06, 2.07652893e-05,
        0.00015295, 0.00028465, 0.00031767, 0.00037503, 0.00047629, 0.00070817,
        0.00069556, 0.00071058, 0.00068717, 0.00085751, 0.00080836, 0.00068803,
        0.0004593, 0.00038036, 0.00023775, 0.00016827, 0.00018715, 0.0001683,
        9.01244588e-05, 1.11477099e-08, 6.33360328e-07, 1.02075804e-05,
        3.32653464e-05, 3.32339511e-06, 2.88497277e-06, 5.62943246e-05,
        1.4668768e-05, 5.08214763e-08, 5.38758653e-06, 3.22183944e-05,
        8.69566071e-05, 0.00017487, 0.00013233, 0.00017856, 0.00021881,
        0.0002324, 0.00027731, 0.00024965, 0.00020651, 5.31775661e-05,
        4.04651739e-05, 4.03603105e-05, 1.04818476e-05, 2.91035737e-05,
        7.37178008e-05, 5.13091154e-06
    ]

    baseline_loss = [
        7.396494136870434e-07, 2.2677575932333715e-06, 3.161344194540561e-06,
        3.26459474721261e-05, 0.0002704300929930796, 0.0005964502835447904,
        0.0006755326977604766, 0.0008103178465013457, 0.001028285395040369,
        0.0014358960495963091, 0.0013702099252691272, 0.0013215620194156094,
        0.0014910693903787005, 0.0014552545895809305, 0.0016509913554882737,
        0.001446596561418685, 0.0009419228782199154, 0.0006850129757785468,
        0.0004638991013071895, 0.000308084630911188, 0.0003598450716070742,
        0.000374457840734333, 0.00034752258746635915, 0.0, 4.8997080449827e-06,
        1.4049584294502115e-05, 8.270181540753557e-05, 7.411512399077278e-06,
        3.195135284505959e-06, 4.742391748366013e-05, 5.453881920415225e-05,
        1.87728277585544e-06, 9.476523452518261e-06, 5.482041162053057e-05,
        0.00013154871323529412, 0.0002496222907054979, 0.00020320459678969626,
        0.00028636822376009233, 0.00028379259179161864, 0.0003579677888312187,
        0.0004275173611111111, 0.00031476400302768166, 0.00024243229767397157,
        8.324622741253364e-05, 5.39718798058439e-05, 8.502964604959632e-05,
        1.7590139609765473e-05, 5.9633764657823906e-05, 0.00017642328070934256,
        8.902074923106497e-06
    ]

    learningcurve = [
        0.00876916, 0.00763574, 0.00775013, 0.00728725, 0.00706771, 0.00690446,
        0.00679859, 0.00685016, 0.0083208, 0.00674477, 0.00669052, 0.0065954,
        0.00673142, 0.00698619, 0.00660383, 0.00655915, 0.00652131, 0.00663586,
        0.00654554, 0.00680054, 0.00727, 0.00677988, 0.00748705, 0.00646014,
        0.00644975, 0.00646124, 0.00648091, 0.00649791, 0.0063754, 0.0064263,
        0.00636619, 0.00636916, 0.00642287, 0.00659642, 0.00636527, 0.006555,
        0.00633962, 0.00630069, 0.00631354, 0.0062959, 0.00634053, 0.00628647,
        0.00631652, 0.00621621, 0.00622429, 0.00662466, 0.00626726, 0.00645528,
        0.00620124, 0.00660065, 0.00620791, 0.00617543, 0.00616647, 0.00620594,
        0.00616584, 0.00615409, 0.0061273, 0.0061772, 0.00614339, 0.0061334,
        0.00616816, 0.00613709, 0.00609759, 0.00638806, 0.00613235, 0.00612941,
        0.00612209, 0.00615991, 0.006145, 0.00615401, 0.00635393, 0.00611096,
        0.00615352, 0.0060822, 0.00610717, 0.00606298, 0.00606257, 0.00604071,
        0.00606661, 0.00603798
    ]
    prediction_loss = [
        1.71875397e-06, 1.65552961e-06, 2.51839882e-06, 2.19947249e-05,
        0.00014833, 0.00027089, 0.00031949, 0.00036634, 0.00046509, 0.00072178,
        0.00068672, 0.00071651, 0.00068261, 0.00081278, 0.0008001, 0.00070923,
        0.00047983, 0.00037889, 0.00024699, 0.00016343, 0.00020333, 0.00018502,
        0.00010807, 4.36043915e-07, 6.65378881e-07, 1.43982297e-05,
        3.11619439e-05, 3.33887501e-06, 2.96609177e-06, 5.79021047e-05,
        1.96858796e-05, 1.84167894e-07, 4.46581796e-06, 2.94263092e-05,
        8.62778783e-05, 0.00016357, 0.0001413, 0.00017918, 0.00021462,
        0.00023868, 0.00028542, 0.00024291, 0.00019439, 5.60679626e-05,
        4.105311e-05, 4.99470187e-05, 1.29807418e-05, 3.14061303e-05,
        8.14463202e-05, 5.68513118e-06
    ]
    baseline_loss = [
        7.396494136870434e-07, 2.2677575932333715e-06, 3.161344194540561e-06,
        3.26459474721261e-05, 0.0002704300929930796, 0.0005964502835447904,
        0.0006755326977604766, 0.0008103178465013457, 0.001028285395040369,
        0.0014358960495963091, 0.0013702099252691272, 0.0013215620194156094,
        0.0014910693903787005, 0.0014552545895809305, 0.0016509913554882737,
        0.001446596561418685, 0.0009419228782199154, 0.0006850129757785468,
        0.0004638991013071895, 0.000308084630911188, 0.0003598450716070742,
        0.000374457840734333, 0.00034752258746635915, 0.0, 4.8997080449827e-06,
        1.4049584294502115e-05, 8.270181540753557e-05, 7.411512399077278e-06,
        3.195135284505959e-06, 4.742391748366013e-05, 5.453881920415225e-05,
        1.87728277585544e-06, 9.476523452518261e-06, 5.482041162053057e-05,
        0.00013154871323529412, 0.0002496222907054979, 0.00020320459678969626,
        0.00028636822376009233, 0.00028379259179161864, 0.0003579677888312187,
        0.0004275173611111111, 0.00031476400302768166, 0.00024243229767397157,
        8.324622741253364e-05, 5.39718798058439e-05, 8.502964604959632e-05,
        1.7590139609765473e-05, 5.9633764657823906e-05, 0.00017642328070934256,
        8.902074923106497e-06
    ]

    statistical_evaluation_lr_baseline(baseline_loss, prediction_loss,
                                       learningcurve)
    samplebundle = sample_bundle.load_Sample_Bundle("RegenTage2016")
    samplebundle.normalize()
    blul = (([0, 63]), ([35, 35], "blueviolet"))
    purl = (([0, 63]), ([41, 41], "purple"))
    skyl = (([3, 32]), ([35, 17], "skyblue"))
    orl = (([32, 32]), ([0, 63], "orange"))
    display_one_img(
        samplebundle,
        14, [[], [], [], [], [skyl, purl, blul, orl], [skyl, purl, blul, orl]],
        vmax=0.85)

    # net_model = load_model("C:\\Users\\TopSecret!\\Documents\\aMSI1\\Teamprojekt\\DeepRain\\NetworkTypes\\UNet64\\UNet64_2016_100.h5", verbose=False)
    net_model = load_model(
        "C:\\Users\\TopSecret!\\Documents\\aMSI1\\Teamprojekt\\DeepRain\\NetworkTypes\\UNet64_sigmoid_tanh_2016_80.h5",
        verbose=False)

    data, label = samplebundle.get_all_data_label(channels_Last=True)
    prediction = net_model.predict(np.expand_dims(data[14], axis=0))
    plt.imshow(prediction.reshape((64, 64)).T, cmap="gray", vmax=0.85)
    plt.plot(blul[0], *blul[1])
    plt.plot(purl[0], *purl[1])
    plt.plot(skyl[0], *skyl[1])
    plt.plot(orl[0], *orl[1])
    plt.title("prediction")
    plt.show()
    return
コード例 #4
0
        plt.legend()
        plt.savefig(logfilepath + '.png')
        plt.show()

    return learning_curve, learning_curve_traindata


if __name__ == '__main__':
    loadAndEval = True

    val_curve, train_curve = eval_trainlogfile(
        "C:\\Users\\TopSecret!\\Documents\\aMSI1\\Teamprojekt\\DeepRain\\Final_Networks\\trainphase.log",
        plot=True)

    if loadAndEval:
        sb = sample_bundle.load_Sample_Bundle("RegenTage2016")
        sb.normalize()
        data, label = sb.get_all_data_label(channels_Last=True,
                                            flatten_output=True)
        test_samples = data[:50]
        test_labels = label[:50]
        train_s = data[50:]
        train_l = data[50:]
        lcd = data[896]
        lcl = label[896]
        path = "C:\\Users\\TopSecret!\\Documents\\aMSI1\\Teamprojekt\\DeepRain\\NetworkTypes\\UNet64\\UNet64_2016_"
        load_and_eval_network(plotit=True,
                              testdata=test_samples,
                              testlabel=test_labels,
                              learncurvesample=lcd,
                              learncurvelabel=lcl,
コード例 #5
0
### vergleiche Skalierungsfaktoren von 2016
    #print("beginne Auswertung:")
    #dataset = ["TestData2016", "TestData2016EDIT", "TestData2016MAL20", "RegenTage2017_5_7_kn_centered"]
    #for d in dataset:
    #    print("\n",d)
    #    sb = sample_bundle.load_Sample_Bundle(d)
    #    #print(sb.info())
    #    all_m = quick_eval(sb, get_all_max=True)
    #    plt.figure(d)
    #    plt.title(d)
    #    plt.hist(all_m, bins=25, log=True)
    #plt.show()


## Einzelbildauswertung:
    sb = sample_bundle.load_Sample_Bundle("samplebundles\\2008_5in_7out_64x64_without_border")
    print(sb.info())
    b = sb.clear_by_sum(76100)

    dl = sb.get_item_at(1)
    plt.imshow(dl[0][:,:,0])
    plt.show()
    sb.replace_borders(80)

    dl = sb.get_item_at(1)
    plt.imshow(dl[0][:,:,0])
    plt.show()

    #plt.hist(b, bins=100)
    #plt.show()
    print(sb.info())
コード例 #6
0
    ## UNet to predict 1 timestep (5min)
    #model = tfM.UNet64_sigmoid_tanh(input_shape)
    #sb = sample_bundle.load_Sample_Bundle("C:/Users/TopSecret!/Documents/aMSI1/Teamprojekt/DeepRain/Data/RegenTage2016")
    #print(sb.info())
    #train_realdata(model, sb, n_epoch=80, savename="UNet64_sigmoid_tanh_2016", channelsLast=True, use_logfile=True,
    #               load_last_state=True)
    ## UNet to predict 2 timesteps (10min)

    # zwei Zeitschritte:
    #model = tfM.UNet64x2(input_shape)
    #sb = sample_bundle.load_Sample_Bundle("C:/Users/TopSecret!/Documents/aMSI1/Teamprojekt/DeepRain/Data/RegenTage2016_5_2")
    #ein Zeitschritt
    model = tfM.UNet64(input_shape=(64, 64, 5),
                       lossfunction=tfL.mean_squared_error_kopie)
    sb = sample_bundle.load_Sample_Bundle(
        "C:/Users/TopSecret!/Documents/aMSI1/Teamprojekt/DeepRain/Data/RegenTage2016"
    )

    print(sb.info())
    train_realdata(model,
                   sb,
                   n_epoch=10,
                   savename="UNet64",
                   channelsLast=True,
                   use_logfile=True,
                   load_last_state=True,
                   n_testsamples=365,
                   prediction_shape=(64, 64),
                   PREDICTION_IMG_ID=6)
    # eval_trainingsphase(model, n_epoch=100, diffToLabel=DIFF_TO_LABEL, n_train=1000,
    #                    savename="twoUPSamplings", channelsLast=True, n_inputs=N_INPUTS, use_logfile=True, load_last_state=True)