Пример #1
0
 def compare_against_rational(self, num_of_games=100):
     bot = self.p1
     rational = self.rational_bot
     
     bot_win = 0
     bar = progress_tracker(num_of_games, name='Playing against rational bot')
     
     for i in range(num_of_games):
         self.game.reset()
         done = False
         player = i
         
         while not done:
             if player % 2 == 0:
                 if i % 2 == 0:
                     a = bot.get_move(self.game.get_board())
                 else:
                     a = bot.get_move(self.game.get_inverse_player_board())
             else:
                 a = rational.get_move(self.game)
             
             done = self.game.make_move(a)
             
             player += 1
         
         if player % 2 == 1:
             bot_win += 1
         
         bar.iterate()
         
     print("From compare: {}".format(float(bot_win) / float(num_of_games)))
 
     return(float(bot_win) / float(num_of_games))
def calculateStepwise(steps, fileName, mode):
    file_path = os.path.join(get_results_path(), fileName)
    bar = progress_tracker(len(steps), name='Calculating')
    with h5py.File(file_path, 'w') as f:
        f.create_dataset('x', data=steps)
        for x in steps:
            x_group = f.create_group(str(x))
            writeStepToFile(x_group, x, mode)
            bar.iterate()
    return
def main():
    highLevelSteps = np.arange(3, 202, 2, dtype=int)
    lowLevelSteps = np.arange(22, dtype=int)
    bar = progress_tracker(len(highLevelSteps) * len(lowLevelSteps),
                           name='Submitting jobs')
    for highLevel in highLevelSteps:
        for lowLevel in lowLevelSteps:
            #condor_file = write_submit_file(highLevel, lowLevel)
            condor_file = get_commit_file_path(highLevel, lowLevel)
            subprocess.call(["condor_submit", condor_file])
            bar.iterate()
    return
Пример #4
0
def plot_true_and_calc_partial(net,
                               data_path,
                               path,
                               net_path,
                               batch_size=32,
                               show=False,
                               net_name='N/A',
                               save_file=True):
    d_form = imp.load_source('d_form', net_path)
    with h5py.File(data_path, 'r') as data:
        te_d = data['testing']['test_data']
        te_l = data['testing']['test_labels']
        te_c = data['testing']['test_snr_calculated']

        x_pt_1 = np.zeros(len(te_d))
        x_pt_2 = np.zeros(len(te_d))
        y_pt = np.zeros(len(te_d))

        steps = int(np.ceil(float(len(te_d)) / batch_size)) - 1

        bar = progress_tracker(steps, name='Creating plot')

        for i in range(steps):
            lower = i * batch_size
            upper = (i + 1) * batch_size

            if upper > len(te_d):
                upper = len(te_d)

            for j in range(lower, upper):
                x_pt_1[j] = te_l[j][0]
                x_pt_2[j] = te_c[j]
                cache = net.predict(
                    d_form.format_data_segment(np.array([te_d[j]])))

                if type(cache) == list:
                    y_pt[j] = cache[0][0]
                else:
                    y_pt[j] = cache[0]

            bar.iterate()

    _do_plot(net_name,
             x_pt_1,
             x_pt_2,
             y_pt,
             path,
             show=show,
             save_file=save_file)
def calculateStepwiseCombined(snrSteps, pSteps, fileName, mode):
    file_path = os.path.join(get_results_path(), fileName)
    bar = progress_tracker(len(snrSteps) * len(pSteps),
                           name='Calculating steps')
    with h5py.File(file_path, 'w') as f:
        f.create_dataset('snr', data=snrSteps)
        f.create_dataset('p-score', data=pSteps)
        f.create_dataset('mode', data=np.str(mode))
        for x in snrSteps:
            x_group = f.create_group(str(x))
            for y in pSteps:
                y_group = x_group.create_group(str(y))
                writeCombinedStepToFile(y_group, x, y, mode)
                bar.iterate()
    return
 def calculate_sensitivity(self):
     model = self.model
     y_true = []
     y_pred = []
     if self.verbose:
         bar = progress_tracker(len(self.generator), name='Calculating predictions')
     for i in range(len(self.generator)):
         x, y = self.generator.__getitem__(i)
         y_p = model.predict(x)
         y_true.append(y)
         y_pred.append(y_p)
         if self.verbose:
             bar.iterate()
     
     y_true_snr, y_true_prob = self._split_snr_p_val(y_true)
     y_pred_snr, y_pred_prob = self._split_snr_p_val(y_pred)
     
     snr_bins = np.arange(self.bins[0], self.bins[1], self.bins[2])
     
     snr_bins, snr_loud = self._bin_data(y_true_prob, y_true_snr, y_pred_snr)
     prob_bins, prob_loud = self._bin_data(y_true_prob, y_true_snr, y_pred_prob)
     
     return list(snr_bins), float(snr_loud), list(prob_bins), float(prob_loud)
Пример #7
0
 def compare_against_random(self, num_of_games=100):
     player_one_won = 0
     bar = progress_tracker(num_of_games, name='Playing against random')
     
     for i in range(num_of_games):
         #print("From compare: {}".format(i))
         done = False
         self.game.reset()
         player = i
         while not done:
             if player % 2 == 0:
                 if i % 2 == 0:
                     a = self.p1.get_move(self.game.get_board())
                     #a = self.rational_bot.get_move(self.game)
                 else:
                     a = self.p1.get_move(self.game.get_inverse_player_board())
                     #a = self.rational_bot.get_move(self.game)
                 #print("Rational")
             else:
                 a = self.random_bot.get_move(self.game)
                 #print("Random")
             
             done = self.game.make_move(a)
             #self.game.print_board()
             
             player += 1
         
         #print("i: {} | player: {}".format(i, player))
         if player % 2 == 1:
             player_one_won += 1
         
         bar.iterate()
     
     print("From compare: {}".format(float(player_one_won) / float(num_of_games)))
 
     return(float(player_one_won) / float(num_of_games))
def generate_template(file_path,
                      num_pure_signals,
                      num_pure_noise,
                      sample_rates=[4096, 2048, 1024, 512, 256, 128, 64],
                      reduced=False,
                      **kwargs):
    #Manually setting some defaults
    if not 'seed' in kwargs:
        kwargs['seed'] = 0

    if not 't_len' in kwargs:
        kwargs['t_len'] = 96.0

    if not 'f_lower' in kwargs:
        kwargs['f_lower'] = 20.0

    if not 'detectors' in kwargs:
        kwargs['detectors'] = ['L1', 'H1']

    if not 'no_gw_snr' in kwargs:
        kwargs['no_gw_snr'] = 4.0

    parameters = generate_parameters(num_pure_signals,
                                     rand_seed=kwargs['seed'],
                                     **kwargs)
    print(parameters)

    for dic in parameters:
        dic['sample_rates'] = sample_rates

    noise_seeds = np.random.randint(0, 10**8,
                                    num_pure_noise * len(kwargs['detectors']))
    noise_seeds = noise_seeds.reshape(
        (num_pure_noise, len(kwargs['detectors'])))

    pool = mp.Pool()

    with h5py.File(file_path, 'w') as FILE:
        if reduced:
            data_shape = (num_pure_signals, 2048, 2 * (len(sample_rates) + 1))
        else:
            data_shape = (num_pure_signals, 4096, 2 * len(sample_rates))
        signals = FILE.create_group('signals')
        signal_data = signals.create_dataset('data',
                                             shape=data_shape,
                                             dtype=np.float64)
        signal_snr = signals.create_dataset('snr',
                                            shape=(num_pure_signals, ),
                                            dtype=np.float64)
        signal_bool = signals.create_dataset('bool',
                                             shape=(num_pure_signals, ),
                                             dtype=np.float64)
        if reduced:
            data_shape = (num_pure_noise, 2048, 2 * (len(sample_rates) + 1))
        else:
            data_shape = (num_pure_noise, 4096, 2 * len(sample_rates))
        noise = FILE.create_group('noise')
        noise_data = noise.create_dataset('data',
                                          shape=data_shape,
                                          dtype=np.float64)
        noise_snr = noise.create_dataset('snr',
                                         shape=(num_pure_noise, ),
                                         dtype=np.float64)
        noise_bool = noise.create_dataset('bool',
                                          shape=(num_pure_noise, ),
                                          dtype=np.float64)
        parameter_space = FILE.create_group('parameter_space')
        if not 'snr' in kwargs:
            kwargs['snr'] = [8.0, 15.0]
        for k, v in kwargs.items():
            parameter_space.create_dataset(str(k),
                                           data=np.array(v),
                                           dtype=np.array(v).dtype)

        bar = progress_tracker(num_pure_signals, name='Generating signals')

        if reduced:
            #Something
            X = np.zeros(data_shape[1:]).transpose()
            for i, dat in enumerate(
                    pool.imap_unordered(signal_worker, parameters)):
                #for i, dat in enumerate(list(map(signal_worker, parameters))):
                tmp_dat = dat[0].transpose()
                for j in range(len(sample_rates) + 1):
                    if j == 0:
                        X[j] = tmp_dat[j][2048:]
                        X[j + len(sample_rates) +
                          1] = tmp_dat[j + len(sample_rates)][2048:]
                    else:
                        X[j] = tmp_dat[j - 1][:2048]
                        X[j + len(sample_rates) +
                          1] = tmp_dat[(j - 1) + len(sample_rates)][:2048]
                signal_data[i] = X.transpose()
                signal_snr[i] = dat[1]
                signal_bool[i] = 1.0

                bar.iterate()
        else:
            for i, dat in enumerate(
                    pool.imap_unordered(signal_worker, parameters)):
                #for i, dat in enumerate(list(map(signal_worker, parameters))):
                signal_data[i] = dat[0]
                signal_snr[i] = dat[1]
                signal_bool[i] = 1.0

                bar.iterate()

        bar = progress_tracker(num_pure_noise, name='Generating noise')

        if reduced:
            #Something
            X = np.zeros(data_shape[1:]).transpose()
            for i, dat in enumerate(
                    pool.imap_unordered(
                        noise_worker,
                        [(kwargs['t_len'], kwargs['f_lower'],
                          1.0 / max(sample_rates), len(kwargs['detectors']),
                          noise_seeds[i], sample_rates)
                         for i in range(num_pure_noise)])):
                #for i, dat in enumerate(list(map(noise_worker, [(kwargs['t_len'], kwargs['f_lower'], 1.0 / max(sample_rates), len(kwargs['detectors']), np.random.randint(0, 10**8), sample_rates) for i in range(num_pure_noise)]))):
                tmp_dat = dat[0].transpose()
                for j in range(len(sample_rates) + 1):
                    if j == 0:
                        X[j] = tmp_dat[j][2048:]
                        X[j + len(sample_rates) +
                          1] = tmp_dat[j + len(sample_rates)][2048:]
                    else:
                        X[j] = tmp_dat[j - 1][:2048]
                        X[j + len(sample_rates) +
                          1] = tmp_dat[(j - 1) + len(sample_rates)][:2048]
                noise_data[i] = X.transpose()
                noise_snr[i] = kwargs['no_gw_snr']
                noise_bool[i] = 0.0

                bar.iterate()
        else:
            for i, dat in enumerate(
                    pool.imap_unordered(
                        noise_worker,
                        [(kwargs['t_len'], kwargs['f_lower'],
                          1.0 / max(sample_rates), len(kwargs['detectors']),
                          noise_seeds[i], sample_rates)
                         for i in range(num_pure_noise)])):
                #for i, dat in enumerate(list(map(noise_worker, [(kwargs['t_len'], kwargs['f_lower'], 1.0 / max(sample_rates), len(kwargs['detectors']), np.random.randint(0, 10**8), sample_rates) for i in range(num_pure_noise)]))):
                noise_data[i] = dat
                noise_snr[i] = kwargs['no_gw_snr']
                noise_bool[i] = 0.0

                bar.iterate()

        #for i, dat in enumerate(pool.imap_unordered(noise_worker, [(kwargs['t_len'], kwargs['f_lower'], 1.0 / max(sample_rates), len(kwargs['detectors']), noise_seeds[i], sample_rates) for i in range(num_pure_noise)])):
        ##for i, dat in enumerate(list(map(noise_worker, [(kwargs['t_len'], kwargs['f_lower'], 1.0 / max(sample_rates), len(kwargs['detectors']), np.random.randint(0, 10**8), sample_rates) for i in range(num_pure_noise)]))):
        #noise_data[i] = dat
        #noise_snr[i] = kwargs['no_gw_snr']
        #noise_bool[i] = 0.0

        #bar.iterate()

    pool.close()
    pool.join()
    return (file_path)
def create_file(name, **kwargs):
    #print("Hello world!")
    #wav_arg = {}
    #opt_arg = {}

    ##Properties the payload function needs
    ##Properties for the waveform itself
    #wav_arg['approximant'] = "SEOBNRv4_opt"
    #wav_arg['mass1'] = 30.0
    #wav_arg['mass2'] = 30.0
    #wav_arg['delta_t'] = 1.0 / 4096
    #wav_arg['f_lower'] = 20.0
    #wav_arg['coa_phase'] = [0., 2 * np.pi]
    #wav_arg['distance'] = 1.0

    ##Properties for handeling the process of generating the waveform
    #wav_arg['snr'] = [6.0, 15.0]
    #wav_arg['gw_prob'] = 1.0
    #wav_arg['random_starting_time'] = True
    #wav_arg['time_offset'] = [-0.5, 0.5]
    #wav_arg['resample_delta_t'] = 1.0 / 1024
    #wav_arg['t_len'] = 64.0
    #wav_arg['resample_t_len'] = 4.0
    #wav_arg['whiten_len'] = 4.0
    #wav_arg['whiten_cutoff'] = 4.0

    ##Skyposition
    #wav_arg['end_time'] = 1337 * 137 * 42
    #wav_arg['declination'] = 0.0
    #wav_arg['right_ascension'] = 0.0
    #wav_arg['polarization'] = 0.0
    #wav_arg['detectors'] = ['L1', 'H1']

    wav_arg, opt_arg = make_template_bank_defaults()
    """
    #These are just here to remember the values each one of these can take
    wav_arg['declination'] = [-np.pi / 2, np.pi / 2]
    wav_arg['right_ascension'] = [-np.pi, np.pi]
    wav_arg['polarization'] = [0.0, np.pi]
    """

    wav_arg, kwargs = filter_keys(wav_arg, kwargs)

    for key, val in wav_arg.items():
        if not key == 'mode_array':
            if type(val) == list:
                wav_arg[key] = [min(val), max(val)]

    #Properties for the generating program
    #opt_arg['num_of_templates'] = 20000
    #opt_arg['seed'] = 12345
    #opt_arg['train_to_test'] = 0.7
    #opt_arg['path'] = ""
    #opt_arg['data_shape'] = (int(1.0 / wav_arg['delta_t']),)
    #opt_arg['label_shape'] = (1,)

    opt_arg, kwargs = filter_keys(opt_arg, kwargs)

    num_of_templates = opt_arg['num_of_templates']

    kwargs.update(wav_arg)

    seed(opt_arg['seed'])

    parameter_list = generate_parameters(num_of_templates, opt_arg['seed'],
                                         **kwargs)

    file_name = os.path.join(opt_arg['path'], name + '.hf5')

    #tmp_sample = worker(parameter_list[0])

    pool = mp.Pool()

    prop_dict = {}
    prop_dict.update(wav_arg)
    prop_dict.update(opt_arg)

    split_index = int(round(opt_arg['train_to_test'] * num_of_templates))
    #train_snr = np.array([pt['snr'] for pt in parameter_list[:split_index]])
    #test_snr = np.array([pt['snr'] for pt in parameter_list[split_index:]])

    tmp_sample = worker(parameter_list[0])

    #print("Pre file")

    #for idx, dat in enumerate(pool.imap_unordered(worker, parameter_list)):
    #print(idx)

    with h5py.File(file_name, 'w') as output:
        training = output.create_group('training')
        testing = output.create_group('testing')
        psd = output.create_group('psd')
        parameter_space = output.create_group('parameter_space')
        train_parameters = training.create_group('parameters')
        test_parameters = testing.create_group('parameters')

        gen_psd = generate_psd(**kwargs)
        #Is np.float64 enough?
        psd.create_dataset('data', data=np.array(gen_psd), dtype=np.float64)
        psd.create_dataset('delta_f', data=gen_psd.delta_f, dtype=np.float64)

        for key, val in prop_dict.items():
            parameter_space.create_dataset(str(key),
                                           data=np.array(val),
                                           dtype=np.array(val).dtype)

        print((tmp_sample[0]).shape)
        print(
            (split_index, (tmp_sample[0]).shape[0], (tmp_sample[0]).shape[1]))
        #Assumes the data to be in shape (time_samples, 1)
        train_data = training.create_dataset('train_data',
                                             shape=(split_index,
                                                    (tmp_sample[0]).shape[0],
                                                    (tmp_sample[0]).shape[1]),
                                             dtype=tmp_sample[0].dtype)

        #Assumes the data to be in shape ()
        train_snr_calculated = training.create_dataset(
            'train_snr_calculated',
            shape=(split_index, ),
            dtype=tmp_sample[2].dtype)

        #Needs the SNR to be a single number. This has to be returned as the
        #second entry and as a numpy array of shape '()'
        train_labels = training.create_dataset('train_labels',
                                               shape=(split_index,
                                                      len(tmp_sample[1])),
                                               dtype=tmp_sample[1].dtype)

        #Assumes the shape () for the provided data
        train_wav_parameters = train_parameters.create_dataset(
            'wav_parameters', shape=(split_index, ), dtype=tmp_sample[3].dtype)
        train_ext_parameters = train_parameters.create_dataset(
            'ext_parameters', shape=(split_index, ), dtype=tmp_sample[4].dtype)

        #Assumes the data to be in shape (time_samples, 1)
        test_data = testing.create_dataset(
            'test_data',
            shape=(num_of_templates - split_index, (tmp_sample[0]).shape[0],
                   (tmp_sample[0]).shape[1]),
            dtype=tmp_sample[0].dtype)

        #Assumes the data to be in shape ()
        test_snr_calculated = testing.create_dataset('test_snr_calculated',
                                                     shape=(num_of_templates -
                                                            split_index, ),
                                                     dtype=tmp_sample[2].dtype)

        #Needs the SNR to be a single number. This has to be returned as the
        #second entry and as a numpy array of shape '()'
        test_labels = testing.create_dataset(
            'test_labels',
            shape=(num_of_templates - split_index, len(tmp_sample[1])),
            dtype=tmp_sample[1].dtype)

        #Assumes the shape () for the provided data
        test_wav_parameters = test_parameters.create_dataset(
            'wav_parameters',
            shape=(num_of_templates - split_index, ),
            dtype=tmp_sample[3].dtype)
        test_ext_parameters = test_parameters.create_dataset(
            'ext_parameters',
            shape=(num_of_templates - split_index, ),
            dtype=tmp_sample[4].dtype)

        #print("Pre pool")

        #pool = mp.Pool()

        #print("Number of workers: {}".format(pool._processes))

        #print("Pre loop")
        #print("Parameter List: {}".format(parameter_list))

        bar = progress_tracker(num_of_templates, name='Generating templates')

        for idx, dat in enumerate(pool.imap_unordered(worker, parameter_list)):
            if idx < split_index:
                #write to training
                i = idx
                train_data[i] = dat[0]
                train_labels[i] = dat[1]
                train_snr_calculated[i] = dat[2]
                train_wav_parameters[i] = dat[3]
                train_ext_parameters[i] = dat[4]
            else:
                #write to testing
                i = idx - num_of_templates
                test_data[i] = dat[0]
                test_labels[i] = dat[1]
                test_snr_calculated[i] = dat[2]
                test_wav_parameters[i] = dat[3]
                test_ext_parameters[i] = dat[4]

            bar.iterate()

        #print("Closing parallel")

        pool.close()
        pool.join()
        return
    FILE.create_dataset('threshold_p-value', data=np.float(bool_threshold))

    hf5_snr_ts = FILE.create_group('snrTimeSeries')
    hf5_snr_ts.create_dataset('data', data=snr_ts.data[:])
    hf5_snr_ts.create_dataset('sample_times', data=snr_ts.sample_times[:])
    hf5_snr_ts.create_dataset('delta_t', data=snr_ts.delta_t)
    hf5_snr_ts.create_dataset('epoch', data=np.float(snr_ts._epoch))

    hf5_bool_ts = FILE.create_group('p-valueTimeSeries')
    hf5_bool_ts.create_dataset('data', data=bool_ts.data[:])
    hf5_bool_ts.create_dataset('sample_times', data=bool_ts.sample_times[:])
    hf5_bool_ts.create_dataset('delta_t', data=bool_ts.delta_t)
    hf5_bool_ts.create_dataset('epoch', data=np.float(bool_ts._epoch))

#Compute SNR triggers
bar = progress_tracker(len(snr_ts), name='Generating SNR triggers')
snr_val = []
snr_time = []
for i in range(len(snr_ts)):
    if snr_ts[i] > snr_threshold:
        snr_val.append(snr_ts[i])
        snr_time.append(snr_ts.sample_times[i])
    bar.iterate()

snr_val = np.array(snr_val)
snr_time = np.array(snr_time)

#Compute p-value triggers
bar = progress_tracker(len(bool_ts), name='Generating p-value triggers')
bool_val = []
bool_time = []
Пример #11
0
def evaluate_ts(ts,
                net_path,
                time_step=0.25,
                preemptive_whiten=False,
                whiten_len=4.,
                whiten_crop=4.):
    net = keras.models.load_model(net_path)

    if preemptive_whiten:
        for i in range(len(ts)):
            ts[i] = ts[i].whiten(whiten_len,
                                 whiten_crop,
                                 low_frequency_cutoff=20.0)

    mp_arr = mp.Array(c.c_double, len(ts) * (len(ts[0]) + 2))

    cache = tonumpyarray(mp_arr)

    numpy_array = cache.reshape((len(ts), len(ts[0]) + 2))

    for idx, d in enumerate(ts):
        numpy_array[idx][:len(d)] = d.data[:]
        numpy_array[idx][-2] = d.delta_t
        numpy_array[idx][-1] = d.start_time

    #print(numpy_array)

    aux_info = mp.Array(c.c_double, 5)

    aux_info[0] = len(ts)
    aux_info[1] = len(ts[0]) + 2
    aux_info[2] = 1 if preemptive_whiten else 0
    aux_info[3] = whiten_len
    aux_info[4] = whiten_crop

    time_shift_back = ts[0].duration - (64.0 if preemptive_whiten else
                                        (64.0 + whiten_crop))

    indexes = list(np.arange(time_shift_back, 0.0, -time_step))

    inp = []

    bar = progress_tracker(len(np.arange(time_shift_back, 0.0, -time_step)),
                           name='Generating slices')

    with closing(mp.Pool(initializer=init,
                         initargs=(mp_arr, aux_info))) as pool:
        #inp =  list(pool.imap(get_slice, np.arange(time_shift_back, 0.0, -time_step)))
        for idx, l in enumerate(
                pool.imap(get_slice, np.arange(time_shift_back, 0.0,
                                               -time_step))):
            inp.append(l)
            bar.iterate()
    pool.join()

    #print("Inp")

    #print(inp)

    inp = np.array(inp)

    inp = inp.transpose((1, 0, 2))

    real_inp = [np.zeros((2, inp.shape[1], inp.shape[2])) for i in range(14)]

    for i in range(14):
        real_inp[i][0] = inp[i]
        real_inp[i][1] = inp[i + 14]
        real_inp[i] = real_inp[i].transpose(1, 2, 0)

    true_pred = net.predict(real_inp, verbose=1)

    snrs = list(true_pred[0].flatten())
    bools = [pt[0] for pt in true_pred[1]]

    snr_ts = TimeSeries(snrs, delta_t=time_step)
    bool_ts = TimeSeries(bools, delta_t=time_step)
    snr_ts.start_time = ts[0].start_time + (64.0 if preemptive_whiten else
                                            (64.0 + whiten_crop / 2.0))
    bool_ts.start_time = ts[0].start_time + (64.0 if preemptive_whiten else
                                             (64.0 + whiten_crop / 2.0))

    print(snr_ts.sample_times)

    return ((snr_ts.copy(), bool_ts.copy()))