示例#1
0
def mainloop(refresh=None, selenium=None) -> None:
    """
    :param refresh: If true, use only links in config.json.
    :param selenium: Uses legacy selenium backend
    """
    with open('config.json') as json_file:
        CONFIG = json.load(json_file)

    marinetraffic_instance = ScrapeSelenium() if selenium else Scrape()

    logger.info(f'starting run {datetime.datetime.now()}')

    handle_rankings()

    with open('src/data.json') as json_file:
        datafile = json.load(json_file)

    boats_to_retrieve = [item for item in CONFIG]
    fleet_positions = marinetraffic_instance.request_marinetrafic_url(
        'https://www.marinetraffic.com/en/ais/home/centerx:147.6/centery:-53.4/zoom:6',
        do_req=True)
    for item in boats_to_retrieve:
        A = MegaClass(fleet_positions, datafile, CONFIG.get(item),
                      marinetraffic_instance, item, refresh)
        A.get_new_positions()

    marinetraffic_instance.cleanup()

    with open('src/data.json') as json_file:
        datafile = json.load(json_file)

    print('----------------------')
    print(f'starting run {datetime.datetime.now()}')
    for item in datafile:
        if datafile[item]:
            print(src.utils.handle_data(item, datafile[item][-1]))
    print('----------------------')
    plot(datafile, 25)
    create_geojson(datafile)
示例#2
0
def main() -> None:
    """De hoofdfunctie met de logica van het programma op hoog niveau."""
    gegeven = gegevens.Gegevens()

    # Bereken alle data
    alle_data = bereken_gegevens(gegeven)

    # Sla alle data op als CSV
    csv_bestand = Path("hkp.csv")
    io.schrijf_naar_csv(alle_data, csv_bestand)

    # En plot de gegevens
    plot_bestand = Path("hkp.png")
    plot.plot(alle_data,
              plot_bestand,
              plot_jaren=gegeven.looptijd_hypotheek_jaren)

    # Einde van het programma
    print("")
    print("*------------------- OUTPUT -------------*")
    print(f"*     Alle data als CSV: {str(csv_bestand):15s} *")
    print(f"*      Data als grafiek: {str(plot_bestand):15s} *")
    print("*----------------------------------------*")
示例#3
0
    mapping=camvid.CamVid.load_mapping(),
    target_size=(360, 480),
    crop_size=crop_size,
    batch_size=batch_size,
    horizontal_flip=True,
    ignored_labels=['Void'],
    y_repeats=1,
)
generators = camvid11.generators()

# get the next X, y training tuple
X, y = next(generators['train'])
# transform the onehot vector to an image
y = camvid11.unmap(y[0])
# plot the images
_ = plot.plot(X=X[0], y=y[0], order=['X', 'y'])

# build the model for the image shape and number of labels
model = aleatoric_tiramisu.aleatoric_tiramisu(
    (*crop_size, 3),
    camvid11.n,
    class_weights=camvid11.class_mask,
    learning_rate=1e-4,
    weights_file=pretrain_weights,
)
model.summary()
callbacks = [
    EarlyStopping(monitor='val_aleatoric_loss', patience=10),
    ModelCheckpoint(weights_file,
                    monitor='val_aleatoric_loss',
                    save_best_only=True,
from src.plot import plot
from src.soms import train
from src.utils import read_img

import numpy
import matplotlib.pyplot as plt
from minisom import MiniSom

if __name__ == '__main__':
    image, data = read_img('Images/Testing/Image1.jpeg')
    starting_weights, reduced_image, learnt_weights = train(image, data)
    plot(image, reduced_image, starting_weights, learnt_weights)
示例#5
0
    # PLOT and LIGHT CURVE CREATION
    # ============================================= #
    elif 'plot' in input:
        f = open('data.txt', 'w+')
        f.write("# OBJ\t\tChi2\tChi2_m\tRobust\tSTD_target\tSTD_Refs:\tSTD_Master:\tInital\tBad_ref\tRefs\tDOF\tA\tA_err\tp-val\tPoss_var\n")
        f.close()
        if input[5:8] == 'all':
          print "Doing all"
          dir_contents = os.listdir('all_data')
          folders = [s for s in dir_contents if s.startswith("OBJ")]
          folders = sorted(folders)
          for num in range(len(folders)):
			try:
			  if num < 9 and num+1:# not in [6]:
				print "\nWORKING WITH: ",datadir+'/'+folders[int(num)-1],folders,"\n"
				plot.plot(datadir+'/'+folders[int(num)],folders)
			  else:
				#sys.exit()
				if num+1 not in par.ignore_objects and num+1 > -45:
				  print "\nWORKING WITH: ",datadir+'/'+folders[int(num)-1],folders,"\n"
				  plot.plot(datadir+'/'+folders[int(num)],folders)
			except IndexError:
			  print u.yellow("Trying again...(IndexError)")
			  if num < 9 and num+1:# not in [6]:
				print "\nWORKING WITH: ",datadir+'/'+folders[int(num)-1],folders,"\n"
				plot.plot(datadir+'/'+folders[int(num)],folders)
			  else:
				#sys.exit()
				if num+1 not in par.ignore_objects and num+1 > -45:
				  print "\nWORKING WITH: ",datadir+'/'+folders[int(num)-1],folders,"\n"
				  plot.plot(datadir+'/'+folders[int(num)],folders)
def main():
    args = arguments.parser.parse_args()

    models_args = args.models
    methods = args.methods
    file_path = args.input[0]
    attempts = args.attempts[0]
    output = args.output[0]

    df = pd.read_csv(file_path, decimal=',')

    models = [v for k, v in config.ALLOWED_MODELS.items() if k in models_args]
    results_map = dict(
        ((cls, method), []) for cls, method in product(models, methods))
    executor = ProcessPoolExecutor()

    result_dict = {}

    for cls, method in product(models, methods):
        if method == 'PSO':
            params = np.random.rand(attempts, cls.params_scaling().shape[0])
            future_result = executor.submit(fn=psopy.minimize,
                                            fun=goal_function,
                                            x0=params,
                                            args=(df.copy(), cls),
                                            tol=2.5e-3)
            results_map[(cls, method)].append(future_result)
        else:
            for _ in range(attempts):
                params = np.random.rand(cls.params_scaling().shape[0])
                future_result = executor.submit(fn=scopt.minimize,
                                                fun=goal_function,
                                                x0=params,
                                                args=(df.copy(), cls),
                                                method=method,
                                                tol=2.5e-3)
                results_map[(cls, method)].append(future_result)

    for cls in models:
        method_dict = {}
        for method in methods:
            print(
                'Waiting for {} optimizations of model {} to complete'.format(
                    method, cls.__name__))
            results = results_map[(cls, method)]
            wait(results, return_when=ALL_COMPLETED)
            results = [future_result.result() for future_result in results]
            results = sorted(
                results, key=lambda r: r.fun,
                reverse=False)[:min(config.MAX_RESULTS, len(results))]

            def result_mapper(result):
                model = cls(result.x)
                fitness = result.fun
                return {
                    'params': model.json,
                    'fitness': fitness,
                    'deviation_percentage': 100.0 * (fitness**0.5),
                    'method': method
                }

            best_result = results[0].x
            plot(df, cls(best_result),
                 '{}_{}.png'.format(method, cls.__name__))
            method_dict[method] = [result_mapper(r) for r in results]
        result_dict[cls.__name__] = method_dict

    with open(output, 'w') as output:
        json.dump(result_dict, output)
示例#7
0
def evaluate(args):
    """
    Evaluates the performance of ring signatures (sign and verify algorithms) under
    the different parameters provided in args.
    :param args: Object containing the parameters such as ring size, curves to evaluate
    :return:
    """

    total_stimes = []
    total_vtimes = []
    num_cpus = os.cpu_count()
    for c in args.curves:
        # Define the used keys
        max_size = max(args.ringsizes)
        pair_keys = [utils.generateKeyPair(c) for _ in range(max_size)]
        public_keys = [pair_keys[i][1] for i in range(len(pair_keys))]
        private_keys = [pair_keys[i][0] for i in range(len(pair_keys))]
        used_keys = []
        stimes = []
        vtimes = []
        for rs in args.ringsizes:
            keys = public_keys[:rs]
            signer = secrets.randbelow(rs)

            # Simulate signatures and verifications
            it = 64  # Number of signatures crafted/verified in parallel
            parameters = keys, signer, private_keys[signer], args.message, c
            signatures = [None for _ in range(it)]

            # Sign
            t0 = timer()
            signatures = Parallel(n_jobs=num_cpus)(
                delayed(auxSign)(i, parameters, signatures) for i in range(it))
            sign_time = timer() - t0
            stimes.append(sign_time / it)

            # Each parallel job returns a different list.
            # We get a matrix with elements in the diagonal.
            # We apply list comprehension to get a single non-empty list.
            signatures = [signatures[i][i] for i in range(it)]

            # Verify
            t0 = timer()
            Parallel(n_jobs=num_cpus)(
                delayed(auxVerify)(i, parameters, signatures, used_keys)
                for i in range(it))
            verify_time = timer() - t0
            vtimes.append(verify_time / it)

        total_stimes.append(stimes)
        total_vtimes.append(vtimes)
    # Plot signing times
    plot(args.ringsizes,
         'Ring size',
         total_stimes,
         'Time in seconds',
         args.curves,
         'Time to craft a signature',
         'graph',
         save_csv=True)
    # Plot verification times
    plot(args.ringsizes,
         'Ring size',
         total_vtimes,
         'Time in seconds',
         args.curves,
         'Time to verify a signature',
         'graph',
         save_csv=True)
示例#8
0
def main(reporter_file: str = './data/reporter-export.csv'):
    data = pd.read_csv(reporter_file)
    data = data_process(data)
    plot(data)
    return None