Esempio n. 1
0
def main():
    if len(sys.argv) > 1:
        args = load_config(sys.argv[1])

        if args == None:
            exit(1)

        out_dir_path = Path(args['COVERAGE']['OUT_DIR'])
        if not out_dir_path.exists():
            print('Out dir doesnt exist. Creating new!')
            out_dir_path.mkdir(parents=True, exist_ok=True)

        tasks = queue.Queue()

        for worker_id in range(args['COVERAGE']['WORKERS']):
            worker = threading.Thread(target=run_drrun,
                                      args=(tasks, worker_id, args))
            worker.setDaemon(True)
            worker.start()

        for f_path in Path(args['COVERAGE']['INP_DIR']).iterdir():
            tasks.put({'input': f_path.as_posix()})

        tasks.join()
    else:
        print(f'Usage: {sys.argv[0]} <config_file>')
Esempio n. 2
0
def main():
    # TODO: find a cleaner way of shutting down
    signal.signal(signal.SIGINT, signal.SIG_DFL)

    config = load_config()
    debounce_sec = config['debounce']
    display_name = config['display_name']
    script_working_dir = config['_script_working_dir']
    dbus_interface_name = config['_dbus']['interface_name']
    dbus_path = config['_dbus']['path']
    dbus_signal_name = config['_dbus']['signal_name']
    connect_scripts = config['on_connect']
    disconnect_scripts = config['on_disconnect']

    app = QCoreApplication(sys.argv)
    loop = DBusQtMainLoop(set_as_default=True)
    bus = dbus.SessionBus()

    signal_runner = SignalRunner(debounce_sec,
                                 display_name,
                                 cwd=script_working_dir)
    for script in connect_scripts:
        signal_runner.add_connect_script(script)
    for script in disconnect_scripts:
        signal_runner.add_disconnect_script(script)

    bus.add_signal_receiver(handler_function=signal_runner.run,
                            dbus_interface=dbus_interface_name,
                            path=dbus_path,
                            signal_name=dbus_signal_name)

    sys.exit(app.exec_())
    def __init__(self, img_path=None, silent=False):
        self.config = load_config()

        if (img_path != None):
            self.img = cv2.imread(img_path)
        else:
            self.img = cv2.imread(self.config['img_filename'])

        self.silent = silent

        bgr_props = self.config['bgr_properties']
        bgr_mean = (bgr_props['b_mean'], bgr_props['g_mean'],
                    bgr_props['r_mean'])
        bgr_stdev = (bgr_props['b_stdev'], bgr_props['g_stdev'],
                     bgr_props['r_stdev'])

        self.seg_img = self._segment_bgr(
            self.img,
            self._calc_lowerb(bgr_mean, bgr_stdev,
                              bgr_props['lowerb_stdev_scalar']),
            self._calc_upperb(bgr_mean, bgr_stdev,
                              bgr_props['upperb_stdev_scalar']))

        self.filtered_img = cv2.medianBlur(self.seg_img,
                                           self.config['filter_ksize'])

        self.contours = self._get_contours(self.filtered_img)
Esempio n. 4
0
def main(argv):
    inputfile = ''
    outputdir = ''
    arch = ''
    lib = ''

    try:
        opts, args = getopt.getopt(argv, "i:o:a:l:",
                                   ["file=", "output_dir=", "arch=", "lib="])
    except:
        getopt.GetoptError
        print("generator.py -i <inputfile> -o <outputdir> -a <arch> -l <lib>")
        sys.exit(2)
    for opt, arg in opts:
        if opt == "-h":
            print("generator.py -i <inputfile>")
            sys.exit()
        elif opt in ("-i", "--file"):
            inputfile = arg
        elif opt in ("-o", "--output_dir"):
            outputdir = arg
        elif opt in ("-a", "--arch"):
            arch = arg
        elif opt in ("-l", "--lib"):
            lib = arg

    print("Generate code from: ", inputfile)
    print("Files will be written to: ", outputdir)
    generator = select_generator(arch, lib)
    config = load_config(inputfile)
    generator.set_config(config)

    generator.set_output_dir(os.path.abspath(outputdir))
    generator.generate()
Esempio n. 5
0
def main():
    config = config_loader.load_config()
    fifoname = config['clonner_fifo']
    if not os.path.exists(fifoname):
        os.mkfifo(fifoname)
    while True:
        pipein = open(fifoname, 'r')
        instr = pipein.readline()[:-1]
        pipein.close()
        print("Instruction readed: ", instr)
        if (instr == 'clone'):
            clone(config_loader.load_config())
        else:
            pid = os.fork()
            if (pid == 0):
                os.system(instr)
                return
    def __init__(self, orthomosaic_path=None, silent=False):
        self.silent = silent
        self.config = load_config()

        if (orthomosaic_path == None):
            self.orthomosaic_path = self.config['orthomosaic_filename']
        else:
            self.orthomosaic_path = orthomosaic_path
Esempio n. 7
0
class LogConfig:
    """Logging related configuration

    Attributes:
        log_level (str): python logging level
        log_location (str): log file output location
    """
    _lcfg: Dict[Any, Any] = load_config('logging')
    log_level: str = _lcfg['log_level']
    log_location: str = _lcfg['log_location']
Esempio n. 8
0
def main():
    if len(sys.argv) > 1:
        args = load_config(sys.argv[1])

        if args == None:
            exit(1)

        run_dbg(args)
    else:
        print(f'Usage: {sys.argv[0]} <config_file>')
Esempio n. 9
0
def swtich_windows():
    """
        Switch all the windows
    """
    config = config_loader.load_config("config.json")
    original_window = xdotool.get_window_active()
    windows = xdotool.search_window_class(config['browser'])
    for window in windows:
        window_name = xdotool.get_window_name(window)
        xdotool.activate_window(window)
        swtich_tabs(window, max_tab=30)
    xdotool.activate_window(original_window)
Esempio n. 10
0
def detect_wrong_format(input_file, pattern):
    """Delete entries in ldap older than limit_days_ago before today
        :param input_file: Ldap dumb file to parse
        :param pattern: Pattern to be detected, it must be defined in config.yml!
    """
    cfg = load_config()
    try:
        cfg[pattern]
    except KeyError:
        LOGGER.error("Pattern not found in the config.yml file!")
        sys.exit(1)
    else:
        detect_ldap_problems.detect_wrong_format(cfg[pattern], input_file)
Esempio n. 11
0
def get_logger():
    logger = logging.getLogger('Basic model (coordinates) train')
    logger.setLevel(logging.DEBUG)

    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    config = load_config()
    file_handler = logging.FileHandler(config["PATH_TO_DEBUG_LOG"])
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)

    return logger
Esempio n. 12
0
class DBConfig:
    """Database related configuration

    Attributes:
        mongo_server (str): server host
        mongo_database (str): mongo database name
        mongo_collection (str): mongo collection name
    """
    _lcfg: Dict[Any, Any] = load_config('mongo_db')

    mongo_server: str = _lcfg['mongo_server']
    mongo_database: str = _lcfg['mongo_database']
    mongo_collection: str = _lcfg['mongo_collection']
Esempio n. 13
0
def main():
    """Entry point of the program"""

    config, input_spec, coords, dm = config_loader.load_config()

    # Create the data.
    data = create_data(input_spec)
    addresses = data['addresses']
    API_key = data['API_key']
    distance_matrix = create_distance_matrix(data)
    print(distance_matrix)
    with open(config["matrix_file"], "w") as f:
        for row in distance_matrix:
            f.write(",".join([str(e) for e in row]) + "\n")
Esempio n. 14
0
def main():
    """Entry point of the program"""

    input_file = None
    config = None

    config, input_data, coords, dm = config_loader.load_config()

    # Create the data.
    data = create_data(input_data)
    coords_vect = create_distance_matrix(data)
    # coords_vect = [[44.4352806, 26.0490164], [44.448984, 26.0561547], [44.4421364, 26.0666876], [44.4386435, 26.0458386], [44.4430103, 26.05114], [44.4447228, 26.0548487], [44.4482361, 26.0429976], [44.4492548, 26.0530481], [44.4384469, 26.038602], [44.4430707, 26.0627016], [44.4468016, 26.0660459], [44.4404178, 26.0629714], [44.4335841, 26.0560158]]
    print(coords_vect)
    with open(config["coords_file"], "w") as f:
        for row in coords_vect:
            f.write(",".join([str(e) for e in row]) + "\n")
Esempio n. 15
0
def main():
    config = config_loader.load_config()
    if not os.path.exists(config['menu_fifo']):
        os.mkfifo(config['menu_fifo'])

    pipeout = os.open(config['menu_fifo'], os.O_WRONLY)
    instr = ""
    while instr != "n":
        instr = input(
            "Select action:\n1 - block\n2 - run\n3 - kill\n4 - emulate\n")
        os.write(pipeout, bytearray(instr + '\n', 'utf-8'))
        if (instr == '2' or instr == '3'):
            name = input("Enter process name\n")
            os.write(pipeout, bytearray(name + '\n', 'utf-8'))
        instr = input("Add instruction(Y/n)\n")
    os.write(pipeout, b'stop\n')
    return 0
Esempio n. 16
0
def main():
    if len(sys.argv) > 1:
        args = load_config(sys.argv[1])

        if args == None:
            exit(1)

        crashes_dir = pathlib.Path(args['CRASH_ANALYZER']['INP_DIR'])
        reports_dir = pathlib.Path(args['CRASH_ANALYZER']['REPORTS_DIR'])

        if not crashes_dir.exists():
            print(
                'Crashes directory doesnt exist! Check if file path is correct!'
            )
            exit(1)

        if not reports_dir.exists():
            print('Out directory doesnt exist! Creating new one.')
            reports_dir.mkdir(parents=True, exist_ok=True)

        tasks = queue.Queue()

        for worker_id in range(args['CRASH_ANALYZER']['WORKERS']):
            worker = threading.Thread(target=run_bugid,
                                      args=(tasks, worker_id, args))
            worker.setDaemon(True)
            worker.start()

        crash_samples = list(crashes_dir.iterdir())
        current = 0
        total = len(crash_samples)

        for crash_path in crash_samples:
            tasks.put({
                'crash_path': crash_path.absolute().as_posix(),
                'reports_dir': reports_dir.absolute().as_posix(),
                'current': current,
                'total': total,
                'delete': args['CRASH_ANALYZER']['DELETE_NON_CRASHING']
            })
            current += 1

        tasks.join()
    else:
        print(f'Usage: {sys.argv[0]} <config_file>')
Esempio n. 17
0
def create_data_model():
    global config, input_data
    """Stores the data for the problem."""
    data = {}
    config, input_data, coords, dm = config_loader.load_config()
    depots = input_data["depots"]
    issues = input_data["issues"]
    vehicles = input_data["vehicles"]

    data['distance_matrix'] = dm

    data['demands'] = [
        int(d) * input_data["options"]["demand_factor"]
        for d in input_data["demands"]
    ]

    data['num_vehicles'] = len(vehicles)
    data['vehicle_capacities'] = [int(v["capacity"]) for v in vehicles]
    data['vehicle_fuel'] = [int(v["fuel"]) for v in vehicles]

    sum_demands = sum(data['demands'])
    sum_capacities = sum(data['vehicle_capacities'])

    # ratio = sum_demands / sum_capacities
    # if ratio > 1:
    #     data["vehicle_capacities"] = [int(d*ratio) for d in data["vehicle_capacities"]]

    data['depot'] = 0

    # So far, we have assumed that all vehicles start and end at a single location, the depot.
    # You can also set possibly different start and end locations for each vehicle in the problem.
    # To do so, pass two vectors, containing the indices of the start and end locations,
    # as inputs to the RoutingModel method in the main function.
    # Here's how to create the start and end vectors in the data section of the program:

    data['starts'] = [i for i in range(len(depots))]
    data['ends'] = [i for i in range(len(depots))]

    print(data)

    print("sum demands: " + str(sum_demands))
    print("sum capacities: " + str(sum_capacities))

    # quit()
    return data
Esempio n. 18
0
def simplemodel_train(logger, use_backup=False, debug=False):
    config = load_config()
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    if not torch.cuda.is_available():
        logger.error("Cuda is unavailable")

    seq, angles = data_utils.get_embedded_data_angles(config)
    train_data, test_data = data_utils.get_dataset_angles(seq, angles)
    train_dataloader, val_dataloader = data_utils.get_dataloaders(
        train_data, test_data, BATCH_SIZE)

    model = SimpleRNN(MODEL_INPUT_SIZE, MODEL_OUTPUT_SIZE, MODEL_HIDDEN_DIM,
                      N_LAYERS)
    start_epoch, model = train_utils.try_load_model_backup(
        model, MODEL_NAME, use_backup, logger, config)
    model.to(device)

    train_utils.initialize_wandb(model, config, N_LAYERS, BATCH_SIZE,
                                 'simple-model-angles', MODEL_HIDDEN_DIM)

    loss = nn.MSELoss()
    optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE)

    train_utils.train_model(
        train_dataloader,
        val_dataloader,
        model,
        MODEL_NAME,
        loss,
        optimizer,
        NUM_EPOCHS,
        logger,
        device,
        config,
        metrics_logger=train_utils.angles_metrics_logger,
        start_epoch=start_epoch,
        model_backup_path=config["PATH_TO_SIMPLEMODEL_BACKUP"],
        num_epoch_before_backup=config["NUM_EPOCH_BEFORE_BACKUP"],
        debug=debug)
    train_utils.write_training_epoch(config, 0, MODEL_NAME, logger)
    torch.save(model.state_dict(), os.path.join(wandb.run.dir, 'model.pt'))
Esempio n. 19
0
def simplemodel_coord_train(logger, args, use_backup=False, debug=False):
    params = parse_parameters(args)
    save_prefix = args['save_prefix']
    config = load_config()
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    if not torch.cuda.is_available():
        logger.error("Cuda is unavailable")

    seq, coord = data_utils.get_embedded_data_coordinates(config)
    train_data, test_data = data_utils.get_dataset_coordinates(seq, coord, config)
    train_dataloader, val_dataloader = data_utils.get_dataloaders(train_data, test_data, params['batch_size'])

    model = SimpleCharRNN(params['input_size'], params['output_size'], params['hidden_dim'], params['n_layers'], device)
    start_epoch, model = train_utils.try_load_model_backup(model, MODEL_NAME, use_backup, logger, config)
    model.to(device)

    if not debug:
        train_utils.initialize_wandb(model, config, params['n_layers'], params['batch_size'],
                                     'simple-model-coordinates', params['hidden_dim'])

    loss = ComplexLoss(on_cpu=debug)
    optimizer = torch.optim.Adam(model.parameters(), lr=params['learning_rate'])

    scheduler = None
    # scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=100, gamma=0.5)
    # scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.0001, max_lr=0.002, step_size_up=100,
    #                                               cycle_momentum=False)

    train_utils.train_model(train_dataloader, val_dataloader, model, MODEL_NAME, loss, optimizer, params['epochs'],
                            logger,
                            device,
                            config,
                            train_utils.coordinates_metrics_logger,
                            scheduler=scheduler,
                            start_epoch=start_epoch, model_backup_path=config["PATH_TO_SIMPLEMODEL_COORD_BACKUP"],
                            num_epoch_before_backup=config["NUM_EPOCH_BEFORE_BACKUP"],
                            debug=debug)
    train_utils.write_training_epoch(config, 0, MODEL_NAME, logger)
    if save_prefix is not None:
        model_path = '../results/' + save_prefix + '.sav'
        torch.save(model.cpu(), model_path)
Esempio n. 20
0
def main():
    global user_stats
    global ip_stats

    if len(sys.argv) < 2:
        config_path = "./config.py"
    elif len(sys.argv) == 2:
        config_path = sys.argv[1]
    else:
        print("Usage: mtprotoproxy [path/to/config.py]")

    if not load_config(globals(), config_path):
        return

    try:
        setup_uvloop()
    except:
        pass

    try:
        setup_fd_limit()
    except Exception as exc:
        print_err("Failed to increase file limit:", exc)

    try:
        setup_debug()
    except:
        pass

    init_ip_info()
    print_tg_info()

    user_stats = {}
    ip_stats = {}

    if sys.platform == "win32":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)

    loop = asyncio.get_event_loop()
    loop.set_exception_handler(loop_exception_handler)

    loop.run_until_complete(INIT_HOOK(HookAPI(USERS, user_stats, ip_stats)))

    stats_printer_task = asyncio.Task(stats_printer())
    asyncio.ensure_future(stats_printer_task)

    if USE_MIDDLE_PROXY:
        middle_proxy_updater_task = asyncio.Task(update_middle_proxy_info())
        asyncio.ensure_future(middle_proxy_updater_task)

    reuse_port = hasattr(socket, "SO_REUSEPORT")

    task_v4 = asyncio.start_server(handle_client_wrapper,
                                   '0.0.0.0',
                                   PORT,
                                   limit=TO_TG_BUFSIZE,
                                   reuse_port=reuse_port,
                                   loop=loop)
    server_v4 = loop.run_until_complete(task_v4)

    if socket.has_ipv6:
        task_v6 = asyncio.start_server(handle_client_wrapper,
                                       '::',
                                       PORT,
                                       limit=TO_TG_BUFSIZE,
                                       reuse_port=reuse_port,
                                       loop=loop)
        server_v6 = loop.run_until_complete(task_v6)

    try:
        loop.run_forever()
    except KeyboardInterrupt:
        pass

    stats_printer_task.cancel()

    server_v4.close()
    loop.run_until_complete(server_v4.wait_closed())

    if socket.has_ipv6:
        server_v6.close()
        loop.run_until_complete(server_v6.wait_closed())

    loop.close()
Esempio n. 21
0
def main():
    if len(sys.argv) > 1:
        args = load_config(sys.argv[1])

        if args == None:
            exit(1)

        sync_path = pathlib.Path(args['COLLECT']['FUZZER_SYNC_DIR'])
        out_dir_path = pathlib.Path(args['COLLECT']['OUT_DIR'])

        crashes_dir = out_dir_path / 'crashes'
        hangs_dir = out_dir_path / 'hangs'

        if not crashes_dir.exists() or not hangs_dir.exists():
            print('Out directory doesnt exist! Creating new one.')
            crashes_dir.mkdir(parents=True, exist_ok=True)
            hangs_dir.mkdir(parents=True, exist_ok=True)
        
        crashes = 0
        hangs   = 0
        per_fuzzer_stats = {} # {'fuzzer0':(crashes, hangs)}
        for fuzzer_path in sync_path.iterdir():
            if fuzzer_path.is_dir():
                for fuzzer_data in fuzzer_path.iterdir():
                    if 'crashes' in fuzzer_data.name and fuzzer_data.is_dir():
                        for crash_path in fuzzer_data.iterdir():
                            if not 'README' in crash_path.name:
                                copy_from = crash_path.as_posix()
                                copy_to   = (crashes_dir).as_posix() + '/' + crash_path.name + '_' + str(crashes)
                                if args['COLLECT']['VERBOSITY']:
                                    print('Crash path:   ' + copy_from)
                                    print('Copy to path: ' + copy_to)
                                if not args['COLLECT']['JUST_GET_STATS']:
                                    shutil.copy(copy_from, copy_to)
                                crashes += 1
                                if not fuzzer_path.name in per_fuzzer_stats.keys():
                                    per_fuzzer_stats[fuzzer_path.name] = [0, 0]
                                else:
                                    per_fuzzer_stats[fuzzer_path.name][0] += 1
                    if 'hangs' in fuzzer_data.name and fuzzer_data.is_dir():
                        for hangs_path in fuzzer_data.iterdir():
                            if not 'README' in hangs_path.name: 
                                copy_from = hangs_path.as_posix()
                                copy_to   = (hangs_dir).as_posix() + '/' + hangs_path.name + '_' + str(hangs)
                                if args['COLLECT']['VERBOSITY']:
                                    print('Crash path:   ' + copy_from)
                                    print('Copy to path: ' + copy_to)
                                if not args['COLLECT']['JUST_GET_STATS']:
                                    shutil.copy(copy_from, copy_to)
                                hangs += 1
                                if not fuzzer_path.name in per_fuzzer_stats.keys():
                                    per_fuzzer_stats[fuzzer_path.name] = [0, 0]
                                else:
                                    per_fuzzer_stats[fuzzer_path.name][1] += 1
        
        print(f'Total collected:')
        print(f'\tCrashes: {crashes}')
        print(f'\tHangs:   {hangs}')
        print(f'Per-fuzzer stats:')
        for k, v in per_fuzzer_stats.items():
            print(f'\tFuzzer: {k}. Crashes: {v[0]}. Hangs: {v[1]}.')
    else:
        print(f'Usage: {sys.argv[0]} <config_file>')
Esempio n. 22
0
def main():
    # Instantiate the data problem.
    config, input_data, coords, dm = config_loader.load_config()
    depots = input_data["depots"]
    places = input_data["places"]
    vehicles = input_data["vehicles"]
    n_vehicles = len(vehicles)
    options = input_data["options"]
    # just for initial check
    demands = [
        int(d) * options["demand_factor"] for d in input_data["demands"]
    ]
    vehicle_capacities = [int(v["capacity"]) for v in vehicles]
    vehicle_fuel = [int(v["fuel"]) for v in vehicles]

    items = options["items"]

    if options["fixed_vehicles"]:
        n_vehicles = options["n_vehicles"]
        vehicles = [{"id": i + 1} for i in range(n_vehicles)]

    print(items)

    if options["sort_order_msb"]:
        items.sort(key=lambda x: x["weight"], reverse=True)

    print(items)

    sum_demands = sum(demands)
    sum_capacities = sum(vehicle_capacities)

    depot = 0

    # So far, we have assumed that all vehicles start and end at a single location, the depot.
    # You can also set possibly different start and end locations for each vehicle in the problem.
    # To do so, pass two vectors, containing the indices of the start and end locations,
    # as inputs to the RoutingModel method in the main function.
    # Here's how to create the start and end vectors in the data section of the program:

    start_points = [i for i in range(len(depots))]
    end_points = [i for i in range(len(depots))]

    print("sum demands: " + str(sum_demands))
    print("sum capacities: " + str(sum_capacities))

    # compute_geometry.load_config()
    n_iter = options["n_iter"]
    n_epochs = options["n_epochs"]

    if use_external_input:
        df = pd.read_csv('coords_nearby_filtered.csv')

        place_ids = []
        coords = []
        place_ids = df["google_id"]
        coords_lat = [lat for lat in df["lat"]]
        coords_lng = [lng for lng in df["lng"]]
        for i in range(len(coords_lat)):
            coords.append([coords_lat[i], coords_lng[i]])
        place_ids = ["place_id:" + pid for pid in place_ids]
        print(place_ids[0])
        print(coords[0])
        places = place_ids[n_vehicles:]

        if options["fixed_demands"]:
            # only use demand factor
            input_data["demands"] = [1 for p in place_ids]

        place_coords = coords[n_vehicles:]
        compute_geometry.set_coords(coords)

    # quit()

    if options["use_range"]:
        # use demand factor range
        demand_factor_range = options["demand_factor_range"]
        demand_factor_range = range(demand_factor_range[0],
                                    demand_factor_range[1] + 1)
        print("using demand factor range: ", list(demand_factor_range))
    else:
        # use specified demand factor
        demand_factor_range = [options["demand_factor"]]
        print("using default demand factor: ", list(demand_factor_range))

    geometry.init_random(False)

    epoch_results_vect = []

    for epoch in range(n_epochs):
        for i_df, df in enumerate(demand_factor_range):
            demands = [int(d) * df for d in input_data["demands"]]
            demands = demands[n_vehicles:]

            fill_dict = {}

            for i, place in enumerate(places):
                fill_dict[place] = {
                    "place": place,
                    "coords": place_coords[i],
                    "items": [],
                    "item_coords": [],
                    "found": False,
                    "filled": False,
                    "finder": None,
                    "find_index": -1,
                    "total_revisits": 0,
                    "demand": demands[i]
                }

            print(len([k for k in fill_dict]))

            find_index = 0
            compute_geometry.init_random_walk(vehicles, None)

            for i in range(n_iter):
                if i == 0 and use_initial_depots:
                    distance_matrix = compute_geometry.compute_distance_matrix_wrapper(
                    )
                else:
                    distance_matrix = compute_geometry.get_distance_matrix_with_random_walk(
                    )

                print("epoch: " + str(epoch) + ", iteration: " + str(i) +
                      " with demand factor: " + str(df) + " [" + str(
                          int((i_df * n_iter + i) /
                              (len(demand_factor_range) * n_iter) * 100)) +
                      "%]")

                # each agent covers a given range (treasure scan)
                for i_vehicle, v in enumerate(vehicles):
                    if disp_view:
                        print("vehicle: ", i_vehicle)
                    # check nearby places within range
                    found_places = []
                    for j, d in enumerate(distance_matrix[i_vehicle]):
                        if j >= n_vehicles:
                            # print(j,d)
                            if d <= options["scan_radius"]:
                                if disp_view:
                                    print("found: ", d)
                                place = places[j - n_vehicles]
                                dict_place = fill_dict[place]

                                # check if place was already found by another agent
                                if not dict_place["found"]:
                                    dict_place["found"] = True
                                    dict_place["finder"] = vehicles[i_vehicle][
                                        "id"]
                                    dict_place["find_index"] = find_index
                                    found_places.append(dict_place)
                                    find_index += 1
                                else:
                                    dict_place["total_revisits"] += 1
                                    if disp_view:
                                        print("already found: ", d)

                    # assign random items for each agent and found places
                    n_slots = 0
                    for fps in found_places:
                        # check filled items, get number of free slots
                        if not fps["filled"]:
                            n_slots += fps["demand"]

                    if disp_view:
                        print("n_slots: ", n_slots)

                    # generate free slots
                    slots = [None] * n_slots
                    n_items = []

                    # compute number of slots for each item type
                    for item in items:
                        n_items.append(int(n_slots * item["weight"]))

                    # assign items for free slots
                    slots_index = 0
                    for k, n_item in enumerate(n_items):
                        for n in range(n_item):
                            slots[slots_index] = items[k]["item"]
                            slots_index += 1

                    if disp_view:
                        print("n_items: ", n_items)
                        print("filled: ", slots_index)

                    # check unfilled slots, fill with last item type
                    n_unfilled = n_slots - slots_index
                    if slots_index < n_slots:
                        for n in range(n_unfilled):
                            slots[slots_index + n] = items[len(items) -
                                                           1]["item"]

                    # shuffle items/slots
                    slots = shuffle.fisher_yates_shuffle_improved(slots)

                    if disp_view:
                        print(len(slots))

                    slots_index = 0

                    # assign items to actual places
                    for fps in found_places:
                        for d in range(fps["demand"]):
                            if not fps["filled"]:
                                fps["items"].append(slots[slots_index])
                                fps["item_coords"].append(
                                    geometry.get_random_point_in_radius(
                                        fps["coords"],
                                        options["item_coords"]["min_radius"],
                                        options["item_coords"]["max_radius"]))
                                slots_index += 1
                            else:
                                if disp_view:
                                    print("already filled")

                if check_results(items, places, demands, fill_dict, i, epoch,
                                 False, True)[0]:
                    break

            _, epoch_results, map_geometry = check_results(
                items, places, demands, fill_dict, i, epoch, True, False)
            epoch_results_vect.append(epoch_results)

    fig = compute_geometry.plot_random_walk_record()
    output_str = "epoch,places,demand,T,C,S,A,revisits,iterations\n"
    for epoch_results in epoch_results_vect:
        output_str += epoch_results + "\n"
    print(output_str)
    with open("./data/dms_results.csv", "w") as f:
        f.write(output_str)
    map_geometry_str = json.dumps(map_geometry, indent=2)
    with open("./data/dms_map.json", "w") as f:
        f.write(map_geometry_str)

    fig.savefig("./data/random_walk.png", dpi=300)
from config_loader import load_config


def projects_from_file(fname):
    with open(fname) as f:
        doc = yaml.load(f)
        commercial = doc['Projects']['Commercial']
        for project in commercial.keys():
            yield ('commercial', project, commercial[project])
        private = doc['Projects']['Private']
        for project in private.keys():
            yield ('private', project, private[project])


if __name__ == '__main__':

    fname = "life_tasks.yml"

    elastic_hosts = load_config('elastic-config.yml')
    es = Elasticsearch(hosts=elastic_hosts, verify_certs=False)
    for project_tuple in projects_from_file(fname):
        commercial_or_private = project_tuple[0]
        project_name = project_tuple[1]
        project_details = project_tuple[2]
        project_details['title'] = project_name
        project_details['type'] = commercial_or_private
        es.index(index="searchmybio_luke",
                 doc_type='project',
                 id=project_name,
                 body=project_details)
Esempio n. 24
0
import pandas as pd
from transmission_rpc import Client

from telegram import Update, ParseMode
from telegram.ext import CallbackContext

from scraper import search_piratebay
from config_loader import load_config

# Load config file
token, username, password, host, port, chatid = load_config()


def help_command(update: Update, context: CallbackContext) -> None:
    """Send a message when the command /help is issued."""
    help_text = """/piratesearch (n) query - Show first n results for query on Piratebay
    /more - Show 5 more results for Query
    /download idx - Download torrent with idx from previous piratesearch
    /listtorrents - List all torrents in Transmission
    /deletealltorrents - Delete all torrents in Transmission
    /deletetorrent idx - Delete torrent with idx from previous listtorrent
    """
    update.message.reply_text(help_text.replace('    ', ''))


def pirate_search(update, context):
    """Usage /piratesearch n query
    Show first n results for query on Piratebay. Note that n is limited to 30, as only the first page is scraped.
    If no n is provided 10 rows are shown by default.
    """
    if update.effective_chat.id != chatid:
Esempio n. 25
0
import redis
import time
from config_loader import load_config
from bot import WallEBot
from handlers.tags import TagsHandler
from handlers.repeat import RepeatHandler
from handlers.morse import MorseCodeHandler
from handlers.mud_emote import MudEmoteHandler


cfg = load_config()

rds = redis.StrictRedis(host=cfg.REDIS_HOST, db=cfg.REDIS_DB)
bot = WallEBot(cfg.TELEGRAM_TOKEN)

bot.add_handlers(
    TagsHandler(bot),
    MorseCodeHandler(bot),
    MudEmoteHandler(bot),
    RepeatHandler(bot)
)


def run():
    bot.message_loop()
    print('Listening...')

    while True:
        time.sleep(10)
Esempio n. 26
0
    return (mean[0] - interval_scalar * stdev[0],
            mean[1] - interval_scalar * stdev[1],
            mean[2] - interval_scalar * stdev[2])


def calc_upperb(mean, stdev, interval_scalar):
    return (mean[0] + interval_scalar * stdev[0],
            mean[1] + interval_scalar * stdev[1],
            mean[2] + interval_scalar * stdev[2])


###############################################################
# Main

if __name__ == "__main__":
    config = load_config()

    img = cv2.imread("../input/DJI_0240.JPG")

    bgr_props = config['bgr_properties']
    bgr_mean = (bgr_props['b_mean'], bgr_props['g_mean'], bgr_props['r_mean'])
    bgr_stdev = (bgr_props['b_stdev'], bgr_props['g_stdev'],
                 bgr_props['r_stdev'])

    # # BGR segmentation
    img_seg_bgr = segment_bgr(
        img, calc_lowerb(bgr_mean, bgr_stdev,
                         bgr_props['lowerb_stdev_scalar']),
        calc_upperb(bgr_mean, bgr_stdev, bgr_props['upperb_stdev_scalar']))

    _, cnts, _ = cv2.findContours(img_seg_bgr, cv2.RETR_TREE,
Esempio n. 27
0
from blessings import Terminal

# self
import config_loader

# parse docopt
arguments = docopt.docopt(__doc__, options_first=True)

if arguments['--debug']:
    log_level = logging.DEBUG
else:
    log_level = logging.INFO

# Set configuration and logging up first
config_location = "~/.nbssh"
config = config_loader.load_config(config_location)
if config == 0:
    sample_config = """Sample configuration:

[main]
API_ADDRESS = https://netbox.yourdomain.com
LOG_LOCATION = /var/log/nbssh.log
API_TOKEN = Token abc123
NO_OF_RESULTS = 10
    """
    print(sample_config)
    sys.exit()

logging.basicConfig(level=log_level,
                    format='[%(asctime)s][%(levelname)s][%(name)s] \
                    %(message)s',
Esempio n. 28
0
#!/usr/bin/env python
from config_loader import load_config
from google.cloud import pubsub_v1

config = load_config("../config.yml")
project_id = config["project_id"]
subscription = config["subscription"]
subscription_name = f"projects/{project_id}/subscriptions/{subscription}"
print(f"Subscription name: {subscription_name}")

subscriber = pubsub_v1.SubscriberClient()

print("Listening for messages on {}".format(subscription_name))
while True:
    response = subscriber.pull(subscription_name, max_messages=5)

    for msg in response.received_messages:
        print("Received message:", msg.message.data)

    ack_ids = [msg.ack_id for msg in response.received_messages]
    subscriber.acknowledge(subscription_name, ack_ids)
Esempio n. 29
0
#!/usr/bin/env python

from tweepy import API

from authenticate import authenticate
from config_loader import load_config
from unfollow import unfollow_stale_friends

auth_config: dict = load_config('auth_config.yml')
config: dict = load_config('config.yml')

api: API = authenticate(consumer_key=auth_config['consumer_key'],
                        consumer_secret=auth_config['consumer_secret'],
                        access_key=auth_config['access_key'],
                        access_secret=auth_config['access_secret'])

unfollow_stale_friends(days=config['days'], api=api)
Esempio n. 30
0
import yaml
import csv
from modules import geometry
import config_loader

config, input_spec, coords, dm = config_loader.load_config()


def compute_distance_matrix(coords):
    """build distance matrix from coords"""

    d_mat = []
    for c1 in coords:
        # get distance between current point and all other points
        d_vect = []
        for c2 in coords:
            d = int(geometry.get_distance_from_deg(c1, c2))
            d_vect.append(d)
        d_mat.append(d_vect)

    return d_mat


def compute_distance_matrix_wrapper():
    return compute_distance_matrix(coords)


def compute_random_points(coords):
    """set origin as coords centroid. compute distances relative to origin. update depot coords with random points."""

    new_coords = []
Esempio n. 31
0
app.config['TEMPLATES_AUTO_RELOAD'] = True
app.debug = True

LOG_FILENAME = 'app_access_logs.log'

app.logger.setLevel(logging.INFO) # use the native logger of flask

handler = handlers.RotatingFileHandler(
    LOG_FILENAME,
    maxBytes=1024 * 1024 * 100,
    backupCount=20
    )

app.logger.addHandler(handler)

elastic_config = load_config('elastic-config.yml')
dao = ElasticDao(elastic_config)


@app.route("/")
def hello():
    message = "This is front page"
    return render_template("index.html", message = message)


@app.route("/cv")
def cv():
    response = dao.docs_sorted_by_date(innertype='commercial')
    commercial_projects = response['hits']['hits']
    response = dao.docs_sorted_by_date(innertype='private')
    private_projects = response['hits']['hits']