def main(argv):
    """
    Expects command line input: 
        python generate-traffic.py config_file.txt my_ip pkts_per_min
    """
    # Parse command line args
    config_file = argv[1]
    my_ip = argv[2]
    pkts_per_min = int(argv[3])

    # Time to wait between sending packets to satisfy pkts_per_min arg
    time_to_wait = (1.0/pkts_per_min) * 60.0

    # Parse the config file into dictionary
    config_dict = util.parse_config(config_file)

    # Get a list of destination nodes
    dests = config_dict["dests"]
    dests.remove(my_ip)

    # Make UDP socket
    udpPort = 44000
    try:
        udpSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    except socket.error, msg:
        print "Failed to create socket with error: " + msg
        sys.exit()
Example #2
0
def main():

    parser = argparse.ArgumentParser()
    arguments = [
        ("preprocess", preprocess_dataset,
         "Preprocess samples - cleaning/filtering of invalid data."),
        ("split", split_dataset,
         "Split dataset in separate folds for training/validation/testing."),
        ("pretrain", prepare_embeddings,
         "Precompute input representations from unlabeled/training data."),
        ("prepare_input", prepare_input,
         "Convert raw inputs to numpy compatible data types."),
        ("train", train, "Train currently selected model."),
        ("test", test, "Run available model on evaluation data.")
        # ("analyse", analyse_dataset),                 # WIP
        # ("extract_embeddings", extract_embeddings),   # WIP
    ]

    for arg, _, description in arguments:
        parser.add_argument('--{}'.format(arg),
                            action='store_true',
                            help=description)

    params = parser.parse_args()
    args = parse_config("config.json")

    setup_logging(args)
    set_random_seed(args)

    for arg, fun, _ in arguments:
        if hasattr(params, arg) and getattr(params, arg):
            logging.info("Performing {} operation..".format(arg))
            fun(args)
Example #3
0
def parse_kiosk_conf():
    kiosk_conf=util.parse_config(ini_file, 'UNIBOX')
    if type(kiosk_conf) is dict:
        extern_conf_file = kiosk_conf['ubkiosk_dir'] + 'Kiosk.conf'
        extern_conf = util.parse_config(extern_conf_file, '*')

        if type(extern_conf) is dict:
            kiosk_conf.update(extern_conf)
        else:
            kiosk_conf['kioskid']=0
            kiosk_conf['ownerid']=0
            log.error(kiosk_conf['kiosk_conf_file']+' config file error')
    else:
        return {}

    return kiosk_conf
Example #4
0
def reduce_ops(config_path: str,
               enable_type_reduction: bool = False,
               use_cuda: bool = True):
    '''
    Reduce op kernel implementations.
    :param config_path: Path to configuration file that specifies the ops to include
    :param enable_type_reduction: Whether per operator type reduction is enabled
    :param use_cuda: Whether to reduce op kernels for the CUDA provider
    '''
    required_ops, op_type_usage_manager, globally_allowed_types = parse_config(
        config_path, enable_type_reduction)

    if globally_allowed_types is not None:
        globally_allowed_types = _validated_globally_allowed_types(
            globally_allowed_types)

    _process_provider_registrations(ort_root, use_cuda, required_ops,
                                    op_type_usage_manager,
                                    globally_allowed_types)

    if op_type_usage_manager is not None:
        type_control_cpp_code = op_type_usage_manager.get_cpp_entries()
    elif globally_allowed_types is not None:
        type_control_cpp_code = [
            "ORT_SPECIFY_OP_KERNEL_GLOBAL_ALLOWED_TYPES({});".format(", ".join(
                sorted(globally_allowed_types)))
        ]
    else:
        type_control_cpp_code = []

    _insert_type_control_cpp_code(ort_root, type_control_cpp_code)
Example #5
0
def reduce_ops(config_path: str,
               build_dir: str,
               enable_type_reduction: bool = False,
               use_cuda: bool = True):
    '''
    Reduce op kernel implementations.
    :param config_path: Path to configuration file that specifies the ops to include
    :param build_dir: Path to the build directory. The op reduction files will be generated under the build directory.
    :param enable_type_reduction: Whether per operator type reduction is enabled
    :param use_cuda: Whether to reduce op kernels for the CUDA provider
    '''
    build_dir = Path(build_dir).resolve()
    build_dir.mkdir(parents=True, exist_ok=True)

    required_ops, op_type_impl_filter = parse_config(config_path,
                                                     enable_type_reduction)

    # delete any existing generated files first
    op_reduction_root = _get_op_reduction_file_path(ORT_ROOT, build_dir)
    if op_reduction_root.is_dir():
        log.info(
            f"Deleting existing op reduction file root directory: {op_reduction_root}"
        )
        shutil.rmtree(op_reduction_root)

    _generate_provider_registrations(ORT_ROOT, build_dir, use_cuda,
                                     required_ops, op_type_impl_filter)

    type_control_cpp_code = op_type_impl_filter.get_cpp_entries(
    ) if op_type_impl_filter is not None else []
    _generate_type_control_overrides(ORT_ROOT, build_dir,
                                     type_control_cpp_code)
Example #6
0
def parse_kiosk_conf():
    kiosk_conf = util.parse_config(ini_file, 'UNIBOX')
    if type(kiosk_conf) is dict:
        extern_conf_file = kiosk_conf['ubkiosk_dir'] + 'Kiosk.conf'
        extern_conf = util.parse_config(extern_conf_file, '*')

        if type(extern_conf) is dict:
            kiosk_conf.update(extern_conf)
        else:
            kiosk_conf['kioskid'] = 0
            kiosk_conf['ownerid'] = 0
            log.error(kiosk_conf['kiosk_conf_file'] + ' config file error')
    else:
        return {}

    return kiosk_conf
Example #7
0
    def _load(self):
        config_filename = "config.ini"
        self.config = util.parse_config(config_filename)

        #Set up logging
        self.logger = util.logger(self.config['loglevel'], self.config['logfile'])
        self.logger.info("Loaded config")
Example #8
0
def exclude_unused_ops_and_types(config_path, enable_type_reduction=False, use_cuda=True):
    required_ops, op_type_usage_manager = parse_config(config_path, enable_type_reduction)

    registration_files = op_registration_utils.get_kernel_registration_files(ort_root, use_cuda)

    _exclude_unused_ops_and_types_in_registrations(required_ops, op_type_usage_manager, registration_files)

    _generate_required_types_cpp_code(ort_root, op_type_usage_manager)
Example #9
0
    def __init__(self, target_list):
        """Index builder"""

        self.config = util.parse_config('core.cfg')
        self.context = self.__update_context(target_list)
        tfile = util.view_mapper.get('index')
        tlookup = TemplateLookup(directories = ['.'],
                                 output_encoding='utf-8',
                                 encoding_errors='replace')
        self.template = Template(filename = os.path.join('design', tfile),
                                 lookup = tlookup)
Example #10
0
def main(opt):
    opt = parse_config(opt, TEST)
    pl_logger = logging.getLogger("lightning")
    pl_logger.propagate = False

    # passing `valid_ds` will overwrite `ds`
    if opt[VALID_DATA][INPUT]:
        console.log(
            '[[ WARN ]] Found `valid_ds` in arguments. The value of `ds` will be overwrited by `valid_ds`.'
        )
        opt[DATA] = opt[VALID_DATA]

    ModelClass = parse_model_class(opt[RUNTIME][MODELNAME])

    ckpt = opt[CHECKPOINT_PATH]
    assert ckpt
    model = ModelClass.load_from_checkpoint(ckpt, opt=opt)
    model.opt[IMG_DIRPATH] = model.build_test_res_dir()
    console.log(f'Loading model from: {ckpt}')

    transform = parseAugmentation(opt)
    if opt[AUGMENTATION][CROP]:
        console.log(
            f'[yellow]WRAN: You are testing the model but aug.crop is {opt[AUGMENTATION][CROP]}. Ignore the crop? (Y/n) [/yellow]'
        )
        res = input()
        if res == 'n':
            console.log('Testing with cropped data...')
        else:
            console.log('Ignore and set transform=None...')
            transform = torchvision.transforms.ToTensor()

    ds = ImagesDataset(opt, ds_type=DATA, transform=transform)
    dataloader = torch.utils.data.DataLoader(
        ds,
        batch_size=1,
        shuffle=False,
        num_workers=opt[DATALOADER_NUM_WORKER])
    trainer = Trainer(gpus=opt[GPU],
                      distributed_backend='dp',
                      precision=opt[RUNTIME_PRECISION])

    # test.
    beg = time.time()
    trainer.test(model, dataloader)
    console.log(
        f'[ TIMER ] Total time usage: {time.time() - beg}, #Dataset sample num: {len(ds)}'
    )

    console.log('[ PATH ] The results are in :')
    console.log(model.opt[IMG_DIRPATH])
Example #11
0
def reduce_ops(config_path: str, enable_type_reduction: bool = False, use_cuda: bool = True):
    '''
    Reduce op kernel implementations.
    :param config_path: Path to configuration file that specifies the ops to include
    :param enable_type_reduction: Whether per operator type reduction is enabled
    :param use_cuda: Whether to reduce op kernels for the CUDA provider
    '''
    required_ops, op_type_impl_filter = parse_config(config_path, enable_type_reduction)

    _process_provider_registrations(ort_root, use_cuda, required_ops, op_type_impl_filter)

    if enable_type_reduction:
        type_control_cpp_code = op_type_impl_filter.get_cpp_entries() if op_type_impl_filter is not None else []
        _insert_type_control_cpp_code(ort_root, type_control_cpp_code)
Example #12
0
    def _load(self):
        config_filename = "config.ini"
        self.config = util.parse_config(config_filename)

        #Set up logging
        self.logger = util.logger(self.config['loglevel'], self.config['logfile'])
        self.logger.info("Loaded config")

        #Read file and store header and content
        with open(self.name, 'r') as f:
            self.raw_header, self.raw_content = f.read().split('---')
        self.logger.info("Read raw header and content")

        self.header = util.parse_header(self.raw_header)
        self.logger.info("Parsed header into a dict")
Example #13
0
def update_tabsint_plugins(input):
    """ Updates just the tabsint specific plugins in the project """
    ut.log.info("[BUILD]: Updating tabsint plugins")
    config = ut.parse_config()

    if input.tabsint_plugins_src:
        add_tabsint_plugins(config["plugins"], cordova=False, docs=False)
        return

    # remove the tabsint_plugins folder from www
    rm_tabsint_plugins()
    rm_tabsint_plugin_dependencies(config["plugins"])

    add_tabsint_plugins(config["plugins"])
    add_tabsint_plugin_dependencies(config["plugins"])
Example #14
0
    def _load(self):
        config_filename = "config.ini"
        self.config = util.parse_config(config_filename)

        #Set up logging
        self.logger = util.logger(self.config['loglevel'],
                                  self.config['logfile'])
        self.logger.info("Loaded config")

        #Read file and store header and content
        with open(self.name, 'r') as f:
            self.raw_header, self.raw_content = f.read().split('---')
        self.logger.info("Read raw header and content")

        self.header = util.parse_header(self.raw_header)
        self.logger.info("Parsed header into a dict")
Example #15
0
    def __init__(self, name, target_list, rss = False):
        """Index builder"""

        self.config = util.parse_config()
        self.name = name
        self.rss = rss
        self.tagprefix = 'tags'
        self.context = self.__update_context(target_list)
        if not rss:
            tfile = util.view_mapper.get('index')
        else:
            tfile = util.view_mapper.get('rss')
        tlookup = TemplateLookup(directories = ['.'],
                                 output_encoding='utf-8',
                                 encoding_errors='replace')
        self.template = Template(filename = os.path.join(self.config.get('designdir', 'design'), tfile),
                                 lookup = tlookup)
Example #16
0
def get_app_version():
    # gen version num automatic
    ver=util.parse_config(ini_file, 'UNIBOX')

    if ver and ver['version'] != '':
        return ver['version']
    else:
        try:
            git_tags=os.popen('git tag').read().strip('\n')

            if git_tags:
                # get last tag
                lastest_ver=git_tags.split('\n')[-1]
                # save lastest_ver into extern file
                util.update_config(ini_file, {'version': lastest_ver}, 'UNIBOX')
                return lastest_ver
        except Exception, e:
            # fake initial ver
            return 'v0.0.1'
Example #17
0
def get_app_version():
    # gen version num automatic
    ver = util.parse_config(ini_file, 'UNIBOX')

    if ver and ver['version'] != '':
        return ver['version']
    else:
        try:
            git_tags = os.popen('git tag').read().strip('\n')

            if git_tags:
                # get last tag
                lastest_ver = git_tags.split('\n')[-1]
                # save lastest_ver into extern file
                util.update_config(ini_file, {'version': lastest_ver},
                                   'UNIBOX')
                return lastest_ver
        except Exception, e:
            # fake initial ver
            return 'v0.0.1'
Example #18
0
    def __init__(self, inpath):
        """Read the infile and populate the object"""

        # First patse the config and extract data from infile
        self.inpath = inpath
        self.config = util.parse_config()
        with open(inpath) as infh:
            raw_header, self.content = infh.read().split('\n' + self.config.get('separator', '---') + '\n', 1)

        # Parse the header and populate the context with this
        # information
        self.context = self.__update_context(util.parse_header(raw_header))

        # Get a template ready to write
        tfile = util.view_mapper.get(self.context.get('view', 'single'))
        tlookup = TemplateLookup(directories = ['.'],
                                 output_encoding='utf-8',
                                 encoding_errors='replace')
        self.template = Template(filename = os.path.join(self.config.get('designdir', 'design'), tfile),
                                 lookup = tlookup)
Example #19
0
def reduce_ops(
    config_path: str,
    build_dir: str,
    enable_type_reduction: bool,
    use_cuda: bool,
    is_extended_minimal_build_or_higher: bool,
):
    """
    Reduce op kernel implementations.
    :param config_path: Path to configuration file that specifies the ops to include
    :param build_dir: Path to the build directory. The op reduction files will be generated under the build directory.
    :param enable_type_reduction: Whether per operator type reduction is enabled
    :param use_cuda: Whether to reduce op kernels for the CUDA provider
    :param is_extended_minimal_build_or_higher: Whether this build has at least the features of an extended minimal
                                                build enabled.
    """
    build_dir = Path(build_dir).resolve()
    build_dir.mkdir(parents=True, exist_ok=True)

    required_ops, op_type_impl_filter = parse_config(config_path,
                                                     enable_type_reduction)
    if is_extended_minimal_build_or_higher:
        required_ops, op_type_impl_filter = _adapt_filters_for_extended_minimal_build(
            required_ops, op_type_impl_filter)

    # delete any existing generated files first
    op_reduction_root = _get_op_reduction_root(build_dir)
    if op_reduction_root.is_dir():
        log.info(
            f"Deleting existing op reduction file root directory: {op_reduction_root}"
        )
        shutil.rmtree(op_reduction_root)

    _generate_provider_registrations(ORT_ROOT, build_dir, use_cuda,
                                     required_ops, op_type_impl_filter)

    type_control_cpp_code = op_type_impl_filter.get_cpp_entries(
    ) if op_type_impl_filter is not None else []
    _generate_type_control_overrides(ORT_ROOT, build_dir,
                                     type_control_cpp_code)
def main(args):
    """
    Expects command line input: python traffic-simulator.py config_file.txt my_ip
    """
    config_filename = str(args[1])
    my_ip = str(args[2])

    # First, parse the config file
    config_dict = util.parse_config(config_filename)

    # Instantiate this traffic light
    traffic_light = TrafficLight(config_dict, my_ip)

    dir_to_mac_dict = util.match_MAC_to_direction(traffic_light.router, config_dict)

    # Make seperate process to listen for packets
    packet_listener = multiprocessing.Process(target=packet_sniff, \
                    args=(dir_to_mac_dict, traffic_light.north_queue, \
                    traffic_light.east_queue, traffic_light.south_queue, \
                    traffic_light.west_queue))

    packet_listener.start()
    traffic_light.start()
Example #21
0
def main(config):
    opt = parse_config(config, TRAIN)

    # logging
    console.log('Running config:', opt, log_locals=False)
    pl_logger = logging.getLogger("lightning")
    pl_logger.propagate = False

    # init model:
    ModelClass = parse_model_class(opt[RUNTIME][MODELNAME])
    if opt[CHECKPOINT_PATH]:
        model = ModelClass.load_from_checkpoint(opt[CHECKPOINT_PATH], opt=opt)
        console.log(f'Loading model from: {opt[CHECKPOINT_PATH]}')
    else:
        model = ModelClass(opt)

    # Loading data:
    transform = parseAugmentation(opt)
    dataset = ImagesDataset(opt, ds_type=DATA, transform=transform)
    console.log('Finish loading data.')
    sample_input_batch = dataset[0][INPUT].unsqueeze(0)

    # only support one input argument in model.forward
    dynamic_ax = {'input': {2: 'H', 3: 'W'}, 'output': {2: 'H', 3: 'W'}}
    fpath = f"../onnx/{opt[NAME]}.onnx"
    model.to_onnx(
        fpath,
        sample_input_batch,
        export_params=True,
        verbose=True,
        do_constant_folding=True,
        input_names=['input'],  # the model's input names
        output_names=['output'],  # the model's output names
        opset_version=12,
        dynamic_axes=dynamic_ax)
    console.log(f'[[ DONE ]] ONNX file {fpath} exported.')
Example #22
0
def check_tabsint_plugins(input):
    """ Checks locally installed cordova plugins against the required list in plugins.json and tabsintConfig.json"""
    ut.log.info("[BUILD]: Checking tabsint plugins")

    # if tabsint plugins directory is not there, then run the full update plugins script
    if not os.path.isdir("../www/tabsint_plugins"):
        ut.log.info("[BUILD]: No tabsint_plugins directory found")
        update_tabsint_plugins(input)
    else:
        config = ut.parse_config()
        tp_dir = set(
            os.walk("../www/tabsint_plugins").next()[1]
        )  # list the directory contents of tabsint_plugins, only dirs, not files
        tp = set(config["plugins"].keys())

        missing_pl = list(tp - tp_dir)
        extra_pl = list(tp_dir - tp)

        if len(missing_pl) > 0:
            ut.log.info("[BUILD]: Missing the following tabsint plugins: {0}".format(missing_pl))
            update_tabsint_plugins(input)
        elif len(extra_pl) > 0:
            ut.log.info("[BUILD]: Installed tabsint plugins not found in config: {0}".format(extra_pl))
            update_tabsint_plugins(input)
Example #23
0
 def test_parse_config(self):
     config = util.parse_config('core.cfg')
     assert config.get('basedir', None) == "/home/artagnon/dev/rejourn"
def main():

    #Parsing level from parameters and parsing main config:

    level_name = None
    world_index = 0
    world = World(WORLDS[world_index])

    user_supplied_level = False

    parse_config()

    getlevel = False
    
    Variables.vdict["devmode"] = False

    if len(sys.argv) > 1:
        for arg in sys.argv:
          if getlevel:
            try:
              level_name = arg
              user_supplied_level = True
              end_trigger = END_NEXT_LEVEL
              menu_choice = MENU_QUIT
            except:
              error_message("Incorrect command line parameters")
              level_name = None
          elif arg == "-l":
            getlevel = True
          elif arg == "-dev":
            Variables.vdict["devmode"] = True
            Variables.vdict["verbose"] = True            
          elif arg == "-v":
            Variables.vdict["verbose"] = True

    #Initializing pygame and screen

    pygame.init()
    screen = pygame.display.set_mode((SCREEN_WIDTH,SCREEN_HEIGHT))
    caption = "Which way is up?"
    if (Variables.vdict["devmode"]):
      caption = caption + " - developer mode"
    pygame.display.set_caption(caption)

    apply_fullscreen_setting(screen)

    if (pygame.joystick.get_count() > 0):
      joystick = pygame.joystick.Joystick(0)
      joystick.init()
    else:
      joystick = None

    score = Score(0)

    done = False

    if not user_supplied_level:
      if (Variables.vdict["unlocked" + WORLDS[0]] == 0): # Nothing unlocked, go straight to the game
        end_trigger = END_NEXT_LEVEL
        menu_choice = MENU_QUIT
        level_name = world.get_level()
      else:                                      # Go to the menu first
        end_trigger = END_MENU
        menu_choice = 0

    bgscreen = None

    #Menu and level changing loop, actual game code is in game.py:

    while not done:
      if end_trigger == END_NEXT_LEVEL:
        if user_supplied_level:
          end_trigger = game.run(screen, level_name, world.index, score, joystick)
          if end_trigger == END_NEXT_LEVEL:
            user_supplied_level = False
            end_trigger = END_WIN
        else:
          end_trigger = game.run(screen, level_name, world.index, score, joystick)
          if end_trigger == END_NEXT_LEVEL:
            if world.is_next_level():
              level_name = world.get_level()
            else:
              end_trigger = END_WIN
          elif end_trigger == END_QUIT:
            display_bg("quit", screen)
            end_trigger = END_MENU
            bgscreen = screen.copy()
      if end_trigger == END_LOSE:
        display_bg("lose", screen)
        end_trigger = END_MENU
        menu_choice = world.index - 1
        bgscreen = screen.copy()
      elif end_trigger == END_WIN:
        display_bg("victory", screen)
        end_trigger = END_MENU
        menu_choice = 0
        bgscreen = screen.copy()
      elif end_trigger == END_QUIT or end_trigger == END_HARD_QUIT:
        done = True
      elif end_trigger == END_MENU:
        prev_score = score.score
        prev_time = score.time
        prev_levels = score.levels
        score = Score(0)
        if prev_score != 0:
          menu = Mainmenu(screen, prev_score, world, bgscreen, prev_time, prev_levels)
        else:
          menu = Mainmenu(screen, None, world, bgscreen)
        menu_choice = menu.run(menu_choice)
        if menu_choice == MENU_QUIT:
          end_trigger = END_QUIT
        elif menu_choice == MENU_SOUND:
          Variables.vdict["sound"] = not Variables.vdict["sound"]
          end_trigger = END_MENU
        elif menu_choice == MENU_DIALOGUE:
          Variables.vdict["dialogue"] = not Variables.vdict["dialogue"]
          end_trigger = END_MENU
        elif menu_choice == MENU_FULLSCREEN:
          Variables.vdict["fullscreen"] = not Variables.vdict["fullscreen"]
          end_trigger = END_MENU
          apply_fullscreen_setting(screen)
        elif menu_choice == MENU_WORLD:
          world_index += 1
          if world_index >= len(WORLDS):
            world_index = 0
          world = World(WORLDS[world_index])
          end_trigger = END_MENU
        else:
          level_name = world.get_level(menu_choice)
          end_trigger = END_NEXT_LEVEL

    write_config()
    write_log()

    return
Example #25
0
 def test_parse_config(self):
     config = util.parse_config()
     assert config.get("indir", None)
     assert config.get("outdir", None)
Example #26
0
def main():
    global w_init, w_menu

    w_init = sg.Window(title="Modular Macro Menu", layout=gen_layout_init())

    while True:
        event, val = w_init.Read()

        if event == sg.WIN_CLOSED or event == "Cancel":
            return 0

        elif event == "btn_load":
            w_init["fname_error"].Update(visible=False)

            if not val["fname"]:
                w_init["fname_error"].Update(value="No config selected.")
                w_init["fname_error"].Update(visible=True)

            else:

                try:
                    config = util.parse_config(val["fname"])
                    assert config
                    break

                except:
                    w_init["fname_error"].Update(
                        value="{} was not loaded correctly.".format(
                            os.path.basename(val["fname"])))
                    w_init["fname_error"].Update(visible=True)

    menu_params = config.pop("MenuParams")

    hotkeys = {}

    loaded_macros = []

    for k, v in config.items():
        try:
            x = macro.Macro(**v)

            if x.hotkey not in hotkeys.keys():
                hotkeys[x.hotkey] = x.run_macro
                loaded_macros.append(x)

            else:
                print(
                    "Failed to load Macro '{}'. Reason: WARNING::Duplicate Hotkey {} found!"
                    .format(k, x.hotkey))

        except Exception as e:
            print("Failed to load Macro '{}'. Reason: {}".format(k, str(e)))

    config["MenuParams"] = menu_params

    w_menu = sg.Window(title="Modular Macro Menu",
                       layout=gen_layout_menu(config, loaded_macros),
                       text_justification="center",
                       element_justification="center")

    t = threading.Thread(target=util.establish_keyboard_hook, args=(hotkeys, ))
    t.daemon = True
    t.start()

    w_init.close()

    while True:
        event, val = w_menu.Read()

        if event == sg.WIN_CLOSED or event == "Cancel":
            return 0

        elif event == "event_trigger":
            macro_name = w_menu["event_trigger"].metadata
            try:
                status = macro_name.split(":::")[-1]
                macro_name = ":::".join(macro_name.split(":::")[:-1])

                if status == "running":
                    w_menu[macro_name +
                           ":::status"].Update(background_color=GREEN)
                    w_menu[macro_name + ":::status"].Update(value="Active")
                else:
                    w_menu[macro_name +
                           ":::status"].Update(background_color=RED)
                    w_menu[macro_name + ":::status"].Update(value="Inactive")

            except AttributeError as e:
                print(e)
                pass

        elif event == "Exit":
            return 0

        elif event == "Go Back":
            return -1

        elif event == "Open Docs":
            util.open_help()

        elif event == "Toggle Freeze":
            macro._FROZEN = not macro._FROZEN

            if macro._FROZEN:
                w_menu["menu_title"].Update(value="FROZEN")
                w_menu["menu_title"].Update(text_color=LIGHT_BLUE)

            else:
                w_menu["menu_title"].Update(
                    value=config["MenuParams"]["menu_name"])
                w_menu["menu_title"].Update(text_color=BLACK)

        else:
            elem = event.split(":::")[-1]
            macro_name = ":::".join(event.split(":::")[:-1])

            if elem and macro_name:
                if elem in ["name", "status", "hotkey", "desc", "vs"]:

                    w_menu[macro_name + ":::hotkey"].Update(visible=True)
                    w_menu[macro_name + ":::desc"].Update(visible=True)

                    cur_state = w_menu[macro_name + ":::desc"].metadata

                    if cur_state:
                        w_menu[macro_name + ":::hotkey"].hide_row()
                        w_menu[macro_name + ":::desc"].hide_row()
                    else:
                        w_menu[macro_name + ":::hotkey"].unhide_row()
                        w_menu[macro_name + ":::desc"].unhide_row()

                    cur_state = not cur_state
                    w_menu[macro_name + ":::desc"].metadata = cur_state
Example #27
0
def main():

    #Parsing level from parameters:

    level = 0

    user_supplied_level = False

    parse_config()

    if len(sys.argv) > 1:
        for arg in sys.argv:
          if level == -1:
            try:
              level = int(arg)
              user_supplied_level = True
            except:
              print "Error: incorrect level number"
              level = 0
          elif arg == "-l":
            level = -1

    #Initializing pygame and screen

    pygame.init()
    print "Which way is up starting up."
    screen = pygame.display.set_mode((SCREEN_WIDTH,SCREEN_HEIGHT))
    pygame.display.set_caption("Which way is up?")

    if (pygame.joystick.get_count() > 0):
      joystick = pygame.joystick.Joystick(0)
      joystick.init()
    else:
      joystick = None

    score = Score(0)

    done = False

    if (Variables.vdict["unlocked"] == 0) or user_supplied_level: # Go straight to the game
      end_trigger = END_NEXT_LEVEL
      menu_choice = -2
    else:                                      # Go to the menu first
      end_trigger = END_MENU
      menu_choice = 0

    #Menu and level changing loop, actual game code is in game.py:

    while not done:
      if end_trigger == END_NEXT_LEVEL:
        if level < TOTAL_LEVELS or user_supplied_level:
          end_trigger = game.run(screen, level, score, joystick)
          level += 1
          if end_trigger == END_QUIT:
            end_trigger = END_MENU
        else:
          end_trigger = END_WIN
      if end_trigger == END_LOSE:
        display_bg("lose", screen)
        end_trigger = END_MENU
      elif end_trigger == END_WIN:
        display_bg("victory", screen)
        end_trigger = END_MENU
      elif end_trigger == END_QUIT or end_trigger == END_HARD_QUIT:
        done = True
      elif end_trigger == END_MENU:
        prev_score = score.score
        score = Score(0)
        if prev_score != 0:
          menu = Menu(screen, prev_score)
        else:
          menu = Menu(screen)
        menu_choice = menu.run(menu_choice)
        if menu_choice == MENU_QUIT:
          end_trigger = END_QUIT
        elif menu_choice == MENU_SOUND:
          Variables.vdict["sound"] = not Variables.vdict["sound"]
          end_trigger = END_MENU
        elif menu_choice == MENU_DIALOGUE:
          Variables.vdict["dialogue"] = not Variables.vdict["dialogue"]
          end_trigger = END_MENU
        else:
          level = menu_choice
          end_trigger = END_NEXT_LEVEL
      else:
        if user_supplied_level:
          user_supplied_level = False
          end_trigger = END_WIN
        else:
          if Variables.vdict["unlocked"] < level:
            Variables.vdict["unlocked"] = level
            print "Unlocked level " + str(Variables.vdict["unlocked"])

    write_config()

    return
Example #28
0
def main(config):
    opt = parse_config(config, TRAIN)

    # update debug config (if in debug mode)
    if opt[DEBUG]:
        debug_config = {
            DATALOADER_NUM_WORKER: 0,
            NAME: DEBUG,
            LOG_EVERY: 1,
            VALID_EVERY: 1,
            NUM_EPOCH: 2
        }
        opt.update(debug_config)
        console.log(
            '[red]>>>> [[ WARN ]] You are in debug mode, update configs. <<<<[/red]'
        )
        console.log(debug_config)
        console.log(
            '[red]>>>> [[ WARN ]] You are in debug mode, update configs. <<<<[/red]'
        )

    # logging
    console.log('Running config:', opt, log_locals=False)
    opt[LOG_DIRPATH], opt[IMG_DIRPATH] = configLogging(TRAIN, opt)
    pl_logger = logging.getLogger("lightning")
    pl_logger.propagate = False

    # init model:
    ModelClass = parse_model_class(opt[RUNTIME][MODELNAME])
    ckpt = opt[CHECKPOINT_PATH]
    if ckpt:
        model = ModelClass.load_from_checkpoint(ckpt, opt=opt)
        console.log(f'Loading model from: {ckpt}')
    else:
        model = ModelClass(opt)

    # Loading data:
    transform = parseAugmentation(opt)
    training_dataset = ImagesDataset(opt, ds_type=DATA, transform=transform)
    trainloader = torch.utils.data.DataLoader(
        training_dataset,
        batch_size=opt[BATCHSIZE],
        shuffle=True,
        num_workers=opt[DATALOADER_NUM_WORKER],
        drop_last=True)

    valid_loader = None
    if opt[VALID_DATA] and opt[VALID_DATA][INPUT]:
        valid_dataset = ImagesDataset(opt,
                                      ds_type=VALID_DATA,
                                      transform=transform)
        valid_loader = torch.utils.data.DataLoader(
            valid_dataset,
            batch_size=opt[VALID_BATCHSIZE],
            shuffle=False,
            num_workers=opt[DATALOADER_NUM_WORKER])
    console.log('Finish loading data.')

    # callbacks:
    checkpoint_callback = ModelCheckpoint(
        dirpath=opt[LOG_DIRPATH],
        save_last=True,
        save_weights_only=True,
        filename='{epoch:}-{step}',
        save_top_k=10,  # save 10 model
        monitor=opt[CHECKPOINT_MONITOR],
    )

    # trainer logger:
    mylogger = WandbLogger(name=opt[NAME],
                           project='vielab',
                           notes=None if not opt[COMMENT] else opt[COMMENT],
                           tags=[opt[RUNTIME][MODELNAME], opt[DATA][NAME]],
                           save_dir=ROOT_PATH)

    # init trainer:
    trainer = pl.Trainer(
        gpus=opt[GPU],
        distributed_backend='dp',
        # auto_select_gpus=True,
        max_epochs=opt[NUM_EPOCH],
        logger=mylogger,
        callbacks=[checkpoint_callback],
        precision=opt[RUNTIME_PRECISION],
        check_val_every_n_epoch=opt[VALID_EVERY])

    # training loop
    global OPT
    OPT = copy.deepcopy(opt)
    trainer.fit(model, trainloader, val_dataloaders=valid_loader)
Example #29
0
    attack_vectors = extract_attack_vectors(
        attack)  # Extract the attack vectors from the attack
    summary = compute_summary(
        attack_vectors
    )  # Compute summary statistics of the attack (e.g. average bps / Bpp / pps)
    # Generate fingeperint
    fingerprint = Fingerprint(target=target,
                              summary=summary,
                              attack_vectors=attack_vectors,
                              show_target=args.show_target)

    if args.summary:  # If the user wants a preview, show the finerprint in the terminal
        LOGGER.info(str(fingerprint))

    args.output.mkdir(parents=True, exist_ok=True)
    fingerprint.write_to_file(
        args.output /
        (fingerprint.checksum[:16] + '.json'))  # write the fingerprint to disk

    if args.ddosdb:  # Upload the fingerprint to a specified DDoS-DB instance
        fingerprint.upload_to_ddosdb(**parse_config(args.config),
                                     noverify=args.noverify)
    if args.misp:  # Upload the fingerprint to a specified MISP instance
        conf = parse_config(args.config, misp=True)
        misp_instance = MispInstance(host=conf['host'],
                                     token=conf['token'],
                                     protocol=conf['protocol'],
                                     verify_tls=not args.noverify)
        if misp_instance.misp is not None:
            fingerprint.upload_to_misp(misp_instance)