Example #1
0
def main():
    # Read command line arguments
    args = parse_arguments()

    # Setup paths to data
    args = setup_environ(args)

    # Saving configuration to output directory
    write_config(vars(args))

    # Locate and read datum
    datum = read_datum(args)

    if args.pca_to:
        print(f'PCAing to {args.pca_to}')
        datum = run_pca(args, datum)

    # Processing significant electrodes or individual subjects
    if args.sig_elec_file:
        process_sig_electrodes(args, datum)
    else:
        electrode_info = process_subjects(args)
        this_is_where_you_perform_regression(args, electrode_info, datum)

    return
Example #2
0
def make_landing_patterns(options):
    """Makes .cfg file with all landing patterns for Kramax AutoPilot."""
    print 'Landing patterns are generating'
    landing_patterns = {}

    @_for_all_runways
    def _(loc, runway_num, gs_pt, loc_pt):
        hdg = geometry.heading(gs_pt, loc_pt)
        name = 'Landing {} {:02}'.format(loc.name, int(round(hdg / 10)))
        description = 'Plan for landing to the {} with heading {}°'.format(
            loc.name, int(round(hdg)),
        )
        waypoints = flightplan.make_landing_pattern(gs_pt, loc_pt)
        landing_patterns[name] = [
            ('name', name),
            ('description', description),
            ('planet', 'Kerbin'),
            ('WayPoints', [('WayPoint', waypoint) for waypoint in waypoints]),
        ]

    plans_list = [('FlightPlan', landing_patterns[name]) for name in sorted(landing_patterns)]
    if options.verbose > 1:
        print 'Writing file KerbinSideRunwaysLandingPatterns.cfg'
    with open('KerbinSideRunwaysLandingPatterns.cfg', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, _make_kramax_patch(plans_list))
Example #3
0
async def language(bot, channel, author, message, server, o_message):
    """

    :param DiscordBot.DiscordBot bot:
    :param discord.Channel channel:
    :param discord.Member author:
    :param discord.Message.content message:
    :param discord.Server server:
    :param discord.Message o_message:
    """
    modrole = discord.utils.get(server.roles, name=bot.modrole_name)
    adminrole = discord.utils.get(server.roles, name=bot.adminrole_name)
    pattern = re.compile(r'^.language (en|fr)')

    if modrole in author.roles or adminrole in author.roles:
        try:
            inputlocale = pattern.findall(message.strip())[0]
            if inputlocale == "":
                inputlocale = "en"
            bot.language = inputlocale

            # Writing the updated value in config file
            write_config("language", inputlocale)
            load_language(inputlocale)

            await bot.client.add_reaction(o_message, '👍')
        except IndexError:
            await bot.say(channel, _('The language is not available.\nAvailable languages: fr, en'))
    else:
        await bot.say(channel, _('You need to be moderator or administrator to do this.'))
async def setup(configpath='./config/client.json'):
    config = load_config(configpath)
    async with aiohttp.ClientSession() as http_client:
        async with http_client.get(urljoin(config['server'],
                                           '/setup')) as response:
            newconfig = await response.json(content_type=None)
        if system() == 'Windows':
            plat = 'win'
        else:
            plat = 'nix'
        for filename in newconfig['files']:
            # Get hash
            sha1 = getsha1(filename)
            # Send hash in request
            j = {'os': plat, 'filename': filename, 'hash': sha1}
            async with http_client.request('GET',
                                           urljoin(config['server'],
                                                   '/updates'),
                                           json=j) as response:
                if response.status == 200:
                    with open(str(response.url).split('/')[-1], 'wb') as f:
                        f.write(await response.read())

        del newconfig['files']
        write_config(newconfig, configpath)
        config = newconfig
        async with http_client.post(urljoin(config['server'], '/setup')) as __:
            pass
Example #5
0
async def language(bot, channel, author, message, server, o_message):
    """

    :param DiscordBot.DiscordBot bot:
    :param discord.Channel channel:
    :param discord.Member author:
    :param discord.Message.content message:
    :param discord.Server server:
    :param discord.Message o_message:
    """
    modrole = discord.utils.get(server.roles, name=bot.modrole_name)
    adminrole = discord.utils.get(server.roles, name=bot.adminrole_name)
    pattern = re.compile(r'^.language (en|fr)')

    if modrole in author.roles or adminrole in author.roles:
        try:
            inputlocale = pattern.findall(message.strip())[0]
            if inputlocale == "":
                inputlocale = "en"
            bot.language = inputlocale

            # Writing the updated value in config file
            write_config("language", inputlocale)
            load_language(inputlocale)

            await bot.client.add_reaction(o_message, '👍')
        except IndexError:
            await bot.say(
                channel,
                _('The language is not available.\nAvailable languages: fr, en'
                  ))
    else:
        await bot.say(
            channel,
            _('You need to be moderator or administrator to do this.'))
Example #6
0
 def on(self, msg):
     self.msg = msg
     self.lcd.text('Alert', 1)
     self.lcd.text(self.msg, 2)
     self.bz.on()
     config['ALARM_ON'] = True
     write_config()
def update_analysis_config(analysis_config, raw_data_dir, bed_file):
    """Method updates analysis config with missing paths.

    :param analysis_config: Path configuration file used in the analysis.
    :type analysis_config: str

    :param raw_data_dir: Full path to raw data directory.
    :type raw_data_dir: str

    :param bed_file: Full path to kit bed file.
    :type bed_file: str
    """
    logger.info(f"Replacing entries in analysis config file:\n"
                f"- bed file:{bed_file}\n"
                f"- raw data dir: {raw_data_dir}")
    # Load config
    config = load_config(analysis_config)

    # Modify bed file path
    target_coverage_report_dict = config['step_config']['ngs_mapping'][
        'target_coverage_report']
    target_coverage_report_dict['path_target_interval_list_mapping'][0][
        'path'] = bed_file
    config['step_config']['ngs_mapping'][
        'target_coverage_report'] = target_coverage_report_dict

    # Modify raw data dir path
    config['data_sets']['mundlos_limb']['search_paths'] = [raw_data_dir]

    # Replace
    write_config(analysis_config, config)
Example #8
0
    def _run_daemon(self,
                    options={},
                    silent=False,
                    options_only={},
                    overwrite={}):
        '''Spawn an osquery daemon process'''
        global ARGS, TEMP_DIR, CONFIG
        config = copy.deepcopy(CONFIG)
        if len(options_only.keys()) > 0:
            # Create a temporary config.
            config["options"]["config_path"] = os.path.join(
                TEMP_DIR, "config-%d.json" % (random.randint(1000, 9999)))
        for option in options.keys():
            config["options"][option] = options[option]
        flags = ["--%s=%s" % (k, v) for k, v in config["options"].items()]
        for option in options_only.keys():
            config["options"][option] = options_only[option]
        for key in overwrite:
            config[key] = overwrite[key]
        if len(options_only.keys()) > 0:
            # Write the temporary config.
            utils.write_config(config)
        binary = getLatestOsqueryBinary('osqueryd')

        daemon = ProcRunner("daemon", binary, flags, silent=silent)
        daemon.options = config["options"]
        self.generators.append(daemon)
        return daemon
Example #9
0
    def _run_daemon(self,
                    options={},
                    silent=False,
                    options_only={},
                    overwrite={}):
        '''Spawn an osquery daemon process'''
        global ARGS, CONFIG_NAME, CONFIG
        config = copy.deepcopy(CONFIG)
        config["options"]["database_path"] += str(random.randint(1000, 9999))
        config["options"]["extensions_socket"] += str(
            random.randint(1000, 9999))
        for option in options.keys():
            config["options"][option] = options[option]
        flags = ["--%s=%s" % (k, v) for k, v in config["options"].items()]
        for option in options_only.keys():
            config["options"][option] = options_only[option]
        for key in overwrite:
            config[key] = overwrite[key]
        utils.write_config(config)
        binary = getLatestOsqueryBinary('osqueryd')

        daemon = ProcRunner("daemon", binary, flags, silent=silent)
        daemon.options = config["options"]
        self.generators.append(daemon)
        return daemon
Example #10
0
    def _run_daemon(self,
                    options={},
                    silent=False,
                    options_only={},
                    overwrite={}):
        '''Spawn an osquery daemon process'''
        global ARGS, CONFIG_NAME, CONFIG
        config = copy.deepcopy(CONFIG)
        config["options"]["database_path"] += str(random.randint(1000, 9999))
        config["options"]["extensions_socket"] += str(
            random.randint(1000, 9999))
        for option in options.keys():
            config["options"][option] = options[option]
        flags = ["--%s=%s" % (k, v) for k, v in config["options"].items()]
        for option in options_only.keys():
            config["options"][option] = options_only[option]
        for key in overwrite:
            config[key] = overwrite[key]
        utils.write_config(config)
        binary = os.path.join(ARGS.build, "osquery", "osqueryd")

        daemon = ProcRunner("daemon", binary, flags, silent=silent)
        daemon.options = config["options"]
        self.generators.append(daemon)
        return daemon
Example #11
0
def change_start_date(data):
    message = ""
    try:
        config = read_config(config_path)
    except FileNotFoundError:
        message = f"There was a problem. The configuration file `{config_path}`, could not be found or read."
        logger.exception("", exc_info=True)
    except Exception as error:
        message = f"There was a problem with configuration file: `{config_path}`"
        logger.exception("", exc_info=True)
    else:
        new_start_date = data["text"].split(" ")[4]
        try:
            new_date = date.fromisoformat(new_start_date)
            config["start_date"] = new_start_date
            new_config = toml.dumps(config)
            write_config(new_config, config_path)
            message = f"Date changed to {new_start_date}"
            logger.info(f"Start date changed to {new_start_date}")
        except ValueError as error:
            logger.info(f"User wrote this date as input: {new_start_date}")
            logger.info(f"User wrote this date with error: {error}")
            message = f"Format should be yyyy-mm-dd, i.e. 2019-01-03"
    response = slack_client.chat_postMessage(channel="#general", text=message)
    try:
        assert response["ok"]
    except AssertionError:
        logger.exception("", exc_info=True)
        logger.debug(f"{response}")
Example #12
0
 def download_update_files(self, file_name, file_url):
     """
     下载完成在本地改名为-new-xxx
     """
     print("Downloading:{}".format(file_name))
     response = urequests.get(file_url)
     if '.json' in file_name:
         update_profiles = response.json()
         if file_name == 'config.json':
             # 如果是config.json直接更新
             local_profiles = read_config()
             update_profiles["USER_CONF"] = local_profiles["USER_CONF"]
             update_profiles["Ctrl_plan"] = local_profiles["Ctrl_plan"]
             update_profiles["MQTT_CONF"] = local_profiles["MQTT_CONF"]
             update_profiles["Electricity_times"] = local_profiles[
                 "Electricity_times"]
             write_config(update_profiles)
         else:
             with open('-new-' + file_name, 'w') as f:
                 json.dump(update_profiles, f)
     elif '.mpy' in file_name:
         with open('-new-' + file_name, 'w') as f:
             f.write(response.content)
     else:
         with open('-new-' + file_name, 'w') as f:
             f.write(response.text)
     return response
Example #13
0
def change_end_date(data):
    message = ""
    try:
        config = read_config(config_path)
    except FileNotFoundError:
        message = f"There was a problem. The configuration file `{config_path}`, could not be found or read."
        logger.exception("", exc_info=True)
    except Exception:
        message = f"There was a problem with configuration file: `{config_path}`"
        logger.exception("", exc_info=True)
    else:
        new_end_date = data["text"].split(" ")[4]
        try:
            new_date = date.fromisoformat(new_end_date)
            config["end_date"] = new_end_date
            new_config = toml.dumps(config)
            write_config(new_config, config_path)
            message = f"Date changed to {new_end_date}"
        # need to write better error message for this(date is out of range or wrong formatting)
        except ValueError:
            logger.info("User inputted {new_end_date} for end date. ")
            logger.exception("", exc_info=True)
            message = "Format should be yyyy-mm-dd, i.e. 2019-02-03"
    response = slack_client.chat_postMessage(channel="#general", text=message)
    try:
        assert response["ok"]
    except AssertionError:
        logger.exception("", exc_info=True)
        logger.debug(f"{response}")
Example #14
0
    def _run_daemon(self, options=None, silent=False, options_only=None,
            overwrite=None):
        '''Spawn an osquery daemon process'''
        if options is None:
            options = {}
        if options_only is None:
            options_only = {}
        if overwrite is None:
            overwrite = {}
        global ARGS, CONFIG_NAME, CONFIG
        config = copy.deepcopy(CONFIG)
        config["options"]["database_path"] += str(random.randint(1000, 9999))
        config["options"]["extensions_socket"] += str(random.randint(1000, 9999))
        for option in options.keys():
            config["options"][option] = options[option]
        flags = ["--{0!s}={1!s}".format(k, v) for k, v in config["options"].items()]
        for option in options_only.keys():
            config["options"][option] = options_only[option]
        for key in overwrite:
            config[key] = overwrite[key]
        utils.write_config(config)
        binary = os.path.join(ARGS.build, "osquery", "osqueryd")

        daemon = ProcRunner("daemon", binary, flags, silent=silent)
        daemon.options = config["options"]
        self.generators.append(daemon)
        return daemon
Example #15
0
 def off(self):
     self.bz.off()
     self.msg = None
     self.lcd.clear()
     self.lcd.text(config['MOTD'], 1)
     config['ALARM_ON'] = False
     write_config()
def make_landing_patterns(options):
    """Makes .cfg file with all landing patterns for Kramax AutoPilot."""
    print 'Landing patterns are generating'
    landing_patterns = {}

    @_for_all_runways
    def _(loc, runway_num, gs_pt, loc_pt):
        hdg = geometry.heading(gs_pt, loc_pt)
        name = 'Landing {} {:02}'.format(loc.name, int(round(hdg / 10)))
        description = 'Plan for landing to the {} with heading {}°'.format(
            loc.name,
            int(round(hdg)),
        )
        waypoints = flightplan.make_landing_pattern(gs_pt, loc_pt)
        landing_patterns[name] = [
            ('name', name),
            ('description', description),
            ('planet', 'Kerbin'),
            ('WayPoints', [('WayPoint', waypoint) for waypoint in waypoints]),
        ]

    plans_list = [('FlightPlan', landing_patterns[name])
                  for name in sorted(landing_patterns)]
    if options.verbose > 1:
        print 'Writing file KerbinSideRunwaysLandingPatterns.cfg'
    with open('KerbinSideRunwaysLandingPatterns.cfg', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, _make_kramax_patch(plans_list))
Example #17
0
async def add_token(message: Message, token: str) -> Optional[str]:
    if not check_access(message.from_id):
        return
    config = read_config()
    config['tokens'].append(token)
    write_config(config)
    return "Токен добавлен"
Example #18
0
async def add_account(message: Message, login: str,
                      password: str) -> Optional[str]:
    if not check_access(message.from_id):
        return
    config = read_config()
    config['log_pass'].append({'login': login, 'password': password})
    write_config(config)
    return "Логин и пароль добавлен"
Example #19
0
def network_ready(network):
    logging.info(
        "Openzwave network is ready with %d nodes (%d are sleeping). All nodes are queried, the network is fully functional."
        % (
            network.nodes_count,
            utils.get_sleeping_nodes_count(),
        ))
    utils.write_config()
    save_network_state(network.state)
Example #20
0
def display_errors():
    try:
        config = get_config()
        if config["ERRORS"]["json_error"]:
            sg.Popup(config["ERRORS"]["json_error"],title="Error")
            config["ERRORS"]["json_error"] = ''
            write_config(config)
    except Exception as e:
        print(e)
Example #21
0
 def authenticate(self):
     if config['KEYPAD_CODE'] == self.code:
         self.disarm()
         config['ALARM_ON'] = False
         write_config()
     else:
         self.lcd.text('Incorrect!', 2)
         sleep(1)
         self.clear()
Example #22
0
async def wrapper(message: Message, user_id: int, **kwargs):
    config = read_config()
    if user_id not in config['admin_ids']:
        return "Не списке админов"
    config['admin_ids'].remove(user_id)
    write_config(config)

    new_admin = (await message.api.users.get(user_ids=user_id))[0]

    return f"Пользователь [id{new_admin.id}|{new_admin.first_name} {new_admin.last_name}] теперь не админ"
Example #23
0
 def save_status(self, device, algo, current_round, position, offset):
     conf = utils.read_config('resume')
     serial = utils.get_partition_info(device)[1]
     conf[serial] = {
         'algo': algo,
         'round': current_round,
         'position': position,
         'offset': offset
     }
     utils.write_config('resume', conf)
def make_flight_plans(options):
    """Makes .cfg file with flight plans for Kramax AutoPilot."""
    print 'Flight plans are generating'
    locations_dict = {loc.name: loc for loc in LOCATIONS}
    flight_plans = {}
    distances = []
    for route, contract in ROUTES.iteritems():
        from_loc = locations_dict[route[0]]
        to_loc = locations_dict[route[1]]
        contract.set_locations(from_loc, to_loc)
        if not contract.plane_allowed:
            continue
        name = '{} -> {}'.format(from_loc.name, to_loc.name)
        description = 'Plan for {} flight from the {} to the {}.'.format(
            contract.get_flight_type(),
            from_loc.name,
            to_loc.name,
        )
        waypoints, beacon_distances = flightplan.make_route_waypoints(
            from_loc,
            to_loc,
            contract.flight_level,
            contract.beacons,
        )
        distances.append({
            'name': name,
            'type': contract.get_flight_type(),
            'straight': utils.loc_distance(from_loc, to_loc),
            'max': max(beacon_distances),
            'sum': sum(beacon_distances),
        })
        flight_plans[name] = [
            ('name', name),
            ('description', description),
            ('planet', 'Kerbin'),
            ('WayPoints', [('WayPoint', waypoint) for waypoint in waypoints]),
        ]
    plans_list = [('FlightPlan', flight_plans[name])
                  for name in sorted(flight_plans)]
    if options.verbose > 1:
        print 'Writing file KerbinSideGapFlightPlans.cfg'
    with open('KerbinSideGapFlightPlans.cfg', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, _make_kramax_patch(plans_list))

    if options.verbose > 0:
        for info in sorted(distances,
                           key=(lambda info: (info['type'], info['max']))):
            print 'The most distant beacons in {} km, beacons distance is {} km ({:+}% of straight) for {} flight "{}" '.format(
                round(info['max'], 1),
                round(info['sum'], 1),
                round(100 * (info['sum'] / info['straight'] - 1), 1),
                info['type'],
                info['name'],
            )
Example #25
0
def node_removed(network, node):
	logging.info('A node has been removed from OpenZWave list id:[%s] model:[%s].' % (node.node_id, node.product_name,))
	if node.node_id in globals.not_supported_nodes:
		return
	if network.state >= globals.network.STATE_AWAKED:
		utils.write_config()
		save_node_event(node.node_id, "removed")
	if node.node_id in globals.node_notifications:
		del globals.node_notifications[node.node_id]
	if node.node_id in globals.pending_associations:
		del globals.pending_associations[node.node_id]
Example #26
0
def node_added(network, node):
	logging.info('A node has been added to OpenZWave list id:[%s] model:[%s].' % (node.node_id, node.product_name,))
	if node.node_id in globals.not_supported_nodes:
		logging.debug('remove fake nodeId: %s' % (node.node_id,))
		node_cleaner = threading.Timer(60.0, network.manager.removeFailedNode, [network.home_id, node.node_id])
		node_cleaner.start()
		return
	node.last_update = time.time()
	if network.state >= globals.network.STATE_AWAKED:
		utils.write_config()
		save_node_event(node.node_id, "added")
Example #27
0
def train_model(train_config):
    images,labels = image_load.read_12channel_images(train_config['target_path'],train_config['image_resize'])
    kflod_container = image_load.KFoldContainer(images,labels,train_config['Kfold'])
    utils.write_config(train_config,train_config['save_path']+'config.csv')
    confuse_matrix = []
    accuracy = []
    duration = []

    for k in range(train_config['Kfold']):
        train_x,train_y,test_x,test_y = kflod_container.get_fold_k(k)
        train_x,train_y = image_load.augimage(train_x,train_y)

        print('Load Training {n:d} images'.format(n=len(train_x)))
        print('Load Testing {n:d} images'.format(n=len(test_x)))

        train_ds = tf.data.Dataset.from_tensor_slices((train_x,train_y))
        train_ds = train_ds.shuffle(buffer_size=train_config['shuffle_buffer_size']).repeat().batch(train_config['batch_size'])
        test_ds = tf.data.Dataset.from_tensor_slices((test_x,test_y))
        test_ds = test_ds.repeat(1).batch(train_config['batch_size'])

        model = models.model_7_3_12channel(train_config['image_resize'],train_config['l2_factor'])

        model.compile(optimizer=tf.keras.optimizers.Adam(lr=train_config['lr']),
                      loss=tf.keras.losses.SparseCategoricalCrossentropy(),
                      metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])

        save_name = f'{train_config["save_path"]}{train_config["model_name"]}-{k}'

        cbs = [tf.keras.callbacks.EarlyStopping(patience=train_config['early_stop_patience']),
               tf.keras.callbacks.ModelCheckpoint(monitor='val_sparse_categorical_accuracy',filepath=save_name,save_best_only=True,save_weights_only=True,verbose=1),
               utils.HSLRSchedular(train_config['lr'],
                                   watch_value_name=train_config['schedular_watch_name'],
                                   max_reduce_time=train_config['schedular_max_reduce_time'],
                                   reduce_factor=train_config['schedular_reduce_factor'],
                                   restart_factor=train_config['schedular_restart_factor'],
                                   patience=train_config['schedular_patience'],
                                   verbose=0),
               utils.HSTensorboard(log_dir=f'./logs/{save_name}/',embeddings_metadata=test_x)]

        ct = time.time()
        model.fit(train_ds,epochs=train_config['epochs'],steps_per_epoch=train_config['steps_per_epoch'],validation_data=test_ds,callbacks=cbs)
        duration.append(time.time()-ct)

        model.load_weights(save_name)
        logits = model.predict(test_x)
        cm,acc = utils.confusion_matrix(test_y,tf.argmax(logits,axis=1).numpy())
        confuse_matrix.append(cm)
        accuracy.append(acc)
        print(f'finish training. k={k}, accuracy={acc:.2f}')

    sio.savemat(train_config['save_path']+'result.mat',{'cm':np.array(confuse_matrix),
                                                        'accuracy':np.array(accuracy),
                                                        'duration':np.array(duration)})
Example #28
0
    def test_query_packs(self):
        query_pack_path = test_base.CONFIG_DIR + "/test_pack.conf"
        utils.write_config(
            {
                "queries": {
                    "simple_test": {
                        "query": "select * from time",
                        "interval": 60,
                    },
                    "simple_test2": {
                        "query": "select * from time",
                        "interval": 60,
                        "platform": "does_not_exist",
                    }
                }
            },
            path=query_pack_path)

        # Get a daemon process, loaded with the default test configuration.
        # We'll add a config override (overwrite) for the "packs" key.
        # THis will point a single pack at the config written above.
        daemon = self._run_daemon({
            "disable_watchdog": True,
        },
                                  overwrite={
                                      "packs": {
                                          "test_pack": query_pack_path
                                      },
                                  })
        self.assertTrue(daemon.isAlive())

        # Introspect into the daemon's query packs.
        client = test_base.EXClient(daemon.options["extensions_socket"])
        test_base.expectTrue(client.try_open)
        self.assertTrue(client.open())
        em = client.getEM()

        # Every query from the pack(s) is added to the packs table.
        def get_packs():
            result = em.query("select * from osquery_packs")
            return len(result.response) == 2

        # Allow the daemon some lag to parse the pack content.
        test_base.expectTrue(get_packs)
        result = em.query("select * from osquery_packs")
        self.assertEqual(len(result.response), 2)

        # Only the applicable queries are added to the schedule.
        # There will be len(pack_queries) - 1 since "simple_test2" is bound
        # to an unknown/non-existing platform.
        result = em.query("select * from osquery_schedule")
        self.assertEqual(len(result.response), 1)
        daemon.kill()
Example #29
0
    def on_save(self):
        # Write new config
        self.save_general_tab()
        self.save_lock_tab()
        self.save_algorithm_tab()
        utils.write_config('settings', self.conf)
        self.conf = utils.read_config('settings')

        # Reload config
        self.load_general()
        self.load_locked()
        self.load_algorithm()
Example #30
0
    def test_query_packs(self):
        query_pack_path = test_base.CONFIG_DIR + "/test_pack.conf"
        utils.write_config({
            "queries": {
                "simple_test": {
                    "query": "select * from time",
                    "interval": 60,
                },
                "simple_test2": {
                    "query": "select * from time",
                    "interval": 60,
                    "platform": "does_not_exist",
                }
            }
        }, path=query_pack_path)

        # Get a daemon process, loaded with the default test configuration.
        # We'll add a config override (overwrite) for the "packs" key.
        # THis will point a single pack at the config written above.
        daemon = self._run_daemon({
            "disable_watchdog": True,
            },
            overwrite={
            "packs": {
                "test_pack": query_pack_path
            },
        })
        self.assertTrue(daemon.isAlive())

        # Introspect into the daemon's query packs.
        client = test_base.EXClient(daemon.options["extensions_socket"])
        test_base.expectTrue(client.open)
        self.assertTrue(client.open())
        em = client.getEM()

        # Every query from the pack(s) is added to the packs table.
        def get_packs():
            result = em.query("select * from osquery_packs")
            return len(result.response) == 2
        # Allow the daemon some lag to parse the pack content.
        test_base.expectTrue(get_packs)
        result = em.query("select * from osquery_packs")
        self.assertEqual(len(result.response), 2)

        # Only the applicable queries are added to the schedule.
        # There will be len(pack_queries) - 1 since "simple_test2" is bound
        # to an unknown/non-existing platform.
        result = em.query("select * from osquery_schedule")
        self.assertEqual(len(result.response), 1)
        daemon.kill()
Example #31
0
 def _run_daemon(self, options={}, silent=False, options_only={}):
     '''Spawn an osquery daemon process'''
     global ARGS, CONFIG_NAME, CONFIG
     config = copy.deepcopy(CONFIG)
     for option in options.keys():
         config["options"][option] = options[option]
     for option in options_only.keys():
         config["options"][option] = options_only[option]
     utils.write_config(config)
     binary = os.path.join(ARGS.build, "osquery", "osqueryd")
     flags = ["--%s=%s" % (k, v) for k, v in config["options"].items()]
     daemon = ProcRunner("daemon", binary, flags, silent=silent)
     self.generators.append(daemon)
     return daemon
Example #32
0
	def save(self):
		tagname = self.ui.lineEdit_5.text()
		if len(tagname) == 0:
			utils.show_message("'Tag' should not be blank !")
			return
		elif '.' in tagname:
			utils.show_message("Please do not use '.' (dot) in tag name !")
			return
		elif '_' in tagname:
			utils.show_message("Please do not use '_' in tag name !")
			return

		if self.assignments not in Process_Settings.param.keys():
			Process_Settings.param[self.assignments] = {}
		Process_Settings.param[self.assignments][str(self.ui.label_9.text())] = str(self.ui.lineEdit_5.text())
		Process_Settings.param[self.assignments][str(self.ui.checkBox_2.text())] = self.ui.checkBox_2.checkState()

		if self.assignments == self.namespace['process_HF']:
			Process_Settings.param[self.assignments][str(self.ui.label.text())] = str(self.ui.lineEdit.text())
			Process_Settings.param[self.assignments][str(self.ui.label_5.text())] = str(self.ui.lineEdit_4.text())
			#Process_Settings.param[self.assignments][str(self.ui.label_2.text())] = str(self.ui.lineEdit_2.text())
			Process_Settings.param[self.assignments][str(self.ui.label_4.text())] = str(self.ui.lineEdit_3.text())
			Process_Settings.param[self.assignments][str(self.ui.label_3.text())] = [self.ui.spinBox.value(), self.ui.spinBox_2.value()]
			Process_Settings.param[self.assignments][str(self.ui.label_6.text())] = self.ui.spinBox_3.value()
			Process_Settings.param[self.assignments][str(self.ui.label_7.text())] = self.ui.spinBox_4.value()
		elif self.assignments == self.namespace['process_FA']:
			Process_Settings.param[self.assignments][str(self.ui.label.text())] = str(self.ui.lineEdit.text())
			Process_Settings.param[self.assignments][str(self.ui.label_5.text())] = str(self.ui.lineEdit_4.text())
			Process_Settings.param[self.assignments][str(self.ui.label_2.text())] = str(self.ui.lineEdit_2.text())
			Process_Settings.param[self.assignments][str(self.ui.label_4.text())] = str(self.ui.lineEdit_3.text())
			Process_Settings.param[self.assignments][str(self.ui.label_3.text())] = [self.ui.spinBox.value(), self.ui.spinBox_2.value()]
		elif self.assignments == self.namespace['process_FAA']:
			Process_Settings.param[self.assignments][str(self.ui.label.text())] = str(self.ui.lineEdit.text())
			Process_Settings.param[self.assignments][str(self.ui.label_5.text())] = str(self.ui.lineEdit_4.text())
			Process_Settings.param[self.assignments][str(self.ui.label_2.text())] = str(self.ui.lineEdit_2.text())
			Process_Settings.param[self.assignments][str(self.ui.label_6.text())] = self.ui.spinBox_3.value()
			#Process_Settings.param[self.assignments][str(self.ui.label_7.text())] = self.ui.spinBox_4.value()
		elif self.assignments == self.namespace['process_AP']:
			Process_Settings.param[self.assignments][str(self.ui.label.text())] = str(self.ui.lineEdit.text())
			Process_Settings.param[self.assignments][str(self.ui.label_5.text())] = str(self.ui.lineEdit_4.text())
			Process_Settings.param[self.assignments][str(self.ui.label_2.text())] = str(self.ui.lineEdit_2.text())
			Process_Settings.param[self.assignments][str(self.ui.label_7.text())] = self.ui.spinBox_4.value()
			Process_Settings.param[self.assignments][str(self.ui.label_8.text())] = self.ui.doubleSpinBox.value()
			Process_Settings.param[self.assignments][str(self.ui.checkBox.text())] = self.ui.checkBox.checkState()

		# save ini
		config_file = self.get_config_path(self.assignments, tagname)
		utils.write_config(config_file, {self.namespace['config_head']:Process_Settings.param[self.assignments]},'w')
		self.close()
Example #33
0
def run(args=None):
    device = 'cuda' if torch.cuda.is_available() and (not args.no_cuda) else 'cpu'
    num_train, train_loader, test_loader, input_size, input_channel, n_class = get_loaders(args)

    lossFn = nn.CrossEntropyLoss(reduction='none')
    def evalFn(x): return torch.max(x, dim=1)[1]

    ## initialize SpecNet
    dTNet = MyDeepTrunkNet.get_deepTrunk_net(args, device, lossFn, evalFn, input_size, input_channel, n_class)

    ## setup logging and checkpointing
    timestamp = int(time.time())
    model_signature = '%s/%s/%d/%s_%.5f/%d' % (args.dataset, args.exp_name, args.exp_id, args.net, args.train_eps, timestamp)
    model_dir = args.root_dir + 'models_new/%s' % (model_signature)
    args.model_dir = model_dir


    print("Saving model to: %s" % model_dir)
    count_vars(args, dTNet)
    if not os.path.exists(model_dir):
        os.makedirs(model_dir)

    tb_writer = SummaryWriter(model_dir)
    stats = Statistics(len(train_loader), tb_writer, model_dir)
    args_file = os.path.join(model_dir, 'args.json')
    with open(args_file, 'w') as fou:
        json.dump(vars(args), fou, indent=4)
    write_config(args, os.path.join(model_dir, 'run_config.txt'))


    ## main part depending on training mode
    if 'train' in args.train_mode:
        epoch = train_deepTrunk(dTNet, args, device, stats, train_loader, test_loader)
        if args.cert:
            with torch.no_grad():
                cert_deepTrunk_net(dTNet, args, device, test_loader if args.test_set == "test" else train_loader,
                                   stats, log_ind=True, break_on_failure=False, epoch=epoch)
    elif args.train_mode == 'test':
        with torch.no_grad():
            test_deepTrunk_net(dTNet, args, device, test_loader if args.test_set == "test" else train_loader, stats,
                               log_ind=True)
    elif args.train_mode == "cert":
        with torch.no_grad():
            cert_deepTrunk_net(dTNet, args, device, test_loader if args.test_set == "test" else train_loader, stats,
                               log_ind=True, break_on_failure=False)
    else:
        assert False, 'Unknown mode: {}!'.format(args.train_mode)

    exit(0)
Example #34
0
def node_removed(network, node):
    logging.info(
        'A node has been removed from OpenZWave list id:[%s] model:[%s].' % (
            node.node_id,
            node.product_name,
        ))
    if node.node_id in globals.not_supported_nodes:
        return
    if network.state >= globals.network.STATE_AWAKED:
        utils.write_config()
        save_node_event(node.node_id, "removed")
    if node.node_id in globals.node_notifications:
        del globals.node_notifications[node.node_id]
    if node.node_id in globals.pending_associations:
        del globals.pending_associations[node.node_id]
Example #35
0
def update_config(payload):
    d = json.loads(payload)
    alarm_msg = ''
    for key, value in d.items():
        if key in config and key not in immutable_configs and not key.endswith(
                'PIN'):
            if key == 'ALARM_ON':
                if value == True:
                    hwalert.on(alarm_msg)
                else:
                    print('Alert stopped')
                    hwalert.stop_alert()
            elif key == 'ALARM_MSG':
                alarm_msg = value
            config[key] = value
    write_config()
Example #36
0
def node_added(network, node):
    logging.info(
        'A node has been added to OpenZWave list id:[%s] model:[%s].' % (
            node.node_id,
            node.product_name,
        ))
    if node.node_id in globals.not_supported_nodes:
        logging.debug('remove fake nodeId: %s' % (node.node_id, ))
        node_cleaner = threading.Timer(60.0, network.manager.removeFailedNode,
                                       [network.home_id, node.node_id])
        node_cleaner.start()
        return
    node.last_update = time.time()
    if network.state >= globals.network.STATE_AWAKED:
        utils.write_config()
        save_node_event(node.node_id, "added")
Example #37
0
def make_flight_plans(options):
    """Makes .cfg file with flight plans for Kramax AutoPilot."""
    print 'Flight plans are generating'
    locations_dict = {loc.name: loc for loc in LOCATIONS}
    flight_plans = {}
    distances = []
    for route, contract in ROUTES.iteritems():
        from_loc = locations_dict[route[0]]
        to_loc = locations_dict[route[1]]
        contract.set_locations(from_loc, to_loc)
        if not contract.plane_allowed:
            continue
        name = '{} -> {}'.format(from_loc.name, to_loc.name)
        description = 'Plan for {} flight from the {} to the {}.'.format(
            contract.get_flight_type(), from_loc.name, to_loc.name,
        )
        waypoints, beacon_distances = flightplan.make_route_waypoints(
            from_loc, to_loc, contract.flight_level, contract.beacons,
        )
        distances.append({
            'name': name,
            'type': contract.get_flight_type(),
            'straight': utils.loc_distance(from_loc, to_loc),
            'max': max(beacon_distances),
            'sum': sum(beacon_distances),
        })
        flight_plans[name] = [
            ('name', name),
            ('description', description),
            ('planet', 'Kerbin'),
            ('WayPoints', [('WayPoint', waypoint) for waypoint in waypoints]),
        ]
    plans_list = [('FlightPlan', flight_plans[name]) for name in sorted(flight_plans)]
    if options.verbose > 1:
        print 'Writing file KerbinSideGapFlightPlans.cfg'
    with open('KerbinSideGapFlightPlans.cfg', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, _make_kramax_patch(plans_list))

    if options.verbose > 0:
        for info in sorted(distances, key=(lambda info: (info['type'], info['max']))):
            print 'The most distant beacons in {} km, beacons distance is {} km ({:+}% of straight) for {} flight "{}" '.format(
                round(info['max'], 1),
                round(info['sum'], 1),
                round(100 * (info['sum'] / info['straight'] - 1), 1),
                info['type'], info['name'],
            )
def make_locations_runways(options):
    """Makes .rwy file with all runways in NavUtilities format."""
    print 'Runways for locations are generating'
    runways = {}

    @_for_all_runways
    def _(loc, runway_num, gs_pt, loc_pt):
        hdg = geometry.heading(gs_pt, loc_pt)
        hdg_str = '{:02}'.format(int(round(hdg / 10)))
        name = '{} {}'.format(re.sub(r'[^a-zA-Z0-9 ]', '°', loc.name), hdg_str)
        if gs_pt[3] > 0:
            name = '{} gs {}'.format(name, gs_pt[3])
        short_name = '{}{}'.format(
            ''.join(word[0]
                    for word in re.sub(r'([A-Z])', r' \1', loc.name).split()),
            hdg_str,
        )
        runway_key = (loc.name, runway_num)
        runways.setdefault(runway_key, {})[name] = [
            ('body', 'Kerbin'),
            ('ident', name),
            ('shortID', short_name),
            ('hdg', round(hdg, 2)),
            ('altMSL', gs_pt[2]),
            ('gsLatitude', gs_pt[0]),
            ('gsLongitude', gs_pt[1]),
            ('locLatitude', loc_pt[0]),
            ('locLongitude', loc_pt[1]),
            ('outerMarkerDist', 10000),
            ('middleMarkerDist', 2200),
            ('innerMarkerDist', 200),
        ]

    config = []
    for runway_key in sorted(runways):
        loc_runways = runways[runway_key]
        if len(loc_runways) == 2:
            names = loc_runways.keys()
            loc_runways[names[0]].append(('identOfOpposite', names[1]))
            loc_runways[names[1]].append(('identOfOpposite', names[0]))
        config.extend(
            ('Runway', loc_runways[name]) for name in sorted(loc_runways))
    if options.verbose > 1:
        print 'Writing file KerbinSideRunways.rwy'
    with open('KerbinSideRunways.rwy', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, config)
Example #39
0
 def _run_daemon(self, options={}, silent=False):
     """Spawn an osquery daemon process"""
     global ARGS, CONFIG_NAME, CONFIG
     config = copy.deepcopy(CONFIG)
     for option in options.keys():
         config["options"][option] = options[option]
     utils.write_config(config)
     binary = os.path.join(ARGS.build, "osquery", "osqueryd")
     flags = ["--%s=%s" % (k, v) for k, v in config["options"].items()]
     daemon = ProcRunner(
         "daemon",
         binary,
         ["--config_path=%s.conf" % CONFIG_NAME, "--verbose" if ARGS.verbose else ""] + flags,
         silent=silent,
     )
     self.generators.append(daemon)
     return daemon
Example #40
0
def make_locations_runways(options):
    """Makes .rwy file with all runways in NavUtilities format."""
    print 'Runways for locations are generating'
    runways = {}

    @_for_all_runways
    def _(loc, runway_num, gs_pt, loc_pt):
        hdg = geometry.heading(gs_pt, loc_pt)
        hdg_str = '{:02}'.format(int(round(hdg / 10)))
        name = '{} {}'.format(re.sub(r'[^a-zA-Z0-9 ]', '°', loc.name), hdg_str)
        if gs_pt[3] > 0:
            name = '{} gs {}'.format(name, gs_pt[3])
        short_name = '{}{}'.format(
            ''.join(word[0] for word in re.sub(r'([A-Z])', r' \1', loc.name).split()),
            hdg_str,
        )
        runway_key = (loc.name, runway_num)
        runways.setdefault(runway_key, {})[name] = [
            ('body', 'Kerbin'),
            ('ident', name),
            ('shortID', short_name),
            ('hdg', round(hdg, 2)),
            ('altMSL', gs_pt[2]),
            ('gsLatitude', gs_pt[0]),
            ('gsLongitude', gs_pt[1]),
            ('locLatitude', loc_pt[0]),
            ('locLongitude', loc_pt[1]),
            ('outerMarkerDist', 10000),
            ('middleMarkerDist', 2200),
            ('innerMarkerDist', 200),
        ]

    config = []
    for runway_key in sorted(runways):
        loc_runways = runways[runway_key]
        if len(loc_runways) == 2:
            names = loc_runways.keys()
            loc_runways[names[0]].append(('identOfOpposite', names[1]))
            loc_runways[names[1]].append(('identOfOpposite', names[0]))
        config.extend(('Runway', loc_runways[name]) for name in sorted(loc_runways))
    if options.verbose > 1:
        print 'Writing file KerbinSideRunways.rwy'
    with open('KerbinSideRunways.rwy', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, config)
Example #41
0
    def __init__(self, args, options, config):
        self.config_path = os.path.join(c.CONFIG_DEFAULT_PATH, c.CONFIG_FILE)
        self.config = config

        # Only load the out file from the options when config was called
        # through the actual CLI, rather than when no default config exists
        if options == {}:
            no_configs_found = True
        else:
            no_configs_found = False

            if options.out_file:
                self.config_path = options.out_file

        if no_configs_found:
            self.show_no_config_help()

        out_config = self.ask_questions()
        utils.write_config(self.config_path, out_config)
Example #42
0
def make_locations_waypoints(options):
    """Makes .cfg file with all waypoints in WaypointManager format."""
    print 'Waypoints for locations are generating'
    waypoints = []
    index = 0
    for loc in LOCATIONS:
        for point_type in ('helipad', 'aircraft_launch', 'aircraft_parking', 'staff_spawn', 'vip_spawn'):
            point = getattr(loc, point_type)
            if point is None or (point_type == 'aircraft_parking' and point == loc.aircraft_launch):
                continue
            index += 1
            waypoints.append(('WAYPOINT', [
                    ('name', '{} > {}'.format(loc.name, point_type)),
                    ('celestialName', 'Kerbin'),
                    ('icon', 'report'),
                    ('index', index),
                ] + utils.point_to_params(point)
            ))
    if options.verbose > 1:
        print 'Writing file CustomWaypoints.cfg'
    with open('CustomWaypoints.cfg', 'w') as out:
        utils.write_config(out, waypoints)
Example #43
0
def get_oz_config():
	utils.write_config()
	filename = globals.data_folder + "/zwcfg_" + globals.network.home_id_str + ".xml"
	with open(filename, "r") as ins:
		content = ins.read()
	return utils.format_json_result(data=content)
Example #44
0
	def get(self):
		try:
			utils.check_apikey(self.get_argument('apikey',''))
			type = self.get_argument('type','')
			node_id = int(self.get_argument('node_id','0'))
			target_id = int(self.get_argument('target_id','0'))
			cc_id = int(self.get_argument('cc_id','0'))
			instance_id = int(self.get_argument('instance_id','0'))
			index = int(self.get_argument('index','0'))
			identical = int(self.get_argument('identical','0'))
			frequency = int(self.get_argument('frequency','0'))
			action = self.get_argument('action','')
			info = self.get_argument('info','')
			utils.check_node_exist(node_id)
			if type == 'action':
				utils.can_execute_command()
				logging.info("node action "+str(action))
				if action in globals.NODE_REST_MAPPING:
					self.write(globals.NODE_REST_MAPPING[action](node_id))
				else:
					self.write(utils.format_json_result())
			elif type == 'info':
				logging.info("node info "+str(info))
				if info in globals.NODE_REST_MAPPING:
					self.write(globals.NODE_REST_MAPPING[info](node_id))
				else:
					self.write(utils.format_json_result())
			elif type == 'refreshClass':
				logging.info('Request values refresh for '+str(node_id)+' on class '+str(cc_id))
				for value_id in globals.network.nodes[node_id].get_values(class_id=cc_id):
					if globals.network.nodes[node_id].values[value_id].id_on_network in globals.pending_configurations:
						del globals.pending_configurations[globals.network.nodes[node_id].values[value_id].id_on_network]
				globals.network.manager.requestAllConfigParams(globals.network.home_id, node_id)
				self.write(utils.format_json_result())
			elif type == 'removeDeviceZWConfig':
				my_node = globals.network.nodes[node_id]
				manufacturer_id = my_node.manufacturer_id
				product_id = my_node.product_id
				product_type = my_node.product_type
				list_to_remove = [node_id]
				if identical != 0:
					for child_id in list(globals.network.nodes):
						node = globals.network.nodes[child_id]
						if child_id != node_id and node.manufacturer_id == manufacturer_id and node.product_id == product_id and node.product_type == product_type:
							list_to_remove.append(child_id)
				globals.network_is_running = False
				globals.network.stop()
				logging.info('ZWave network is now stopped')
				time.sleep(5)
				filename = globals.data_folder + "/zwcfg_" + globals.network.home_id_str + ".xml"
				tree = etree.parse(filename)
				for child_id in list_to_remove:
					logging.info("Remove xml element for node %s" % (child_id,))
					node = tree.find("{http://code.google.com/p/open-zwave/}Node[@id='" + str(child_id) + "']")
					tree.getroot().remove(node)
				working_file = open(filename, "w")
				working_file.write('<?xml version="1.0" encoding="utf-8" ?>\n')
				working_file.writelines(etree.tostring(tree, pretty_print=True))
				working_file.close()
				network_utils.start_network()
				self.write(utils.format_json_result())
			elif type == 'copyConfigurations':
				utils.can_execute_command(0)
				logging.info("copy_configuration from source_id:%s to target_id:%s" % (node_id, target_id,))
				items = 0
				utils.check_node_exist(target_id)
				source = globals.network.nodes[node_id]
				target = globals.network.nodes[target_id]
				if source.manufacturer_id != target.manufacturer_id or source.product_type != target.product_type or source.product_id != target.product_id:
					raise Exception('The two nodes must be with same: manufacturer_id, product_type and product_id')
				for value_id in source.get_values():
					configuration_value = source.values[value_id]
					if configuration_value.genre == 'Config':
						if configuration_value.type == 'Button':
							continue
						if configuration_value.is_write_only:
							continue
						target_value = value_utils.get_value_by_index(target_id, globals.COMMAND_CLASS_CONFIGURATION, 1,configuration_value.index)
						if target_value is not None:
							if configuration_value.type == 'List':
								globals.network.manager.setValue(target_value.value_id, configuration_value.data)
								accepted = True
							else:
								accepted = target.set_config_param(configuration_value.index,configuration_value.data)
							if accepted:
								items += 1
								value_utils.mark_pending_change(target_value, configuration_value.data)
				my_result = items != 0
				self.write(utils.format_json_result())
			elif type == 'refreshData':
				for value_id in globals.network.nodes[node_id].get_values(class_id=cc_id):
					if globals.network.nodes[node_id].values[value_id].instance == instance_id and globals.network.nodes[node_id].values[value_id].index == index:
						globals.network.nodes[node_id].values[value_id].refresh()
						self.write(utils.format_json_result())
						return
				raise Exception('This device does not contain the specified value')
			elif type == 'data':
				logging.debug("get_config for nodeId:%s" % (node_id,))
				config = {}
				for value_id in globals.network.nodes[node_id].values:
					list_values = []
					my_value = globals.network.nodes[node_id].values[value_id]
					if my_value.command_class == cc_id:
						config[globals.network.nodes[node_id].values[value_id].index] = {}
						if my_value.type == "List" and not my_value.is_read_only:
							result_data = globals.network.manager.getValueListSelectionNum(my_value.value_id)
							values = my_value.data_items
							for index_item, value_item in enumerate(values):
								list_values.append(value_item)
								if value_item == my_value.data_as_string:
									result_data = index_item
						elif my_value.type == "Bool" and not my_value.data:
							result_data = 0
						elif my_value.type == "Bool" and my_value.data:
							result_data = 1
						else:
							result_data = my_value.data
						config[my_value.index]['val'] = {'value2': my_value.data, 'value': result_data,'value3': my_value.label, 'value4': sorted(list_values),'updateTime': int(time.time()), 'invalidateTime': 0}
				self.write(utils.format_json_result(data=config))
			elif type == 'setPolling':
				logging.info('set_polling_value for nodeId: '+str(node_id)+' instance: '+str(instance_id)+' cc : '+str(cc_id)+' index : '+str(index)+' at: '+str(frequency))
				for value_id in globals.network.nodes[node_id].get_values(class_id=cc_id):
					if globals.network.nodes[node_id].values[value_id].instance == instance_id:
						my_value = globals.network.nodes[node_id].values[value_id]
						if frequency == 0 & my_value.poll_intensity > 0:
							my_value.disable_poll()
						else:
							if globals.network.nodes[node_id].values[value_id].index == index:
								value_utils.changes_value_polling(frequency, my_value)
							elif my_value.poll_intensity > 0:
									my_value.disable_poll()
				utils.write_config()
				self.write(utils.format_json_result())
			elif type == 'buttonaction':
				logging.info('Button nodeId : '+str(node_id)+' instance: '+str(instance_id)+' cc : '+str(cc_id)+' index : '+str(index)+' : ' +str(action))
				for value_id in globals.network.nodes[node_id].get_values(class_id=cc_id, genre='All', type='All', readonly=False, writeonly='All'):
					if globals.network.nodes[node_id].values[value_id].instance == instance_id and globals.network.nodes[node_id].values[value_id].index == index:
						if action == 'press':
							globals.network.manager.pressButton(globals.network.nodes[node_id].values[value_id].value_id)
						elif action == 'release':
							globals.network.manager.releaseButton(globals.network.nodes[node_id].values[value_id].value_id)
						self.write(utils.format_json_result())
						return
				self.write(utils.format_json_result(success='error', data='Button not found'))
			elif type == 'setRaw':
				slot_id = int(self.get_argument('slot_id','0'))
				value0 = self.get_argument('value0','')
				logging.info("set_user_code2 nodeId:%s slot:%s user code:%s" % (node_id, slot_id, value0,))
				for value_id in globals.network.nodes[node_id].get_values(class_id=globals.COMMAND_CLASS_USER_CODE):
					if globals.network.nodes[node_id].values[value_id].index == slot_id:
						globals.network.nodes[node_id].values[value_id].data = binascii.a2b_hex(value0)
						self.write(utils.format_json_result())
						return
				self.write(utils.format_json_result(success='error', data='Value not found'))
			elif type == 'setconfig':
				size = int(self.get_argument('size','0'))
				value = self.get_argument('value','')
				self.write(utils.format_json_result(data=value_utils.set_config(node_id, index, value, size)))
			elif type == 'setvalue':
				value = self.get_argument('value','')
				self.write(utils.format_json_result(data=commands.send_command_zwave(node_id, cc_id, instance_id, index, value)))
			elif type == 'switchall':
				state = int(self.get_argument('state','0'))
				if state == 0:
					logging.info("SwitchAll Off")
					globals.network.switch_all(False)
				else:
					logging.info("SwitchAll On")
					globals.network.switch_all(True)
				for node_id in globals.network.nodes:
					my_node = globals.network.nodes[node_id]
					if my_node.is_failed:
						continue
					value_ids = my_node.get_switches_all()
					if value_ids is not None and len(value_ids) > 0:
						for value_id in value_ids:
							if my_node.values[value_id].data == "Disabled":
								continue
							elif my_node.values[value_id].data == "On and Off Enabled":
								pass
							if my_node.values[value_id].data == "Off Enabled" and state != 0:
								continue
							if my_node.values[value_id].data == "On Enabled" and state == 0:
								continue
							for switch in my_node.get_switches():
								my_node.values[switch].refresh()
							for dimmer in my_node.get_dimmers():
								my_node.values[dimmer].refresh()
				self.write(utils.format_json_result())
			elif type == 'setDeviceName':
				location = self.get_argument('location','')
				name = self.get_argument('name','')
				is_enable = int(self.get_argument('is_enable','0'))
				logging.info("set_device_name node_id:%s new name ; '%s'. Is enable: %s" % (node_id, name, is_enable,))
				if node_id in globals.disabled_nodes and is_enable:
					globals.disabled_nodes.remove(node_id)
				elif node_id not in globals.disabled_nodes and not is_enable:
					globals.disabled_nodes.append(node_id)
				name = name.encode('utf8')
				name = name.replace('+', ' ')
				globals.network.nodes[node_id].set_field('name', name)
				location = location.encode('utf8')
				location = location.replace('+', ' ')
				globals.network.nodes[node_id].set_field('location', location)
				self.write(utils.format_json_result())
			elif type == 'association':
				group = int(self.get_argument('group','0'))
				self.write(node_utils.add_assoc(node_id, group, target_id,instance_id,action))
			else:
				self.write(utils.format_json_result())
		except Exception,e:
			logging.error('RequestHandler ' + e.message)
			self.write(utils.format_json_result(success="error",data=str(e)))
Example #45
0
def network_ready(network):
	logging.info(
		"Openzwave network is ready with %d nodes (%d are sleeping). All nodes are queried, the network is fully functional." % (
		network.nodes_count, utils.get_sleeping_nodes_count(),))
	utils.write_config()
	save_network_state(network.state)
Example #46
0
    def apply_config(self):
        utils.log("Applying settings to {}".format(utils.CONFIG_PATH))
        config = OrderedDict()

        overclock_preset = utils.get_setting('overclock_preset')
        utils.log("Using {} overclock settings".format(overclock_preset))
        if overclock_preset == 'Custom':
            for prop in utils.OVERCLOCK_PRESET_PROPERTIES:
                config[prop] = utils.get_property_setting(prop)
        elif overclock_preset in utils.OVERCLOCK_PRESETS:
            config = OrderedDict(zip(utils.OVERCLOCK_PRESET_PROPERTIES,
                                     utils.OVERCLOCK_PRESETS[overclock_preset]))

        for prop in utils.OTHER_PROPERTIES:
            value = utils.get_property_setting(prop)
            if value is not None:
                config[prop] = value
                
        if ('force_turbo' in config and config['force_turbo'] == 1 and
            'over_voltage' in config and config['over_voltage'] > 0):
            if not xbmcgui.Dialog().yesno("OpenELEC RPi Config WARNING!!",
                                          "Overvolting with dynamic overclock disabled",
                                          "will void your warranty!!",
                                          "Continue, or fix by enabling dynamic overclock?",
                                          "Fix",
                                          "Continue"):
                utils.log("Enabling dynamic overclock") 
                config['force_turbo'] = 0
            else:
                utils.log("Warranty warning was ignored")

        if 'max_usb_current' in config and config['max_usb_current'] == 1:
            if not xbmcgui.Dialog().yesno("OpenELEC RPi Config WARNING!",
                                          "To output 1.2A from the USB ports",
                                          "you will need to use a good 2A power supply.",
                                          "Are you sure you want to set max_usb_current?"):
                config['max_usb_current'] = 0

        updated = False
        if os.path.isfile(utils.CONFIG_PATH):
            with open(utils.CONFIG_PATH, 'r') as f:
                config_txt = f.read()

            config_txt_new = config_txt

            for prop, value in config.iteritems():
                utils.log("==== {} ====".format(prop))
                config_property_re = re.compile(utils.CONFIG_SUB_RE_STR.format(prop), re.MULTILINE)
                match = config_property_re.search(config_txt)
                if match:
                    comment = bool(match.group(1))
                    old_value = match.group(3)
                    if value is None:
                        utils.log("  Commenting out")
                        config_txt_new = config_property_re.sub(utils.comment_out, config_txt_new)
                        updated = True
                    elif comment or str(value) != old_value:
                        utils.log("  Setting to {}".format(value))
                        config_txt_new = config_property_re.sub(partial(utils.replace_value, value),
                                                                config_txt_new)
                        updated = True
                    else:
                        utils.log("  Unchanged ({})".format(value))
                elif value is not None:
                    utils.log("  Appending {}={}".format(prop, value))
                    config_txt_new += utils.property_value_str(prop, value) + '\n'
                    updated = True
        else:
            utils.log("A new {} will be created".format(utils.CONFIG_PATH))
            config_txt_new = utils.add_property_values(config)
            updated = True

        reboot_needed = False
        if updated:
            reboot_needed = True
            with utils.remount():
                try:
                    utils.write_config(config_txt_new)
                except (OSError, IOError) as e:
                    reboot_needed = False
                    utils.write_error(utils.CONFIG_PATH, str(e))
        
        if reboot_needed:
            if utils.restart_countdown("Ready to reboot to apply changes in config.txt"):
                xbmc.restart()
            else:
                utils.log("Cancelled reboot")
        else:
            utils.log("No changes made")
Example #47
0
def new_app(app_name, app_dir):
    app_template = utils.get_template("supervisor.conf")
    app_conf = app_template.render({"app_name": app_name, "app_dir": app_dir})
    dest_config = "%s/%s.conf" % (config.SUPERVISOR_DIR, app_name)
    utils.write_config(dest_config, app_conf)
Example #48
0
def make_routes(options):
    """Makes contract files."""
    print 'Contract files is generating'
    locations_info = {
        loc.name: {'location': loc, 'incoming': 0, 'outgoing': 0}
        for loc in LOCATIONS
    }
    classes_set = set()
    for route, contract in ROUTES.iteritems():
        # Find and count locations.
        from_loc = locations_info[route[0]]
        to_loc = locations_info[route[1]]
        from_loc['outgoing'] += 1
        to_loc['incoming'] += 1

        # Initialize route and config.
        contract.set_locations(from_loc['location'], to_loc['location'])
        classes_set.add(contract.__class__)
        contract_config = []

        # Add common contract info.
        contract_group = 'KerbinSideGap' + contract.__class__.__name__
        contract_name = ''.join([
            contract.from_loc.alphanum_name,
            contract.to_loc.alphanum_name,
            contract.__class__.__name__,
        ])
        contract_config.extend([
            ('name', contract_name),
            ('group', contract_group),
            ('maxSimultaneous', 1),
            ('targetBody', 'Kerbin'),
            ('prestige', 'Trivial'),
            ('deadline', 3),
        ])
        if hasattr(contract, 'agent'):
            contract_config.append(('agent', contract.agent))

        # Add contract texts.
        flight_title = 'Flight: {} -> {}'.format(contract.from_loc.name, contract.to_loc.name)
        flight_description = contract.get_description()
        flight_generic_description = utils.normalize_flight_description(flight_description)
        flight_synopsis = 'Perform {} flight from the {} to the {}.'.format(
            contract.get_flight_type(), contract.from_loc.name, contract.to_loc.name,
        )
        contract_config.extend([
            ('title', flight_title),
            ('description', flight_description),
        ])
        if flight_generic_description != flight_description:
            contract_config.append(('genericDescription', flight_generic_description))
        contract_config.extend([
            ('synopsis', ' '.join([flight_synopsis] + contract.get_synopsis_notes())),
            ('completedMessage', 'Your flight successfully completed.'),
        ])

        # Add contract reward info.
        advance_funds, reward_funds, reward_reputation, failure_reputation = contract.get_rewards()
        contract_config.extend([
            ('advanceFunds', advance_funds),
            ('failureReputation', failure_reputation),
            ('failureFunds', '{} * Random(0.1, 0.25)'.format(advance_funds)),
            ('rewardReputation', reward_reputation),
            ('rewardFunds', '({} + {}) * Random(1.0, 1.15)'.format(
                reward_funds, contract.refund_amount,
            )),
            ('rewardScience', 0),
        ])

        # Add data nodes.
        contract_config.extend(
            ('DATA', [('type', type), ('hidden', 'true'), (name, definition)])
            for type, name, definition in contract.get_data()
        )

        # Add requirements.
        contract_config.extend(contract.get_requirements())

        # Add behaviours.
        waypoints_config = []
        for wp in contract.get_waypoints():
            attribute_keys = set(el[0] for el in wp)
            point_type = 'RANDOM_WAYPOINT'
            if 'nearIndex' in attribute_keys:
                point_type = 'RANDOM_WAYPOINT_NEAR'
            elif 'latitude' in attribute_keys and 'longitude' in attribute_keys:
                point_type = 'WAYPOINT'
            waypoints_config.append((point_type, wp))
        contract_config.append(('BEHAVIOUR', [
                ('name', 'WaypointGenerator'),
                ('type', 'WaypointGenerator'),
            ] + waypoints_config
        ))
        contract_config.extend(('BEHAVIOUR', beh) for beh in contract.get_additional_behaviours())

        # Add parameters.
        contract_config.extend(contract.get_parameters())

        # Write config.
        if options.verbose > 1:
            print 'Writing file {}.cfg'.format(contract_name)
        with open(contract_name + '.cfg', 'w') as out:
            out.write(CFG_FILE_HEADER)
            utils.write_config(out, [('CONTRACT_TYPE', contract_config)])

    groups_config = [
        ('minVersion', '1.21.0'),
        ('name', 'KerbinSideGapContract'),
        ('displayName', 'Kerbin Side GAP'),
        ('agent', DEFAULT_AGENT),
        ('maxSimultaneous', 8),
    ]
    for contract_class in sorted(classes_set, key=(lambda cls: cls.__name__)):
        group_config = [
            ('minVersion', '1.21.0'),
            ('name', 'KerbinSideGap' + contract_class.__name__),
            ('displayName', 'Perform {} flight'.format(contract_class.get_flight_type())),
            ('agent', getattr(contract_class, 'agent', DEFAULT_AGENT)),
            ('maxSimultaneous', contract_class.max_simultaneous),
        ]
        groups_config.append(('CONTRACT_GROUP', group_config))
    if options.verbose > 1:
        print 'Writing file Groups.cfg'
    with open('Groups.cfg', 'w') as out:
        out.write(CFG_FILE_HEADER)
        utils.write_config(out, [('CONTRACT_GROUP', groups_config)])

    if options.verbose > 0:
        for loc, info in locations_info.iteritems():
            print 'Location "{}": {} incoming, {} outgoing'.format(
                loc, info['incoming'], info['outgoing']
            )