示例#1
0
def generate_dataset(dry_dpath, fx_fpath, output_dpath=None, func=None):
    """Generate dataset of wet samples.
    Fill a JSON file wich matches dry and wet samples.
    """
    if not output_dpath:
        output_dpath = mkrdir()
    elif not pth.__exists(output_dpath):
        pth.__make_dir(output_dpath)

    fx = _read(fx_fpath)

    jsn.init()

    info, save_steps = dict(), tml.value('json',
                                         section='data',
                                         subkey='save_steps')

    for idx, dryfpath in enumerate(__list_audio_files(dry_dpath)):
        wet_signal = apply_fx(_read(dryfpath), fx, func)
        dpath = pth.__with_extension(
            rfname(path=output_dpath, prefix='{0}_'.format(idx)), '.wav')
        _save(wet_signal, dpath)

        info[dryfpath] = dpath
        if (idx + 1) % save_steps == 0:
            log.debug("{0} samples processed".format(idx + 1))
            jsn.dump(info)

    jsn.dump(info)
示例#2
0
def _filter(dpath):
    """Filtering <dpath> by removing a certain amount of files.
    Keep files corresponding to specific instruments and sources, given in configuration file.
    For now, filtering removes files from the tail.
    """
    log.debug("Filtering \"{0}\"".format(dpath))

    instruments = tml.value('instruments', section='data')
    sources = tml.value('sources', section='data')

    instr_conds = list(
        map(lambda instr: eval('ns.is_{0}'.format(instr)), instruments))
    src_conds = list(map(lambda src: eval('ns.is_{0}'.format(src)), sources))

    ns.filter_elements(dpath, [instr_conds, src_conds, [ns.is_pitch_63]])

    max_samples = tml.value('max_samples', section='data')
    audio_files = __list_audio_files(dpath)

    if len(audio_files) <= max_samples:
        log.debug("Keeping all remaining files in {0}".format(dpath))
        return

    for afile in audio_files[max_samples:]:
        pth.__remove_file(afile)
 def train(self, data, labels):
     log.debug("Training model")
     train_data, valid_data = split_valid(data, labels)
     self.model.fit(*train_data,
                    batch_size=self.bsiz,
                    epochs=self.epoc,
                    callbacks=self.cbac,
                    validation_data=valid_data)
示例#4
0
 def write_binary_file(path: Union[str, Path], content, mode=0o0755, symlink: Union[str, Path] = None):
     p = Path(path)
     logger.debug(f"Dump to file [{p}]")
     with open(str(p.absolute()), 'wb') as f:
         f.write(content)
         os.chmod(p, mode)
         if symlink:
             os.symlink(p, symlink)
def _read(fpath):
    """Read content of numpy archive file at <fpath>."""
    if not pth.__is_file(fpath):
        log.critical(
            "Numpy file \"{0}\" not found, cannot read data".format(fpath))

    log.debug("Reading data from \"{0}\"".format(fpath))

    return np.load(fpath)
示例#6
0
 def get_vpn_ip(self, nic: str, lenient=True):
     try:
         logger.log(self.log_lvl, f'Query VPN IPv4 on {nic}...')
         return netifaces.ifaddresses(nic)[netifaces.AF_INET]
     except Exception as err:
         if lenient:
             logger.debug(f'Not found VPN IP {nic}. Error: {err}')
             return None
         raise err
示例#7
0
def _dump(fpath, _dict, mode='w', n_indent=4):
    """Dump <_dict> into json file at <fpath>.
    By default, mode is set to overwrite previous data.
    As well as an indentation is set for visualization purpose.
    """
    log.debug("Dumping data into \"{0}\"".format(fpath))

    fjson = pth.__open_file(fpath, _mode=mode)
    json.dump(_dict, fjson, indent=n_indent)
    pth.__close_file(fjson)
def extract(fpath, dpath):
    """Extract archive at <fpath> into <dpath>."""
    log.debug("Extracting \"{0}\" into \"{1}\"".format(fpath, dpath))

    try:
        unpack_archive(fpath, dpath)
    except ValueError:
        log.critical("\"{0}\" not a valid archive".format(fpath))

    pth.__remove_file(fpath)
示例#9
0
 def decode_base64(value: str, url_safe=False, without_padding=False, lenient=False) -> str:
     v = value + ("=" * (4 - (len(value) % 4))) if without_padding else value
     try:
         v = base64.urlsafe_b64decode(v) if url_safe else base64.b64decode(v)
         return v.decode(DEFAULT_ENCODING)
     except (TypeError, ValueError, UnicodeError) as err:
         if lenient:
             logger.debug(f'Failed when decoding base64. Value[{value}]. Error[{err}]')
             return value
         raise
示例#10
0
 def _common_adapt_dnsmasq(self, vpn_service: str):
     identity = self.config.identity
     logger.debug(
         f'Adapt [{identity}] DNS resolver service to compatible with [dnsmasq] and [{vpn_service}]...'
     )
     FileHelper.mkdirs(self.config.config_dir)
     FileHelper.copy(self.resource_dir.joinpath(f'dnsmasq-{identity}.conf'),
                     self.config.to_fqn_cfg(self.DNSMASQ_TUNED_CFG), True)
     FileHelper.chmod(self.config.to_fqn_cfg(self.DNSMASQ_TUNED_CFG),
                      mode=0o0644)
     return self.config.runtime_resolv
示例#11
0
 def restore_config(self, vpn_service: str, keep_dnsmasq=True):
     if not keep_dnsmasq:
         logger.debug(
             f'Remove dnsmasq vpn hook config [{self._dnsmasq_vpn_hook_cfg}]'
         )
         FileHelper.rm(self._dnsmasq_vpn_hook_cfg)
         logger.debug(
             f'Remove dnsmasq vpn config [{self._dnsmasq_vpn_cfg(vpn_service)}]'
         )
         FileHelper.rm(self._dnsmasq_vpn_cfg(vpn_service))
     if self._resolver:
         self._resolver.restore_config(vpn_service, keep_dnsmasq)
示例#12
0
def parseYamlFile(absoluteFilePath):

    with open(absoluteFilePath) as f:
        try:
            inputConfig = yaml.load(f)
        except yaml.YAMLError as e:
            logger.error(str(e))
            sys.exit(1)

    logger.debug("Parsed config:\n{}".format(json.dumps(inputConfig)))

    return inputConfig
示例#13
0
 def testMysqlSeeder(self):
     logger.info("Initializing mysql integration testing components..")
     ctx = Context(self.conn, self.inputConfig)
     orderInfo, schemaForDatagen = SchemaBuilder(ctx).getSchemaForDataGen()
     logger.debug("Schema for data generation:\n{}".format(json.dumps(schemaForDatagen)))
     logger.debug("Will be worked in order:\n{}".format(json.dumps(orderInfo)))
     writer = Writer(ctx)
     dataGen = DataGen(ctx)
     for results in dataGen.generate(schemaForDatagen, orderInfo):
         logger.info("Writing {} documents into {}..".format(len(results["docs"]), results["table"]))
         writer.doBulkWrite(results["table"], results["docs"])
     logger.info("Finally, Done with it!")
def write_data():
    _dict = jsn.load()
    if not _dict:
        log.error("{0} is empty".format(tml.value('json', section='data', subkey='fname')))

    data = []
    for key in _dict:
        data.append(list(map(__convert, map(_read, (_dict[key], key)), repeat(DTYPE))))
    
    log.debug("{0} couples data/label have been retrieved".format(len(data)))

    npz.write(np.asarray(data))
示例#15
0
def _extract(archive, to, suffix, compression, force_reload, extract_func):
    if isinstance(to, str):
        to = Path(to)
    to.mkdir(parents=True, exist_ok=True)
    filename = archive.name.rstrip(suffix)
    if (to / filename).exists() and not force_reload:
        logger.debug("Skipping file extraction. Already exist")
    else:
        logger.debug("Extracting %s to %s", filename, to)
        extract_func(archive, to, compression)

    return to / filename
示例#16
0
def _load(fpath):
    """Get content of json file at <fpath>."""
    if not pth.__is_file(fpath):
        log.critical(
            "\"{0}\" doesn\'t exist, can\'t load data from it".format(fpath))

    log.debug("Loading data from \"{0}\"".format(fpath))

    fjson = pth.__open_file(fpath)
    ret = json.load(fjson)
    pth.__close_file(fjson)

    return ret
    def generate(self, target_label: int):
        logger.info(self.gather_parameters())

        labels: Tensor = next(iter(self._dataloader))[1]
        target_label_vector = torch.tensor(
            [target_label] * labels.shape[0],
            dtype=labels.dtype,
            device=self._device
        )

        # store former classified label
        former_classified_label_vector = torch.tensor(
            [0] * labels.shape[0],
            dtype=labels.dtype,
            device=self._device
        )

        self._model.eval()
        for ep in range(self._epoch):
            # number of images with triggers that inferred as targeted label by models
            attack_success_number = 0
            for inputs, _ in self._dataloader:
                # skip the last batch, owing to inconsistent shape
                if inputs.shape != self._mask.shape:
                    logger.debug("skip last batch")
                    continue

                inputs = inputs.to(self._device)
                self._add_trigger(inputs)
                inputs.requires_grad = True

                self._model.zero_grad()

                outputs = self._model(inputs)
                loss = self._loss_function(outputs, target_label_vector) - \
                    self._loss_function(outputs, former_classified_label_vector)
                loss.backward()

                self._mask = self._mask - self._lr * inputs.grad.sign()
                self._mask = torch.clamp(self._mask, 0, 1)

                _, former_classified_label_vector = torch.max(outputs, 1)

                attack_success_number += (former_classified_label_vector == target_label_vector).sum().item()

            logger.info(f"epoch: {ep}, attack success number: {attack_success_number}, "
                        f"attack success rate: {attack_success_number / len(self._dataloader.dataset)}")

            if ep % 9 == 0:
                torch.save(self._mask.data, settings.trigger_dir / f"triggers-epoch{ep}")
示例#18
0
def shape(data):
    """Shape <data> to fit input of neural network."""
    log.debug("Shaping data")
    
    if data.ndim != 2:
        log.error("\'shape\' expects a two-dimensional array : (n_samples, sample_len)")
        return data

    _dtype = 'int{0}'.format(tml.value('bit_depth', section='audio'))
    
    if data.dtype != _dtype:
        log.warning("\'shape\' expects an {0} array".format(_dtype))
        
    data = data.astype('float64')
    for i in range(data.shape[0]):
        data[i] = __pcm2float(data[i].astype(_dtype))

    return data.reshape(*data.shape, 1)    
示例#19
0
def unshape(data):
    """Unshape <data> output of neural network."""
    log.debug("Unshaping data")
    
    if data.ndim != 3:
        log.error("\'unshape\' expects a three-dimensional array : (n_samples, sample_len, n_channels)")
        return data

    data = data.reshape(*data.shape[:-1])

    if data.dtype.kind != 'f':
        log.warning("\'unshape\' expects a float array")
        data = data.astype('float64')

    for i in range(data.shape[0]):
        data[i] = __float2pcm(data[i])

    return data.astype('int{0}'.format(tml.value('bit_depth', section='audio')))
示例#20
0
    def seedFromTableRef(self, table, field, inSerial=True, offset=0, limit=10000):
        cacheKey = "seedFromTableRef___{}___{}".format(table, field)
        hit      = cache.getCacheKey(cacheKey)
        if not hit:
            logger.debug("Queries into table for {}".format(cacheKey))
            self.cursor.execute("SELECT {} FROM {} LIMIT {}, {}".format(field, table, offset, limit))
            hit = [result[field] for result in self.cursor.fetchall()]
            cache.setCacheKey(cacheKey, hit)
        # TODO (3): Following should go in some utility file
        #           Used in seeders/j.py also
        if not inSerial:
            return random.choice(hit)
        k = "MySQL__seedFromTableRef__{}__{}".format(table, field)
        i = cache.getCacheKey(k, 0)
        cache.setCacheKey(k, i + 1)
        hitLen = len(hit)

        return hit[i%hitLen]
def download(furl, dpath='.'):
    """Download file at <furl> and write it at <dpath>.
    Return path to extracted file.
    """
    log.debug("Downloading file from \"{0}\" into \"{1}\"".format(furl, dpath))

    response = req.get(furl)
    if response.status_code != req.codes.ok:
        if response.status_code == HTTP.NOT_FOUND:
            log.critical(''.join(['File not found at: ', furl]))
        else:
            log.critical(''.join(
                ['Error code: ', response.status_code, ', getting: ', furl]))

    fpath = pth.__join_path(dpath, pth.__file_name(furl))
    pth.__write_file(fpath, response.content)

    return fpath
示例#22
0
 def setup(self, vpn_service: str, origin_resolv_conf: Path,
           vpn_resolv_conf: Path, vpn_nameserver_hook_conf: Path):
     if not self._available:
         logger.error('[dnsmasq] is not yet installed or is corrupted')
         sys.exit(ErrorCode.MISSING_REQUIREMENT)
     logger.info('Setup DNS resolver[dnsmasq]...')
     dnsmasq_vpn_cfg = self._dnsmasq_vpn_cfg(vpn_service)
     runtime_resolv_cfg = self.adapt_dnsmasq(origin_resolv_conf,
                                             vpn_service)
     dnsmasq_opts = {
         '{{DNS_RESOLVED_FILE}}':
         self.__build_dnsmasq_conf('resolv-file', runtime_resolv_cfg),
         '{{PORT}}':
         self.__build_dnsmasq_conf('port',
                                   self.dnsmasq_options().get('port',
                                                              None)),
         '{{CACHE_SIZE}}':
         self.__build_dnsmasq_conf(
             'cache-size',
             self.dnsmasq_options().get('cache_size', None))
     }
     logger.debug(
         f'Add [dnsmasq] config for {vpn_service}[{dnsmasq_vpn_cfg}]...')
     FileHelper.copy(self.resource_dir.joinpath(self.DNSMASQ_CONFIG_TMPL),
                     dnsmasq_vpn_cfg,
                     force=True)
     FileHelper.replace_in_file(dnsmasq_vpn_cfg, dnsmasq_opts, backup='')
     FileHelper.chmod(dnsmasq_vpn_cfg, mode=0o0644)
     logger.debug(
         f'Symlink [dnsmasq] VPN nameserver runtime configuration [{vpn_nameserver_hook_conf}]...'
     )
     FileHelper.create_symlink(vpn_nameserver_hook_conf,
                               self._dnsmasq_vpn_hook_cfg,
                               force=True)
     logger.info(f'Generate System DNS config file from VPN service...')
     FileHelper.write_file(vpn_resolv_conf,
                           self.__dnsmasq_resolv(vpn_service),
                           mode=0o0644)
     FileHelper.create_symlink(vpn_resolv_conf,
                               DNSResolver.DNS_SYSTEM_FILE,
                               force=True)
     self.service.enable(self.config.identity)
示例#23
0
 def probe(self) -> 'DNSResolver':
     self.kind = next(
         (t for t in DNSResolverType.as_services()
          if self.service.status(t.config.identity).is_enabled()),
         self.kind)
     if self.kind.might_be_command():
         self.kind = next(t for t in DNSResolverType.as_command()
                          if SystemHelper.verify_command(t.config.identity))
     if self.kind.is_unknown():
         logger.warn(
             'Unknown DNS resolver. DNS VPN IP might be not resolved correctly'
         )
     if self.kind not in [DNSResolverType.DNSMASQ, DNSResolverType.UNKNOWN]:
         dnsmasq_name = DNSResolverType.DNSMASQ.config.identity
         self._is_dnsmasq = self.service.status(dnsmasq_name).is_enabled(
         ) or shutil.which(dnsmasq_name) is not None
     logger.debug(
         f'Current DNS resolver [{self.kind.name}], is dnsmasq available [{self._is_dnsmasq}]'
     )
     return self
示例#24
0
 def copy(file_or_folder: Union[str, Path], dest: Union[str, Path], force=False, skip_if_no_source=True):
     p = Path(file_or_folder)
     t = Path(dest)
     logger.debug(f'Copy [{p}] to [{t}]...')
     if not FileHelper.is_exists(p):
         if skip_if_no_source:
             return
         raise RuntimeError(f'Given path[{file_or_folder}] is not existed')
     if FileHelper.is_dir(t):
         FileHelper.mkdirs(t)
     else:
         if t.exists():
             if not force:
                 raise RuntimeError(f'Destination[{dest}] is existed')
             FileHelper.rm(t)
         FileHelper.mkdirs(t.parent)
     if p.is_dir():
         copy_tree(str(p.absolute()), str(t.absolute()))
     else:
         shutil.copy(p, t)
示例#25
0
 def get_vpn_status(self, vpn_acc: str) -> dict:
     if not vpn_acc:
         return {'connected': False}
     try:
         ss = self.exec_command('AccountStatusGet',
                                params=vpn_acc,
                                silent=True,
                                log_lvl=logger.DEBUG)
         ss_msg = TextHelper.awk(next(
             iter(TextHelper.grep(ss, r'Session Status.+')), None),
                                 sep='|',
                                 pos=1).strip()
         return {
             'connected':
             ss_msg == 'Connection Completed (Session Established)',
             'msg': ss_msg
         }
     except Exception as err:
         logger.debug(
             f'Something wrong when getting VPN status. Error[{err}]')
         return {'connected': False}
示例#26
0
def parseOptions(argv):

    # Intialize defaults
    inputFile = None

    try:
        options, args = getopt.getopt(argv, "hi:", ["inputFile="])
    except getopt.GetoptError as e:
        logger.error(str(e))
        sys.exit(1)

    for option, arg in options:
        if option == "-h":
            print(getCliHelpText())
            sys.exit()
        elif option in ("-i", "--inputFile"):
            inputFile = arg

    if not inputFile:
        raise Exception('-i/--inputFile is required')

    logger.debug("Input file: {}".format(inputFile))

    return (inputFile)
示例#27
0
def run_neuralnet(data, labels):
    """Run tool to train network on <data> and <labels>.
    Predict on <data> and export them as wave files.
    """
    # Shaping data and splitting them into train and test parts

    log.info("Shaping data")

    data, labels = map(utls.shape, (data, labels))
    train_data, test_data = utls.split_test(data, labels)

    log.debug("Computing initial MSE")

    train_mses, test_mses = utls.mse(*train_data), utls.mse(*test_data)

    avg_train_mse = sum(train_mses) / train_mses.shape[0]
    avg_test_mse = sum(test_mses) / test_mses.shape[0]

    log.debug("Average MSE of train dataset: {0}".format(avg_train_mse))
    log.debug("Average MSE of test dataset: {0}".format(avg_test_mse))

    # Building and training model

    mdl_dname = tml.value('dnames', section='neuralnet', subkey='saved_models')
    if not pth.__is_empty(mdl_dname):
        log.warning(
            "Model has already been trained in a previous session, picking up best model from \'{0}\' directory"
            .format(mdl_dname))

        NN = NeuralNetwork(model=utls.load_best_model())
    else:
        log.info("Training model")

        pth.__make_dir(mdl_dname)
        NN = NeuralNetwork()
        NN.compile()
        NN.train(*train_data)

    # Making predictions using model

    log.info("Predicting with model")

    predictions = NN.predict(test_data[0])

    # Exporting predicted data along with expected data

    log.info("Exporting data")

    dnames = tml.value('dnames', section='neuralnet')

    _export(utls.unshape(predictions), dnames['predicted_labels'])
    _export(utls.unshape(test_data[1]), dnames['expected_labels'])
    _export(utls.unshape(test_data[0]), dnames['original_data'])
示例#28
0
 def _reconnect_vpn(_executor: VPNClientExecutor, _default_acc: str,
                    _current_acc: str):
     logger.debug(
         f'UPGRADE::Reconnect VPN previous state: default[{_default_acc}] - current[{_current_acc}]'
     )
     if not _current_acc and not _default_acc:
         return
     if not _current_acc and _default_acc:
         logger.debug('UPGRADE::Enable VPN service but no connect...')
         return _executor.lease_vpn_service(is_enable=True,
                                            is_restart=False,
                                            is_lease_ip=False)
     if _current_acc == _default_acc:
         logger.debug('UPGRADE::Enable then restart VPN service...')
         return _executor.lease_vpn_service(is_enable=True,
                                            is_restart=True,
                                            is_lease_ip=False)
     logger.debug(
         f'UPGRADE::Start VPN service then connect to previous current acc [{_current_acc}]...'
     )
     _executor.device.unix_service.restart(_executor.vpn_service, delay=0)
     _executor.do_disconnect_current(log_lvl=logger.DEBUG)
     _executor.do_connect(_current_acc)
示例#29
0
 def setUpClass(self):
     logger.debug("Setting up class..")
     self.inputConfig = {
         "engine":        "mysql",
         "host":          "localhost",
         "user":          "******",
         "database":      "jseeder",
         "password":      "******",
         "port":          3306,
         "includeTables": {
             "users": {
                 "seedSize":        10,
                 "excludeFields":   ["middle_name"],
                 "inclusionPolicy": "all", # "all"/"none" - Include all/ none fields, default - "none"
                 "includeFields":   {
                     "first_name": {
                         "seeder":     "j.fromList",
                         "seederArgs":  {
                             "l": ["jitendra", "kumar", "ojha"],
                             "inSerial": True
                         }
                     },
                     "last_name": {
                         "seeder":     "j.fromList",
                         "seederArgs":  {
                             "l": ["jitendra", "kumar", "ojha"],
                             "inSerial": True
                         }
                     },
                     "fav_num": {
                         "seeder":     "j.fromBetween",
                         "seederArgs":  {
                             "i": 0,
                             "j": 3,
                             "inSerial": False
                         }
                     },
                     "city_id": {
                         "seederArgs": {
                             "inSerial": True,
                             "offset":   3,
                             "limit":    5
                         }
                     }
                 }
             },
             "cities": {
                 "seedSize":        10,
                 "inclusionPolicy": "all",
                 "includeFields":   {
                     "name": {
                         "seeder": "j.fromList",
                         "seederArgs":  {
                             "l": ["Bangalore", "Patna"],
                             "inSerial": True
                         }
                     }
                 }
             }
         }
     }
     logger.info("Using following input config:\n{}".format(json.dumps(self.inputConfig)))
     self.conn = MySQLdb.connect(
         self.inputConfig["host"],
         self.inputConfig["user"],
         self.inputConfig["password"],
         self.inputConfig["database"],
         self.inputConfig["port"]
     )
     logger.info("Creating required test tables..")
     self.cursor = self.conn.cursor()
     sql = """CREATE TABLE cities (
             id INT PRIMARY KEY AUTO_INCREMENT NOT NULL,
             name  VARCHAR(20) NOT NULL)"""
     self.cursor.execute(sql)
     sql = """CREATE TABLE users (
             id INT PRIMARY KEY AUTO_INCREMENT NOT NULL,
             first_name  VARCHAR(20) NOT NULL,
             middle_name VARCHAR(20),
             last_name  VARCHAR(20),
             fav_num INT,
             city_id INT,
             CONSTRAINT fk_users_cities_city_id_id FOREIGN KEY (city_id) REFERENCES cities(id))"""
     self.cursor.execute(sql)
示例#30
0
 def tearDownClass(self):
     logger.debug("Tearing down class..")
     logger.info("Droping all tests tables..")
     self.cursor.execute("DROP TABLE users")
     self.cursor.execute("DROP TABLE cities")
示例#31
0
        logger.error(str(e))
        sys.exit(1)
    inputConfig = parseYamlFile(inputFile)

    if inputConfig["engine"] == "mysql":
        from src.schema_builders.mysql import MysqlSchemaBuilder as SchemaBuilder
        from src.writers.mysql import MysqlWriter as Writer
        from src.contexts.mysql import MysqlContext as Context
        conn = MySQLdb.connect(
            inputConfig["host"],
            inputConfig["user"],
            inputConfig["password"],
            inputConfig["database"],
            inputConfig["port"]
        )
        ctx = Context(conn, inputConfig)

    else:
        logger.error("Engine - {} not supported".format(inputConfig["engine"]))
        sys.exit(1)

    orderInfo, schemaForDatagen = SchemaBuilder(ctx).getSchemaForDataGen()
    logger.debug("Schema for data generation:\n{}".format(json.dumps(schemaForDatagen)))
    logger.debug("Will be worked in order:\n{}".format(json.dumps(orderInfo)))
    writer = Writer(ctx)
    dataGen = DataGen(ctx)
    for results in dataGen.generate(schemaForDatagen, orderInfo):
        logger.info("Writing {} documents into {}..".format(len(results["docs"]), results["table"]))
        writer.doBulkWrite(results["table"], results["docs"])
    logger.info("Finally, Done with it!")
示例#32
0
def _write(data, fpath):
    """Write <data> into numpy archive file at <fpath>."""
    log.debug("Writing data in \"{0}\"".format(fpath))

    np.savez_compressed(fpath, *data)
 def evaluate(self, data, labels):
     log.debug("Evaluating model")
     return self.model.evaluate(data,
                                labels,
                                batch_size=self.bsiz,
                                callbacks=self.cbac)
 def predict(self, data):
     log.debug("Generating predictions")
     return self.model.predict(data, batch_size=self.bsiz)
示例#35
0
 def _error(_path: Union[str, Path], _strict: bool, _err=None):
     if strict:
         if _err:
             logger.debug(f'Unable read json file [{path}]. Error:{err}')
         raise FileNotFoundError(f'Not found or unreadable file[{path}]')
     return {}
 def compile(self):
     log.debug("Compiling model")
     self.model.compile(self.opti, loss=self.loss, metrics=self.metr)
示例#37
0
 def _common_remove_dnsmasq(self, vpn_service: str, keep_dnsmasq: bool):
     if not keep_dnsmasq:
         cfg = self.config.to_fqn_cfg(self.DNSMASQ_TUNED_CFG)
         logger.debug(
             f'Remove [dnsmasq] and [{vpn_service}] plugin[{cfg}]...')
         FileHelper.rm(cfg)