def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except Warning as w:
         logger.warning(str(w), exc_info=True)
     except Exception as e:
         logger.error(str(e), exc_info=True)
def demo():    
    import src.utils.logger as log
    import src.utils.path as pth
    import src.parser.toml as tml
    from src.utils.tools import download, extract
    
    
    # Downloading data from URLs and extracting downloaded files

    dry_url = tml.value('urls', section='demo', subkey='dry')
    fx_url = tml.value('urls', section='demo', subkey='fx')

    dry_dpath = tml.value('dnames', section='demo', subkey='input')
    fx_fname = tml.value('fx_name', section='demo')

    log.info("Downloading and extracting dataset and fx")

    fx_fpath = download(fx_url)
    pth.__rename_file(fx_fpath, fx_fname)
    
    if not pth.__exists(dry_dpath):
        dry_fpath = download(dry_url)
        extract(dry_fpath, dry_dpath)
    else:
        log.warning("\"{0}\" already exist, skipping dataset downloading".format(dry_dpath))

    run(dry_dpath, fx_fname, tml.value('dnames', section='demo', subkey='output'))
    def check_db():
        """Check if database exists"""
        # get all databases
        all_dbs_list = InfluxService.db_client.get_list_database()

        # check if current database exists and if return warning message
        if InfluxService.cnf.INFLUX_DB not in [
                str(x['name']) for x in all_dbs_list
        ]:
            try:
                app_logger.warning("Database {0} does not exist".format(
                    InfluxService.cnf.INFLUX_DB))
            except exceptions.InfluxDBClientError as e:
                app_logger.error(str(e))
            except exceptions.InfluxDBServerError as e1:
                app_logger.error(str(e1))
        else:
            try:
                app_logger.info("Using db {0}".format(
                    InfluxService.cnf.INFLUX_DB))
                InfluxService.db_client.switch_database(
                    InfluxService.cnf.INFLUX_DB)
            except exceptions.InfluxDBClientError as e:
                app_logger.error(str(e))
            except exceptions.InfluxDBServerError as e1:
                app_logger.error(str(e1))
Example #4
0
def delete_object(obj_info):
    """
    Delete all the associated edges and vertices for a workspace object into RE.
    """
    # Delete documents associated with this kbase object
    # For now, the ws_object document is simply flagged
    wsid = obj_info[6]
    objid = obj_info[0]
    obj_key = f'{wsid}:{objid}'
    results = re_client.get_doc(_OBJ_COLL_NAME, obj_key).get('results')
    if not results:
        logger.warning(
            f"No RE document found with key {obj_key}. Cannot delete.")
    obj_doc = results[0]
    for key in ['updated_at', '_id', '_rev']:
        del obj_doc[key]
    obj_doc['deleted'] = True
    # Delete the unversioned object
    re_client.save(_OBJ_COLL_NAME, obj_doc)
    # Delete all versioned objects
    query = f"""
    FOR doc IN ws_object_version
        FILTER doc.workspace_id == @wsid AND doc.object_id == @objid
        UPDATE {{deleted: true, _key: doc._key}} IN {_OBJ_VER_COLL_NAME}
    """
    re_client.execute_query(query, {'wsid': wsid, 'objid': objid})
Example #5
0
def _read(fpath):
    """Read sound at <fpath> as a pydub AudioSegment."""
    if __is_audio_file(fpath):
        return AudioSegment.from_file(pth.__path(fpath))

    log.warning("{0} is not an audio file".format(fpath))

    return AudioSegment.empty()
def n_parameters(func):
    """Return number of parameters of <func> if it's a function.
    Otherwise return -1.
    """
    if not callable(func):
        log.warning("\'{0}\' is not a function".format(func))
        return -1

    return len(signature(func).parameters)
Example #7
0
def _load(dpath):
    """Read all sounds at <dpath> in a list of pydub AudioSegment."""
    audio_segments = []
    for fpath in pth.__list_files(dpath):
        if __is_audio_file(fpath):
            audio_segments.append(_read(fpath))

    if len(audio_segments) == 0:
        log.warning("{0} does not contain any audio file".format(dpath))

    return audio_segments
Example #8
0
def run_neuralnet(data, labels):
    """Run tool to train network on <data> and <labels>.
    Predict on <data> and export them as wave files.
    """
    # Shaping data and splitting them into train and test parts

    log.info("Shaping data")

    data, labels = map(utls.shape, (data, labels))
    train_data, test_data = utls.split_test(data, labels)

    log.debug("Computing initial MSE")

    train_mses, test_mses = utls.mse(*train_data), utls.mse(*test_data)

    avg_train_mse = sum(train_mses) / train_mses.shape[0]
    avg_test_mse = sum(test_mses) / test_mses.shape[0]

    log.debug("Average MSE of train dataset: {0}".format(avg_train_mse))
    log.debug("Average MSE of test dataset: {0}".format(avg_test_mse))

    # Building and training model

    mdl_dname = tml.value('dnames', section='neuralnet', subkey='saved_models')
    if not pth.__is_empty(mdl_dname):
        log.warning(
            "Model has already been trained in a previous session, picking up best model from \'{0}\' directory"
            .format(mdl_dname))

        NN = NeuralNetwork(model=utls.load_best_model())
    else:
        log.info("Training model")

        pth.__make_dir(mdl_dname)
        NN = NeuralNetwork()
        NN.compile()
        NN.train(*train_data)

    # Making predictions using model

    log.info("Predicting with model")

    predictions = NN.predict(test_data[0])

    # Exporting predicted data along with expected data

    log.info("Exporting data")

    dnames = tml.value('dnames', section='neuralnet')

    _export(utls.unshape(predictions), dnames['predicted_labels'])
    _export(utls.unshape(test_data[1]), dnames['expected_labels'])
    _export(utls.unshape(test_data[0]), dnames['original_data'])
Example #9
0
 def write_predictions_to_file(self, writer: TextIO,
                               test_input_reader: TextIO, preds_list: List):
     example_id = 0
     for line in test_input_reader:
         if line.startswith("-DOCSTART-") or line == "" or line == "\n":
             writer.write(line)
             if not preds_list[example_id]:
                 example_id += 1
         elif preds_list[example_id]:
             output_line = line.split(
             )[0] + " " + preds_list[example_id].pop(0) + "\n"
             writer.write(output_line)
         else:
             logger.warning(
                 "Maximum sequence length exceeded: No prediction for '%s'.",
                 line.split()[0])
Example #10
0
def unshape(data):
    """Unshape <data> output of neural network."""
    log.debug("Unshaping data")
    
    if data.ndim != 3:
        log.error("\'unshape\' expects a three-dimensional array : (n_samples, sample_len, n_channels)")
        return data

    data = data.reshape(*data.shape[:-1])

    if data.dtype.kind != 'f':
        log.warning("\'unshape\' expects a float array")
        data = data.astype('float64')

    for i in range(data.shape[0]):
        data[i] = __float2pcm(data[i])

    return data.astype('int{0}'.format(tml.value('bit_depth', section='audio')))
Example #11
0
def shape(data):
    """Shape <data> to fit input of neural network."""
    log.debug("Shaping data")
    
    if data.ndim != 2:
        log.error("\'shape\' expects a two-dimensional array : (n_samples, sample_len)")
        return data

    _dtype = 'int{0}'.format(tml.value('bit_depth', section='audio'))
    
    if data.dtype != _dtype:
        log.warning("\'shape\' expects an {0} array".format(_dtype))
        
    data = data.astype('float64')
    for i in range(data.shape[0]):
        data[i] = __pcm2float(data[i].astype(_dtype))

    return data.reshape(*data.shape, 1)    
def apply_fx(dry, fx, func=convolve):
    """Apply an fx to a dry signal.
    Return the resulting signal.
    """
    if func is None:
        func = convolve

    n_params = n_parameters(func)
    if n_params == -1:
        log.critical("A function is needed to apply fx")
    elif n_params != 2:
        log.critical(
            "\'{0}\' function doesn't take exactly two arguments, can't apply fx"
            .format(func.__name__))

    if dry.frame_count() == 0:
        log.warning("Applying fx to an empty signal")

    return func(dry, fx)
def run_datagen(dry_dpath, fx_fpath, output_dpath=None):
    """Run tool to generate dataset and write numpy data file.
    Return a couple (data, labels) to be used for training network.
    """

    mimetypes.init()

    # Filtering samples and generating dataset

    if output_dpath is not None and pth.__is_dir(output_dpath):
        log.warning(
            "\"{0}\" already exists, skipping dataset generation".format(
                output_dpath))
    else:
        log.info("Filtering directory containing dry samples")

        _filter(dry_dpath)

        log.info("Generating dataset of wet samples")

        generate_dataset(dry_dpath, fx_fpath, output_dpath)

    # Retrieving data and saving them

    npy_fname = tml.value('numpy', section='data', subkey='fname')
    if pth.__is_file(npy_fname):
        log.warning("\"{0}\" already exists, skipping data retrieval".format(
            npy_fname))
    else:
        log.info("Retrieving data and saving it into a numpy file")

        write_data()

    # Reading data from saved file

    log.info("Reading data from numpy file")

    data, labels = read_data()

    return data, labels