Пример #1
0
    def setup(self):
        self.sink.mkdir(exist_ok=True, parents=True)

        # Create a place to store microcached results
        self.cache = self.sink / ".cache"
        self.cache.mkdir(exist_ok=True)

        # Setup logging and meta path
        self.metapath = self.sink / "metadata.json"
        logfile_path = self.sink / "logfile.log"

        # Read in previous metadata if exists
        try:
            self.prev_meta = read_json(self.metapath)
        except FileNotFoundError:
            self.prev_meta = None

        self.logger.setLevel(logging.DEBUG)

        if logfile_path.exists():
            logfile_path.unlink()

        logfile_handler = logging.FileHandler(logfile_path)
        formatter = logging.Formatter(
            "%(asctime)s : %(levelname)s : %(name)s : %(message)s")
        logfile_handler.setFormatter(formatter)
        self.logger.addHandler(logfile_handler)
        self.logger.debug("Logging initialised")
Пример #2
0
    def setup(self) -> None:
        self.metadata["time"] = datetime.datetime.now().strftime(
            "%H:%M:%S | %B %d, %Y")
        self.sink.mkdir(exist_ok=True, parents=True)

        # Microcache
        self.cache = self.sink / ".cache"
        self.cache.mkdir(exist_ok=True)

        # Setup logging and meta path
        self.metapath = self.sink / "metadata.json"
        logfile_path = self.sink / "logfile.log"

        # Load previous meta data if around
        if self.metapath.exists() and self.metapath.is_file():
            self.prev_meta = read_json(self.metapath)

        # Init loggin
        self.logger.setLevel(logging.DEBUG)

        if logfile_path.exists():
            logfile_path.unlink()

        logfile_handler = logging.FileHandler(logfile_path)
        formatter = logging.Formatter(
            "%(asctime)s : %(levelname)s : %(name)s : %(message)s")
        logfile_handler.setFormatter(formatter)
        self.logger.addHandler(logfile_handler)
        self.logger.debug("Logging initialised")
Пример #3
0
    def update_success(self, status: bool) -> None:
        try:
            existing_metadata = read_json(self.process.metapath)
        except FileNotFoundError:
            existing_metadata = {}

        try:
            success = existing_metadata["success"]  # extract the progress dict
        except KeyError:
            success = {}  # progress doesnt exist yet

        success[self.identity[
            "hash"]] = status  # update the status of this analyser
        # join any existing data into the metadata
        self.process.metadata["success"] = success  # modify the original
        write_json(self.process.metapath, self.process.metadata)
Пример #4
0
 def load_cache(self):
     self.output = read_json(self.dump_path)
Пример #5
0
 def load_cache(self):
     d = read_json(self.dump_path)
     self.output = [x for x in d["corpus_items"]]
Пример #6
0
# Description
"""
Analyse each item belonging to a cluster so that they can be ranked in order of a descriptor.
"""

from ftis.analyser import Flux
from ftis.process import FTISProcess as Chain
from ftis.common.io import read_json
import numpy as np

src = "outputs/segments/2_ExplodeAudio"
folder = "outputs/metacluster_analysis"

process = Chain(source=src, folder=folder)

flux = Flux(cache=True)
process.add(flux)

if __name__ == "__main__":
    process.run()
    # print(flux.output)
    clusters = read_json("outputs/classification/3_AGCluster")
    for k in clusters.keys():
        buf = []
        for v in clusters[k]:
            for point in flux.output[v]:
                buf.append(point)
        print(f"Cluster {k}: {np.median(buf)}")
Пример #7
0
from pydub import AudioSegment
from ftis.common.io import read_json, write_json
from random import choice, uniform

CLUSTERS = read_json("outputs/micro_clustering/5_HDBSCLUSTER.json")
urn = list(CLUSTERS["37"])
num_choices = 10  # how many individual samples to use

# Generate selection pool
selection_pool = []
for x in range(num_choices):
    r = choice(urn)
    selection_pool.append(r)
    urn.remove(r)

# Setup constraints
max_length = 10 * 1000  # maximum length of the final result
min_length = 2 * 1000  # minimum length of the final result
length = uniform(min_length,
                 max_length)  # generate a random length between min/max
repeat_chance = 0.1  # ten percent
max_repeats = 4
repeat = 0  #Should we repeat?
memory = 4  # way of mitigating the loop from occuring within a certain window of samples
mem = []

# STICK THIS IN A FOR LOOP TO DO MANY GENERATIONS
# Create audiosegment containers

for x in range(100):
    container = AudioSegment.empty()  # the container to append to