Esempio n. 1
0
def load():

    global GRID
    global CODES
    global STEP

    if len(GRID) == 0:

        # Status
        log.p('LOADING eBIRD GRID DATA...', new_line=False)

        # Load pickled or zipped grid data
        if cfg.EBIRD_MDATA.rsplit('.', 1)[-1] == 'gz':
            with gzip.open(cfg.EBIRD_MDATA, 'rt') as pfile:
                GRID = json.load(pfile)
        else:
            with open(cfg.EBIRD_MDATA, 'rb') as pfile:
                GRID = pickle.load(pfile)

        # Load species codes
        with open(cfg.EBIRD_SPECIES_CODES, 'r') as jfile:
            CODES = json.load(jfile)

        STEP = cfg.GRID_STEP_SIZE

        log.p(('DONE!', len(GRID), 'GRID CELLS'))
Esempio n. 2
0
def loadParams(net, params):

    log.p('IMPORTING MODEL PARAMS...', new_line=False)

    l.set_all_param_values(net, params)

    log.p('DONE!')
    
    return net
Esempio n. 3
0
def test_function(net, layer_index=-1):

    log.p('COMPILING THEANO TEST FUNCTION FUNCTION...', new_line=False)    

    prediction = l.get_output(l.get_all_layers(net)[layer_index], deterministic=True)    
    test_function = theano.function([l.get_all_layers(net)[0].input_var], prediction, allow_input_downcast=True)        

    log.p('DONE!')

    return test_function
Esempio n. 4
0
def setSpeciesList(lat, lon, week):

    if not week in range(1, 49):
        week = -1

    if cfg.USE_EBIRD_CHECKLIST:
        cfg.WHITE_LIST, cfg.BLACK_LIST = grid.getSpeciesLists(
            lat, lon, week, cfg.EBIRD_THRESHOLD)
    else:
        cfg.WHITE_LIST = cfg.CLASSES

    log.p(('SPECIES:', len(cfg.WHITE_LIST)), new_line=False)
Esempio n. 5
0
def loadModel(model_file, config_file):

    global INPUT_LAYER_INDEX
    global OUTPUT_LAYER_INDEX

    log.p('LOADING TF LITE MODEL...', new_line=False)

    # Load TFLite model and allocate tensors.
    interpreter = tflite.Interpreter(model_path=model_file)
    interpreter.allocate_tensors()

    # Get input and output tensors.
    input_details = interpreter.get_input_details()
    output_details = interpreter.get_output_details()

    # Get input tensor index
    INPUT_LAYER_INDEX = input_details[0]['index']
    OUTPUT_LAYER_INDEX = output_details[0]['index']

    # Load model-specific config
    cfg['LOAD'](config_file, [
        'CLASSES', 'SPEC_TYPE', 'MAGNITUDE_SCALE', 'WIN_LEN', 'SAMPLE_RATE',
        'SPEC_FMIN', 'SPEC_FMAX', 'SPEC_LENGTH', 'INPUT_TYPE', 'INPUT_SHAPE'
    ])

    log.p('DONE!')
    log.p(('INPUT LAYER INDEX:', INPUT_LAYER_INDEX))
    log.p(('OUTPUT LAYER INDEX:', OUTPUT_LAYER_INDEX))

    return interpreter
Esempio n. 6
0
def loadSnapshot(path):

    log.p(('LOADING SNAPSHOT', path.split(os.sep)[-1], '...'), new_line=False)

    with open(path, 'rb') as f:
        try:
            model = pickle.load(f, encoding='latin1') 
        except:
            model = pickle.load(f) 

    cfg.setModelSettings(model)

    log.p('DONE!')
    
    return model
Esempio n. 7
0
def parseTestSet(path, file_type='wav'):

    # Find all soundscape files
    dataset = []
    if os.path.isfile(path):
        dataset.append(path)
    else:
        for dirpath, _, filenames in os.walk(path):
            for f in filenames:
                if f.rsplit('.', 1)[-1].lower() == file_type:
                    dataset.append(os.path.abspath(os.path.join(dirpath, f)))

    # Dataset stats
    log.p(('FILES IN DATASET:', len(dataset)))

    return dataset
Esempio n. 8
0
def save(p):

    # Time in UTC
    utc = time.strftime('%H:%M:%S', time.localtime(p['timestamp']))

    # Log
    for detection in p['detections']:
        log.p((utc, int((p['time_for_prediction']) * 1000) / 1000.0),
              new_line=False)
        log.p((detection['species'], detection['score']), new_line=False)
        log.p('')

    # Save JSON response data
    data = {'prediction': {'0': {}}, 'time': p['time_for_prediction']}
    with open('stream_analysis.json', 'w') as jfile:

        for i in range(len(p['detections'])):
            label = p['detections'][i]['species']
            data['prediction']['0'][str(i)] = {
                'score': str(p['detections'][i]['score']),
                'species': label
            }
            if i > 25:
                break

        json.dump(data, jfile)
Esempio n. 9
0
def execute(json_data):

    # Init
    data = {}

    # Parse Array
    action = json_data['action']
    ip = str(json_data['ip'])

    # Status
    log.p((ip, '|', 'action =', action), discard=True)

    ##########  TEST NEURAL NET  #############
    try:

        if action == 'analysis':

            with open('stream_analysis.json', 'r') as jfile:
                data = json.load(jfile)

    except:
        data['response'] = 'error'

    return make_response(data)
Esempio n. 10
0
def run():

    # Load model
    interpreter = loadModel(cfg['MODEL_PATH'], cfg['CONFIG_PATH'])

    # Load species list
    getSpeciesList()

    # Start recording
    log.p(('STARTING RECORDING WORKER'))
    recordWorker = Thread(target=record, args=())
    recordWorker.start()

    # Keep running...
    log.p(('STARTING ANALYSIS'))
    while not cfg['KILL_ALL']:

        try:

            # Make prediction
            p = analyzeStream(interpreter)

            # Save results
            if not p == None:
                save(p)

                # Sleep if we are too fast
                if 'time_for_prediction' in p:
                    if p['time_for_prediction'] < cfg['SPEC_LENGTH'] - cfg[
                            'SPEC_OVERLAP']:
                        time.sleep((cfg['SPEC_LENGTH'] - cfg['SPEC_OVERLAP']) -
                                   (p['time_for_prediction']))

            else:
                time.sleep(1.0)

        except KeyboardInterrupt:
            cfg['KILL_ALL'] = True
            break
        #except:
        #cfg.KILL_ALL = True

    # Done
    log.p(('TERMINATED'))
Esempio n. 11
0
def process(soundscape, sid, out_dir, out_type, test_function):

    # Time
    start = time.time()
    log.p(('SID:', sid, 'PROCESSING:', soundscape.split(os.sep)[-1]),
          new_line=False)

    # Analyze file
    p = analyzeFile(soundscape, test_function)

    # Generate Raven selection table + Audacity text lables
    stable, dcnt = getRavenSelectionTable(p, soundscape.split(os.sep)[-1])
    atext = getAudacityLabels(p, soundscape.split(os.sep)[-1])
    log.p(('DETECTIONS:', dcnt), new_line=False)

    # Save results
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    if out_type == 'raven':
        with open(
                os.path.join(
                    out_dir,
                    os.path.splitext(soundscape.split(os.sep)[-1])[0] +
                    '.BirdNET.selections.txt'), 'w') as stfile:
            stfile.write(stable)

    else:
        with open(
                os.path.join(
                    out_dir,
                    os.path.splitext(soundscape.split(os.sep)[-1])[0] +
                    '.BirdNET.Audacity_Labels.txt'), 'w') as stfile:
            stfile.write(atext)

    # Time
    t = time.time() - start

    # Stats
    log.p(('TIME:', int(t)))
Esempio n. 12
0
def showProgress(epoch, done=False):

    global last_update

    # First call?
    if not 'batch_count' in cfg.STATS:
        bcnt = 0
    else:
        bcnt = cfg.STATS['batch_count']

    # Calculate number of batches to train
    total_batches = cfg.STATS['sample_count'] // cfg.BATCH_SIZE + 1

    # Current progess
    if not done:
        if bcnt == 0:
            log.p(('EPOCH', epoch, '['), new_line=False)
        else:
            p = bcnt * 100 / total_batches
            if not p % 5 and not p == last_update:
                log.p('=', new_line=False)
                last_update = p
    else:
        log.p(']', new_line=False)
Esempio n. 13
0
def buildNet():

    log.p('BUILDING BirdNET MODEL...', new_line=False)

    # Input layer for images
    net = l.InputLayer((None, cfg.IM_DIM, cfg.IM_SIZE[1], cfg.IM_SIZE[0]))    

    # Pre-processing stage
    #log.p(("\tPRE-PROCESSING STAGE:"))
    net = l.batch_norm(l.Conv2DLayer(net,
                    num_filters=int(FILTERS[0] * RESNET_K),
                    filter_size=(5, 5),
                    pad='same',
                    nonlinearity=nl.rectify))
    
    #log.p(("\t\tFIRST  CONV OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))

    # Max pooling
    net = l.MaxPool2DLayer(net, pool_size=(1, 2))
    #log.p(("\t\tPRE-MAXPOOL OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))
    
    # Residual Stacks
    for i in range(1, len(FILTERS)):
        #log.p(("\tRES STACK", i, ':'))
        net = resblock(net,
                       filters=int(FILTERS[i] * RESNET_K),
                       kernel_size=KERNEL_SIZES[i],
                       stride=2,
                       preactivated=True,
                       block_id=i,
                       name='BLOCK ' + str(i) + '-1')
        
        for j in range(1, RESNET_N):
            net = resblock(net,
                           filters=int(FILTERS[i] * RESNET_K),
                           kernel_size=KERNEL_SIZES[i],
                           preactivated=False,
                           block_id=i+j,
                           name='BLOCK ' + str(i) + '-' + str(j + 1))
        
    # Post Activation
    net = l.batch_norm(net)
    net = l.NonlinearityLayer(net, nonlinearity=nl.rectify)
    
    # Classification branch
    #log.p(("\tCLASS BRANCH:"))
    net = classificationBranch(net,  (4, 10)) 
    #log.p(("\t\tBRANCH OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))

    # Pooling
    net = l.GlobalPoolLayer(net, pool_function=logmeanexp)
    #log.p(("\tGLOBAL POOLING SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))

    # Sigmoid output
    net = l.NonlinearityLayer(net, nonlinearity=nl.sigmoid)

    #log.p(("\tFINAL NET OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net))))
    log.p("DONE!")

    # Model stats
    #log.p(("MODEL HAS", (sum(hasattr(layer, 'W') for layer in l.get_all_layers(net))), "WEIGHTED LAYERS"))
    #log.p(("MODEL HAS", l.count_params(net), "PARAMS"))

    return net
Esempio n. 14
0
    data = {}

    # Parse Array
    action = json_data['action']
    ip = str(json_data['ip'])

    # Status
    log.p((ip, '|', 'action =', action), discard=True)

    ##########  TEST NEURAL NET  #############
    try:

        if action == 'analysis':

            with open('stream_analysis.json', 'r') as jfile:
                data = json.load(jfile)

    except:
        data['response'] = 'error'

    return make_response(data)


############################## SERVER ################################
if __name__ == '__main__':

    # RUN SERVER
    log.p('STREAM SERVER UP AND RUNNING!')
    run(host='localhost', port=8080, server='paste')
    #run(host='innonuc.informatik.tu-chemnitz.de', port=8080, server='paste')