示例#1
0
def load_data(window_width, file, sample_rate, rolling_step=None):
    if rolling_step is None:
        rolling_step = window_width // 2
    data = aux_fn.get_data_from_files([file], sample_rate, window_width,
                                      rolling_step)
    print(data.shape)
    return data
示例#2
0
def get_response(model, file, file_meta, sample_rate, rolling_step=None):
    window_width = model.layers[0].input_shape[1]
    if rolling_step is None:
        rolling_step = window_width // 2
    data = aux_fn.get_data_from_files(
        [file], sample_rate, window_width, rolling_step)
    print(data.shape)

    yhat = model.predict(np.expand_dims(data, axis=2),
                         batch_size=data.shape[0], verbose=1)
    yhat = np.squeeze(yhat)
    response = aux_fn.unroll(yhat, rolling_step)
    raw_data = aux_fn.unroll(data, rolling_step)

    anomaly_bounds = None
    if file_meta['event_present']:
        ev_start = round(
            file_meta['event_start_in_mixture_seconds'] * sample_rate)
        ev_end = round(
            ev_start + file_meta['event_length_seconds'] * sample_rate)
        anomaly_bounds = (min(ev_start, len(raw_data)),
                          min(ev_end, len(raw_data)))

    return {'data': raw_data, 'resp': response, 'actual': anomaly_bounds}
                    filemode='w',
                    level=logging.INFO)


class LogHistory(Callback):
    def on_epoch_end(self, batch, logs={}):
        logging.info(logs)


model = LSTMAutoencoderV2(batchsize, window_width, encoder_layer_list,
                          decoder_layer_list)

files = aux_fn.get_all_files(path)
print(['Number of files', len(files)])

data = aux_fn.get_data_from_files(files, sr=sample_rate, ww=window_width)
data = np.expand_dims(data, axis=2)

with tf.Session() as sess:
    tf.global_variables_initializer().run()
    tf.local_variables_initializer().run()
    train_writer = tf.summary.FileWriter(logs_path, sess.graph)

    start_time = time.time()
    for epoch in range(epochs):
        epoch_loss = 0
        minibatch = 0
        iteration = int((data.shape[0]) / batchsize)
        for i in range(iteration):
            start = minibatch
            end = minibatch + batchsize