Beispiel #1
0
def main():
    args = init_args()
    if args.side != 'a' and args.side != 'b':
        print("Side must be 'a' or 'b'")
        return
    print("Server url:", args.server_url)
    print("You are side", args.side)
    DistanceClient(args.server_url, args.side, args).start()
Beispiel #2
0
        def decode_file():
            start = time.time_ns()
            get_wave = utils.load_wave(save_base='../receive', file_name='output.wav')
            end = time.time_ns()
            print('读取文件耗时:', (end - start) / 1e6, 'ms')
            if len(get_wave.shape) == 2:
                get_wave = get_wave[:, 0]

            start = time.time_ns()
            packets = FSK.demodulation(utils.init_args(), get_wave)
            count, result = utils.decode_bluetooth_packet(utils.init_args(), packets)
            f = open('../result.txt', 'w', encoding='utf-8')
            f.write(result)
            f.close()
            end = time.time_ns()
            print('解码文本耗时:', (end - start) / 1e6, 'ms')
            print('蓝牙包成功解码数量:{}\n解码信息:{}\n'.format(count, result))
Beispiel #3
0
# -*- coding: utf-8 -*-
import threading
import time

from flask import Flask, request, jsonify

import FSK
import utils
from distance.beepbeep import calculate_distance

app = Flask(__name__, static_folder='static', static_url_path='')
program_args = utils.init_args()
program_args.beep_beep = True
program_args.threshold = 0.72e11

paired_files = [{}, {}]
server_state = {
    "process": []
}
image_list = []


def dump_file_info(file):
    return {
        "filename": file["filename"],
        "size": len(file["data"]),
        "time": file["time"],
    }


def process(file1, file2):
Beispiel #4
0
def main():

    args = init_args()

    # Prepare the dataloader
    train_dataloaders, validation_dataloaders, test_dataloader, args = initialize_dataloader(
        args, subset=None)

    # Prepare the model
    model, criterion, optimizer = initialize_model(args)

    best_validation_loss = evaluate(model, validation_dataloaders, criterion)
    log(-1, args, validation_loss=best_validation_loss)

    training_losses = []
    validation_losses = []
    model.train()
    epoch = 0

    # Iterate through the data
    while best_validation_loss > args.threshold:

        epoch += 1.0
        training_loss = 0.0
        n_batches = 0.0

        env = np.random.choice(args.training_agents)

        for observations, actions, target in tqdm(train_dataloaders[env]):

            # Zero out the gradient for this round of updates
            optimizer.zero_grad()

            # Conduct a forward pass of the transformer
            prediction = model.forward(observations, actions)

            # Compare the output of the model to the target
            prediction = torch.Tensor(prediction.flatten())
            target = torch.Tensor(target.flatten().float())
            loss = criterion(prediction, target)

            # Update the model
            loss.backward()
            optimizer.step()

            training_loss += loss.item()
            n_batches += 1.0

        # Check against the validation dataset
        validation_loss = evaluate(model, validation_dataloaders, criterion)

        # Scale by the batch size
        training_loss = training_loss / n_batches

        # Save the losses
        training_losses.append(training_loss)
        validation_losses.append(validation_loss)
        np.save(f'{args.model_dir}/log/training_losses.npy', training_losses)
        np.save(f'{args.model_dir}/log/validation_losses.npy',
                validation_losses)

        # Update the logs
        log(epoch,
            args,
            validation_loss=validation_loss,
            training_loss=training_loss)

        # Save model
        if validation_loss < best_validation_loss:
            save_model(model, args, epoch)
            best_validation_loss = validation_loss

    # Apply to test dataset
    test_loss = evaluate(model, test_dataloader, criterion)
    log(-2, args, test_loss=test_loss)
Beispiel #5
0
        dst = th.tensor(sku_info[edge.dst.numpy()[0]]).view(1, 4)
        # (1, dim)
        src = model.query_node_embed(src)
        dst = model.query_node_embed(dst)
        # (1, dim) -> (1, dim) -> (1, )
        logit = th.sigmoid(th.sum(src * dst))
        preds.append(logit.detach().numpy().tolist())
        labels.append(edge.label)

    fpr, tpr, thresholds = metrics.roc_curve(labels, preds, pos_label=1)

    print("Evaluate link prediction AUC: {:.4f}".format(metrics.auc(fpr, tpr)))


if __name__ == "__main__":
    args = utils.init_args()

    valid_sku_raw_ids = utils.get_valid_sku_set(args.item_info_data)

    g, sku_encoder, sku_decoder = utils.construct_graph(
        args.action_data, args.session_interval_sec, valid_sku_raw_ids)

    train_g, test_g = utils.split_train_test_graph(g)

    sku_info_encoder, sku_info_decoder, sku_info = \
        utils.encode_sku_fields(args.item_info_data, sku_encoder, sku_decoder)

    num_skus = len(sku_encoder)
    num_brands = len(sku_info_encoder["brand"])
    num_shops = len(sku_info_encoder["shop"])
    num_cates = len(sku_info_encoder["cate"])
Beispiel #6
0
        train_cbs = ()
        val_cbs = ()

    strategy = Strategy(data_provider=data_provider,
                        train_loader_names=tuple(
                            sampling_config['train']['data_provider'].keys()),
                        val_loader_names=tuple(
                            sampling_config['eval']['data_provider'].keys()),
                        data_sampling_config=sampling_config,
                        loss=criterion,
                        model=model,
                        n_epochs=args.n_epochs,
                        optimizer=optimizer,
                        train_callbacks=train_cbs,
                        val_callbacks=val_cbs,
                        device=torch.device('cuda:{}'.format(args.gpu)),
                        distributed=args.distributed,
                        use_apex=args.use_apex)

    strategy.run()


if __name__ == '__main__':
    """When the file is run"""
    t = time.time()
    # parse arguments
    args = init_args()
    # kick off the main function
    kick_off_launcher(args, worker_process)
    print('Execution Time ', (time.time() - t), ' Seconds')
Beispiel #7
0
def main():
    packets = FSK.demodulation(utils.init_args(),
                               utils.load_wave("", "_tmp_a_0_6535.wav"))