Пример #1
0
def decode_dataset(data):
    data = to_structs(data)
    config = data.config
    classes = [struct(id=int(k), **v) for k, v in config.classes.items()]

    images = {i.image_file: decode_image(i, config) for i in data.images}
    return config, DetectionDataset(classes=classes, images=images)
Пример #2
0
def decode_dataset(data):
    data = to_structs(data)
    config = data.config
    classes = [struct(id=int(k), **v) for k, v in config.classes.items()]

    images = filter_none([decode_image(i, config) for i in data.images])
    images.sort(key=lambda image: image.start)

    return struct(classes=classes, images=images, config=config)
Пример #3
0
def read_log(file):
    entries = [to_structs(json.loads(line)) for line in open(file, mode="r")]

    steps = {}
    tags = {}

    for entry in entries:
        step = steps.get(entry.step) or {}
        step[entry.tag] = struct(value=entry.value, time=entry.time)

        tags[entry.tag] = True
        steps[entry.step] = step

    return struct (tags=tags.keys(), steps={i : Struct(step) for i, step in steps.items()})
Пример #4
0
def load_dataset(filename):
    return to_structs(import_json(filename))
Пример #5
0
    def process_command(str):
        nonlocal env

        if str is None:
            print("Server disconnected.")
            raise UserCommand('pause')

        try:
            tag, data = split_tagged(to_structs(json.loads(str)))
            if tag == 'command':
                raise UserCommand(data)

            elif tag == 'init':
                config, dataset = init_dataset(data)
                env = initialise(config, dataset, args)

                args.no_load = False  # For subsequent initialisations

                if not args.paused:
                    raise UserCommand('resume')

            elif tag == 'import':
                file, image_data = data

                image = decode_image(image_data, env.config)
                env.dataset.update_image(image)

            elif tag == 'update':
                file, method, image_data = data

                image = decode_image(image_data, env.config)
                env.dataset.update_image(image)

                if image.category == 'validate':
                    env.best.score = 0

                if env.pause_time == 0:
                    env.pause_time = env.args.pause_epochs
                    raise UserCommand('resume')
                else:
                    env.pause_time = env.args.pause_epochs

            elif tag == 'detect':
                reqId, file, annotations, nms_params = data

                review = decode_object_map(
                    annotations, env.config) if len(annotations) > 0 else None

                if env is not None:
                    results = detect_request(env,
                                             file,
                                             nms_params,
                                             review=review)
                    send_command('detect_request', (reqId, file, results))

                else:
                    send_command('req_error',
                                 [reqId, image, "model not available yet"])

            else:
                send_command('error', "unknown command: " + tag)
                print("unknown command: " + tag)

        except (JSONDecodeError) as err:
            send_command('error', repr(err))
            return None
Пример #6
0
        plt.plot(x, density(x), label=labels[k], color=dataset_colors[k])

    plt.xlabel('object size as percent of image size')
    plt.ylabel('density')

    plt.xlim(xmin=0.0, xmax=100.0)

    plt.legend()

    return fig, ax


if __name__ == '__main__':
    figure_path = "/home/oliver/sync/figures/summaries"

    loaded = load_all(datasets._subset("penguins", "branches", "scallops"),
                      base_path)

    voc = to_structs(import_voc())
    coco = to_structs(import_coco(subsets=[('val2017', 'validate')]))

    loaded = loaded._extend(pascal_voc=voc, coco=coco)
    keys = list(sorted(loaded.keys()))

    fig, ax = plot_sizes_density(loaded,
                                 keys,
                                 labels=dataset_labels._extend(
                                     pascal_voc="pascal vOC", coco="coco"))
    fig.savefig(path.join(figure_path, "sizes_density.pdf"),
                bbox_inches='tight')