Example #1
0
def measure_performance(config, *, student=None, teacher=None):
    data_loader = get_data_loader(config.data.validation)

    if student:
        model = load_network(
            {
                "model": config.student["path"],
                "input_shape": config.teacher["input_shape"],
            }
        ).as_pytorch(maintain_weights=True)
    elif teacher:
        model = load_network(config.teacher).as_pytorch(maintain_weights=True)
    else:
        raise ValueError(
            "must specify whether to measure student of teacher performance"
        )

    device = torch.device("cpu")
    if config.get("cuda", False) and torch.cuda.is_available():
        device = torch.device("cuda")
    model.to(device)

    model.eval()

    prediction_type = config.get("type", "classification")
    num_correct = 0.0
    num_samples = 0.0
    loss = 0.0
    for i, (idx, _, sx, target) in enumerate(data_loader):
        y = model(sx.to(device)).squeeze()
        target = target.to(device)
        num_samples += target.shape[0]
        if prediction_type == "classification":
            pred = y.argmax(-1)
            correct = pred == target
            num_correct += correct.sum(dtype=torch.float)
            performance = {"accuracy": (num_correct / num_samples).item()}
        else:
            loss += ((y - target) ** 2).sum()
            performance = {"loss": (loss / num_samples).item()}
    return performance
Example #2
0
def main(args):
    config = DataConfiguration(toml.load(args.data_config))
    config.config["_STAGE"] = "val_test"
    data_loader = get_data_loader(config)

    model = load_network({
        "model": args.model_path,
        "input_shape": args.input_shape,
        "input_format": args.input_format,
    }).as_pytorch(maintain_weights=True)

    device = torch.device("cpu")
    if args.cuda and torch.cuda.is_available():
        device = torch.device("cuda")
    print("Using device: %s" % device)
    model.to(device)

    model.eval()

    prediction_type = args.prediction_type
    num_correct = 0.0
    num_samples = 0.0
    loss = 0.0
    for i, (idx, _, sx, target) in enumerate(data_loader):
        y = model(sx.to(device)).squeeze()
        target = target.to(device)
        num_samples += target.shape[0]
        if prediction_type == "classification":
            pred = y.argmax(-1)
            correct = pred == target
            num_correct += correct.sum(dtype=torch.float)
            performance = {"accuracy": (num_correct / num_samples).item()}
        else:
            loss += ((y - target)**2).sum()
            performance = {"loss": (loss / num_samples).item()}
        print("%7d: %s" % ((i + 1) * data_loader.batch_size, performance))
def main(args):
    logger = logging.initialize(__name__, args)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)

    os.makedirs(args.output_dir, exist_ok=True)
    properties_filename = os.path.join(args.output_dir, "properties.csv")
    if not os.path.exists(properties_filename):
        with open(properties_filename, "w+") as prop_file:
            prop_file.write(
                "id,property_filename,image_filename,numpy_filename,steering_angle,collision_prob,steering_angle_lb,steering_angle_ub,collision_prob_lb,collision_prob_ub\n"
            )

    config = DataConfiguration(toml.load(args.data_config))
    config.config["_STAGE"] = "val_test"
    config.config["shuffle"] = True
    config.config["batchsize"] = 1
    data_loader = get_data_loader(config)

    logger.info("Generating properties.")
    steer_count = 0
    coll_count = 0
    for idx, _, sx, target in data_loader:
        if steer_count >= args.num_properties and coll_count >= args.num_properties:
            break
        input_img_path = data_loader.dataset.samples[0][idx][0]
        new_img_path = os.path.join(
            args.output_dir,
            "%s%s" % (idx.item(), os.path.splitext(input_img_path)[-1]))

        property_type = None

        steering_angle = target[:, 0].item()
        steering_angle_lb, steering_angle_ub = float("nan"), float("nan")
        if not np.isnan(steering_angle) and steer_count < args.num_properties:
            steering_angle_lb = max(-np.pi / 2,
                                    steering_angle - args.gamma * np.pi / 180)
            steering_angle_ub = min(np.pi / 2,
                                    steering_angle + args.gamma * np.pi / 180)
            steer_count += 1
            property_type = "steer"

        collision_prob = target[:, 1].item()
        collision_prob_lb, collision_prob_ub = float("nan"), float("nan")
        if not np.isnan(collision_prob) and coll_count < args.num_properties:
            collision_prob_lb = 0.5 if collision_prob >= 0.5 else 0.0
            collision_prob_ub = 0.5 if collision_prob < 0.5 else 1.0
            coll_count += 1
            property_type = "collision"

        if property_type is None:
            continue

        shutil.copy(input_img_path, new_img_path)

        npy_img_path = os.path.join(args.output_dir, "%s.npy" % idx.item())
        img = sx[0].numpy()
        np.save(npy_img_path, img)

        property_path = os.path.join(args.output_dir,
                                     "robustness.%s.py" % idx.item())
        with open(property_path, "w+") as property_file:
            if property_type == "steer":
                property_file.write(
                    "from dnnv.properties import *\n"
                    "import numpy as np\n\n"
                    'N = Network("N")\n'
                    f'x = Image("{npy_img_path}")\n'
                    "input_layer = 0\n"
                    "output_layer = -1\n"
                    "output_select = 0\n\n"
                    f"epsilon = {args.epsilon}\n"
                    f"gamma = {args.gamma} * np.pi / 180\n"
                    "output = N[input_layer:output_layer, output_select](x)\n"
                    "gamma_lb = max(-np.pi / 2, (output - gamma) / 2)\n"
                    "gamma_ub = min(np.pi / 2, (output + gamma) / 2)\n"
                    "Forall(\n"
                    "    x_,\n"
                    "    Implies(\n"
                    "        ((x - epsilon) < x_ < (x + epsilon)),\n"
                    "        (gamma_lb < N[input_layer:output_layer, output_select](x_) < gamma_ub),\n"
                    "    ),\n"
                    ")\n")
            elif property_type == "collision":
                property_file.write(
                    "from dnnv.properties import *\n"
                    'N = Network("N")\n'
                    f'x = Image("{npy_img_path}")\n'
                    "input_layer = 0\n"
                    "output_layer = -2\n"
                    "output_select = 1\n\n"
                    f"epsilon = {args.epsilon}\n"
                    f"gamma_lb = {collision_prob_lb}\n"
                    f"gamma_ub = {collision_prob_ub}\n"
                    "output = N[input_layer:output_layer, output_select](x)\n"
                    "Forall(\n"
                    "    x_,\n"
                    "    Implies(\n"
                    "        ((x - epsilon) < x_ < (x + epsilon)),\n"
                    "        Implies(output < 0.5, N[input_layer:output_layer, output_select](x_) < 0.0)\n"
                    "        & Implies(output > 0.5, N[input_layer:output_layer, output_select](x_) > 0.0),\n"
                    "    ),\n"
                    ")\n")

        with open(properties_filename, "a") as prop_file:
            prop_file.write("%s,%s,%s,%s,%s,%s,%s,%s,%s\n" % (
                idx.item(),
                new_img_path,
                npy_img_path,
                steering_angle,
                collision_prob,
                steering_angle_lb,
                steering_angle_ub,
                collision_prob_lb,
                collision_prob_ub,
            ))
    logger.info("Generated %d steering angle properties.", steer_count)
    logger.info("Generated %d collision probability properties.", coll_count)
def main(args):
    logger = logging.initialize(__name__, args)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)

    os.makedirs(args.output_dir, exist_ok=True)
    properties_filename = os.path.join(args.output_dir, "properties.csv")
    if not os.path.exists(properties_filename):
        with open(properties_filename, "w+") as prop_file:
            prop_file.write(
                "id,property_filename,image_filename,numpy_filename,steering_angle,steering_angle_lb,steering_angle_ub\n"
            )

    config = DataConfiguration(toml.load(args.data_config))
    config.config["_STAGE"] = "val_test"
    config.config["shuffle"] = True
    config.config["batchsize"] = 1
    data_loader = get_data_loader(config)

    logger.info("Generating properties.")
    for i, (idx, _, sx, target) in enumerate(data_loader):
        if i == args.num_properties:
            break
        input_img_path = data_loader.dataset.samples[0][idx][0]
        new_img_path = os.path.join(
            args.output_dir,
            "%s%s" % (idx.item(), os.path.splitext(input_img_path)[-1]))
        shutil.copy(input_img_path, new_img_path)

        npy_img_path = os.path.join(args.output_dir, "%s.npy" % idx.item())

        img = sx[0].numpy()
        np.save(npy_img_path, img)

        steering_angle = target.item()
        steering_angle_lb = max(-np.pi / 2,
                                steering_angle - args.gamma * np.pi / 180)
        steering_angle_ub = min(np.pi / 2,
                                steering_angle + args.gamma * np.pi / 180)

        property_path = os.path.join(args.output_dir,
                                     "robustness.%s.py" % idx.item())
        with open(property_path, "w+") as property_file:
            property_file.write(
                "from dnnv.properties import *\n"
                "import numpy as np\n\n"
                'N = Network("N")\n'
                f'x = Image("{npy_img_path}")\n'
                "input_layer = 0\n"
                "output_layer = -2\n\n"
                f"epsilon = {args.epsilon}\n"
                f"gamma = {args.gamma} * np.pi / 180\n"
                "output = N[input_layer:](x)\n"
                "gamma_lb = np.tan(max(-np.pi / 2, (output - gamma) / 2))\n"
                "gamma_ub = np.tan(min(np.pi / 2, (output + gamma) / 2))\n"
                "Forall(\n"
                "    x_,\n"
                "    Implies(\n"
                "        ((x - epsilon) < x_ < (x + epsilon)),\n"
                "        (gamma_lb < N[input_layer:output_layer](x_) < gamma_ub),\n"
                "    ),\n"
                ")\n")

        with open(properties_filename, "a") as prop_file:
            prop_file.write("%s,%s,%s,%s,%s,%s,%s\n" % (
                idx.item(),
                property_path,
                new_img_path,
                npy_img_path,
                steering_angle,
                steering_angle_lb,
                steering_angle_ub,
            ))
    logger.info("Generated %d properties.", i)
Example #5
0
def main(args):
    for plugin_path in args.plugins:
        plugin_name = Path(plugin_path).stem
        spec = importlib.util.spec_from_file_location(plugin_name, plugin_path)
        mod = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(mod)

    config = DataConfiguration(toml.load(args.data_config))
    config.config["_STAGE"] = "val_test"
    data_loader = get_data_loader(config)

    device = torch.device("cpu")
    if args.cuda and torch.cuda.is_available():
        device = torch.device("cuda")
    print("Using device: %s" % device)

    if os.path.isfile(args.model_path):
        measure(
            args.model_path,
            args.teacher,
            device,
            data_loader,
            args.input_shape,
            args.input_format,
            args.teacher_input_shape,
            args.teacher_input_format,
            args.loss,
        )
    elif os.path.isdir(args.model_path):
        performance_measures = defaultdict(list)
        for model_path in sorted(
                glob.glob(os.path.join(args.model_path, "*.onnx")),
                key=lambda path: tuple(
                    maybe_int(part) for part in path.split(".")),
        ):
            print(model_path)
            model_name = os.path.basename(model_path)
            if not any(
                    isinstance(maybe_int(part), int)
                    for part in model_name.split(".")):
                continue
            performance_measure = measure(
                model_path,
                args.teacher,
                device,
                data_loader,
                args.input_shape,
                args.input_format,
                args.teacher_input_shape,
                args.teacher_input_format,
                args.loss,
            )
            measures_seen = set()
            for measure_name in performance_measure.keys():
                measure_name = measure_name.split("_")[-1]
                if measure_name in measures_seen:
                    continue
                measures_seen.add(measure_name)
                performance = {}
                for name, value in performance_measure.items():
                    if not name.endswith(measure_name):
                        continue
                    optimization = "minimum"
                    opt, argopt = np.min, np.argmin
                    if "accuracy" in name:
                        optimization = "maximum"
                        opt, argopt = np.max, np.argmax
                    performance_measures[name].append(value)
                    iteration = len(performance_measures[name]) - 1
                    opt_value = opt(performance_measures[name])
                    opt_iteration = argopt(performance_measures[name])
                    print("%d - Current iteration with %s %s: %d" %
                          (iteration, optimization, name, opt_iteration))
                    print(
                        "%d - Current %s %s: %f" %
                        (iteration, optimization, name, opt_value),
                        flush=True,
                    )
                    performance[name] = performance_measures[name]
                plot_loss_vs_epoch(
                    performance,
                    title=args.model_path,
                    path=os.path.join(args.model_path,
                                      "performance.%s.tmp.png" % measure_name),
                )
        print()
        print("Best Performing Iterations")
        print("==========================")
        measures_seen = set()
        for measure_name in performance_measure.keys():
            measure_name = measure_name.split("_")[-1]
            if measure_name in measures_seen:
                continue
            measures_seen.add(measure_name)
            performance = {}
            for name, value in performance_measure.items():
                if not name.endswith(measure_name):
                    continue
                optimization = "minimum"
                opt, argopt = np.min, np.argmin
                if "accuracy" in name:
                    optimization = "maximum"
                    opt, argopt = np.max, np.argmax
                performance_measures[name].append(value)
                opt_value = opt(performance_measures[name])
                opt_iteration = argopt(performance_measures[name])
                print("Iteration with %s %s: %d" %
                      (optimization, name, opt_iteration))
                optimization = optimization[0].upper() + optimization[1:]
                print("%s %s: %f\n" % (optimization, name, opt_value),
                      flush=True)
                performance[name] = performance_measures[name]
            plot_loss_vs_epoch(
                performance,
                title=args.model_path,
                path=os.path.join(args.model_path,
                                  "performance.%s.png" % measure_name),
            )
    else:
        print("%s does not exist." % args.model_path)
def main(args):
    logger = logging.initialize(__name__, args)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)

    os.makedirs(args.output_dir, exist_ok=True)
    properties_filename = os.path.join(args.output_dir, "properties.csv")
    if not os.path.exists(properties_filename):
        with open(properties_filename, "w+") as prop_file:
            prop_file.write(
                "id,property_filename,image_filename,numpy_filename,target\n"
            )

    config = DataConfiguration(toml.load(args.data_config))
    config.config["_STAGE"] = "val_test"
    config.config["shuffle"] = True
    config.config["batchsize"] = 1
    data_loader = get_data_loader(config)

    logger.info("Generating properties.")
    for i, (idx, _, sx, target) in enumerate(data_loader):
        if i == args.num_properties:
            break
        
        new_img_path = os.path.join(
            args.output_dir, "%s.png" % (idx.item())
        )
        height = config.config['transform']['height']
        width = config.config['transform']['width']
        assert height == width
        img = Image.fromarray(sx.numpy().reshape(height,width), 'L')
        img.save(new_img_path)
        
        npy_img_path = os.path.join(args.output_dir, "%s.npy" % idx.item())
        np.save(npy_img_path, sx.reshape(1,height,width))

        property_path = os.path.join(args.output_dir, "robustness.%s.%s.py" % (idx.item(), args.epsilon))
        with open(property_path, "w+") as property_file:
            property_file.write(
                "from dnnv.properties import *\n"
                "import numpy as np\n\n"
                'N = Network("N")\n'
                f'x = Image("{npy_img_path}")\n'
                "input_layer = 0\n"
                f"epsilon = {args.epsilon}\n"
                "Forall(\n"
                "    x_,\n"
                "    Implies(\n"
                "        ((x - epsilon) < x_ < (x + epsilon)),\n"
                "        argmax(N[input_layer:](x_)) == argmax(N[input_layer:](x)),\n"
                "    ),\n"
                ")\n"
            )

        with open(properties_filename, "a") as prop_file:
            prop_file.write(
                "%s,%s,%s,%s,%s\n"
                % (
                    idx.item(),
                    property_path,
                    new_img_path,
                    npy_img_path,
                    target
                )
            )
    logger.info("Generated %d properties.", i)