Exemple #1
0
    def test_pcd_subsampling_random(self):
        """
        Tests random sequential_skip.
        """

        pointcloud_original = self.get_pointcloud()

        for subsampling_size in subsampling_sizes:
            pointcloud_subsampled = subsample_pointcloud(
                pointcloud_original, subsampling_size, "sequential_skip")
            self.assertEqual(len(pointcloud_subsampled), subsampling_size)
Exemple #2
0
    def process_artifacts(artifacts):

        # Create database connection.
        db_connector = dbutils.connect_to_main_database()

        # Load the model first.
        model = load_model(model_path)
        model_name = model_path.split("/")[-2]

        # Evaluate and create SQL-statements.
        bar = progressbar.ProgressBar(max_value=len(artifacts))
        for artifact_index, artifact in enumerate(artifacts):
            bar.update(artifact_index)

            # Execute SQL statement.
            try:
                # Load the artifact and evaluate.
                artifact_id, pcd_path, target_height, qrcode = artifact
                pcd_array = utils.load_pcd_as_ndarray(pcd_path)
                pcd_array = utils.subsample_pointcloud(pcd_array, 10000)
                mse, mae = model.evaluate(np.expand_dims(pcd_array, axis=0),
                                          np.array([target_height]),
                                          verbose=0)
                if qrcode in qrcodes_train:
                    misc = "training"
                else:
                    misc = "nottraining"

                # Create an SQL statement.
                sql_statement = ""
                sql_statement += "INSERT INTO artifact_quality (type, key, value, artifact_id, misc)"
                sql_statement += " VALUES(\'{}\', \'{}\', \'{}\', \'{}\', \'{}\');".format(
                    model_name, "mae", mae, artifact_id, misc)

                # Call database.
                result = db_connector.execute(sql_statement)
            except psycopg2.IntegrityError:
                print("Already in DB. Skipped.", pcd_path)
            except ValueError:
                print("Skipped.", pcd_path)
        bar.finish()
Exemple #3
0
    def test_pcd_subsampling_first(self):
        """
        Tests first subsampling.
        """

        pointcloud_original = self.get_pointcloud()

        # Generate some pointclouds and check if target_size is met.
        pointclouds = []
        for subsampling_size in subsampling_sizes:
            pointcloud_subsampled = subsample_pointcloud(
                pointcloud_original, subsampling_size, "first")
            self.assertEqual(len(pointcloud_subsampled), subsampling_size)
            pointclouds.append(pointcloud_subsampled)

        # See if the pointclouds have the same first n elements.
        for pointcloud1, pointcloud2 in zip(pointclouds[0:], pointclouds[1:]):
            min_length = min(len(pointcloud1), len(pointcloud2))
            self.assertTrue(min_length != 0)
            self.assertTrue(
                np.array_equal(pointcloud1[:min_length],
                               pointcloud2[:min_length]))
    def process_artifacts(artifacts, process_index):

        # Create database connection.
        db_connector = dbutils.connect_to_main_database()

        # Load the model first.
        model_weights_path = [
            x for x in glob.glob((os.path.join(model_path, "*")))
            if x.endswith("-model-weights.h5")
        ][0]
        model_details_path = [
            x for x in glob.glob((os.path.join(model_path, "*")))
            if x.endswith("-details.p")
        ][0]
        model_name = model_path.split("/")[-1]
        model_details = pickle.load(open(model_details_path, "rb"))
        pointcloud_target_size = model_details["dataset_parameters"][
            "pointcloud_target_size"]
        pointcloud_subsampling_method = model_details["dataset_parameters"][
            "pointcloud_subsampling_method"]
        target_key = model_details["dataset_parameters"]["output_targets"][0]
        model = load_model(model_weights_path, pointcloud_target_size)

        # Evaluate and create SQL-statements.
        for artifact_index, artifact in enumerate(
                tqdm(artifacts, position=process_index)):

            # Unpack fields.
            artifact_id, pcd_path = artifact

            # Check if there is already an entry.
            select_sql_statement = ""
            select_sql_statement += "SELECT COUNT(*) FROM artifact_result"
            select_sql_statement += " WHERE artifact_id='{}'".format(
                artifact_id)
            select_sql_statement += " AND model_name='{}'".format(model_name)
            select_sql_statement += " AND target_key='{}'".format(target_key)
            results = db_connector.execute(select_sql_statement,
                                           fetch_one=True)[0]

            # There is an entry. Skip
            if results != 0:
                continue

            # Execute SQL statement.
            try:
                # Load the artifact and evaluate.
                pcd_path = pcd_path.replace("/whhdata/qrcode",
                                            "/localssd/qrcode")
                pcd_array = utils.load_pcd_as_ndarray(pcd_path)
                pcd_array = utils.subsample_pointcloud(
                    pcd_array,
                    pointcloud_target_size,
                    subsampling_method=pointcloud_subsampling_method)

                value = model.predict(np.expand_dims(pcd_array, axis=0),
                                      verbose=0)[0][0]

                # Create an SQL statement.
                sql_statement = ""
                sql_statement += "INSERT INTO artifact_result (model_name, target_key, value, artifact_id)"
                sql_statement += " VALUES(\'{}\', \'{}\', \'{}\', \'{}\');".format(
                    model_name, target_key, value, artifact_id)

                # Call database.
                result = db_connector.execute(sql_statement)
            except psycopg2.IntegrityError as e:
                #print("Already in DB. Skipped.", pcd_path)
                pass
            except ValueError as e:
                #print("Skipped.", pcd_path)
                pass
Exemple #5
0
def store_results(scan_path, db_connection_file, destination_folder):

    #db_connection_file = str(sys.argv[2])
    #destination_folder = str(sys.argv[3])

    # Get the path to the scan.
    #scan_path = sys.argv[1]
    scan_path_split = scan_path.split("/")
    scan_qrcode = scan_path_split[-3]
    scan_timestamp = scan_path_split[-1]

    # Get the paths to the artifacts.
    glob_search_path = os.path.join(scan_path, "pc", "*.pcd")
    pcd_paths = glob.glob(glob_search_path)
    if len(pcd_paths) == 0:
        print("No artifacts found. Aborting...")
        exit(1)

    # Prepare results dictionary.
    results = Bunch()
    results.scan = Bunch()
    results.scan.qrcode = scan_qrcode
    results.scan.timestamp = scan_timestamp
    results.model_results = []

    main_connector = dbutils.connect_to_main_database(db_connection_file)

    # Select models from model table where active=True in json_metadata
    select_models = "SELECT * FROM model WHERE (json_metadata->>'active')::BOOLEAN IS true;"
    models = main_connector.execute(select_models, fetch_all=True)

    # Go through the models from the models-file.
    #with open("/home/mmatiaschek/whhdata/models.json") as json_file:
    for model in models:
        model_name = model[0]

        # Locate the weights of the model.
        weights_search_path = os.path.join("/home/smahale/whhdata/models",
                                           model_name, "*")
        weights_paths = [
            x for x in glob.glob(weights_search_path) if "-weights" in x
        ]
        if len(weights_paths) == 0:
            continue
        weights_path = weights_paths[0]
        entry = model[3]

        # Get the model parameters.
        input_shape = entry["input_shape"]
        output_size = entry["output_size"]
        hidden_sizes = entry["hidden_sizes"]
        #hidden_sizes = [512, 256, 128]
        subsampling_method = entry["subsampling_method"]

        # Load the model.
        #print(weights_path, input_shape, output_size, hidden_sizes)
        try:
            model = modelutils.load_pointnet(weights_path, input_shape,
                                             output_size, hidden_sizes)
        except:
            print("Failed!", weights_path)
            continue
        #print("Worked!", weights_path)

        # Prepare the pointclouds.
        pointclouds = []
        for pcd_path in pcd_paths:
            pointcloud = utils.load_pcd_as_ndarray(pcd_path)
            pointcloud = utils.subsample_pointcloud(
                pointcloud,
                target_size=input_shape[0],
                subsampling_method="sequential_skip")
            pointclouds.append(pointcloud)
        pointclouds = np.array(pointclouds)

        # Predict.
        predictions = model.predict(pointclouds)

        # Prepare model result.
        model_result = Bunch()
        model_result.model_name = model_name

        # Store measure result.
        model_result.measure_result = Bunch()
        model_result.measure_result.mean = str(np.mean(predictions))
        model_result.measure_result.min = str(np.min(predictions))
        model_result.measure_result.max = str(np.max(predictions))
        model_result.measure_result.std = str(np.std(predictions))

        # Store artifact results.
        model_result.artifact_results = []
        for pcd_path, prediction in zip(pcd_paths, predictions):
            artifact_result = Bunch()
            #artifact_result.path = pcd_path
            artifact_result.path = '/'.join(pcd_path.split('/')[4:])
            artifact_result.prediction = str(prediction[0])
            model_result.artifact_results.append(artifact_result)

        results.model_results.append(model_result)

    results_json_string = json.dumps(results)
    #print(results_json_string)

    results_json_object = json.loads(results_json_string)

    filename = "{0}/{1}-{2}-{3}-{4}.json".format(destination_folder,
                                                 pcd_paths[0].split('/')[3],
                                                 scan_qrcode, scan_timestamp,
                                                 random.randint(10000, 99999))

    # Add the results to a json file in destination_folder
    with open(filename, 'w') as json_file:
        json.dump(results_json_object, json_file, indent=2)