Example #1
0
def get_pointcloud_values(path):
    number_of_points = 0.0
    confidence_min = 0.0
    confidence_avg = 0.0
    confidence_std = 0.0
    confidence_max = 0.0
    error = False
    error_message = ""

    try:
        pointcloud = utils.load_pcd_as_ndarray(path)
        number_of_points = len(pointcloud)
        confidence_min = float(np.min(pointcloud[:, 3]))
        confidence_avg = float(np.mean(pointcloud[:, 3]))
        confidence_std = float(np.std(pointcloud[:, 3]))
        confidence_max = float(np.max(pointcloud[:, 3]))
    except Exception as e:
        print("\n", path, e)
        error = True
        error_message = str(e)
    except ValueError as e:
        print("\n", path, e)
        error = True
        error_message = str(e)

    values = {}
    values["number_of_points"] = number_of_points
    values["confidence_min"] = confidence_min
    values["confidence_avg"] = confidence_avg
    values["confidence_std"] = confidence_std
    values["confidence_max"] = confidence_max
    values["error"] = error
    values["error_message"] = error_message
    return values
        def process_ply_entry(entry):
            path, qr_code, height, weight = entry

            print(path)

            if os.path.exists(path) == False:
                print("\n", "File {} does not exist!".format(path), "\n")
                return

            try:
                pointcloud = utils.load_pcd_as_ndarray(path)
                targets = np.array([height, weight])
                pickle_filename = os.path.basename(path).replace(".ply", ".p")
                qrcode_path = os.path.join(base_path, "ply", qr_code)

                #                print('qr code path: ' + qrcode_path)

                # if not os.path.exists(qrcode_path):
                os.mkdir(qrcode_path)
                print('creating path')

                pickle_output_path = os.path.join(qrcode_path, pickle_filename)
                pickle.dump((pointcloud, targets),
                            open(pickle_output_path, "wb"))
            except BaseException as e:
                print(e)
                pass
Example #3
0
def execute_command_preprocess(preprocess_pcds=True, preprocess_jpgs=False):
    print("Preprocessing data-set...")

    # Create the base-folder.
    datetime_path = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
    base_path = os.path.join(preprocessed_root_path, datetime_path)
    os.mkdir(base_path)
    os.mkdir(os.path.join(base_path, "pcd"))
    os.mkdir(os.path.join(base_path, "jpg"))

    # Process the filtered PCDs.
    if preprocess_pcds == True:
        entries = execute_command_filterpcds()["results"]
        print("Found {} PCDs. Processing...".format(len(entries)))
        bar = progressbar.ProgressBar(max_value=len(entries))
        for index, entry in enumerate(entries):
            bar.update(index)
            path = entry["path"]
            if os.path.exists(path) == False:
                print("\n", "File {} does not exist!".format(path), "\n")
                continue
            pointcloud = utils.load_pcd_as_ndarray(path)
            targets = np.array(
                [float(value) for value in entry["targets"].split(",")])
            qrcode = entry["qrcode"]
            pickle_filename = entry["id"].replace(".pcd", ".p")
            qrcode_path = os.path.join(base_path, "pcd", qrcode)
            if os.path.exists(qrcode_path) == False:
                os.mkdir(qrcode_path)
            pickle_output_path = os.path.join(qrcode_path, pickle_filename)
            pickle.dump((pointcloud, targets), open(pickle_output_path, "wb"))
        bar.finish()

    # Process the filtered JPGs.
    if preprocess_jpgs == True:
        entries = execute_command_filterjpgs()["results"]
        print("Found {} JPGs. Processing...".format(len(entries)))
        bar = progressbar.ProgressBar(max_value=len(entries))
        for index, entry in enumerate(entries):
            bar.update(index)
            path = entry["path"]
            if os.path.exists(path) == False:
                print("\n", "File {} does not exist!".format(path), "\n")
                continue
            image = cv2.imread(path)
            targets = np.array(
                [float(value) for value in entry["targets"].split(",")])
            qrcode = entry["qrcode"]
            pickle_filename = entry["id"].replace(".jpg", ".p")
            qrcode_path = os.path.join(base_path, "jpg", qrcode)
            if os.path.exists(qrcode_path) == False:
                os.mkdir(qrcode_path)
            pickle_output_path = os.path.join(qrcode_path, pickle_filename)
            pickle.dump((image, targets), open(pickle_output_path, "wb"))
        bar.finish()
 def process_pcd_entry(entry):
     path = entry["artifact_path"]
     if os.path.exists(path) == False:
         print("\n", "File {} does not exist!".format(path), "\n")
         return
     pointcloud = utils.load_pcd_as_ndarray(path)
     targets = np.array([entry["height"], entry["weight"]])
     qrcode = entry["qr_code"]
     pickle_filename = os.path.basename(entry["artifact_path"]).replace(".pcd", ".p")
     qrcode_path = os.path.join(base_path, "pcd", qrcode)
     if os.path.exists(qrcode_path) == False:
         os.mkdir(qrcode_path)
     pickle_output_path = os.path.join(qrcode_path, pickle_filename)
     pickle.dump((pointcloud, targets), open(pickle_output_path, "wb"))
Example #5
0
    def process_artifacts(artifacts):

        # Create database connection.
        db_connector = dbutils.connect_to_main_database()

        # Load the model first.
        model = load_model(model_path)
        model_name = model_path.split("/")[-2]

        # Evaluate and create SQL-statements.
        bar = progressbar.ProgressBar(max_value=len(artifacts))
        for artifact_index, artifact in enumerate(artifacts):
            bar.update(artifact_index)

            # Execute SQL statement.
            try:
                # Load the artifact and evaluate.
                artifact_id, pcd_path, target_height, qrcode = artifact
                pcd_array = utils.load_pcd_as_ndarray(pcd_path)
                pcd_array = utils.subsample_pointcloud(pcd_array, 10000)
                mse, mae = model.evaluate(np.expand_dims(pcd_array, axis=0),
                                          np.array([target_height]),
                                          verbose=0)
                if qrcode in qrcodes_train:
                    misc = "training"
                else:
                    misc = "nottraining"

                # Create an SQL statement.
                sql_statement = ""
                sql_statement += "INSERT INTO artifact_quality (type, key, value, artifact_id, misc)"
                sql_statement += " VALUES(\'{}\', \'{}\', \'{}\', \'{}\', \'{}\');".format(
                    model_name, "mae", mae, artifact_id, misc)

                # Call database.
                result = db_connector.execute(sql_statement)
            except psycopg2.IntegrityError:
                print("Already in DB. Skipped.", pcd_path)
            except ValueError:
                print("Skipped.", pcd_path)
        bar.finish()
Example #6
0
def execute_command_preprocess():
    print("Preprocessing data-set...")

    # Create the base-folder.
    datetime_path = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
    base_path = os.path.join(preprocessed_root_path, datetime_path)
    os.mkdir(base_path)

    entries = execute_command_filterpcds()["results"]
    print("Found {} PCDs. Processing...".format(len(entries)))
    bar = progressbar.ProgressBar(max_value=len(entries))
    for index, entry in enumerate(entries):
        bar.update(index)
        pointcloud = utils.load_pcd_as_ndarray(entry["path"])
        targets = np.array(
            [float(value) for value in entry["targets"].split(",")])
        qrcode = entry["qrcode"]
        pickle_filename = entry["id"].replace(".pcd", ".p")
        qrcode_path = os.path.join(base_path, qrcode)
        if os.path.exists(qrcode_path) == False:
            os.mkdir(qrcode_path)
        pickle_output_path = os.path.join(qrcode_path, pickle_filename)
        pickle.dump((pointcloud, targets), open(pickle_output_path, "wb"))
    bar.finish()
    def process_artifacts(artifacts, process_index):

        # Create database connection.
        db_connector = dbutils.connect_to_main_database()

        # Load the model first.
        model_weights_path = [
            x for x in glob.glob((os.path.join(model_path, "*")))
            if x.endswith("-model-weights.h5")
        ][0]
        model_details_path = [
            x for x in glob.glob((os.path.join(model_path, "*")))
            if x.endswith("-details.p")
        ][0]
        model_name = model_path.split("/")[-1]
        model_details = pickle.load(open(model_details_path, "rb"))
        pointcloud_target_size = model_details["dataset_parameters"][
            "pointcloud_target_size"]
        pointcloud_subsampling_method = model_details["dataset_parameters"][
            "pointcloud_subsampling_method"]
        target_key = model_details["dataset_parameters"]["output_targets"][0]
        model = load_model(model_weights_path, pointcloud_target_size)

        # Evaluate and create SQL-statements.
        for artifact_index, artifact in enumerate(
                tqdm(artifacts, position=process_index)):

            # Unpack fields.
            artifact_id, pcd_path = artifact

            # Check if there is already an entry.
            select_sql_statement = ""
            select_sql_statement += "SELECT COUNT(*) FROM artifact_result"
            select_sql_statement += " WHERE artifact_id='{}'".format(
                artifact_id)
            select_sql_statement += " AND model_name='{}'".format(model_name)
            select_sql_statement += " AND target_key='{}'".format(target_key)
            results = db_connector.execute(select_sql_statement,
                                           fetch_one=True)[0]

            # There is an entry. Skip
            if results != 0:
                continue

            # Execute SQL statement.
            try:
                # Load the artifact and evaluate.
                pcd_path = pcd_path.replace("/whhdata/qrcode",
                                            "/localssd/qrcode")
                pcd_array = utils.load_pcd_as_ndarray(pcd_path)
                pcd_array = utils.subsample_pointcloud(
                    pcd_array,
                    pointcloud_target_size,
                    subsampling_method=pointcloud_subsampling_method)

                value = model.predict(np.expand_dims(pcd_array, axis=0),
                                      verbose=0)[0][0]

                # Create an SQL statement.
                sql_statement = ""
                sql_statement += "INSERT INTO artifact_result (model_name, target_key, value, artifact_id)"
                sql_statement += " VALUES(\'{}\', \'{}\', \'{}\', \'{}\');".format(
                    model_name, target_key, value, artifact_id)

                # Call database.
                result = db_connector.execute(sql_statement)
            except psycopg2.IntegrityError as e:
                #print("Already in DB. Skipped.", pcd_path)
                pass
            except ValueError as e:
                #print("Skipped.", pcd_path)
                pass
Example #8
0
def store_results(scan_path, db_connection_file, destination_folder):

    #db_connection_file = str(sys.argv[2])
    #destination_folder = str(sys.argv[3])

    # Get the path to the scan.
    #scan_path = sys.argv[1]
    scan_path_split = scan_path.split("/")
    scan_qrcode = scan_path_split[-3]
    scan_timestamp = scan_path_split[-1]

    # Get the paths to the artifacts.
    glob_search_path = os.path.join(scan_path, "pc", "*.pcd")
    pcd_paths = glob.glob(glob_search_path)
    if len(pcd_paths) == 0:
        print("No artifacts found. Aborting...")
        exit(1)

    # Prepare results dictionary.
    results = Bunch()
    results.scan = Bunch()
    results.scan.qrcode = scan_qrcode
    results.scan.timestamp = scan_timestamp
    results.model_results = []

    main_connector = dbutils.connect_to_main_database(db_connection_file)

    # Select models from model table where active=True in json_metadata
    select_models = "SELECT * FROM model WHERE (json_metadata->>'active')::BOOLEAN IS true;"
    models = main_connector.execute(select_models, fetch_all=True)

    # Go through the models from the models-file.
    #with open("/home/mmatiaschek/whhdata/models.json") as json_file:
    for model in models:
        model_name = model[0]

        # Locate the weights of the model.
        weights_search_path = os.path.join("/home/smahale/whhdata/models",
                                           model_name, "*")
        weights_paths = [
            x for x in glob.glob(weights_search_path) if "-weights" in x
        ]
        if len(weights_paths) == 0:
            continue
        weights_path = weights_paths[0]
        entry = model[3]

        # Get the model parameters.
        input_shape = entry["input_shape"]
        output_size = entry["output_size"]
        hidden_sizes = entry["hidden_sizes"]
        #hidden_sizes = [512, 256, 128]
        subsampling_method = entry["subsampling_method"]

        # Load the model.
        #print(weights_path, input_shape, output_size, hidden_sizes)
        try:
            model = modelutils.load_pointnet(weights_path, input_shape,
                                             output_size, hidden_sizes)
        except:
            print("Failed!", weights_path)
            continue
        #print("Worked!", weights_path)

        # Prepare the pointclouds.
        pointclouds = []
        for pcd_path in pcd_paths:
            pointcloud = utils.load_pcd_as_ndarray(pcd_path)
            pointcloud = utils.subsample_pointcloud(
                pointcloud,
                target_size=input_shape[0],
                subsampling_method="sequential_skip")
            pointclouds.append(pointcloud)
        pointclouds = np.array(pointclouds)

        # Predict.
        predictions = model.predict(pointclouds)

        # Prepare model result.
        model_result = Bunch()
        model_result.model_name = model_name

        # Store measure result.
        model_result.measure_result = Bunch()
        model_result.measure_result.mean = str(np.mean(predictions))
        model_result.measure_result.min = str(np.min(predictions))
        model_result.measure_result.max = str(np.max(predictions))
        model_result.measure_result.std = str(np.std(predictions))

        # Store artifact results.
        model_result.artifact_results = []
        for pcd_path, prediction in zip(pcd_paths, predictions):
            artifact_result = Bunch()
            #artifact_result.path = pcd_path
            artifact_result.path = '/'.join(pcd_path.split('/')[4:])
            artifact_result.prediction = str(prediction[0])
            model_result.artifact_results.append(artifact_result)

        results.model_results.append(model_result)

    results_json_string = json.dumps(results)
    #print(results_json_string)

    results_json_object = json.loads(results_json_string)

    filename = "{0}/{1}-{2}-{3}-{4}.json".format(destination_folder,
                                                 pcd_paths[0].split('/')[3],
                                                 scan_qrcode, scan_timestamp,
                                                 random.randint(10000, 99999))

    # Add the results to a json file in destination_folder
    with open(filename, 'w') as json_file:
        json.dump(results_json_object, json_file, indent=2)