예제 #1
0
def rasterize(overwrite,src,dest,grid_step,SF):
    print("Rasterizing cloud with " + SF)
    print("")
    
    #Path to the CloudCompare exe file
    cc_path = r"C:\Program Files\CloudCompare\CloudCompare.exe"

    #Create destination folder if it does not already exist
    os.makedirs(dest,exist_ok=True)
  
    dated_sections = determine_section_week(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]
        
        #List of the files for this section and week
        m3c2_files = [ x for x in os.listdir(src) if  "Zone" + section in x and date_boundary(x,week)]
        
        for filename in m3c2_files:
            #Current file path
            cropped_path = src + "\\" + filename
            
            #Destination file path
            new_file_path = dest + "\\" + filename.replace(".bin","Raster_" + SF + ".tif")
            
            #Check if the new file path exists 
            path_exists = os.path.exists(new_file_path)
            if not path_exists or overwrite:
    
                #Performs the M3C2 comparison
                # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                # -O cloud opens the file with path given by cloud
                # -RASTERIZE rasterizes the loaded clouds with -GRID_STEP given by grid_step
                # -OUTPUT_RASTER_Z outputs the result as a geotiff raster (altitudes + all SFs by default)
                subprocess.run([cc_path,"-SILENT", "-O", cropped_path,"-RASTERIZE", "-GRID_STEP", grid_step,"-OUTPUT_RASTER_" + SF], shell = True)
                
                #Filename for the raster
                new_raster_file = [file for file in os.listdir(src) if (filename.replace(".bin","") in file and "RASTER" in file)][0]
                
                #Deletes the old raster if it exists and the overwrite option is on
                if path_exists and overwrite:
                    os.remove(new_file_path)
                
                #Rename the new raster
                os.rename(src + "\\" + new_raster_file, new_file_path)
    
    
        print("Section " + section + " week " + week + " completed.")
        print("")
예제 #2
0
def z_shift_bin(overwrite, src, registration_folder, section, week, cc_path):

    print("Shifting clouds towards the first day of the week")
    print("")

    #Folder containing median values for the z shifts
    registration_errors = registration_folder + "\\Median"

    #First day of the week
    first_day = str(13 + (int(week) - 1) * 7)

    #List of files to be shifted, only files of the current section and week, that have not already been shifted and are not the first day of the week
    shift_files = [
        x for x in os.listdir(src)
        if "_Z_Shifted" not in x and "201808" + first_day not in x and "Zone" +
        section in x and date_boundary(x, week)
    ]

    for filename in shift_files:
        #File path to the current file being shifted
        shift_path = src + "\\" + filename

        #Check if the shifted file already exists
        path_exists = os.path.exists(
            shift_path.replace(".bin", "_Z_Shifted.bin"))

        if not path_exists or overwrite:

            #Generates the shifted cloud file and stores the output name given
            time_stamped_name = shifted_cloud(filename, shift_path,
                                              registration_errors, section,
                                              first_day, cc_path)

            #Deletes file if it already exists and the overwrite option is on
            if path_exists and overwrite:
                os.remove(shift_path.replace(".bin", "_Z_Shifted.bin"))

            #Removes the time stamp from the shifted file name and adds "_Z_Shifted" to the filename
            os.rename(shift_path.replace(filename, time_stamped_name),
                      shift_path.replace(".bin", "_Z_Shifted.bin"))
def ply_to_bin(overwrite,src,dest):
    
    print("Converting .ply files to .bin files")
    print("")
    
    dated_sections = determine_section_week(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]
            
        #Path to the CloudCompare exe file
        cc_path =r"C:\Program Files\CloudCompare\CloudCompare.exe"
        start = timer()
        
        #Create destination folders if they don't already exist
        os.makedirs(dest + "\\BIN Files",exist_ok=True)
        os.makedirs(dest + "\\PLY Files",exist_ok=True)
    
        
        #List the ply files in the src directory for this section and week
        ply_files = [x for x in os.listdir(src) if "Zone" + section in x and date_boundary(x,week)]
        
        
        for filename in ply_files:
            
            #Path to the current ply file
            current_file_path = src + "\\"  + filename
            
            #Bin file name
            dest_file_path = dest + "\\BIN Files\\" + filename.replace('.ply','.bin')
            generated_bin_file = src + "\\" + filename.replace(".ply",".bin")
            
            #Check if the bin file already exists
            bin_path_exists = os.path.exists(dest_file_path)
            
            if not bin_path_exists or overwrite:
                
                #Save cloud in bin format
                # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                # -NO_TIMESTAMP prevents the saved files having timestamps in their name
                # -O cloud opens the file with path given by cloud
                # -SAVE_CLOUDS saves all open clouds
                bin_time_start = timer()
                subprocess.run([cc_path, "-SILENT", "-NO_TIMESTAMP","-O", current_file_path, "-SAVE_CLOUDS"], shell = True)
                bin_time_end = timer()
    
                print("Created corresponding BIN file for ")  
                print(current_file_path)
                print("in " + str(round(bin_time_end-bin_time_start)) + " seconds.") 
                print("")
           
                
                
                #Deletes old file if it already exists and the overwrite option was on 
                if bin_path_exists and overwrite:
                    os.remove(dest_file_path)
                #Move bin file to the destination bin folder
                os.rename(generated_bin_file,dest_file_path)
            
            else:
                print("The BIN file for " + filename + " already exists!")
                print("")
            
            #Check if the ply file already exists
            ply_path_exists = os.path.exists(dest + "\\PLY Files\\" + filename)
            
            if not ply_path_exists or overwrite:
                if ply_path_exists and overwrite:
                    #Deletes old file if it already exists and the overwrite option was on
                    os.remove(dest + "\\PLY Files\\" + filename)
                #Move ply file to the ply folder
                os.rename(src + "\\" + filename,dest + "\\PLY Files\\" + filename)
            else:
                #File already exists in its destination folder and overwrite is not on so delete
                os.remove(src + "\\" + filename)
    
    
        end = timer()
        print("Total time elapsed: " + str(round(end-start)) + " seconds.")     
예제 #4
0
def volume_calculation(overwrite, src, dest, point_folder):

    gdal.UseExceptions()

    #Created destination folder if it does not already exist
    os.makedirs(dest, exist_ok=True)

    dated_sections = determine_section_week(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]
        #List of shapefiles for the current section and week
        shapefiles = [
            x for x in os.listdir(src) if ".shp" in x and "Zone" +
            section in x and date_boundary(x, week) and not ".lock" in x
        ]

        #Generate polygons for the pallets
        pallet_bounds, pallet_numbers = pallet_dimensions(section, week)

        #Read points file
        try:
            with open(point_folder + "\\" + week + section +
                      ".txt.") as points_file:
                plant_points = points_file.readlines()
        except:
            plant_points = []

        pallet_points = [[], [], [], []]
        for point in plant_points:
            #Read pairs of coordinates that are written on lines like: x,y
            if "," in point and not re.search("[a-zA-Z]", point):
                #Generate points
                temp_point = Plant_Point(point, pallet_bounds, pallet_numbers)
                pallet_points[temp_point.pallet] += [temp_point]

        for filename in shapefiles:
            data_file = dest + "\\" + filename.replace("Shapefile", "Data")
            volume_file = dest + "\\" + filename.replace(
                "_Contour_Shapefile.shp", "_Volumes.txt")
            if not os.path.exists(data_file) or overwrite:
                shape_file_path = src + "\\" + filename
                driver = ogr.GetDriverByName("ESRI Shapefile")
                dataSource = driver.Open(shape_file_path, 0)
                contour_layer = dataSource.GetLayer()

                feature_list = []

                #Create list of contours
                for i in range(contour_layer.GetFeatureCount()):
                    feature_list += [
                        Contour_Polygon(i, contour_layer, pallet_bounds,
                                        pallet_points)
                    ]

                #Difference overlapping polygons
                for poly1 in feature_list:
                    for poly2 in feature_list:
                        poly1.differencing(poly2)

                #Calculate total volume below contours
                for poly1 in feature_list:
                    for poly2 in feature_list:
                        poly1.total_volume(poly2)

                #Write estimated volume totals to file
                if not estimated_volume_check(overwrite, volume_file, section,
                                              week):
                    estimated_volumes = [0, 0, 0, 0, 0]
                    for poly in feature_list:
                        estimated_volumes[poly.pallet] += poly.volume
                    with open(volume_file, "a") as file:
                        for i in range(len(pallet_numbers)):
                            file.write("Pallet " + str(pallet_numbers[i]) +
                                       ":" + str(estimated_volumes[i]) +
                                       "m^3 \n")

                #Create output contour file
                outShapefile = src + "\\" + filename.replace("shp", "")
                driver = ogr.GetDriverByName("ESRI Shapefile")
                outDataSource = driver.CreateDataSource(outShapefile + ".shp")

                contour_layer = outDataSource.CreateLayer('contour')

                #Create output fields
                length_field = ogr.FieldDefn("Length", ogr.OFTReal)
                length_field.SetPrecision(8)
                contour_layer.CreateField(length_field)
                area_field = ogr.FieldDefn("Area", ogr.OFTReal)
                area_field.SetPrecision(8)
                contour_layer.CreateField(area_field)
                d_area_field = ogr.FieldDefn("D Area", ogr.OFTReal)
                d_area_field.SetPrecision(8)
                contour_layer.CreateField(d_area_field)
                volume_field = ogr.FieldDefn("Volume", ogr.OFTReal)
                volume_field.SetPrecision(8)
                contour_layer.CreateField(volume_field)
                d_volume_field = ogr.FieldDefn("D Volume", ogr.OFTReal)
                d_volume_field.SetPrecision(8)
                contour_layer.CreateField(d_volume_field)
                m3c2_field = ogr.FieldDefn("M3C2", ogr.OFTReal)
                m3c2_field.SetPrecision(5)
                contour_layer.CreateField(m3c2_field)
                pallet_field = ogr.FieldDefn("Pallet", ogr.OFTInteger)
                contour_layer.CreateField(pallet_field)
                point_list_field = ogr.FieldDefn("Point", ogr.OFTString)
                contour_layer.CreateField(point_list_field)
                contained_field = ogr.FieldDefn("Contained", ogr.OFTInteger)
                contour_layer.CreateField(contained_field)
                within_field = ogr.FieldDefn("Within", ogr.OFTInteger)
                contour_layer.CreateField(within_field)

                #Add contours to file with fields
                for contour in feature_list:
                    featureDefn = contour_layer.GetLayerDefn()
                    feature = ogr.Feature(featureDefn)
                    feature.SetGeometry(contour.feature.GetGeometryRef())
                    feature.SetFID(contour.FID)
                    feature.SetField("Length", contour.polygon.length)
                    feature.SetField("Area", contour.polygon.area)
                    feature.SetField("D Area",
                                     contour.differenced_polygon.area)
                    feature.SetField("Volume", contour.volume_below)
                    feature.SetField("D Volume", contour.volume)
                    feature.SetField("M3C2", contour.M3C2)
                    feature.SetField("Pallet", contour.pallet)
                    feature.SetField("Point", contour.points)
                    feature.SetField("Contained", contour.contained)
                    feature.SetField("Within", contour.within)
                    contour_layer.CreateFeature(feature)
                    feature = None

                #Close contour file
                outDataSource = None
                del outDataSource

                #Delete files if they already exist and the overwrite option is on
                #Rename the new files
                if overwrite and os.path.exists(data_file):
                    os.remove(dest + "\\" + filename.replace(
                        ".tif", "_Contour_Shapefile.shp")).replace(
                            "Raster_Z", "")
                os.rename(outShapefile + ".shp", data_file)

                if os.path.exists(data_file.replace(".shp", ".dbf")):
                    os.remove(data_file.replace(".shp", ".dbf"))
                os.rename(outShapefile + ".dbf",
                          data_file.replace(".shp", ".dbf"))

                if os.path.exists(data_file.replace(".shp", ".shx")):
                    os.remove(data_file.replace(".shp", ".shx"))
                os.rename(outShapefile + ".shx",
                          data_file.replace(".shp", ".shx"))

                if len(plant_points) != 0:
                    #Create point file
                    outShapefile = src + "\\" + filename.replace("shp",
                                                                 "") + "points"
                    driver = ogr.GetDriverByName("ESRI Shapefile")
                    outDataSource = driver.CreateDataSource(outShapefile +
                                                            ".shp")

                    point_layer = outDataSource.CreateLayer('points')

                    #Create output fields
                    point_pallet_field = ogr.FieldDefn("Pallet",
                                                       ogr.OFTInteger)
                    point_layer.CreateField(point_pallet_field)
                    point_point_list_field = ogr.FieldDefn(
                        "Point", ogr.OFTString)
                    point_layer.CreateField(point_point_list_field)
                    point_within_field = ogr.FieldDefn("Within",
                                                       ogr.OFTInteger)
                    point_layer.CreateField(point_within_field)
                    x_field = ogr.FieldDefn("X", ogr.OFTReal)
                    x_field.SetPrecision(8)
                    point_layer.CreateField(x_field)
                    y_field = ogr.FieldDefn("Y", ogr.OFTReal)
                    y_field.SetPrecision(8)
                    point_layer.CreateField(y_field)

                    #Add points to file with fields
                    for point_list in pallet_points:
                        for point in point_list:
                            featureDefn = point_layer.GetLayerDefn()
                            feature = ogr.Feature(featureDefn)
                            feature.SetGeometry(point.arc_point)
                            feature.SetField("Pallet", point.pallet + 1)
                            feature.SetField("Point", point.id)
                            feature.SetField("Within", point.within)
                            feature.SetField("X", point.x)
                            feature.SetField("Y", point.y)
                            point_layer.CreateFeature(feature)
                            feature = None

                    #Close point file
                    outDataSource = None
                    del outDataSource

                    #Delete files if they already exist and the overwrite option is on
                    #Rename the new files
                    if overwrite and os.path.exists(
                            data_file.replace(".shp", "points.shp")):
                        os.remove(dest + "\\" + filename.replace(
                            ".tif", "_Contour_Shapefilepoints.shp")).replace(
                                "Raster_Z", "")
                    os.rename(outShapefile + ".shp",
                              data_file.replace(".shp", "points.shp"))

                    if os.path.exists(data_file.replace(".shp", "points.dbf")):
                        os.remove(data_file.replace(".shp", "points.dbf"))
                    os.rename(outShapefile + ".dbf",
                              data_file.replace(".shp", "points.dbf"))

                    if os.path.exists(data_file.replace(".shp", "points.shx")):
                        os.remove(data_file.replace(".shp", "points.shx"))
                    os.rename(outShapefile + ".shx",
                              data_file.replace(".shp", "points.shx"))
def generateContours(overwrite, src, dest):
    #Create destination folder if it does not already exist
    os.makedirs(dest, exist_ok=True)

    dated_sections = determine_section_week_contour(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]
        #List of all the rasters in the current week and section that has the M3C2 layer
        all_files = [
            x for x in os.listdir(src) if "Zone" +
            section in x and date_boundary(x, week) and "Raster_Z.tif" in x
        ]

        for filename in all_files:

            #Check if the path already exists
            if overwrite or not os.path.exists(dest + "\\" + filename.replace(
                    ".tif", "_Contour_Shapefile.shp").replace("Raster_Z", "")):
                pathname = src + "\\" + filename
                writefilename = src + "\\" + filename.replace(
                    ".tif", "_temp.tif")

                #Read M3C2 band from raster
                [xsize, ysize, geotransform, geoproj, Z] = readFile(pathname)

                #Set NaN values to 9999
                Z[numpy.isnan(Z)] = 9999

                #Rewrite the M3C2 band to a file
                writeFile(writefilename, geotransform, geoproj, Z)

                print("Contour plotting for: ")
                print(filename)
                print("")
                #Import your image from file. Select band to contour. If a DSM will probably be band 1.
                image = gdal.Open(src + "\\" +
                                  filename.replace(".tif", "_temp.tif"))
                band = image.GetRasterBand(1)

                #Generate shapefile to save Contourlines in
                outShapefile = pathname.replace(".tif", "")
                driver = ogr.GetDriverByName("ESRI Shapefile")

                #Generates new layer in shapefile
                outDataSource = driver.CreateDataSource(outShapefile + ".shp")
                layer = outDataSource.CreateLayer('contour')

                #Add fields to new layer in shapefile.
                #These are shown in attribute tabe in ArcGIS.
                #Set Precision sets the precision to # number of decimal places
                id_field = ogr.FieldDefn("ID", ogr.OFTInteger)
                layer.CreateField(id_field)
                length_field = ogr.FieldDefn("Length", ogr.OFTReal)
                length_field.SetPrecision(8)
                layer.CreateField(length_field)
                area_field = ogr.FieldDefn("Area", ogr.OFTReal)
                area_field.SetPrecision(8)
                layer.CreateField(area_field)
                m3c2_field = ogr.FieldDefn("M3C2", ogr.OFTReal)
                m3c2_field.SetPrecision(5)
                layer.CreateField(m3c2_field)
                """
                Generate Contourlines. 
                band= band of raster layer to contour- as defined above
                0.003 - contour interval value
                -0.4- first contour value
                [] - List takes priority over the previous two arguments, contours are only at these levels
                0
                0
                layer - the output layer
                0 - the index of the id field
                3 - the index of the elevation (M3C2) field
                """

                gdal.ContourGenerate(band, 0.003, -0.4,
                                     [x / 1000 for x in range(-3000, 0, 1)], 0,
                                     0, layer, 0, 3)

                #gdal.ContourGenerate(band, 0.003, -0.4, [x / 10000 for x in range(-3000,0,1)], 0, 0, layer, 0, 3)

                #delete particular features in attribute table.
                for features in layer:
                    geom = features.GetGeometryRef()

                    length = geom.Length()
                    area = geom.Area()

                    features.SetField(
                        "Length", length)  # add length value to each feature
                    features.SetField("Area",
                                      area)  # add area value to each feature
                    layer.SetFeature(features)

                    #Delete contours with length less than 0.2m or area less than 0.001m^2
                    if length < 0.2 or area < 0.001:
                        layer.DeleteFeature(features.GetFID())

                #delete data source at the end. Important to do this otherwise code gets stuck!
                image = None
                del image
                outDataSource = None
                del outDataSource

                #Delete old output files if they exists and the overwrite option is on
                #Rename new file
                if overwrite and os.path.exists(dest + "\\" + filename.replace(
                        ".tif", "_Contour_Shapefile.shp").replace(
                            "Raster_Z", "")):
                    os.remove(dest + "\\" + filename.replace(
                        ".tif", "_Contour_Shapefile.shp")).replace(
                            "Raster_Z", "")
                os.rename(
                    outShapefile + ".shp", dest + "\\" +
                    filename.replace(".tif", "_Contour_Shapefile.shp").replace(
                        "Raster_Z", ""))

                if os.path.exists(
                        dest + "\\" +
                        filename.replace(".tif", "_Contour_Shapefile.dbf"
                                         ).replace("Raster_Z", "")):
                    os.remove(dest + "\\" +
                              filename.replace(".tif", "_Contour_Shapefile.dbf"
                                               ).replace("Raster_Z", ""))
                os.rename(
                    outShapefile + ".dbf", dest + "\\" +
                    filename.replace(".tif", "_Contour_Shapefile.dbf").replace(
                        "Raster_Z", ""))

                if os.path.exists(
                        dest + "\\" +
                        filename.replace(".tif", "_Contour_Shapefile.shx"
                                         ).replace("Raster_Z", "")):
                    os.remove(dest + "\\" +
                              filename.replace(".tif", "_Contour_Shapefile.shx"
                                               ).replace("Raster_Z", ""))
                os.rename(
                    outShapefile + ".shx", dest + "\\" +
                    filename.replace(".tif", "_Contour_Shapefile.shx").replace(
                        "Raster_Z", ""))

                os.remove(writefilename)
        print("Section " + section + " Week " + week + " Completed")
        print("")
예제 #6
0
def crop(overwrite, registration_folder, src, dest):
    #Path to the CloudCompare exe file
    cc_path = r"C:\Program Files\CloudCompare\CloudCompare.exe"

    #Creates the destination folder if it does not already exist
    os.makedirs(dest, exist_ok=True)
    dated_sections = determine_section_week(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]

        #Translate the bin file using the median given from the M3C2 calculation on the wooden edges of the pallet
        z_shift_bin(overwrite, src, registration_folder, section, week,
                    cc_path)

        print("Cropping the clouds")
        print("")

        pallet_dimensions = []

        #Read the cropping dimensions for each of the pallets
        for number in get_pallet_numbers(section, week):
            with open(os.getcwd() +
                      "\\parameter_files\\cropping_dimensions\\" + section +
                      "_" + week + "_pallet_" + number +
                      ".txt") as pallet_dims:
                pallet_dimensions += [pallet_dims.readline().replace("\n", "")]

        #List the bin files in src for this section and week that have either been shifted using the median or are the first day of the week
        bin_files = [
            x for x in os.listdir(src)
            if "Zone" + section in x and date_boundary(x, week) and (
                "Z_Shifted" in x or "201808" + str(13 +
                                                   (int(week) - 1) * 7) in x)
        ]
        #List all the files in the src directory
        all_files = os.listdir(src)

        for filename in bin_files:
            #Path to the current bin file
            bin_path = src + "\\" + filename
            #Checks if the cropped bin file exists
            path_exists = os.path.exists(
                dest + "\\" + filename.replace(".bin", "_Cropped.bin"))

            if not path_exists or overwrite:

                #Crops the four pallets out of the cloud and saves then in individual files
                # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                # -O opens the file listed directly after
                # -CROP crops all loaded clouds with parameters {Xmin:Ymin:Zmin:Xmax:Ymax:Zmax}
                # -CLEAR clears all the loaded clouds
                #Cropping pallets produces the list containg command line code for any number of pallets
                subprocess.run([cc_path, "-SILENT"] +
                               cropping_pallets(bin_path, pallet_dimensions),
                               shell=True)

                bin_path = bin_path.replace(".bin", "")
                print("Cropped the clouds for:")
                print(bin_path)
                print("")

                #Lists the cropped pallet files and assigns their file path
                pallet_files = [
                    src + "\\" + x for x in diff(os.listdir(src), all_files)
                ]

                #Merges the four pallets into one cloud
                # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                # -O opens the file listed directly after
                # -MERGE_CLOUDS merges all the loaded clouds
                subprocess.run([cc_path, "-SILENT"] +
                               pallet_list(pallet_files) + ["-MERGE_CLOUDS"],
                               shell=True)

                print("Merged cropped clouds for:")
                print(bin_path)
                print("")

                #Deletes the individually cropped pallet files
                for pallet in pallet_files:
                    os.remove(pallet)

                #Finds the name of the merged file by comparing the list of files from before the cropping with the current list of files in the src directory
                merge_file = diff(os.listdir(src), all_files)[0]

                #Deletes the old merged file if it already exists and the overwrite option was on
                if path_exists and overwrite:
                    os.remove(dest + "\\" +
                              filename.replace(".bin", "_Cropped.bin"))
                #Moves the merged file to the dest folder and adds "_Cropped" to the original filename
                os.rename(
                    src + "\\" + merge_file,
                    dest + "\\" + filename.replace(".bin", "_Cropped.bin"))

        print("Section " + section + " week " + week + " completed.")
        print("")
def m3c2(overwrite, src, dest, m3c2_folder, registration_folder,
         first_day_txt_file):
    #Path to the CloudCompare exe file
    cc_path = r"C:\Program Files\CloudCompare\CloudCompare.exe"

    #Create destination folder if it doesn't already exist
    os.makedirs(dest, exist_ok=True)

    dated_sections = determine_section_week(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]
        #M3C2 parameter file path
        m3c2_parameters = m3c2_folder + "\\m3c2_params_" + section + "_" + week + ".txt"

        #Folder containing the registration error values
        registration_error_files = registration_folder + "\\Median + MAD"

        print("Starting M3C2 comparisons")
        print("Registration error given by Median Absolute Deviation ")
        print("")

        #Set previous date to a trigger value for the registration update so it knows that the M3C2 parameter file registration error value has not yet been updated
        previous_date = "00"

        #List of files for the current section and week
        all_files = [
            x for x in os.listdir(src)
            if "Zone" + section in x and date_boundary(x, week)
        ]

        #Works out the date of the file that all other files are being compared against for this section and week
        first_day = determine_first_day(first_day_txt_file, section, week)

        #Finds the filename of the of the file that all other files are being compared against for this section and week
        first_day_file = [x for x in all_files if "201808" + first_day in x][0]

        #File path of the first day after it has been shifted back to its original position
        first_day_shifted = src + "\\" + first_day_file.replace(
            ".bin", "_Origin.bin")

        #List of the remaining files to be compared to
        compare_files = [x for x in all_files if x not in first_day_file]

        for filename in compare_files:
            #Finds the date of the current file
            current_date = filename[filename.find('201808') +
                                    6:filename.find('201808') + 8]

            #File path of the current file after it has been shifted to the first_day_shifted file
            compare_shifted = src + "\\" + filename.replace(
                ".bin", "_Median.bin")

            #Output file name
            compared_name = dest + "\\" + filename[
                0:filename.find("Zone") +
                5] + "_M3C2_Projected_Onto_SFM_201808" + first_day + ".bin"

            #Checks if the output file exists
            path_check = os.path.exists(compared_name)
            if not path_check or overwrite:

                #Shifts the current file towards the first_day_file
                z_shift(src, registration_folder, first_day, current_date,
                        first_day_file, filename, section, week, cc_path)

                #Updates the registration error calculated via the median + MAD of the M3C2 comparison of the wooden pallets of the current file and first day file
                registration_update(first_day, current_date, previous_date,
                                    m3c2_parameters, registration_error_files,
                                    section, week)

                #Performs the M3C2 comparison
                # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                # -O cloud opens the file with path given by cloud
                # -M3C2 performs the M3C2 calculation on the on the two first loaded clouds. If a 3rd cloud is loaded, it will be used a core points.
                # Parameters are given by the txt file at m3c2_parameters
                subprocess.run([
                    cc_path, "-SILENT", "-O", first_day_shifted, "-O",
                    compare_shifted, "-M3C2", m3c2_parameters
                ],
                               shell=True)

                #Deletes old file if it exists and the overwrite option is on
                if overwrite and path_check:
                    os.remove(compared_name)

                #Moves the M3C2 output file to the destination folder and renames is to the name given by compared_name
                os.rename(first_day_shifted.replace('.bin', "_M3C2.bin"),
                          compared_name)

                #Deletes the median shifted current file
                os.remove(compare_shifted)

                #Changes the previous date so the registration error can be changed
                previous_date = current_date

            print("First day of section " + section + " week " + week +
                  " compared with:")
            print(filename)
            print("")

        #If the first_day_shifted file has been created it is deleted
        if os.path.exists(first_day_shifted):
            os.remove(first_day_shifted)

        #Returns the registration error file to the default value
        registration_update(first_day, "99", previous_date, m3c2_parameters,
                            registration_error_files, section, week)
def pallet_registration(overwrite,src,dest,m3c2Parameters,registration_folder):
    #Runtime: approx five minutes per week per section.
    #Output: median and median + MAD files for every comparision per week per section.
    
    print("Comparing pallets with M3C2")
    print("")
    #Path to the CloudCompare exe file
    cc_path =r"C:\Program Files\CloudCompare\CloudCompare.exe"

    #Creates the destination folder if it does not already exists
    if not os.path.exists(dest):
        os.makedirs(dest)
    
        print("M3C2 Parameters sourced from: ")
        print(m3c2Parameters)
        print("")
        
    dated_sections = determine_section_week(src)
    for pairs in dated_sections:
        section = pairs[0]
        week = pairs[1]
        
        #Creates the registration error folders if they do not already exist
        os.makedirs(registration_folder + "\\Median",exist_ok = True)
        os.makedirs(registration_folder + "\\Median + MAD",exist_ok=True)
        
        #Lists all the files in the src directory of the current week and section if they are not z shifted 
        allFiles = [x for x in os.listdir(src) if "Zone" + section in x and date_boundary(x,week)and "_Shift" not in x]
        
        #Compares all files in the same week and section with each other projecting on each cloud
        for i in range(len(allFiles)):
            
            #File path for the file that all other files will be compared to using M3C2
            firstDayPath = src + "\\" + allFiles[i]
            
            #Date of the first day
            first_day = allFiles[i][allFiles[i].find("201808") + 6: allFiles[i].find("201808") + 8]
            
            #List of the remianing files
            compareFiles = [allFiles[j] for j in range(len(allFiles)) if j != i]
           
            for filename in compareFiles:
                
                #Output file name for M3C2 comparison
                comparedName = dest + "\\" + filename[0:filename.find("Zone") + 5] + "_M3C2_Projected_Onto_201808" + first_day + ".bin"
                
                #Check if the M3C2 output file exists 
                path_exists = os.path.exists(comparedName)
                
                if not path_exists or overwrite:
                    #Path to the current file
                    comparePath = src + "\\"  + filename
                    
                    #Performs the M3C2 comparison
                    # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                    # -O cloud opens the file with path given by cloud
                    # -M3C2 performs the M3C2 calculation on the on the two first loaded clouds. If a 3rd cloud is loaded, it will be used a core points.
                    # Parameters are given by the txt file at m3c2_parameters
                    subprocess.run([cc_path,"-SILENT","-O", firstDayPath, "-O", comparePath, "-M3C2", m3c2Parameters] ,shell = True)
                                      
                    #Deletes old output file if it exists and the overwrite option is on
                    if overwrite and path_exists:
                        os.remove(comparedName)
                    
                    #Moves the M3C2 file to the destination folder and renames it using the comparedName
                    os.rename(firstDayPath.replace('.bin',"_M3C2.bin"),comparedName)
                    
                #Checks if the csv file for the M3C2 output file exists
                path_exists = os.path.exists(comparedName.replace('.bin','.csv'))
                
                if not path_exists or overwrite:
                    #Deletes old output file if it already exists and the overwrite option is on
                    if overwrite and path_exists:
                        os.remove(comparedName.replace('.bin','.csv'))
                    
                    #Converts the bin file to a csv file
                    # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                    # -O cloud opens the file with path given by cloud
                    # -NO_TIMESTAMP prevents the saved files having timestamps in their name
                    # -C_EXPORT_FMT  sets the default output format for clouds to the next given (in this case "ASC")
                    # -SEP specifies the seperator character as the next given (in this case "SEMICOLON")
                    # -EXT specifies the file extension as the next given (in this case "CSV")
                    # -SAVE_CLOUDS saves all the loaded clouds
                    subprocess.run([cc_path, "-SILENT","-O", comparedName, "-NO_TIMESTAMP", "-C_EXPORT_FMT", "ASC", "-SEP", "SEMICOLON", "-EXT", "CSV", "-SAVE_CLOUDS"], shell=True)
                
                #The date of the current file
                current_date = filename[filename.find("201808") + 6:filename.find("201808") + 6 +2]
                
                #Destination file name for the median of the M3C2 comparison between the current file and the first day file
                dest_file = registration_folder +  "\\Median\\" + first_day + current_date + section + ".txt"
                
                #Write the median M3C2 value to file
                median_z_shift(overwrite,dest_file,comparedName.replace('.bin','.csv'))
                
                #csv file for the M3C2 comparison of the current file and first day after the current file has been shifted towards the first day file
                comparedName = comparedName.replace(".bin","_Shift.csv")
                
                #Checks if the output file exists
                path_exists = os.path.exists(comparedName)
                if overwrite or not path_exists:
                    #Deletes the output file if it exists and the overwrite option is on
                    if overwrite and path_exists:
                        os.remove(comparedName)
                    
                    #Reads the median value of the unshifted M3C2 comparison
                    with open(dest_file) as median_file:
                        median = float(median_file.readlines()[0])
                    
                    #Shifts the current file towards the first day file using the median of the unshifted M3C2 comparison
                    shifted_file = src + "\\" + shifted_cloud(filename,src + "\\" + filename,median,registration_folder,cc_path)
                    
                    #Converts the bin file to a csv file
                    # -SILENT stops a cloud compare console popping up (useful for debug as it will stop the program after completing its task)
                    # -NO_TIMESTAMP prevents the saved files having timestamps in their name
                    # -C_EXPORT_FMT  sets the default output format for clouds to the next given (in this case "ASC")
                    # -SEP specifies the seperator character as the next given (in this case "SEMICOLON")
                    # -EXT specifies the file extension as the next given (in this case "CSV")
                    # -O cloud opens the file with path given by cloud
                    # -M3C2 performs the M3C2 calculation on the on the two first loaded clouds. If a 3rd cloud is loaded, it will be used a core points.
                    # Parameters are given by the txt file at m3c2_parameters
                    subprocess.run([cc_path, "-SILENT","-NO_TIMESTAMP", "-C_EXPORT_FMT", "ASC", "-SEP", "SEMICOLON","-EXT", "CSV", "-O", firstDayPath,"-O", shifted_file, "-M3C2", m3c2Parameters], shell=True)
                    
                    #Check if the shifted file exists
                    path_exists = os.path.exists(src + "\\" + filename.replace(".bin","_Shift.bin"))
                    
                    #Deletes the old shifted bin file if it exists and the overwrite option is on
                    if path_exists and overwrite:
                        os.remove(src + "\\" + filename.replace(".bin","_Shift.bin"))
                        os.rename(shifted_file,src + "\\" + filename.replace(".bin","_Shift.bin"))
                    
                    #Deletes the new shifted bin file if the old one exists and the overwrite option is on
                    if path_exists and not overwrite:
                        os.remove(shifted_file)
                        
                    #Renames the new shifted file
                    if not path_exists:
                        os.rename(shifted_file,src + "\\" + filename.replace(".bin","_Shift.bin"))
                    
                    #Check if shifted M3C2 csv file exists
                    path_exists = os.path.exists(comparedName)
                    
                    #Deletes shifted M3C2 csv file if it exists and the overwrite option is on
                    if path_exists and overwrite:
                        os.remove(comparedName)
                    #Renames the M3C2 csv file 
                    os.rename(firstDayPath.replace(".bin", "_M3C2.csv"), comparedName)
                    
                #Output file name for |Median| + MAD
                dest_file = dest_file.replace(".txt", "Z.txt").replace("\\Median\\", "\\Median + MAD\\")
                
                #Calculate |Median| + MAD save to dest_file
                median_absolute_deviation_error(overwrite,dest_file,comparedName)
                
            print("Finished comparing with " + allFiles[i])