Exemplo n.º 1
0
    def update_projectfile(self):
        self.loop_projectfile = export_to_projectfile(self.loop_projectfile,
                                                      self.tmp_path,
                                                      self.output_path,
                                                      self.bbox_3d,
                                                      self.proj_crs)

        print("PROJECTFILE FOUND AT", self.loop_projectfile)
Exemplo n.º 2
0
    def load_dtm(self, aus=True):

        polygon_ll = self.polygon.to_crs(self.dtm_crs)

        minlong = polygon_ll.total_bounds[0] - self.step_out
        maxlong = polygon_ll.total_bounds[2] + self.step_out
        minlat = polygon_ll.total_bounds[1] - self.step_out
        maxlat = polygon_ll.total_bounds[3] + self.step_out

        print("Fetching DTM... ", end=" bbox:")
        print(minlong, maxlong, minlat, maxlat)
        downloaded = False
        i = 0
        print('Attempt: 0 ', end='')
        local_dtm = False
        geotif_file = 'F:/Loop_Data/BGS/terr50_gagg_gb/terr50_gagg_gb_all.tif'
        while downloaded == False:
            try:
                if (aus):
                    m2l_utils.get_dtm(self.dtm_file, minlong, maxlong, minlat,
                                      maxlat)
                elif (local_dtm):
                    bbox = [
                        self.bbox_3d["minx"], self.bbox_3d["miny"],
                        self.bbox_3d["maxx"], self.bbox_3d["maxy"]
                    ]
                    m2l_utils.get_local_dtm(self.dtm_path, geotif_file,
                                            self.dtm_crs, bbox)

                else:
                    m2l_utils.get_dtm_hawaii(self.dtm_file, minlong, maxlong,
                                             minlat, maxlat)

                downloaded = True
            except:
                time.sleep(10)
                i = i + 1
                print(' ', i, end='')
        if (i == 100):
            raise NameError(
                'map2loop error: Could not access DTM server after 100 attempts'
            )
        print('Done.')

        if (not local_dtm):
            geom_rp = m2l_utils.reproject_dtm(self.dtm_file,
                                              self.dtm_reproj_file,
                                              self.dtm_crs, self.proj_crs)

        self.dtm = rasterio.open(self.dtm_reproj_file)

        if self.quiet == 'None':
            plt.imshow(self.dtm.read(1), cmap='terrain', vmin=0, vmax=1000)

            plt.title('DTM')
            plt.show()
Exemplo n.º 3
0
    def calc_depth_grid(self, dtb):
        dtm = self.dtm

        if dtb == "":
            self.dtb = 0
            self.dtb_null = 0

            print("dtb and dtb_null set to 0")
            return

        # TODO: DTB need to be defined, every function call bellow here that has a False boolean is referencing to the workflow['cover_map'] flag
        # dtb_grid = os.path.join(data_path,'young_cover_grid.tif') #obviously hard-wired for the moment
        # dtb_null = '-2147483648' #obviously hard-wired for the moment
        # cover_map_path = os.path.join(data_path,'Young_Cover_FDS_MGA_clean.shp') #obviously hard-wired for the moment
        # dtb_clip = os.path.join(output_path,'young_cover_grid_clip.tif') #obviously hard-wired for the moment
        # cover_dip = 10 # dip of cover away from contact
        # cover_spacing = 5000 # of contact grid in metres

        dtb_raw = rasterio.open(dtb_grid)

        cover = gpd.read_file(cover_map_path)

        with fiona.open(cover_map_path, "r") as shapefile:
            shapes = [feature["geometry"] for feature in shapefile]

        with rasterio.open(dtb_grid) as src:
            out_image, out_transform = rasterio.mask.mask(src,
                                                          shapes,
                                                          crop=True)
            out_meta = src.meta.copy()

        out_meta.update({
            "driver": "GTiff",
            "height": out_image.shape[1],
            "width": out_image.shape[2],
            "transform": out_transform
        })

        with rasterio.open(dtb_clip, "w", **out_meta) as dest:
            dest.write(out_image)

        dtb = rasterio.open(dtb_clip)

        m2l_geometry.process_cover(output_path,
                                   dtm,
                                   dtb,
                                   dtb_null,
                                   cover,
                                   workflow['cover_map'],
                                   cover_dip,
                                   bbox,
                                   proj_crs,
                                   cover_spacing,
                                   contact_decimate=3,
                                   use_vector=True,
                                   use_grid=True)
Exemplo n.º 4
0
def round_vertices(layer_file, precision, output_file):

    simpledec = re.compile(r"\d*\.\d+")

    def mround(match):
        return "{:.{}f}".format(float(match.group()), precision)

    layer_file.geometry = layer_file.geometry.apply(
        lambda x: loads(re.sub(simpledec, mround, x.wkt)))
    layer_file.to_file(output_file)
    print("rounded file written to ", output_file)
Exemplo n.º 5
0
 def propagate_contact_dips(self, contact_dip,
                            contact_orientation_decimate):
     print("Propagating dips along contacts...")
     orientations = pd.read_csv(
         os.path.join(self.output_path, 'orientations.csv'), ", ")
     # This is supposed to be a csv but my csv doesn't have a geometry part
     contacts = gpd.read_file(
         os.path.join(self.tmp_path, 'basal_contacts.shp'))
     # contact_dip = -999
     # contact_orientation_decimate = 5
     m2l_geometry.save_basal_contacts_orientations_csv(
         contacts, orientations, self.geol_clip, self.tmp_path,
         self.output_path, self.dtm, self.dtb, self.dtb_null, False,
         contact_orientation_decimate, self.c_l, contact_dip, self.dip_grid,
         self.spacing, self.bbox)
Exemplo n.º 6
0
def explode_polylines(indf, c_l, dst_crs):
    # indf = gpd.GeoDataFrame.from_file(indata)
    outdf = gpd.GeoDataFrame(columns=indf.columns, crs=dst_crs)
    for idx, row in indf.iterrows():
        if type(row.geometry) == LineString:
            outdf = outdf.append(row, ignore_index=True)
        if type(row.geometry) == MultiLineString:
            multdf = gpd.GeoDataFrame(columns=indf.columns, crs=dst_crs)
            recs = len(row.geometry)
            multdf = multdf.append([row] * recs, ignore_index=True)
            i = 0
            for geom in range(recs):
                multdf.loc[geom, 'geometry'] = row.geometry[geom]
                multdf.loc[geom, c_l['o']] = str(
                    multdf.loc[geom, c_l['o']]) + '_' + str(i)
                print('map2loop warning: Fault_' + multdf.loc[geom, c_l['o']],
                      'is one of a set of duplicates, so renumbering')
                i = i + 1
            outdf = outdf.append(multdf, ignore_index=True)
    return outdf
Exemplo n.º 7
0
    def export_png(self):
        filename = self.loop_projectfile
        if self.loop_projectfile is None:
            # TODO: Make sure these user provided paths end with a slash or are joined properly
            filename = os.path.join(self.project_path, 'GEOLOGY_CLIP')
        print("Exporting graphical map...")
        try:

            self.geology_figure.savefig("{}.png".format(filename))
            print("Geology graphic exported to: ", filename)
        except Exception as e:
            print(e)
            print("WARNING: Could not save geology graphic")
Exemplo n.º 8
0
    def create_cmap(self):
        # Make colours consistent from map to model
        formations = sorted([
            formation.replace(" ", "_").replace('-', '_') for formation in
            list(set(self.geol_clip[self.c_l['c']].to_numpy()))
        ])
        temp_colours = [""] * len(formations)
        self.colour_dict = dict(zip(formations, temp_colours))
        try:
            # Try to retrieve the clut reference
            colour_ref = pd.read_csv(self.clut_path)
            for formation in formations:
                key = formation
                colour = None
                try:
                    colour = colour_ref[colour_ref['UNITNAME'] ==
                                        key]['colour'].to_numpy()[0]
                except Exception as e:
                    colour = ('#%02X%02X%02X' %
                              (random.randint(0, 255), random.randint(
                                  0, 255), random.randint(0, 255)))

                self.colour_dict[key] = colour
                print(key, colour)

        except Exception as e:
            # Otherwise, just append a random set
            self.clut_path = ""
            random_colours = [
                '#%02X%02X%02X' % (random.randint(
                    0, 255), random.randint(0, 255), random.randint(0, 255))
                for i in range(len(formations))
            ]
            i = 0
            for key in self.colour_dict.keys():
                self.colour_dict[key] = random_colours[i]

        self.cmap = colors.ListedColormap(self.colour_dict.values(),
                                          name='geol_key')
Exemplo n.º 9
0
    def test_interpolation(self, interpolation_spacing, misorientation,
                           interpolation_scheme):

        geology_file = self.geol_clip_file
        structure_file = self.structure_clip_file
        basal_contacts = os.path.join(self.tmp_path, 'basal_contacts.shp')
        self.spacing = interpolation_spacing  # grid spacing in meters
        # misorientation = misorientation
        self.scheme = interpolation_scheme
        orientations = self.structures

        quiet_interp = True
        if self.quiet == "None":
            quiet_interp = False

        group_girdle = m2l_utils.plot_bedding_stereonets(
            orientations, self.geology, self.c_l, quiet_interp)
        super_groups, self.use_gcode3 = Topology.super_groups_and_groups(
            group_girdle, self.tmp_path, misorientation)
        # print(super_groups)
        # print(self.geology['GROUP_'].unique())
        bbox = self.bbox

        orientation_interp, contact_interp, combo_interp = m2l_interpolation.interpolation_grids(
            geology_file, structure_file, basal_contacts, bbox, self.spacing,
            self.proj_crs, self.scheme, super_groups, self.c_l)

        with open(os.path.join(self.tmp_path, 'interpolated_orientations.csv'),
                  'w') as f:
            f.write('X, Y, l, m, n, dip, dip_dir\n')
            for row in orientation_interp:
                ostr = '{}, {}, {}, {}, {}, {}, {}\n'.format(
                    row[0], row[1], row[2], row[3], row[4], row[5], row[6])
                f.write(ostr)
        with open(os.path.join(self.tmp_path, 'interpolated_contacts.csv'),
                  'w') as f:
            f.write('X, Y, l, m, angle\n')
            for row in contact_interp:
                ostr = '{}, {}, {}, {}, {}\n'.format(row[0], row[1], row[2],
                                                     row[3], row[4])
                f.write(ostr)
        with open(os.path.join(self.tmp_path, 'interpolated_combined.csv'),
                  'w') as f:
            f.write('X, Y, l, m, n, dip, dip_dir\n')
            for row in combo_interp:
                ostr = '{}, {}, {}, {}, {}, {}, {}\n'.format(
                    row[0], row[1], row[2], row[3], row[4], row[5], row[6])
                f.write(ostr)

        if (self.spacing < 0):
            self.spacing = -(bbox[2] - bbox[0]) / spacing
        self.x = int((bbox[2] - bbox[0]) / self.spacing) + 1
        self.y = int((bbox[3] - bbox[1]) / self.spacing) + 1
        x = self.x
        y = self.y
        print(x, y)
        dip_grid = np.ones((y, x))
        dip_grid = dip_grid * -999
        dip_dir_grid = np.ones((y, x))
        dip_dir_grid = dip_dir_grid * -999
        contact_grid = np.ones((y, x))
        contact_grid = dip_dir_grid * -999
        for row in combo_interp:
            r = int((row[1] - bbox[1]) / self.spacing)
            c = int((row[0] - bbox[0]) / self.spacing)
            dip_grid[r, c] = float(row[5])
            dip_dir_grid[r, c] = float(row[6])

        for row in contact_interp:
            r = int((row[1] - bbox[1]) / self.spacing)
            c = int((row[0] - bbox[0]) / self.spacing)
            contact_grid[r, c] = float(row[4])

        self.dip_grid = dip_grid
        self.dip_dir_grid = dip_dir_grid

        if self.quiet == 'None':
            print('interpolated dips')
            plt.imshow(self.dip_grid,
                       cmap="hsv",
                       origin='lower',
                       vmin=-90,
                       vmax=90)
            plt.show()

            print('interpolated dip directions')
            plt.imshow(self.dip_dir_grid,
                       cmap="hsv",
                       origin='lower',
                       vmin=0,
                       vmax=360)
            plt.show()

            print('interpolated contacts')
            plt.imshow(contact_grid,
                       cmap="hsv",
                       origin='lower',
                       vmin=-360,
                       vmax=360)
            plt.show()
Exemplo n.º 10
0
    def run_map2model(self, deposits, aus):
        quiet_m2m = False
        if self.quiet == 'all':
            quiet_m2m = True
        if self.mindeps is not None:
            run_log = map2model.run(self.graph_path, self.geology_file_csv,
                                    self.fault_file_csv, self.mindep_file_csv,
                                    self.bbox_3d, self.c_l, quiet_m2m,
                                    deposits)
        else:
            run_log = map2model.run(self.graph_path, self.geology_file_csv,
                                    self.fault_file_csv, "", self.bbox_3d,
                                    self.c_l, quiet_m2m, deposits)

        print(run_log)

        print("Resolving ambiguities using ASUD...", end='\toutput_dir:')
        if aus:
            Topology.use_asud(self.strat_graph_file, self.graph_path)
            self.strat_graph_file = os.path.join(self.graph_path,
                                                 'ASUD_strat.gml')
        print("Done.")

        print("Generating topology graph display and unit groups...")
        self.G = nx.read_gml(self.strat_graph_file, label='id')
        selected_nodes = [n for n, v in self.G.nodes(data=True) if n >= 0]

        if self.quiet == 'None':
            nx.draw_networkx(self.G,
                             pos=nx.kamada_kawai_layout(self.G),
                             arrows=True,
                             nodelist=selected_nodes)

        nlist = list(self.G.nodes.data('LabelGraphics'))
        nlist.sort()
        for node in nlist:
            if node[0] >= 0:
                elem = str(node[1]).replace("{'text':", "").replace(
                    ", 'fontSize': 14}", "")
                # second = elem.split(":").replace("'", "")
                print(node[0], " ", elem)

        # plt.savefig(os.path.join(self.tmp_path,"topology-fig.png"))
        print("Topology figure saved to",
              os.path.join(self.tmp_path, "topology-fig.png"))

        # Save groups of stratigraphic units
        groups, self.glabels, G = Topology.get_series(self.strat_graph_file,
                                                      'id')

        quiet_topology = True
        if self.quiet == 'None':
            quiet_topology = False
        Topology.save_units(
            G,
            self.tmp_path,
            self.glabels,
            Australia=True,
            asud_strat_file=
            "https://gist.githubusercontent.com/yohanderose/3b257dc768fafe5aaf70e64ae55e4c42/raw/8598c7563c1eea5c0cd1080f2c418dc975cc5433/ASUD.csv",
            quiet=quiet_topology)

        print("Done")
Exemplo n.º 11
0
    def run(self):

        if self.quiet == "all":
            enable_quiet_mode()

        with tqdm(total=100, position=0) as pbar:
            pbar.update(0)

            print("Generating topology analyser input...")
            self.config.export_csv()
            self.config.run_map2model(self.run_flags['deposits'],
                                      self.run_flags['aus'])
            pbar.update(10)

            self.config.load_dtm(self.dtm_file if self.local else self.state)
            pbar.update(10)

            self.config.join_features()
            pbar.update(10)

            self.config.calc_depth_grid(self.run_flags['dtb'])
            pbar.update(10)

            self.config.export_orientations(
                self.run_flags['orientation_decimate'])
            pbar.update(10)
            self.config.export_contacts(self.run_flags['contact_decimate'],
                                        self.run_flags['intrusion_mode'])
            pbar.update(10)
            self.config.test_interpolation(
                self.run_flags['interpolation_spacing'],
                self.run_flags['misorientation'],
                self.run_flags['interpolation_scheme'])
            pbar.update(10)

            # TODO: make all these internal, the config class already has the run_flags dictionary
            self.config.export_faults(self.run_flags['fault_decimate'],
                                      self.run_flags['min_fault_length'],
                                      self.run_flags['fault_dip'])
            self.config.process_plutons(self.run_flags['pluton_dip'],
                                        self.run_flags['pluton_form'],
                                        self.run_flags['dist_buffer'],
                                        self.run_flags['contact_decimate'])
            pbar.update(20)

            # Seismic section is in the hamersely model area
            if (self.workflow['seismic_section']):
                self.config.extract_section_features(seismic_line_file="",
                                                     seismic_bbox_file="",
                                                     seismic_interp_file="")

            if self.workflow["contact_dips"]:
                self.config.propagate_contact_dips(
                    self.run_flags['contact_dip'],
                    self.run_flags['contact_orientation_decimate'])

            if (self.workflow['formation_thickness']):
                self.config.calc_thickness(
                    self.run_flags['contact_decimate'],
                    self.run_flags['null_scheme'],
                    self.run_flags['thickness_buffer'],
                    self.run_flags['max_thickness_allowed'], self.c_l)

            if self.workflow["fold_axial_traces"]:
                self.config.create_fold_axial_trace_points(
                    self.run_flags['fold_decimate'],
                    self.run_flags['fat_step'], self.run_flags['close_dip'])

            # Prepocess model inputs
            inputs = ('')
            if (self.workflow['model_engine'] == 'geomodeller'):
                inputs = ('invented_orientations', 'intrusive_orientations',
                          'fat_orientations', 'fault_tip_contacts',
                          'contact_orientations')
            elif (self.workflow['model_engine'] == 'loopstructural'):
                inputs = ('invented_orientations', 'fat_orientations',
                          'contact_orientations')
            elif (self.workflow['model_engine'] == 'gempy'):
                inputs = ('invented_orientations', 'interpolated_orientations',
                          'fat_orientations', 'contact_orientations')
            elif (self.workflow['model_engine'] == 'noddy'):
                inputs = ('')

            self.config.postprocess(inputs, self.workflow,
                                    self.run_flags['use_interpolations'],
                                    self.run_flags['use_fat'])
            pbar.update(10)

            self.config.save_cmap()

            self.config.update_projectfile()
            self.config.export_png()

        disable_quiet_mode()
Exemplo n.º 12
0
    def __init__(self,
                 project_path,
                 overwrite,
                 geology_file,
                 fault_file,
                 fold_file,
                 structure_file,
                 mindep_file,
                 bbox_3d,
                 polygon,
                 step_out,
                 dtm_crs,
                 proj_crs,
                 local,
                 quiet,
                 loopFilename,
                 c_l={},
                 **kwargs):

        self.project_path = project_path

        if overwrite is False:
            print(
                "WARNING: Overwrite should be a string value {true, in-place} ..."
            )
            self.check_overwrite()
        if overwrite is True:
            print(
                "WARNING: Overwrite should be a string value {true, in-place} ... converting to true."
            )
            overwrite = 'true'

        if (not os.path.exists(project_path)):
            # Create proj root dir if doesn't exist
            os.mkdir(project_path)
        elif overwrite == "in-place":
            # Pass if proj root exists and complete overwrite not wanted
            pass
        else:
            # Remove if exists and accept user's direction
            if overwrite == "true":
                shutil.rmtree(project_path)
                while os.path.exists(project_path):
                    pass
                os.mkdir(project_path)
            else:
                self.check_overwrite()

        self.graph_path = os.path.join(self.project_path, 'graph')
        self.tmp_path = os.path.join(self.project_path, 'tmp')
        self.data_path = os.path.join(self.project_path, 'data')
        self.dtm_path = os.path.join(self.project_path, 'dtm')
        self.output_path = os.path.join(self.project_path, 'output')
        self.vtk_path = os.path.join(self.project_path, 'vtk')

        self.fault_file_csv = os.path.join(self.tmp_path, "faults.csv")
        self.fault_output_file_csv = os.path.join(self.output_path,
                                                  "faults.csv")
        self.structure_file_csv = os.path.join(self.tmp_path, "structure.csv")
        self.geology_file_csv = os.path.join(self.tmp_path, "geology.csv")
        self.mindep_file_csv = os.path.join(self.tmp_path, "mindep.csv")

        self.strat_graph_file = os.path.join(self.graph_path,
                                             "graph_strat_NONE.gml")
        self.dtm_file = os.path.join(self.dtm_path, 'dtm.tif')
        self.dtm_reproj_file = os.path.join(self.dtm_path, 'dtm_rp.tif')

        if (not os.path.isdir(self.tmp_path)):
            os.mkdir(self.tmp_path)
        if (not os.path.isdir(self.data_path)):
            os.mkdir(self.data_path)
        if (not os.path.isdir(self.output_path)):
            os.mkdir(self.output_path)
        if (not os.path.isdir(self.dtm_path)):
            os.mkdir(self.dtm_path)
        if (not os.path.isdir(self.vtk_path)):
            os.mkdir(self.vtk_path)
        if (not os.path.isdir(self.graph_path)):
            os.mkdir(self.graph_path)

        self.quiet = quiet
        if self.quiet == 'all':
            enable_quiet_mode()

        self.bbox_3d = bbox_3d
        self.bbox = tuple([
            bbox_3d["minx"], bbox_3d["miny"], bbox_3d["maxx"], bbox_3d["maxy"]
        ])
        self.polygon = polygon
        self.step_out = step_out

        self.quiet = quiet
        self.c_l = c_l

        self.dtm_crs = dtm_crs
        self.proj_crs = proj_crs

        self.loop_projectfile = loopFilename

        # Check input maps for missing values
        drift_prefix = kwargs.get('drift_prefix', ['None'])
        self.local = local
        #       - Check if fold file is always the same as fault or needs to be seperated
        # TODO: Allow for input as a polygon, not just a bounding box.
        structure_file, geology_file, fault_file, mindep_file, fold_file, c_l = m2l_map_checker.check_map(
            structure_file, geology_file, fault_file, mindep_file, fold_file,
            self.tmp_path, self.bbox, c_l, proj_crs, self.local, drift_prefix)

        # Process and store workflow params
        self.geology_file = geology_file
        self.structure_file = structure_file
        self.fault_file = fault_file
        self.fold_file = fold_file
        self.mindep_file = mindep_file

        self.clut_path = kwargs['clut_path']

        disable_quiet_mode()
Exemplo n.º 13
0
    def preprocess(self):
        """[summary]

        :param command: [description], defaults to ""
        :type command: str, optional
        """

        if self.quiet == 'all':
            enable_quiet_mode()

        geology = gpd.read_file(self.geology_file, bbox=self.bbox)
        geology[self.c_l['g']].fillna(geology[self.c_l['g2']], inplace=True)
        geology[self.c_l['g']].fillna(geology[self.c_l['c']], inplace=True)
        faults = gpd.read_file(self.fault_file, bbox=self.bbox)
        folds = gpd.read_file(self.fold_file, bbox=self.bbox)
        structures = gpd.read_file(self.structure_file, bbox=self.bbox)
        mindeps = None
        try:
            mindeps = gpd.read_file(self.mindep_file, bbox=self.bbox)
            mindeps.crs = self.proj_crs
        except Exception as e:
            print("Warning: Valid mineral deposit file missing")

        # Fix crs to project default and overwrite source
        geology.crs = self.proj_crs
        faults.crs = self.proj_crs
        folds.crs = self.proj_crs
        structures.crs = self.proj_crs
        self.mindeps = mindeps

        self.geology = geology
        self.faults = faults
        self.structures = structures

        # Faults
        self.faults_clip = faults.copy()
        self.faults_clip.crs = self.proj_crs
        self.faults_clip_file = os.path.join(self.tmp_path, "faults_clip.shp")
        self.faults_clip.to_file(self.faults_clip_file)

        # Geology
        self.geol_clip = m2l_utils.explode(self.geology)
        self.geol_clip.crs = self.proj_crs
        self.geol_clip_file = os.path.join(self.tmp_path, "geol_clip.shp")
        self.geol_clip.to_file(self.geol_clip_file)

        # pd.set_option('display.max_columns', None)
        # pd.set_option('display.max_rows', None)

        # Check if bedding data uses the strike convention instead of dip direction
        if (self.c_l['otype'] == 'strike'):
            structures['azimuth2'] = structures.apply(
                lambda row: row[self.c_l['dd']] + 90.0, axis=1)
            self.c_l['dd'] = 'azimuth2'
            self.c_l['otype'] = 'dip direction'
            structures.to_file(self.structure_file)

        # Structures
        list1 = [
            'geometry', self.c_l['d'], self.c_l['dd'], self.c_l['sf'],
            self.c_l['bo']
        ]
        list2 = list(set(list1))
        sub_pts = self.structures[list2]
        structure_code = gpd.sjoin(sub_pts,
                                   self.geol_clip,
                                   how="left",
                                   op="within")

        minx, miny, maxx, maxy = self.bbox
        y_point_list = [miny, miny, maxy, maxy, miny]
        x_point_list = [minx, maxx, maxx, minx, minx]

        bbox_geom = shapely.geometry.Polygon(zip(x_point_list, y_point_list))

        polygo = gpd.GeoDataFrame(index=[0],
                                  crs=self.proj_crs,
                                  geometry=[bbox_geom])
        is_bed = structure_code[self.c_l['sf']].str.contains(
            self.c_l['bedding'], regex=False)

        structure_clip = structure_code[is_bed]
        structure_clip.crs = self.proj_crs

        if (self.c_l['otype'] == 'strike'):
            structure_clip['azimuth2'] = structure_clip.apply(
                lambda row: row[self.c_l['dd']] + 90.0, axis=1)
            self.c_l['dd'] = 'azimuth2'
            self.c_l['otype'] = 'dip direction'

        self.structure_clip = structure_clip[~structure_clip[self.c_l['o']].
                                             isnull()]
        self.structure_clip_file = os.path.join(self.tmp_path,
                                                'structure_clip.shp')
        self.structure_clip.to_file(self.structure_clip_file)

        self.create_cmap()

        try:
            fig, ax = plt.subplots()
            plt.tight_layout()
            ax.ticklabel_format(axis='both', useOffset=False, style='plain')
            ax.margins(0.0)
            fig.set_facecolor("#ffffff00")

            self.geology_figure = geology.copy().plot(
                column=self.c_l['c'],
                ax=ax,
                figsize=(10, 10),
                edgecolor='#000000',
                linewidth=0.2,
                cmap=self.cmap).get_figure()

            self.export_png()
            fig, ax = plt.subplots()

            base = geology.plot(column=self.c_l['c'],
                                figsize=(10, 10),
                                ax=ax,
                                edgecolor='#000000',
                                linewidth=0.2,
                                legend=True,
                                cmap=self.cmap)
            leg = base.get_legend()
            leg.set_bbox_to_anchor((1.04, 1))

            structures.plot(ax=base, color='none', edgecolor='black')

            faults.plot(ax=base,
                        cmap='rainbow',
                        column=self.c_l['f'],
                        figsize=(10, 10),
                        linewidth=0.4)
            structures[[
                'geometry', self.c_l['gi'], self.c_l['d'], self.c_l['dd']
            ]].plot(ax=base)

            fig = self.polygon.plot(ax=base, color='none',
                                    edgecolor='black').get_figure()
            fig.savefig(os.path.join(self.tmp_path, "input-data.png"))

            if self.quiet == 'None':
                plt.show()

            return
        except Exception as e:
            print(e)

        disable_quiet_mode()
Exemplo n.º 14
0
def show_metadata(gdf, name):
    if (len(gdf) > 0):
        print("\n", name, " metadata\n--------------------")
        print("    bbox", gdf.total_bounds)
        print("    CRS", gdf.crs)
        print("    # items", len(gdf))
        types = []
        for i, g in gdf.iterrows():
            if (not g.geometry.type in types):
                types.append(g.geometry.type)

        print("    Data types", types)
    else:
        print("\n", name, " metadata\n--------------------")
        print("    empty file, check contents")
Exemplo n.º 15
0
def check_map(structure_file, geology_file, fault_file, mindep_file, fold_file,
              tmp_path, bbox, c_l, dst_crs, local_paths, drift_prefix):

    # print(gpd.read_file(geology_file).columns)
    # print(gpd.read_file(fault_file).columns)
    # print(gpd.read_file(fold_file).columns)
    # print(gpd.read_file(mindep_file).columns)

    y_point_list = [bbox[1], bbox[1], bbox[3], bbox[3], bbox[1]]
    x_point_list = [bbox[0], bbox[2], bbox[2], bbox[0], bbox[0]]
    bbox_geom = Polygon(zip(x_point_list, y_point_list))
    polygo = gpd.GeoDataFrame(index=[0], crs=dst_crs, geometry=[bbox_geom])

    m2l_errors = []
    m2l_warnings = []
    for file_name in (structure_file, geology_file, fault_file, fold_file):
        if not file_name.startswith("http") and not os.path.isfile(file_name):
            m2l_errors.append('file ' + file_name + ' not found')

    # Process orientation points

    if (os.path.isfile(structure_file) or structure_file.startswith("http")
            or not local_paths):
        orientations2 = gpd.read_file(structure_file, bbox=bbox)
        if (c_l['sf'] == c_l['ds']):
            new_code = 'NEW_' + c_l['sf']
            new_code = new_code[:10]
            orientations = orientations2.rename(columns={c_l['sf']: new_code},
                                                errors="raise")
            m2l_warnings.append(
                'To avoid conflict with geology field of same name, orientation field named "'
                + str(c_l['sf']) + '" renamed to "' + new_code + '"')
            c_l['sf'] = new_code
        else:
            new_code = ''
            orientations = orientations2.copy()
        if (c_l['bo'] == c_l['ds'] and not new_code == ''):
            c_l['bo'] = new_code

        if (len(orientations) < 2):
            m2l_errors.append(
                'not enough orientations to complete calculations (need at least 2), projection may be inconsistent'
            )

        orientations = orientations.replace(r'^\s+$', np.nan, regex=True)
        orientations = orientations[orientations[c_l['d']] != -999]
        for code in ('sf', 'd', 'dd', 'gi'):
            if not c_l[code] in orientations.columns:
                if (code == 'sf'):
                    orientations[c_l[code]] = 'Bed'
                    m2l_warnings.append('field named "' + str(c_l[code]) +
                                        '" added with default value "Bed"')
                elif (not code == 'gi'):
                    m2l_errors.append('"' + c_l[code] + '" field needed')
                else:
                    m2l_warnings.append('field named "' + str(c_l[code]) +
                                        '" added with default value')
                    orientations[c_l[code]] = np.arange(len(orientations))
            else:
                nans = orientations[c_l[code]].isnull().sum()
                if (nans > 0):
                    m2l_warnings.append(
                        '' + str(nans) + ' NaN/blank found in column "' +
                        str(c_l[code]) +
                        '" of orientations file, replacing with 0')
                    orientations[c_l[code]].fillna("0", inplace=True)

        unique_o = set(orientations[c_l['gi']])

        if (not len(unique_o) == len(orientations)):
            m2l_warnings.append('duplicate orientation point unique IDs')
        show_metadata(orientations, "orientations layer")
    # Process geology polygons

    if (os.path.isfile(geology_file) or geology_file.startswith("http")
            or not local_paths):
        geology = gpd.read_file(geology_file, bbox=bbox)
        if (not geology.empty):
            if not c_l['o'] in geology.columns:
                # print(geology.columns)
                geology = geology.reset_index()
                geology[c_l['o']] = geology.index

            unique_g = set(geology[c_l['o']])

            if (not len(unique_g) == len(geology)):
                m2l_warnings.append('duplicate geology polygon unique IDs')

            nans = geology[c_l['c']].isnull().sum()
            if (nans > 0):
                m2l_errors.append('' + str(nans) +
                                  ' NaN/blank found in column "' +
                                  str(c_l['c']) +
                                  '" of geology file, please fix')

            if (c_l['g'] == 'No_col' or not c_l['g'] in geology.columns):
                m2l_warnings.append(
                    'No secondary strat coding for geology polygons')
                c_l['g'] = 'group'
                geology[c_l['g']] = "Top"

            geology = geology.replace(r'^\s+$', np.nan, regex=True)
            geology = geology.replace(',', ' ', regex=True)
            geology[c_l['g']].fillna(geology[c_l['g2']], inplace=True)
            geology[c_l['g']].fillna(geology[c_l['c']], inplace=True)

            if (c_l['r1'] == 'No_col' or not c_l['r1'] in geology.columns):
                m2l_warnings.append('No extra litho for geology polygons')
                c_l['r1'] = 'r1'
                geology[c_l['r1']] = 'Nope'

            if (c_l['r2'] == 'No_col' or not c_l['r2'] in geology.columns):
                m2l_warnings.append('No more extra litho for geology polygons')
                c_l['r2'] = 'r2'
                geology[c_l['r2']] = 'Nope'

            if (c_l['min'] == 'No_col' or not c_l['min'] in geology.columns):
                m2l_warnings.append('No min age for geology polygons')
                c_l['min'] = 'min'
                geology[c_l['min']] = 0

            if (c_l['max'] == 'No_col' or not c_l['max'] in geology.columns):
                m2l_warnings.append('No max age for geology polygons')
                c_l['max'] = 'max'
                geology[c_l['max']] = 100

            if (c_l['c'] == 'No_col' or not c_l['c'] in geology.columns):
                m2l_errors.append(
                    'Must have primary strat coding field for geology polygons'
                )

            for code in ('c', 'g', 'g2', 'ds', 'u', 'r1'):
                if (c_l[code] in geology.columns):

                    geology[c_l[code]].str.replace(",", " ")
                    if (code == 'c' or code == 'g' or code == 'g2'):
                        geology[c_l[code]].str.replace(" ", "_")
                        geology[c_l[code]].str.replace("-", "_")

                    nans = geology[c_l[code]].isnull().sum()
                    if (nans > 0):
                        m2l_warnings.append(
                            '' + str(nans) + ' NaN/blank found in column "' +
                            str(c_l[code]) +
                            '" of geology file, replacing with 0')
                        geology[c_l[code]].fillna("0", inplace=True)
            for drift in drift_prefix:
                geology = geology[~geology[c_l['u']].str.startswith(drift)]

            show_metadata(geology, "geology layer")
        else:
            print('No geology in area, projection may be inconsistent')

    # Process fold polylines
    folds = None
    if (os.path.isfile(fold_file) or not local_paths):
        folds = gpd.read_file(fold_file, bbox=bbox)
        if (len(folds) > 0):
            if not c_l['o'] in folds.columns:
                folds = folds.reset_index()
                folds[c_l['o']] = folds.index
            unique_g = set(folds[c_l['o']])

            if (not len(unique_g) == len(folds)):
                m2l_warnings.append('duplicate fold polyline unique IDs')

            folds = folds[folds[c_l['ff']].str.contains(c_l['fold'],
                                                        case=False)]

            folds = folds.replace(r'^\s+$', np.nan, regex=True)

            for code in ('ff', 't'):
                if (c_l['ff'] == 'No_col' or not c_l['ff'] in folds.columns):
                    m2l_warnings.append('No fold code for fold polylines')
                    c_l['ff'] = 'ff'
                    folds[c_l['ff']] = c_l['fold']

                if (c_l['t'] == 'No_col' or not c_l['t'] in folds.columns):
                    m2l_warnings.append('No fold polarity for fold polylines')
                    c_l['t'] = 't'
                    folds[c_l['t']] = 'None'

                if (c_l[code] in folds.columns):
                    folds[c_l[code]].str.replace(",", " ")

                    nans = folds[c_l[code]].isnull().sum()
                    if (nans > 0):
                        m2l_warnings.append(
                            '' + str(nans) + ' NaN/blank found in column "' +
                            str(c_l[code]) +
                            '" of folds file, replacing with 0')
                        folds[c_l[code]].fillna("0", inplace=True)

            folds_clip = m2l_utils.clip_shp(folds, polygo)
            if (len(folds_clip) > 0):
                folds_explode = explode_polylines(folds_clip, c_l, dst_crs)
                if (len(folds_explode) > len(folds_clip)):
                    m2l_warnings.append(
                        'some folds are MultiPolyLines, and have been split')
                folds_explode.crs = dst_crs

            show_metadata(folds_clip, "fold layer")
Exemplo n.º 16
0
                            '' + str(nans) + ' NaN/blank found in column "' +
                            str(c_l[code]) +
                            '" of folds file, replacing with 0')
                        folds[c_l[code]].fillna("0", inplace=True)

            folds_clip = m2l_utils.clip_shp(folds, polygo)
            if (len(folds_clip) > 0):
                folds_explode = explode_polylines(folds_clip, c_l, dst_crs)
                if (len(folds_explode) > len(folds_clip)):
                    m2l_warnings.append(
                        'some folds are MultiPolyLines, and have been split')
                folds_explode.crs = dst_crs

            show_metadata(folds_clip, "fold layer")
        else:
            print('No folds in area, projection may be inconsistent')

    # Process fault polylines

    if (os.path.isfile(fault_file) or fault_file.startswith("http")
            or not local_paths):
        faults_folds = gpd.read_file(fault_file, bbox=bbox)

        faults = faults_folds[faults_folds[c_l['f']].str.contains(c_l['fault'],
                                                                  case=False)]
        faults = faults.replace(r'^\s+$', np.nan, regex=True)

        if not c_l['o'] in faults.columns:
            m2l_warnings.append('field named "' + str(c_l['o']) +
                                '" added with default value')
            faults[c_l['o']] = np.arange(len(faults))
Exemplo n.º 17
0
    def run(self,
            aus=True,
            deposits="Fe,Cu,Au,NONE",
            dtb='',
            orientation_decimate=0,
            contact_decimate=5,
            intrusion_mode=0,
            interpolation_spacing=500,
            misorientation=30,
            interpolation_scheme='scipy_rbf',
            fault_decimate=5,
            min_fault_length=5000,
            fault_dip=90,
            pluton_dip=45,
            pluton_form='domes',
            dist_buffer=10,
            contact_dip=-999,
            contact_orientation_decimate=5,
            null_scheme='null',
            thickness_buffer=5000,
            max_thickness_allowed=10000,
            fold_decimate=5,
            fat_step=750,
            close_dip=-999,
            use_interpolations=True,
            use_fat=True):
        """Performs the data processing steps of the map2loop workflow.

        :param aus: Indicates if area is in Australia for using ASUD. Defaults to True.
        :type aus: bool
        :param deposits: Mineral deposit names for focused topology extraction. Defaults to "Fe,Cu,Au,NONE".
        :type deposits: str
        :param dtb: Path to depth to basement grid. Defaults to ''.
        :type dtb: str
        :param orientation_decimate: Save every nth orientation data point. Defaults to 0.
        :type orientation_decimate: int
        :param contact_decimate: Save every nth contact data point. Defaults to 5.
        :type contact_decimate: int
        :param intrusion_mode: 1 to exclude all intrusions from basal contacts, 0 to only exclude sills. Defaults to 0.
        :type intrusion_mode: intrusion_mode: int
        :param interpolation_spacing: Interpolation grid spacing in meters. Defaults to 500.
        :type interpolation_spacing: interpolation_spacing: int
        :param misorientation: Defaults to 30.
        :type misorientation: int
        :param interpolation_scheme: What interpolation function to use of scipy_rbf (radial basis) or scipy_idw (inverse distance weighted). Defaults to 'scipy_rbf'.
        :type interpolation_scheme: str
        :param fault_decimate: Save every nth fault data point. Defaults to 5.
        :type fault_decimate: int
        :param min_fault_length: Min fault length to be considered. Defaults to 5000.
        :type min_fault_length: int
        :param fault_dip: Defaults to 90.
        :type fault_dip: int
        :param pluton_dip: Defaults to 45.
        :type pluton_dip: int
        :param pluton_form: Possible forms from domes, saucers or pendant. Defaults to 'domes'.
        :type pluton_form: str
        :param dist_buffer: Buffer for processing plutons. Defaults to 10.
        :type dist_buffer: int
        :param contact_dip: Defaults to -999.
        :type contact_dip: int
        :param contact_orientation_decimate: Save every nth contact orientation point. Defaults to 5.
        :type contact_orientation_decimate: int
        :param null_scheme: How null values present in the dtb. Defaults to 'null'.
        :type null_scheme: str
        :param thickness_buffer: Defaults to 5000.
        :type thickness_buffer: int
        :param max_thickness_allowed: Defaults to 10000.
        :type max_thickness_allowed: int
        :param fold_decimate: Save every nth fold data point. Defaults to 5.
        :type fold_decimate: int
        :param fat_step: How much to step out normal to the fold axial trace. Defaults to 750.
        :type fat_step: int
        :param close_dip: Dip to assign to all new orientations. Defaults to -999.
        :type close_dip: int
        :param use_interpolations: Defaults to True.
        :type use_interpolations: bool
        :param use_fat: Defaults to True.
        :type use_fat: bool
        """

        if self.quiet == 'all':
            enable_quiet_mode()

        with tqdm(total=100, position=0) as pbar:
            pbar.update(0)

            print("Generating topology analyser input...")
            self.config.export_csv()
            self.config.run_map2model(deposits, aus)
            pbar.update(10)

            self.config.load_dtm(aus)
            pbar.update(10)

            self.config.join_features()
            pbar.update(10)

            self.config.calc_depth_grid(dtb)
            pbar.update(10)

            self.config.export_orientations(orientation_decimate)
            pbar.update(10)
            self.config.export_contacts(contact_decimate, intrusion_mode)
            pbar.update(10)
            self.config.test_interpolation(interpolation_spacing,
                                           misorientation,
                                           interpolation_scheme)
            pbar.update(10)

            self.config.export_faults(fault_decimate, min_fault_length,
                                      fault_dip)
            self.config.process_plutons(pluton_dip, pluton_form, dist_buffer,
                                        contact_decimate)
            pbar.update(20)

            # Seismic section is in the hamersely model area
            # TODO: Implement this option better and test with Turner
            if (self.workflow['seismic_section']):
                self.config.extract_section_features(seismic_line_file="",
                                                     seismic_bbox_file="",
                                                     seismic_interp_file="")

            if (self.workflow['contact_dips']):
                self.config.propagate_contact_dips(
                    contact_dip, contact_orientation_decimate)

            if (self.workflow['formation_thickness']):
                self.config.calc_thickness(contact_decimate, null_scheme,
                                           thickness_buffer,
                                           max_thickness_allowed, self.c_l)

            if (self.workflow['fold_axial_traces']):
                self.config.create_fold_axial_trace_points(
                    fold_decimate, fat_step, close_dip)

            # Prepocess model inputs
            inputs = ('')
            if (self.workflow['model_engine'] == 'geomodeller'):
                inputs = ('invented_orientations', 'intrusive_orientations',
                          'fat_orientations', 'fault_tip_contacts',
                          'contact_orientations')
            elif (self.workflow['model_engine'] == 'loopstructural'):
                inputs = ('invented_orientations', 'fat_orientations',
                          'contact_orientations')
            elif (self.workflow['model_engine'] == 'gempy'):
                inputs = ('invented_orientations', 'interpolated_orientations',
                          'fat_orientations', 'contact_orientations')
            elif (self.workflow['model_engine'] == 'noddy'):
                inputs = ('')

            self.config.postprocess(inputs, self.workflow, use_interpolations,
                                    use_fat)
            pbar.update(10)

            self.config.save_cmap()

            if self.loopFilename is not None:
                self.config.update_projectfile()
                self.config.export_png()

        disable_quiet_mode()
Exemplo n.º 18
0
    def update_config(self,
                      out_dir,
                      overwrite='false',
                      loopFilename=None,
                      bbox_3d={
                          "minx": 0,
                          "maxx": 0,
                          "maxx": 0,
                          "maxy": 0,
                          "base": -10000,
                          "top": 1200,
                      },
                      dtm_crs={'init': 'EPSG:4326'},
                      proj_crs=None,
                      step_out=None,
                      quiet='None',
                      clut_path='',
                      model_engine='loopstructural',
                      run_flags=None,
                      **kwargs):
        """Creates a 'sub-project' Config object and pre-processes input data for some area.

        :param out_dir: Path to write output files to.
        :type out_dir: string
        :param overwrite: Allow overwriting the given out_dir if it exists, false, true or in-place, defaults to false
        :type overwrite: string, optional
        :param bbox_3d: 3D bounding box of coordinates and base/top values defining the area, defaults to { "minx": 0, "maxx": 0, "maxx": 0, "maxy": 0, "base": -10000, "top": 1200, }
        :type bbox_3d: dict, optional
        :param dtm_crs: Set the projection of the dtm, defaults to {'init': 'EPSG:4326'}
        :type dtm_crs: dict, optional
        :param proj_crs: Set the projection of the input data, defaults to None
        :type proj_crs: dict, optional
        :param step_out: How far to consider outside the re-projected dtm, defaults to None
        :type step_out: int, optional
        :param quiet: Allow or block print statements and matplotlib figures, 'None' to quiet nothing, 'all' to quiet everything, 'no-figures' to disable plots and allow text output. Defaults to 'None'
        :type quiet: string, optional
        :param clut_path: Path to custom map colours file
        :type clut_path: string, optional
        :param model_engine: Which modelling engine to use and set associated flags for, defaults to loopstructural
        :type model_engine: string, optional
        :param run_flags: Global dictionary that defines custom params such as decimates and fault length see https://github.com/Loop3D/map2loop-2/issues/56  
        :type run_flags: dict, optional
        :param **kwargs:
        """

        self.update_workflow(model_engine)

        self.loopFilename = loopFilename
        if self.loopFilename is not None:
            if not os.path.exists(loopFilename):
                sys.exit("That project file path does not exist.")

        if bbox_3d["minx"] == 0 and bbox_3d["maxx"] == 0:
            bbox_3d.update({
                "minx": self.proj_bounds[0],
                "minx": self.proj_bounds[1],
                "minx": self.proj_bounds[2],
                "minx": self.proj_bounds[3],
            })

        self.clut_path = ""

        if proj_crs is None:
            proj_crs = self.proj_crs

        if step_out is None:
            step_out = self.step_out

        self.quiet = quiet

        bbox = tuple([
            bbox_3d["minx"], bbox_3d["miny"], bbox_3d["maxx"], bbox_3d["maxy"]
        ])
        minx, miny, maxx, maxy = bbox
        lat_point_list = [miny, miny, maxy, maxy, maxy]
        lon_point_list = [minx, maxx, maxx, minx, minx]
        bbox_geom = Polygon(zip(lon_point_list, lat_point_list))
        polygon = gpd.GeoDataFrame(index=[0],
                                   crs=proj_crs,
                                   geometry=[bbox_geom])

        # Define the url queries if remote flag is set
        if self.geology_file is None:
            self.fetch_sources(bbox)

        # TODO: Make run flags global vars that can be updated here instead of in run
        if clut_path != "":
            self.clut_path = clut_path

        try:
            # Check if (perhaps editted) run_flags already exist
            self.run_flags = self.run_flags
        except Exception:
            # Otherwise set them up
            self.run_flags = {
                'aus': True,
                'deposits': "Fe,Cu,Au,NONE",
                'dtb': '',
                'orientation_decimate': 0,
                'contact_decimate': 5,
                'intrusion_mode': 0,
                'interpolation_spacing': 500,
                'misorientation': 30,
                'interpolation_scheme': 'scipy_rbf',
                'fault_decimate': 5,
                'min_fault_length': 5000,
                'fault_dip': 90,
                'pluton_dip': 45,
                'pluton_form': 'domes',
                'dist_buffer': 10,
                'contact_dip': -999,
                'contact_orientation_decimate': 5,
                'null_scheme': 'null',
                'thickness_buffer': 5000,
                'max_thickness_allowed': 10000,
                'fold_decimate': 5,
                'fat_step': 750,
                'close_dip': -999,
                'use_interpolations': True,
                'use_fat': True
            }

        # And copy in any new settings from the user
        if run_flags is not None:
            try:
                for key in self.run_flags.keys():
                    try:
                        self.run_flags[key] = run_flags[key]
                    except Exception:
                        pass
            except Exception:
                print('run_flags must be a dictionary, setting defaults.')

        kwargs = {'clut_path': self.clut_path, 'run_flags': self.run_flags}
        self.config = Config(out_dir, overwrite, self.geology_file,
                             self.fault_file, self.fold_file,
                             self.structure_file, self.mindep_file, bbox_3d,
                             polygon, step_out, dtm_crs, proj_crs, self.local,
                             self.quiet, self.loopFilename, self.c_l, **kwargs)

        self.config.preprocess()