コード例 #1
0
    def callback(self, verts):
        global click
        global the_table
        global ind
        facecolors = self.collection.get_facecolors()
        p = path.Path(verts)
        ind = p.contains_points(self.xys)
        for i in range(len(self.xys)):
            if ind[i]:
                facecolors[i] = Datum.colorin
            else:
                facecolors[i] = Datum.colorout
        if sum(ind) > 0:
            if click > 0:
                the_table.remove()
            the_table = plt.table(cellText=[[str(c) for c in show_data[i]]
                                            for i in range(len(self.xys))
                                            if ind[i]],
                                  colWidths=variablelength_select,
                                  colLabels=columns,
                                  loc='bottom',
                                  bbox=[0.1, -0.6, 0.8, .5])
            click = 1
        else:
            if click > 0:
                the_table.remove()
            click = 0

        self.canvas.draw_idle()
        self.canvas.widgetlock.release(self.lasso)
        del self.lasso
コード例 #2
0
ファイル: surface.py プロジェクト: hsaal/touchsim
    def locate(self, locs):
        """Maps from coordinates on a surface to region tags and ids.

        Args:
            locs (array): 2D array of coordinates in pixel space.

        Returns:
            Tuple containing a list of region tags and a vector of ids.
        """
        locs = np.atleast_2d(self.hand2pixel(locs))
        regions = -np.ones((locs.shape[0], ), dtype=np.int8)

        for b in range(self.num):
            p = path.Path(self.boundary[b])
            ind = p.contains_points(locs)
            regions[ind] = b

        tags = []
        for l in range(locs.shape[0]):
            if regions[l] < 0:
                tags.append('')
            else:
                tags.append(self.tags[regions[l]])

        return tags, regions
コード例 #3
0
def bounded_point_sim(width, height, numpoints, boundaries, pixel_scale,
                      display_graphs):
    random_x = []
    random_y = []
    export_txt = ''
    np_bounds = np.loadtxt(boundaries, skiprows=1)
    comparison_path = path.Path(np_bounds)
    count = 0
    while count < numpoints:
        potential_x = np.random.uniform(0, width, 1)
        potential_y = np.random.uniform(0, height, 1)
        point_array = np.array([potential_x, potential_y]).reshape(1, 2)
        if comparison_path.contains_points(point_array):
            export_txt += str(potential_x[0] * pixel_scale) + ' ' + str(
                potential_y[0] * pixel_scale) + '\n'
            random_x.append(potential_x)
            random_y.append(potential_y)
            count += 1
    random_x = np.array(random_x)
    random_y = np.array(random_y)
    ret = save_plot_points(random_x, random_y, display_graphs)
    if ret == QMessageBox.Save:
        r = save_text_file(export_txt)
        compute_neighbor_distance(r[0], r[1], pixel_scale, display_graphs)
    else:
        return
コード例 #4
0
ファイル: surface.py プロジェクト: hsaal/touchsim
    def sample_uniform(self, id_or_tag, **args):
        """Samples locations from within specified region.

        Args:
            id_or_tag (int or str): region ID number or tag identifying a unique
                region.

        Kwargs:
            num (int): Number of locations to sample (default: None).
            density (float): Density of locations to be sampled expressed as
                locations per cm^2 (default: SA1 density for specified region(s)).
                This parameter will only be evaluated if num is not given / set to None.
            seed (int): Random number seed

        Returns:
            2D array of coordinates in surface space.
        """
        if self.outline is None:
            raise RuntimeError("Cannot sample from surface without border.")

        seed = args.get('seed', None)
        if seed is not None:
            np.random.seed(seed)

        if type(id_or_tag) is str or id_or_tag is None:
            idx = self.tag2idx(id_or_tag)
        elif type(id_or_tag) is int or type(id_or_tag) is np.int64:
            idx = [id_or_tag]
        else:
            idx = id_or_tag

        num = args.get('num', None)
        if num is None:
            xy_list = []
            for i in idx:
                dens = args.get('density', self.density[('SA1', i)])
                dist = np.sqrt(dens) / 10. / self.pxl_per_mm
                b = bbox(self.boundary[i])
                xy = np.mgrid[b[0, 0]:b[1, 0] + 1. / dist:1. / dist,
                              b[0, 1]:b[1, 1] + 1. / dist:1. / dist]
                xy = xy.reshape(2, xy.shape[1] * xy.shape[2]).T
                xy += np.random.randn(xy.shape[0], xy.shape[1]) / dist / 5.
                p = path.Path(self.boundary[i])
                ind = p.contains_points(xy)
                xy = xy[ind, :]
                xy_list.append(xy)
            xy = np.concatenate(xy_list)

        else:
            xy = np.zeros((num, 2))
            coords = np.concatenate([self._coords[i] for i in idx])
            for i in range(num):
                xy[i] = coords[np.random.randint(coords.shape[0])]

        return self.pixel2hand(xy)
コード例 #5
0
	def get_grid(RefPoints):
	
		p=path.Path(windowVerts)
		inWindow=p.contains_points(RefPoints[:,:2]) #first 2 columns of RefPoints is x and y

		windowed=RefPoints[inWindow,:2]
		gs=squareform(pdist(windowed,'euclidean')) #does the same thing as pdist2
		gsize=np.mean(np.sort(gs)[:,1]) #sort the distances, find the closest, non self-referencing points

		grid_x, grid_y = np.meshgrid(np.linspace(RefMin[0],RefMax[0],int((RefMax[0]-RefMin[0])/gsize)),
	    np.linspace(RefMin[1],RefMax[1],int((RefMax[1]-RefMin[1])/gsize)),indexing='xy')
		points=RefPoints[:,:2]

		grid_RefVal=griddata(points,RefPoints[:,-1], (grid_x, grid_y), method='linear')
コード例 #6
0
def make_landmask(bm, lon_grid, lat_grid):
    '''Performs lapse rate adjustment of the supplied variable field. 

	Parameters:
	-bm: basemaps object
		basemap configuration to be used for creating a landmask
	-interpVAR, interpH, interpGamma: array (2D)
		interpolated (basic) height, lapse rate and variable of interest

	Returns: 
	-demVAR: array (2D)
		downscaled lapse-rate adjusted variable field

	may 2015, [email protected]
	'''

    from matplotlib import path
    import numpy as np

    dem_lcn = np.array(zip(lon_grid.ravel(), lat_grid.ravel()))
    polygons = bm.coastpolygons
    poly_type = bm.coastpolygontypes

    landmask = np.zeros(len(dem_lcn))
    for nPoly in range(len(poly_type)):
        poly = polygons[nPoly]
        poly = zip(*poly)
        poly_path = path.Path(poly)
        mask = poly_path.contains_points(dem_lcn)
        if poly_type[nPoly] == 1:
            landmask[mask == 1] = 1
        elif poly_type[nPoly] == 2:
            landmask[mask == 1] = 0
    dem_landmask = np.reshape(landmask, np.shape(lon_grid))

    return dem_landmask
コード例 #7
0
    rgrDataVarAct = rgrDataVar[:, :, :]
    rgrDataVarActT2M = rgrMonthlyData[:, :, :, :, rgsVars.index('T2M')]

    if (rgsVars[va] == 'ProbSnow') | (rgsVars[va] == 'Snow_PRdays') | (
            rgsVars[va] == 'Rain_PRdays'):
        # if se == 0:
        rgrDataVarAct = rgrDataVarAct * 365.25
        rgrDataVarEIAct = rgrDataVarEIAct * 365.25
        rgrROdataAct = rgrROdataAct * 365.25

    ii = 0
    # iYrange=[7,7,7]
    for sr in [2, 5, 1]:
        ax = plt.subplot(gs1[ii, 0])
        rgdShapeAct = grShapeFiles[rgsSubregions[sr]]
        PATH = path.Path(rgdShapeAct)
        for da in range(len(rgsData)):
            if rgsData[da] == 'Rad-Soundings':
                TEST = np.copy(rgrLonAct)
                TEST[TEST > 180] = TEST[TEST > 180] - 360
                flags = PATH.contains_points(
                    np.hstack((TEST[:, np.newaxis], rgrLatAct[:, np.newaxis])))
                rgiROstations = np.sum(flags)
                rgiYearsAct = np.array(range(1979, 2018, 1))
                rgiSelYY = ((rgiYearsAct >= 1979) & (rgiYearsAct <= 2009))
                rgrTMPData = np.nanmean(
                    np.nanmean(rgrROdataAct[:, :, flags], axis=(1)) -
                    np.nanmean(rgrROdataAct[rgiSelYY, :, :][:, :, flags],
                               axis=(0, 1))[None, :],
                    axis=1)
                sColor = 'k'
コード例 #8
0
	def average(self):
		# if not self.aligned:
			# self.ui.statusLabel.setText("Align prior to averaging. Idle.")
			# return
		
		self.ui.statusLabel.setText("Averaging, applying grid . . .")
		QtGui.qApp.processEvents()
		
		#temporarily shift all data such that it appears in the first cartesian quadrant
		tT=np.amin(self.rO,axis=0)
		self.rO, self.fO, self.rp, self.flp=self.rO-tT, self.fO-tT, self.rp-tT, self.flp-tT
		
		#use max to get a 'window' for assessing grid spacing
		RefMax=np.amax(self.rO,axis=0)
		RefMin=np.amin(self.rO,axis=0)
		windowVerts=np.matrix([[0.25*RefMin[0], 0.25*RefMin[1]],
		[0.25*RefMin[0], 0.25*(RefMax[1])],
		[0.25*(RefMax[1]), 0.25*(RefMax[1])],
		[0.25*(RefMax[0]), 0.25*(RefMin[1])]]);
		
		p=path.Path(windowVerts)
		inWindow=p.contains_points(self.rp[:,:2]) #first 2 columns of RefPoints is x and y
		
		windowed=self.rp[inWindow,:2]
		
		gs=squareform(pdist(windowed,'euclidean')) #does the same thing as pdist2
		
		gsize=np.mean(np.sort(gs)[:,1]) #sort the distances, find the mean distance between non self-referencing points
		
		#grid the reference based on gsize, bumping out the grid by 10% in either direction
		grid_x, grid_y = np.meshgrid(
		np.linspace(1.1*RefMin[0],1.1*RefMax[0],int((1.1*RefMax[0]-1.1*RefMin[0])/gsize)),
		np.linspace(1.1*RefMin[1],1.1*RefMax[1],int((1.1*RefMax[1]-1.1*RefMin[1])/gsize)), 
		indexing='xy')
		
		#apply the grid to the reference data
		grid_Ref=griddata(self.rp[:,:2],self.rp[:,-1],(grid_x,grid_y),method='linear')
		
		#apply the grid to the aligned data
		grid_Align=griddata(self.flp[:,:2],self.flp[:,-1],(grid_x,grid_y),method='linear')
		
		self.ui.statusLabel.setText("Averaging using grid . . .")
		QtGui.qApp.processEvents()
		
		#average z values
		grid_Avg=(grid_Ref+grid_Align)/2
		
		#make sure that there isn't anything averaged outside the floating outline
		p=path.Path(self.rO[:,:2])
		inTest=np.hstack((np.ravel(grid_x.T)[np.newaxis].T,np.ravel(grid_y.T)[np.newaxis].T))
		inOutline=p.contains_points(inTest)
		
		#averaged points
		self.ap = np.hstack((inTest[inOutline,:], \
					np.ravel(grid_Avg.T)[np.newaxis].T[inOutline]))
					
		#move everything back to original location
		self.rO, self.fO, self.rp, self.flp, self.ap = \
		self.rO+tT, self.fO+tT, self.rp+tT, self.flp+tT, self.ap+tT
		
		self.ui.statusLabel.setText("Rendering . . .")
		QtGui.qApp.processEvents()
		
		#show it
		color=(int(0.2784*255),int(0.6745*255),int(0.6941*255))
		_, self.aActor, _, = gen_point_cloud(self.ap,color,self.PointSize)
		self.ren.AddActor(self.aActor)
		
		s,nl,axs=self.get_scale()

		self.aActor.SetScale(s)
		self.aActor.Modified()
		
		#update
		self.ui.vtkWidget.update()
		self.ui.vtkWidget.setFocus()
		self.ui.statusLabel.setText("Averaging complete. Idle.")
		self.averaged=True
コード例 #9
0
ファイル: align_average.py プロジェクト: NikStoyanov/pyCM
    def average(self):

        self.unsaved_changes = True

        if hasattr(self, 'aActor'):
            self.ren.RemoveActor(self.aActor)

        self.ui.statLabel.setText("Averaging, applying grid . . .")
        QtWidgets.QApplication.processEvents()

        #temporarily shift all data such that it appears in the first cartesian quadrant
        tT = np.amin(self.rO, axis=0)
        self.rO, self.fO, self.rp, self.flp = self.rO - tT, self.fO - tT, self.rp - tT, self.flp - tT

        #use max to get a 'window' for assessing grid spacing
        RefMax = np.amax(self.rO, axis=0)
        RefMin = np.amin(self.rO, axis=0)
        windowVerts = np.matrix([[0.25 * RefMin[0], 0.25 * RefMin[1]],
                                 [0.25 * RefMin[0], 0.25 * (RefMax[1])],
                                 [0.25 * (RefMax[1]), 0.25 * (RefMax[1])],
                                 [0.25 * (RefMax[0]), 0.25 * (RefMin[1])]])

        p = path.Path(windowVerts)
        inWindow = p.contains_points(
            self.rp[:, :2])  #first 2 columns of RefPoints is x and y

        windowed = self.rp[inWindow, :2]

        #populate grid size if attribute doesn't exist
        if not hasattr(self, 'gsize'):
            gs = squareform(pdist(windowed, 'euclidean'))
            self.gsize = np.mean(np.sort(gs)[:, 1])
            self.ui.gridInd.setValue(self.gsize)
        else:
            self.gsize = self.ui.gridInd.value()

        #grid the reference based on gsize, bumping out the grid by 10% in either direction
        grid_x, grid_y = np.meshgrid(
            np.linspace(1.1 * RefMin[0], 1.1 * RefMax[0],
                        int((1.1 * RefMax[0] - 1.1 * RefMin[0]) / self.gsize)),
            np.linspace(1.1 * RefMin[1], 1.1 * RefMax[1],
                        int((1.1 * RefMax[1] - 1.1 * RefMin[1]) / self.gsize)),
            indexing='xy')

        #apply the grid to the reference data
        grid_Ref = griddata(self.rp[:, :2],
                            self.rp[:, -1], (grid_x, grid_y),
                            method='linear')

        #apply the grid to the aligned data
        grid_Align = griddata(self.flp[:, :2],
                              self.flp[:, -1], (grid_x, grid_y),
                              method='linear')

        self.ui.statLabel.setText("Averaging using grid . . .")
        QtWidgets.QApplication.processEvents()

        #average z values
        grid_Avg = (grid_Ref + grid_Align) / 2

        #make sure that there isn't anything averaged outside the floating outline
        p = path.Path(self.rO[:, :2])
        inTest = np.hstack((np.ravel(grid_x.T)[np.newaxis].T,
                            np.ravel(grid_y.T)[np.newaxis].T))
        inOutline = p.contains_points(inTest)

        #averaged points
        self.ap = np.hstack((inTest[inOutline,:], \
                    np.ravel(grid_Avg.T)[np.newaxis].T[inOutline]))

        #move everything back to original location
        self.rO, self.fO, self.rp, self.flp, self.ap = \
        self.rO+tT, self.fO+tT, self.rp+tT, self.flp+tT, self.ap+tT

        self.ui.statLabel.setText("Rendering . . .")
        QtWidgets.QApplication.processEvents()

        #show it
        color = (int(0.2784 * 255), int(0.6745 * 255), int(0.6941 * 255))
        _, self.aActor, _, = gen_point_cloud(self.ap, color, self.PointSize)
        self.ren.AddActor(self.aActor)

        s, nl, axs = self.get_scale()

        self.aActor.SetScale(s)
        self.aActor.Modified()

        #update
        self.ui.vtkWidget.update()
        self.ui.vtkWidget.setFocus()
        self.ui.statLabel.setText("Averaging complete.")
        self.averaged = True
        self.ui.averageButton.setStyleSheet(
            "background-color :rgb(77, 209, 97);")
コード例 #10
0
ファイル: runslip.py プロジェクト: geofiber/MudPy
def run_inversion(home,
                  project_name,
                  run_name,
                  fault_name,
                  model_name,
                  GF_list,
                  G_from_file,
                  G_name,
                  epicenter,
                  rupture_speed,
                  num_windows,
                  reg_spatial,
                  reg_temporal,
                  nfaults,
                  beta,
                  decimate,
                  bandpass,
                  solver,
                  bounds,
                  weight=False,
                  Ltype=2,
                  target_moment=None,
                  data_vector=None,
                  onset_file=None):
    '''
    Assemble G and d, determine smoothing and run the inversion
    '''
    from mudpy import inverse as inv
    from mudpy.forward import get_mu_and_area
    from numpy import zeros, dot, array, squeeze, expand_dims, empty, tile, eye, ones, arange, load, size, genfromtxt
    from numpy import where, sort, r_
    from numpy.linalg import lstsq
    from scipy.sparse import csr_matrix as sparse
    from scipy.optimize import nnls
    from datetime import datetime
    import gc
    from matplotlib import path

    t1 = datetime.now()
    #Get data vector
    if data_vector == None:
        d = inv.getdata(home,
                        project_name,
                        GF_list,
                        decimate,
                        bandpass=bandpass)
    else:
        d = load(data_vector)
    #Get GFs
    G = inv.getG(home,
                 project_name,
                 fault_name,
                 model_name,
                 GF_list,
                 G_from_file,
                 G_name,
                 epicenter,
                 rupture_speed,
                 num_windows,
                 decimate,
                 bandpass,
                 onset_file=onset_file)
    print(G.shape)
    gc.collect()
    #Get data weights
    if weight == True:
        print('Applying data weights')
        w = inv.get_data_weights(home, project_name, GF_list, d, decimate)
        W = empty(G.shape)
        W = tile(w, (G.shape[1], 1)).T
        WG = empty(G.shape)
        WG = W * G
        wd = w * d.squeeze()
        wd = expand_dims(wd, axis=1)
        #Clear up extraneous variables
        W = None
        w = None
        #Define inversion quantities
        x = WG.transpose().dot(wd)
        print('Computing G\'G')
        K = (WG.T).dot(WG)
    else:
        #Define inversion quantities if no weightd
        x = G.transpose().dot(d)
        print('Computing G\'G')
        K = (G.T).dot(G)
    #Get regularization matrices (set to 0 matrix if not needed)
    static = False  #Is it jsut a static inversion?
    if size(reg_spatial) > 1:
        if Ltype == 2:  #Laplacian smoothing
            Ls = inv.getLs(home, project_name, fault_name, nfaults,
                           num_windows, bounds)
        elif Ltype == 0:  #Tikhonov smoothing
            N = nfaults[0] * nfaults[
                1] * num_windows * 2  #Get total no. of model parameters
            Ls = eye(N)
        elif Ltype == 3:  #moment regularization
            N = nfaults[0] * nfaults[
                1] * num_windows * 2  #Get total no. of model parameters
            Ls = ones((1, N))
            #Add rigidity and subfault area
            mu, area = get_mu_and_area(home, project_name, fault_name,
                                       model_name)
            istrike = arange(0, N, 2)
            Ls[0, istrike] = area * mu
            idip = arange(1, N, 2)
            Ls[0, idip] = area * mu
            #Modify inversion quantities
            x = x + Ls.T.dot(target_moment)
        else:
            print('ERROR: Unrecognized regularization type requested')
            return
        Ninversion = len(reg_spatial)
    else:
        Ls = zeros(K.shape)
        reg_spatial = array([0.])
        Ninversion = 1
    if size(reg_temporal) > 1:
        Lt = inv.getLt(home, project_name, fault_name, num_windows)
        Ninversion = len(reg_temporal) * Ninversion
    else:
        Lt = zeros(K.shape)
        reg_temporal = array([0.])
        static = True
    #Make L's sparse
    Ls = sparse(Ls)
    Lt = sparse(Lt)
    #Get regularization tranposes for ABIC
    LsLs = Ls.transpose().dot(Ls)
    LtLt = Lt.transpose().dot(Lt)
    #inflate
    Ls = Ls.todense()
    Lt = Lt.todense()
    LsLs = LsLs.todense()
    LtLt = LtLt.todense()
    #off we go
    dt = datetime.now() - t1
    print('Preprocessing wall time was ' + str(dt))
    print('\n--- RUNNING INVERSIONS ---\n')
    ttotal = datetime.now()
    kout = 0
    for kt in range(len(reg_temporal)):
        for ks in range(len(reg_spatial)):
            t1 = datetime.now()
            lambda_spatial = reg_spatial[ks]
            lambda_temporal = reg_temporal[kt]
            print('Running inversion ' + str(kout + 1) + ' of ' +
                  str(Ninversion) + ' at regularization levels: ls =' +
                  repr(lambda_spatial) + ' , lt = ' + repr(lambda_temporal))
            if static == True:  #Only statics inversion no Lt matrix
                Kinv = K + (lambda_spatial**2) * LsLs
                Lt = eye(len(K))
                LtLt = Lt.T.dot(Lt)
            else:  #Mixed inversion
                Kinv = K + (lambda_spatial**2) * LsLs + (lambda_temporal**
                                                         2) * LtLt
            if solver.lower() == 'lstsq':
                sol, res, rank, s = lstsq(Kinv, x)
            elif solver.lower() == 'nnls':
                x = squeeze(x.T)
                try:
                    sol, res = nnls(Kinv, x)
                except:
                    print('+++ WARNING: No solution found, writting zeros.')
                    sol = zeros(G.shape[1])
                x = expand_dims(x, axis=1)
                sol = expand_dims(sol, axis=1)
            else:
                print('ERROR: Unrecognized solver \'' + solver + '\'')

            #Force faults outside a polygon to be zero
            print('WARNING: Using fault polygon to force solutions to zero')
            #load faulkt
            fault = genfromtxt(home + project_name + '/data/model_info/' +
                               fault_name)
            polygon = genfromtxt(
                '/Users/dmelgarm/Oaxaca2020/etc/zero_fault.txt')
            polygon = path.Path(polygon)
            i = where(polygon.contains_points(fault[:, 1:3]) == False)[0]
            i = sort(r_[i * 2, i * 2 + 1])
            N = nfaults[0] * 2
            i = r_[i, i + N, i + 2 * N, i + 3 * N]
            sol[i] = 0

            #Compute synthetics
            ds = dot(G, sol)

            #Get stats
            L2, Lmodel = inv.get_stats(Kinv, sol, x)
            VR, L2data = inv.get_VR(home, project_name, GF_list, sol, d, ds,
                                    decimate, WG, wd)
            #VR=inv.get_VR(WG,sol,wd)
            #ABIC=inv.get_ABIC(WG,K,sol,wd,lambda_spatial,lambda_temporal,Ls,LsLs,Lt,LtLt)
            ABIC = inv.get_ABIC(G, K, sol, d, lambda_spatial, lambda_temporal,
                                Ls, LsLs, Lt, LtLt)
            #Get moment
            Mo, Mw = inv.get_moment(home, project_name, fault_name, model_name,
                                    sol)
            #If a rotational offset was applied then reverse it for output to file
            if beta != 0:
                sol = inv.rot2ds(sol, beta)
            #Write log
            inv.write_log(home, project_name, run_name, kout, rupture_speed,
                          num_windows, lambda_spatial, lambda_temporal, beta,
                          L2, Lmodel, VR, ABIC, Mo, Mw, model_name, fault_name,
                          G_name, GF_list, solver, L2data)
            #Write output to file
            inv.write_synthetics(home, project_name, run_name, GF_list, G, sol,
                                 ds, kout, decimate)
            inv.write_model(home,
                            project_name,
                            run_name,
                            fault_name,
                            model_name,
                            rupture_speed,
                            num_windows,
                            epicenter,
                            sol,
                            kout,
                            onset_file=onset_file)
            kout += 1
            dt1 = datetime.now() - t1
            dt2 = datetime.now() - ttotal
            print('... inversion wall time was ' + str(dt1) +
                  ', total wall time elapsed is ' + str(dt2))
コード例 #11
0
ファイル: writer.py プロジェクト: jimporter/bfg9000
import json
import os.path

from ... import path
from ... import safe_str
from ... import shell
from ...iterutils import isiterable
from ...tools.common import Command

_rule_handlers = {}

filepath = path.Path('compile_commands.json')


class CompDB:
    def __init__(self, env):
        self._env = env

        self._commands = []

    def _stringify(self, thing, directory=None):
        thing = safe_str.safe_str(thing)
        if isinstance(thing, safe_str.jbos):
            return safe_str.jbos.from_iterable(
                self._stringify(i, directory) for i in thing.bits
            )
        elif isinstance(thing, path.BasePath):
            result = thing.string(self._env.base_dirs)
            if thing.root == path.Root.builddir:
                dir_str = directory.string(self._env.base_dirs)
                return os.path.relpath(result, dir_str)