def test_project():
    """
    Test basic stuff.
    """
    project = Project.from_las('tests/1.las')
    assert len(project) == 1

    w = Well.from_las('tests/2.las')
    project += w
    assert w in project
    assert len(project) == 2

    project += project

    assert project.uwis[0] == 1

    s = "<table><tr><th>UWI</th><th>Data</th><th>Curves</th></tr><tr><td>1</td>"
    assert s in project._repr_html_()

    # Check __getitem__.
    assert project[1] == w
    assert len(project[:2]) == 2
    l = [0, 1]
    assert len(project[l]) == 2

    assert len(project.get_mnemonics(['DT'])) == 4

    html = project.curve_table_html()
    assert '<table><tr><th>Idx</th><th>UWI</th><th>Data</th><th>Quality</th>' in html
    assert "<th>DPHI_SAN</th>" in html
    s =  """<td style="background-color:#CCEECC; line-height:80%; padding:5px 4px 2px 4px;">DTS"""
    assert s in html
Exemple #2
0
def test_project():
    """
    Test basic stuff.
    """
    project = Project.from_las('tests/1.las')
    assert len(project) == 1

    w = Well.from_las('tests/2.las')
    project += w
    assert w in project
    assert len(project) == 2

    project += project

    assert project.uwis[0] == 1

    s = "<table><tr><th>UWI</th><th>Data</th><th>Curves</th></tr><tr><td>1</td>"
    assert s in project._repr_html_()

    # Check __getitem__.
    assert project[1] == w
    assert len(project[:2]) == 2
    l = [0, 1]
    assert len(project[l]) == 2

    assert len(project.get_mnemonics(['DT'])) == 4

    html = project.curve_table_html()
    assert "<table><tr><th>UWI</th><th>Data</th>" in html
    assert "<th>DPHI_SAN</th>" in html
    s =  """<td style="background-color:#CCEECC; line-height:80%; padding:5px 4px 2px 4px;">DTS<div style="font-size:80%; float:right; padding:4px 0px 4px 6px; color:#CCCCCC;"></div><br /><span style="font-size:70%; color:#33AA33">us/ft</span></td>"""
    assert s in html
Exemple #3
0
def test_despike():
    """
    Test despiker with even window and z != 2.
    """
    well = Well.from_las(FNAME)
    gr = well.data['GR']
    assert gr.max() - gr.despike(50, z=1).max() - 91.83918 < 0.001
Exemple #4
0
def wells_load(path2files, fluids, resample=0):
    filelist = os.listdir(path2files)
    fluid_csv = fluids
    wellsdataframe = pd.DataFrame()
    for f in filelist:
        #Read in the LAS file
        w = Well.from_las(path2files + f)

        #convert well data to a pandas dataframe
        w_df = pd.DataFrame(w.data)

        #add well name to first column
        w_df['Well'] = f

        #add depth column
        dt = w.data['DT']
        w_df["TD"] = dt.basis

        #Adding labels for fluids
        #Reading in the fluid information and setting a fluid column in well dataframe
        fluid_labels = pd.read_csv(fluid_csv)
        fluids = fluid_labels[fluid_labels['Well'] == f]
        fluids = fluids.fillna(value=99999)

        #Setting top and base limits for various fluids
        topgas = int(fluids.iloc[0]['topgas'])
        basegas = int(fluids.iloc[0]['basegas'])
        topoil = int(fluids.iloc[0]['topoil'])
        baseoil = int(fluids.iloc[0]['baseoil'])
        topcond = int(fluids.iloc[0]['topcond'])
        basecond = int(fluids.iloc[0]['basecond'])

        #Assigning fluid fluid_labels
        #Gas=1, Oil=2 Condensate=4
        #Gas and Oil = 3
        #Gas and Condensate = 5
        #Oil and Condensate = 6
        w_df['FLUID'] = 0
        w_df['Flag_gas'] = w_df['TD'].between(topgas,
                                              basegas).astype('int') * 1
        w_df['Flag_oil'] = w_df['TD'].between(topoil,
                                              baseoil).astype('int') * 2
        w_df['Flag_cond'] = w_df['TD'].between(topcond,
                                               basecond).astype('int') * 4
        w_df['FLUID'] = w_df['Flag_gas'] + w_df['Flag_oil'] + w_df['Flag_cond']
        w_df.drop(['Flag_gas', 'Flag_oil', 'Flag_cond'], axis=1)

        #Resampling by a factor
        if resample > 0:
            w_df = w_df.iloc[::int(resample), :]

        #Drops columns only with NaNs
        a = w_df.dropna(axis=1, how='all')

        #Replace NaNs with mean of the log
        b = a.fillna(method='ffill')
        c = b.fillna(method='bfill')
        wellsdataframe = pd.concat([wellsdataframe, c], axis=0)
    return wellsdataframe
def load_data(filename='Data/Poseidon1Decim.LAS'):
    """Fake data loader

    Returns:
        pandas.DataFrame: Dataframe containing info
    """
    w = Well.from_las(filename)
    return w.df()
Exemple #6
0
def test_well_plot():
    """
    Tests mpl image of well.
    """
    well = Well.from_las(FNAME)

    fig = well.plot(tracks=['MD', 'GR', 'DT'], return_fig=True)

    return fig
def test_curve_plot():
    """
    Tests mpl image of curve.
    """
    well = Well.from_las(FNAME)

    fig = well.data['GR'].plot(return_fig=True)

    return fig
Exemple #8
0
def test_well_plot():
    """
    Tests mpl image of well.
    """
    well = Well.from_las(FNAME)

    fig = well.plot(tracks=['MD', 'GR', 'DT'], return_fig=True)

    return fig
def test_curve_2d_plot():
    """
    Tests mpl image of curve as VD display.
    """
    well = Well.from_las(FNAME)

    fig = well.data['GR'].plot_2d(return_fig=True)

    return fig
Exemple #10
0
    def loadlas(self):
        self.getfilename()
        #        filename = self.le_filename.text()
        self.las = lasio.read(self.las_filename)

        self.cbLogDisplay.clear()
        for crv in self.las.curves:
            if crv.mnemonic != "DEPT":
                name, mnemonic, min1, max1, reversed1, units, plottype, colour = helperFunctions.Find_Curve_Data(
                    crv.mnemonic, curves_info, search='mnemonic')
                self.las.curves.minimum = min1
                self.las.curves.maximum = max1
                self.las.curves.reversed = reversed1
                self.las.curves.units = units
                self.las.curves.plottype = plottype
                self.las.curves.colour = colour

                if name is None:
                    name = crv.mnemonic

                crv.mnemonic = name

                #                print('crv',crv.mnemonic,crv.unit,plottype)
                df = self.las.df()
                if crv.unit.lower() == "ohm.m":
                    crv.unit = "ohm-m"
                if crv.unit.lower() == "us/ft":
                    df[crv.mnemonic] = helperFunctions.ConvertCurveToMetric(
                        df[crv.mnemonic], conversion=3.281)
                    self.las.set_data_from_df(df)
                if crv.unit.lower() == "g/cm3" or crv.unit.lower() == "gm/cc":
                    df[crv.mnemonic] = helperFunctions.ConvertCurveToMetric(
                        df[crv.mnemonic], conversion=1000)
                    self.las.set_data_from_df(df)

                self.cb_x.addItem(name)
                self.cb_y.addItem(name)
                self.cb_points.addItem(name)

        self.calculate_elastic_logs()

        self.curve_displ_new = []
        for cd in curve_displ:
            self.cbLogDisplay.addItem(cd['name'])
            new_list = helperFunctions.CheckCurvesDisplay(
                cd, self.las.curvesdict)
            z = {**{'name': cd['name']}, **{'curves': new_list}}
            self.curve_displ_new.append(dict(z))

#        print("here",curve_displ_new)
#        print(self.las.df().describe())

        self.w = Well.from_lasio(self.las)
        mainWnd.setWindowTitle(mainWnd.windowTitle() + "   File: " +
                               self.las_filename + "   UWI: " +
                               self.w.las.header['Well']['UWI'].value)
def test_well_synthetic_plot():
    """
    Tests mpl image of synthetic.
    """
    w = Well.from_las(FNAME)
    w.make_synthetic()

    fig = w.data['Synthetic'].plot(return_fig=True)

    return fig
def test_deviation():
    """
    Test that we can load a deviation survey and compute position.
    """
    well = Well.from_las(FNAME)
    dev = np.loadtxt(DNAME, delimiter=",", skiprows=1)
    well.location.add_deviation(dev)
    assert well.location.position.shape == (46, 3)
    assert well.location.md2tvd(1000) - 987.03517 < 0.001
    assert well.location.tvd2md(987.03517) - 1000 < 0.001
Exemple #13
0
def test_deviation():
    """
    Test that we can load a deviation survey and compute position.
    """
    well = Well.from_las(FNAME)
    dev = np.loadtxt(DNAME, delimiter=',', skiprows=1)
    well.location.add_deviation(dev)
    assert well.location.position.shape == (46, 3)
    assert well.location.md2tvd(1000) - 987.03517 < 0.001
    assert well.location.tvd2md(987.03517) - 1000 < 0.001
Exemple #14
0
def test_canstrat():
    """
    Test basic stuff.
    """
    w = Well.from_las('tests/P-129_out.LAS')
    s = Striplog.from_csv('tests/K90_strip_pred.csv')
    w.data['test'] = s
    dat = w.to_canstrat(key='test', log='K   90', as_text=True)

    s7 = "K   907   3960 3966L0                                                           "
    assert s7 in dat
def test_read():
    """
    Test reading for single number and array.
    """
    well = Well.from_las(FNAME)
    gr = well.data['GR']

    assert gr.read_at(1000) - 109.414177 < 0.001

    actual = gr.read_at([500, 1000, 1500])
    desired = np.array([91.29946709, 109.4141766, 64.55931458])
    np.testing.assert_allclose(actual, desired)
Exemple #16
0
def test_html_repr():
    well = Well.from_las(FNAME)
    html = well._repr_html_()

    name = """<table><tr><th style="text-align:center;" colspan="2">Kennetcook #2<br><small>Long = 63* 45'24.460  W</small></th></tr>"""
    data = """<tr><td><strong>data</strong></td><td>"""
    prov = """<tr><td><strong>province</strong></td><td>Nova Scotia</td></tr>"""
    assert name in html
    assert data in html
    assert prov in html
    for d in ['HCAL', 'RLA1', 'DT', 'DPHI_LIM', 'RLA3', 'RT_HRLT', 'CALI', 'DTS', 'DPHI_DOL', 'RLA5', 'RXO_HRLT', 'RLA4', 'SP', 'RXOZ', 'NPHI_LIM', 'DPHI_SAN', 'RLA2', 'PEF', 'RHOB', 'NPHI_SAN', 'RM_HRLT', 'DEPT', 'NPHI_DOL', 'GR', 'DRHO']:
        assert d in html
def test_well_remap():
    """
    This is about loading messy data from LAS by renaming and transforming
    fields.
    """

    def transform_ll(text):
        """
        The transforming function.
        """

        def callback(match):
            d = match.group(1).strip()
            m = match.group(2).strip()
            s = match.group(3).strip()
            c = match.group(4).strip()
            if c.lower() in ("w", "s") and d[0] != "-":
                d = "-" + d
            return " ".join([d, m, s])

        regex = r".+?([-0-9]+?).? ?([0-9]+?).? ?([\.0-9]+?).? +?([NESW])"
        pattern = re.compile(regex, re.I)
        text = pattern.sub(callback, text)
        return utils.dms2dd([float(i) for i in text.split()])

    remap = {
        "LATI": "LOC",  # Use LOC for the parameter LATI.
        "LONG": "UWI",  # Use UWI for the parameter LONG.
        "SECT": None,  # Use nothing for the parameter SECT.
        "RANG": None,  # Use nothing for the parameter RANG.
        "TOWN": None,  # Use nothing for the parameter TOWN.
    }

    funcs = {
        "LATI": transform_ll,  # Pass LATI through this function before load.
        "LONG": transform_ll,  # Pass LONG through it too.
        "UWI": lambda x: "No name, oh no!",
    }

    well = Well.from_las(FNAME, remap=remap, funcs=funcs)

    # Check some basics.
    assert (well.location.latitude - 45.20951027) < 0.001
    assert well.uwi == "No name, oh no!"

    # Check CRS
    well.location.crs_from_epsg(4269)
    assert well.location.crs.data["no_defs"]
    assert well.location.crs.to_string() == "+init=epsg:4269 +no_defs"

    well.location.crs_from_string("+init=epsg:4267")
    assert well.location.crs.init == "epsg:4267"
def make_well_project(laspath='data/las/', stripath='data/tops/'):
    """
    Return a dictionary of wells and striplogs where the
    key is the base filename
    """
    wells = {}
    lasfiles = glob(laspath + '*.LAS')
    stripfiles = glob(stripath + '*.csv')
    for fname, sname in zip(lasfiles, stripfiles):
        name = fname.split('/')[-1].split('.')[0]
        wells[name] = Well.from_las(fname)
        wells[name].data['tops'] = Striplog.from_csv(sname)
        proj = Project(list(wells.values()))
    return proj
def test_quality():
    """
    Test basic stuff.
    """
    w = Well.from_las('tests/P-129_out.LAS')
    r = w.qc_data(tests, alias=alias)
    assert len(r['GR'].values()) == 6
    assert sum(r['GR'].values()) == 3
    assert len(r['DT'].values()) == 6

    html = w.qc_table_html(tests, alias=alias)
    assert len(html) == 10057
    assert '<table><tr><th>Curve</th><th>Passed</th><th>Score</th>' in html
    assert '<tr><th>GR</th><td>3 / 6</td><td>0.500</td><td style=' in html
Exemple #20
0
def test_quality():
    """
    Test basic stuff.
    """
    w = Well.from_las('tests/P-129_out.LAS')
    r = w.qc_data(tests, alias=alias)
    assert len(r['GR'].values()) == 6
    assert sum(r['GR'].values()) == 3
    assert len(r['DT'].values()) == 6

    html = w.qc_table_html(tests, alias=alias)
    assert len(html) == 10057
    assert '<table><tr><th>Curve</th><th>Passed</th><th>Score</th>' in html
    assert '<tr><th>GR</th><td>3 / 6</td><td>0.500</td><td style=' in html
def test_basis():
    """
    Test basis change.
    """
    well = Well.from_las(FNAME)
    gr = well.data['GR']

    x = gr.to_basis(start=100, stop=200, step=1)
    assert x.size == 101
    assert x[0] - 66.6059 < 0.001

    y = gr.to_basis_like(x)
    assert y.size == 101
    assert y[0] - 66.6059 < 0.001
Exemple #22
0
    def loadlas(self):
        filename = self.le_filename.text()
        self.w = Well.from_las(filename)
        curves = self.w.df().columns

        for c in curves:

            self.cb_plot1.addItem(c)
            self.cb_plot2.addItem(c)
            self.cb_plot3.addItem(c)
            self.cb_plot4.addItem(c)
            self.cb_x.addItem(c)
            self.cb_y.addItem(c)
            self.cb_points.addItem(c)
Exemple #23
0
def test_well_remap():
    """
    This is about loading messy data from LAS by renaming and transforming
    fields.
    """
    def transform_ll(text):
        """
        The transforming function.
        """
        def callback(match):
            d = match.group(1).strip()
            m = match.group(2).strip()
            s = match.group(3).strip()
            c = match.group(4).strip()
            if c.lower() in ('w', 's') and d[0] != '-':
                d = '-' + d
            return ' '.join([d, m, s])

        regex = r".+?([-0-9]+?).? ?([0-9]+?).? ?([\.0-9]+?).? +?([NESW])"
        pattern = re.compile(regex, re.I)
        text = pattern.sub(callback, text)
        return utils.dms2dd([float(i) for i in text.split()])

    remap = {
        'LATI': 'LOC',  # Use LOC for the parameter LATI.
        'LONG': 'UWI',  # Use UWI for the parameter LONG.
        'SECT': None,  # Use nothing for the parameter SECT.
        'RANG': None,  # Use nothing for the parameter RANG.
        'TOWN': None,  # Use nothing for the parameter TOWN.
    }

    funcs = {
        'LATI': transform_ll,  # Pass LATI through this function before load.
        'LONG': transform_ll,  # Pass LONG through it too.
        'UWI': lambda x: "No name, oh no!"
    }

    well = Well.from_las(FNAME, remap=remap, funcs=funcs)

    # Check some basics.
    assert (well.location.latitude - 45.20951027) < 0.001
    assert well.uwi == 'No name, oh no!'

    # Check CRS
    well.location.crs_from_epsg(4269)
    assert well.location.crs.data['no_defs']
    assert well.location.crs.to_string() == '+init=epsg:4269 +no_defs'

    well.location.crs_from_string('+init=epsg:4267')
    assert well.location.crs.init == 'epsg:4267'
Exemple #24
0
def make_well_project(laspath='data/las/', stripath='data/tops/'):
    """
    Return a dictionary of wells and striplogs where the
    key is the base filename

    This assumes that the las file and tops files have the same name
    """
    wells = {}
    lasfiles = glob(laspath + '*.LAS')
    stripfiles = glob(stripath + '*.csv')
    for fname, sname in zip(lasfiles, stripfiles):
        name = Path(fname).stem
        wells[name] = Well.from_las(fname)
        wells[name].data['tops'] = Striplog.from_csv(sname)
        proj = Project(list(wells.values()))
    return proj
Exemple #25
0
def test_well_remap():
    """
    This is about loading messy data from LAS by renaming and transforming
    fields.
    """
    def transform_ll(text):
        """
        The transforming function.
        """
        def callback(match):
            d = match.group(1).strip()
            m = match.group(2).strip()
            s = match.group(3).strip()
            c = match.group(4).strip()
            if c.lower() in ('w', 's') and d[0] != '-':
                d = '-' + d
            return ' '.join([d, m, s])

        regex = r".+?([-0-9]+?).? ?([0-9]+?).? ?([\.0-9]+?).? +?([NESW])"
        pattern = re.compile(regex, re.I)
        text = pattern.sub(callback, text)
        return utils.dms2dd([float(i) for i in text.split()])

    remap = {
        'LATI': 'LOC',  # Use LOC for the parameter LATI.
        'LONG': 'UWI',  # Use UWI for the parameter LONG.
        'SECT': None,   # Use nothing for the parameter SECT.
        'RANG': None,   # Use nothing for the parameter RANG.
        'TOWN': None,   # Use nothing for the parameter TOWN.
    }

    funcs = {
        'LATI': transform_ll,  # Pass LATI through this function before load.
        'LONG': transform_ll,  # Pass LONG through it too.
        'UWI': lambda x: "No name, oh no!"
    }

    well = Well.from_las(FNAME, remap=remap, funcs=funcs)

    # Check some basics.
    assert (well.location.latitude - 45.20951027) < 0.001
    assert well.uwi == 'No name, oh no!'

    # Check CRS
    well.location.crs_from_epsg(4269)
    assert well.location.crs.data['no_defs']
    assert well.location.crs.to_string() == '+init=epsg:4269 +no_defs'
def test_block():
    """
    Test log blocking.
    """
    well = Well.from_las(FNAME)
    gr = well.data['GR']

    b = gr.block(cutoffs=[50, 100])
    assert b.size == 12718
    assert b.basis.size == 12718
    assert b.max() == 2

    b = gr.block()
    assert b.mean() - 0.46839 < 0.001

    b = gr.block(cutoffs=[50, 100], values=[12, 24, 36])
    assert b.max() == 36
    assert b.mean() - 25.077528 < 0.001
Exemple #27
0
    def __init__(self, well_name: str = None):
        """ Class that wraps `welly` to read borehole data - las files
         and deviations, csv, excel - and converts it into a
         `subsurface.UnstructuredData`

        This class is only meant to be extended with all the necessary functionality
         to load borehole data. For extensive manipulations of the data
         it should be done in `welly` itself.

        We need a class because it is going to be quite difficult to make
         one single function that fits all

        A borehole has:

            -  Datum (XYZ location)

            -  Deviation

            - Lithology: For this we are going to need striplog

            - Logs

        Everything would be a LineSet with a bunch of properties

        Parameters
        ----------
        well_name (Optional[str]): Name of the borehole

        Notes
        -----


        TODO: I think welly can initialize a Well from a file. That would be
         something to consider later on

        """
        # Init empty Project

        self.p = Project([])
        self._well_names = set()
        # Init empty well
        self.well = Well(params={'header': {'name': well_name}})
        self.well.location = Location(params={'kb': 100})
Exemple #28
0
def test_deviation_to_position_conversion():
    """
    Test that we can convert a deviation survey – a N x 3 array with columns MD, INC, and AZI 
    and compute position (a.k.a path) – a N x 3 arry with columns X, Y, Z relative 
    to the KB location. Tests the minimum curvature method only.
    """
    tolerance = 0.1  # absolute distance in metres we'll allow to be off.
    location = {'x': 382769.09, 'y': 4994021.65, 'kb': 94.8}
    well = Well({'location': Location(params=location)})

    survey = np.loadtxt(DNAME2, skiprows=2, delimiter=',')
    dev_surv = survey[:, 2:5]  # MD, Incl, Azim columns in test file
    posx, posy, posz = survey[:, 8], survey[:, 7], survey[:, 5]  # E/W, N/S, Z
    well.location.add_deviation(dev_surv)

    assert well.location.position.shape == (83, 3)
    assert well.location.position.shape == (83, 3)
    assert (np.allclose(posx, well.location.position[:, 0], atol=0.1))
    assert (np.allclose(posy, well.location.position[:, 1], atol=0.1))
Exemple #29
0
def test_well():

    well = Well.from_las(FNAME)

    # Check some basics.
    assert well.header.license == 'P-129'
    assert well.location.country == 'CA'
    assert well.location.gl == 90.3
    assert len(well.data) == 25
    assert well.data['GR'][0] - 46.69865036 < 0.001
    assert len(well.survey_basis()) == 12718

    # This is garbled, but it is what it is.
    assert well.uwi == "Long = 63* 45'24.460  W"

    # Check we have the lasio object.
    assert well.las.well['STRT'].value == 1.0668

    # Check we can make one.
    assert well.to_lasio().well['FLD'].value == "Windsor Block"
def las_to_rc(path_to_las):

    w = Well.from_las(path_to_las)
    wdf = w.df()
    wdf = wdf.loc[~np.isnan(wdf['DT']), :]
    wdf['AI'] = w.df()['RHOB'] * 1e6 / w.df()['DT']

    # kb and wd are assumened for the time being
    kb = 35.9
    wd = 44.5
    top_log = wdf.index.values[0]

    w_vel = 1480  # velocity of sea water [m/s]
    repl_vel = 1600  # m/s

    water_twt = 2.0 * wd / w_vel
    repl_time = 2.0 * (top_log - wd) / repl_vel
    log_start_time = water_twt + repl_time

    # ignored for now
    def tvdss(md):
        "assumes a vertical well"
        return md - kb

    # two-way-time to depth relationship
    interval = wdf.index.values[1] - wdf.index.values[0]
    scaled_dt = interval * np.nan_to_num(wdf['DT']) / 1e6

    tcum = 2 * np.cumsum(scaled_dt)
    tdr = tcum + log_start_time

    # RESAMPLING FUNCTION
    dt = 0.004
    mint = 0.0
    maxt = 1.8

    t = np.arange(mint, maxt, dt)
    Z_t = np.interp(x=t, xp=tdr, fp=wdf['AI'])

    RC_t = (Z_t[1:] - Z_t[:-1]) / (Z_t[1:] + Z_t[:-1])
    return np.nan_to_num(RC_t)
def test_curve():
    """
    Test basic stuff.
    """
    well = Well.from_las(FNAME)
    gr = well.data['GR']

    # Basics
    assert gr.ndim == 1
    assert gr.size == 12718
    assert gr.basis.size == 12718

    # Check HTML repr.
    html = gr._repr_html_()
    base = """<table><tr><th style="text-align:center;" colspan="2">GR [gAPI]</th></tr><tr><td style="text-align:center;" colspan="2">1.0668 : 1939.2900 : 0.1524</td></tr><tr><td><strong>"""
    co = """<tr><td><strong>service_company</strong></td><td>Schlumberger</td></tr>"""
    null = """<tr><td><strong>null</strong></td><td>-999.25</td></tr>"""
    data = """<tr><th style="border-top: 2px solid #000;">Depth</th><th style="border-top: 2px solid #000;">Value</th></tr><tr><td>1.0668</td><td>46.6987</td></tr><tr><td>1.2192</td><td>46.6987</td></tr><tr><td>1.3716</td><td>46.6987</td></tr><tr><td>⋮</td><td>⋮</td></tr><tr><td>1938.8328</td><td>92.2462</td></tr><tr><td>1938.9852</td><td>92.2462</td></tr><tr><td>1939.1376</td><td>92.2462</td></tr></table>"""
    assert base in html
    assert co in html
    assert null in html
    assert data in html
Exemple #32
0
def test_well_write():
    w = Well.from_las(FNAME)
    w.to_las('tests/test.las')
    well = Well.from_las('tests/test.las')
    assert well.data['GR'][0] - 46.69865036 < 0.001
Exemple #33
0
# 03. Display Statistics of wells logs
# 04. Quality COntrol of well logs
# =============================================================================


### IMPORTING LIBRARIES
import numpy as np
import matplotlib.pyplot as plt
#%matplotlib inline
import welly
from welly import Well
from welly import Project
#%config InlineBackend.figure_format='svg' # to create high resolution graphics

### IMPORT WELL DATA
well = Well.from_las(r"E:\00 ProjectsData\100. STATIC MODEL\TALARA\02. WELLS\03. LAS - DEV\LOTE X - PETROBRAS\02. las\EA11097.LAS")
well.data

well.header.name #extract the common well name from the LAS file

# well.plot() #extract general header information from the LAS file

### DISPLAY LOGS

# ## DISPLAY GRAPH
# tracks=["MD","RESD","&LN"]
# well.plot(tracks=tracks,lw=3) #line width
# well.data["GR"].plot(lw=0.7)

# ## DISPLAY INFORMATION
# well.data["GR"].start,well.data["GR"].stop # We can display the start and end depth
Exemple #34
0
def plot():
    p = Well.from_las('static/las-files/6307_d.las')
    p.plot()
    plt.savefig('static/images.png')
    return render_template('base.html', name='plot', url='static/images.png')
Exemple #35
0
def retrieve(wellname, fr, to):
    p = Well.from_las('static/las-files/' + wellname + '.las')
    gr = p.data['GR']
    gr[np.isnan(gr)] = 0
    payload = {'curve': list(gr), 'depth': list(gr.basis)}
    return jsonify(payload)
Exemple #36
0
def lfBackus(lb, freqs, test=False, log_plot=True, dt=2e-4, f=35):
    """
    Liner & Fei Backus thickness determination via the 'Backus Number.'

    This function uses the work of Liner & Fei to calculate the Backus average
    of the input curves at varying layer thicknesses. It then plots the orginal
    curves and the average curves. A second plot is used to illustrate the maximum
    bed thickness which will maintain all primaries and scattering reflection
    information for selected frequencies ($B$ <1/3) as well as maximum bed thickness
    which will maintain the direct arrival only ($B$ <2) and is suitable for
    migration velocity analysis, etc.

    B = (L'*f)/Vs min

    Variables:
        B = Backus number
        L' = Backus layer thickness
        f = frequency
        Vs min = The minimum shear velocity after backus averaging

    References:

    [https://library.seg.org/doi/abs/10.1190/1.2723204]

    The Backus number
    Liner,Chris et al.
    The Leading Edge(2007),26(4):420
    http://dx.doi.org/10.1190/1.2723204

    """

    # A lot of what is being done here would be made much simpler if this
    # function required the user to do some pre-work themselves, rather than
    # trying to solve for all the data issues within the function itself

    if test == False:
        lasPath = filedialog.askopenfilename()
        lasFile = Well.from_las(lasPath)

        # print(lasFile.header)
        print(lasFile.data.keys())

        # Check what the names of the curves you are looking for are
        print('What is your compressional sonic called?: ')
        dtc = lasFile.data[str(input())]
        print('What is your shear sonic called?: ')
        dts = lasFile.data[str(input())]
        print('What is your density curve called?: ')
        rhob = lasFile.data[str(input())]

        # Get the z-step
        depth = dtc.basis
        steps = np.diff(depth)

        if len(np.unique(steps) == 1):
            dz = steps[0]
        else:
            dz = np.mean(steps)
            print('Z step was not constant.')
    elif test == True:
        fname = r"C:\Users\goril\Dropbox\PythonScratch\functions\100033305723W500.las"
        lasFile = Well.from_las(fname)

        dtc = lasFile.data['DTC']
        dts = lasFile.data['DTS']
        rhob = lasFile.data['RHOB']
        depth = dtc.basis
        steps = np.diff(depth)
        dz = steps[0]

    # Handle any negative values that weren't caught on import
    dtc = np.where(dtc < 0, np.nan, dtc)
    dts = np.where(dts < 0, np.nan, dts)
    rhob = np.where(rhob < 0, np.nan, rhob)

    # Linearly interpolate any gaps using pandas.
    # (couldn't get interp1d to work)
    curv_df = pd.DataFrame([dtc, dts, rhob]).interpolate(axis=1)
    dtc, dts, rhob = np.array(curv_df.loc[0]), np.array(
        curv_df.loc[1]), np.array(curv_df.loc[2])

    # round dz (this was for when I thought there was an issue with precision)
    dz = np.round(dz, 4)
    print(f'\nThe z step was found to be: {dz} \n')

    vs = 1e6 / (3.23084 * dts)
    vp = 1e6 / (3.23084 * dtc)

    # Make a mask to clip all data to where vp exists
    mask = np.isnan(vp)
    vp_for_time = vp[~np.isnan(vp)]

    # Convert to time and generate synthetics
    bakus = np.array([b.rockphysics.backus(vp, vs, rhob, i, dz) for i in lb])
    bakus_masked = np.array([
        bakus[i, j, mask == False] for i in range(len(lb))
        for j in range(bakus.shape[1])
    ])

    # bakus_time = 6,3,:

    # Problems generalizing this section because depth_tp_time doesn't Deal
    # with NaNs. need to create a mask based on vp
    time_curves = ([
        b.transform.depth_to_time(bakus_masked[i],
                                  vp_for_time,
                                  dz,
                                  dt,
                                  return_t=True)
        for i in range(bakus_masked.shape[0])
    ])

    twt = time_curves[0].basis
    # time_curves = np.array(time_curves)
    rc = np.array([
        b.reflection.acoustic_reflectivity(time_curves[i].data,
                                           time_curves[j].data)
        for i, j in zip(range(0, len(time_curves), 3),
                        range(2, len(time_curves), 3))
    ])

    wavelet = b.filters.ricker(0.128, dt, f)
    synth = np.apply_along_axis(lambda r: np.convolve(r, wavelet, mode='same'),
                                axis=1,
                                arr=rc)

    vsMin = [np.nanmin(bakus[i][1]) for i in range(len(lb))]

    # Plot everything up
    if log_plot == True:
        plt.figure(figsize=(15, 10))
        for i in np.arange(len(lb)):
            plt.subplot(1, len(lb), i + 1)
            plt.plot(vp, depth, 'k', alpha=0.25)
            plt.plot(vs, depth, 'k', alpha=0.25)
            plt.plot(bakus[i][0], depth, 'b', alpha=0.75, label='Vp')
            plt.plot(bakus[i][1], depth, 'g', alpha=0.75, label='Vs')
            plt.gca().invert_yaxis()
            plt.title('%d m Backus layer' % lb[i])
            plt.grid(alpha=0.5)
            plt.xlim(np.nanmin(vs) - 100, np.nanmax(vp) + 100)
            plt.legend()
        plt.tight_layout()

        plt.figure(figsize=(15, 10))
        for i in np.arange(len(lb)):
            plt.subplot(1, len(lb), i + 1)
            for j in np.arange(0, 0.4, 0.1):
                plt.plot(synth[i] + j,
                         twt[:-1],
                         'k',
                         label=f"{lb[i]}m L' Synthetic")
                plt.fill_betweenx(twt[:-1],
                                  j,
                                  synth[i] + j,
                                  where=synth[i] > 0,
                                  color='r',
                                  alpha=0.4)
                plt.fill_betweenx(twt[:-1],
                                  j,
                                  synth[i] + j,
                                  where=synth[i] < 0,
                                  color='b',
                                  alpha=0.4)
            plt.ylim(np.amin(twt) - 0.1, np.amax(twt) + 0.1)
            # plt.xlim(-0.3, 0.3)
            plt.gca().invert_yaxis()
            plt.legend()

        f, axarr = plt.subplots(nrows=1, ncols=2)
        axarr[1].set_ylim(0, 3)
        axarr[1].set_xlim(0, np.max(lb))
        for i in np.arange(len(freqs)):
            axarr[0].plot(lb, vsMin, 'o', lb, vsMin, 'g--')
            axarr[0].set_title('$L$\'(m) vs Vs $min$')
            axarr[0].set_xlabel('$L$\' (backus length)', fontsize=10)
            axarr[0].set_ylabel('Vs $min$')
            axarr[1].plot(lb, (np.ones(len(lb)) / 3), 'r--')
            axarr[1].plot(lb, (np.ones(len(lb)) * 2), 'b--')
            axarr[1].set_title('Frequency ($Hz$) vs $L$\'')
            axarr[1].set_xlabel('$L$\' (backus length)')
            axarr[1].set_ylabel('$L$\' Backus Number')
            axarr[1].plot(lb, (freqs[i] * lb) / vsMin,
                          label='%s Hz' % freqs[i])
            axarr[1].set_xlim(0, np.max(lb))
            axarr[1].set_ylim(0)
        plt.tight_layout()
        axarr[1].legend(loc='upper left', fontsize='large')

    elif log_plot == False:
        f, axarr = plt.subplots(1, 2)
        axarr[1].set_ylim(0, 3)
        axarr[1].set_xlim(0, np.max(lb))
        for i in np.arange(len(freqs)):
            axarr[0].plot(lb, vsMin, 'o', lb, vsMin, 'g--')
            axarr[0].set_title('$L$\'(m) vs Vs $min$')
            axarr[0].set_xlabel('$L$\' (backus length)', fontsize=10)
            axarr[0].set_ylabel('Vs $min$')
            axarr[1].plot(lb, (np.ones(len(lb)) / 3), 'r--')
            axarr[1].plot(lb, (np.ones(len(lb)) * 2), 'b--')
            axarr[1].set_title('Frequency ($Hz$) vs $L$\'')
            axarr[1].set_xlabel('$L$\' (backus length)')
            axarr[1].set_ylabel('$L$\' Backus Number')
            axarr[1].plot(lb, (freqs[i] * lb) / vsMin,
                          label='%s Hz' % freqs[i])
            axarr[1].set_xlim(0, np.max(lb))
        plt.tight_layout()
        axarr[1].legend(loc='upper left', fontsize='large')

    plt.show()

    return depth, vp  #time_curves, twt, rc, synth
Exemple #37
0
def test_well_write():
    w = Well.from_las(FNAME)
    w.to_las('tests/test.las')
    well = Well.from_las('tests/test.las')
    assert well.data['GR'][0] - 46.69865036 < 0.001
Exemple #38
0
import json
import numpy as np
import pandas as pd
from pathlib import Path

import helper

app = Dash(__name__)
# Create server variable with Flask server object for use with gunicorn
server = app.server

# # load well data
# p = Project.from_las(str(Path("well_picks/data/las/PoseidonNorth1Decim.LAS")))
# well_names = [w.name for w in p]

w = Well.from_las(str(
    Path("well_picks/data/las/PoseidonNorth1Decim.LAS")))  #original example

df = w.df()
curve_list = df.columns.tolist()
curve = curve_list[0]

# sample pick data, eventually load from file or other source into dict
surface_picks = {
    "Sea Bed": 520.4,
    "Montara Formation": 4620,
    "Plover Formation (Top Volcanics)": 4703.2,
    "Plover Formation (Top Reservoir)": 4798.4,
    "Nome Formation": 5079
}

dropdown_options = [{
Exemple #39
0
import dash_html_components as html
from dash.dependencies import Input, Output


import plotly.express as px
import plotly.graph_objs as go
from plotly.subplots import make_subplots


app = dash.Dash(__name__)

# load well data and plots from Doug 
# /Notebooks/dashwellviz WellLog example.ipynb
from welly import Well
import pandas as pd
w = Well.from_las('Data/Poseidon1Decim.LAS')
df = w.df()

# Generate Vp and Vs from DTco and DTsm
df['Vp'] = (1000000 / df['DTCO']) / 3.281
df['Vs'] = (1000000 / df['DTSM']) / 3.281
df['Vp_max'] = df['Vp'].max() + 200

# make cross plot 
fig = go.Figure(data=go.Scatter(
    x = df['Vp'],
    y = df['Vs'],
    mode='markers',
    opacity=0.7,
    marker=dict(
        size=8,