예제 #1
0
def create_las(curve_df, curves_name, origin, las_units, las_longs, null, filepath):
    las = lasio.LASFile()
    # write the pandas data to the las file
    las.set_data(curve_df)
    # write the curve metadata from our three lists.
    counter = 0
    for x in curves_name:
        las.curves[x].unit = las_units[counter]
        las.curves[x].descr = las_longs[counter]
        counter = counter + 1
    las.well.COMP = origin.company
    las.well.WELL = origin.well_name
    las.well.FLD = origin.field_name
    las.well.SRVC = origin.producer_name
    las.well.DATE = origin.creation_time
    las.well.UWI = origin.well_id
    las.well.API = origin.well_id
    las.well.NULL = null
    las.params['PROD'] = lasio.HeaderItem('PROD', value=origin.product)
    las.params['PROG'] = lasio.HeaderItem('PROG', value=origin.programs)
    las.params['RUN'] = lasio.HeaderItem('RUN', value=origin.run_nr)
    las.params['DESCENT'] = lasio.HeaderItem('DESCENT', value=origin.descent_nr)
    las.params['VERSION'] = lasio.HeaderItem('VERSION', value=origin.version)
    las.params['LINEAGE'] = lasio.HeaderItem('LINEAGE', value="Python-converted from DLIS")
    las.params['ORFILE'] = lasio.HeaderItem('ORFILE', value=filepath)
    return las
예제 #2
0
def WriteLAS(OUTPUTFILE, df):
    las = lasio.LASFile()
    las.other = 'LAS file created from scratch using lasio'

    for column in df.columns:
        las.add_curve(column, df[column].values, descr='fake data')
    las.write(OUTPUTFILE, version=2.0)
예제 #3
0
 def test_generate_features(self):
     """
     Test feature generation function with las
     """
     print('Testing generate features.')
     las_data = lasio.LASFile()
     depths = np.arange(10, 20, 0.5)
     flen = len(depths)
     c1 = np.random.random(flen)
     c2 = np.random.random(flen)
     lat = 20
     lon = 40
     las_data.add_curve('DEPT', depths, unit='m')
     las_data.add_curve('C1', c1, descr='feature')
     las_data.add_curve('C2', c2, descr='target')
     info_dict = {
         'feature': 'C1',
         'target': 'C2',
         'WGS84Latitude': lat,
         'WGS84Longitude': lon
     }
     feature_keys = ['feature']
     target_keys = ['target']
     keeploc_df = self.widget.generateFeatures(las_data, info_dict,
                                               feature_keys, target_keys,
                                               True)
     noloc_df = self.widget.generateFeatures(las_data, info_dict,
                                             feature_keys, target_keys,
                                             False)
     self.assertEqual(keeploc_df.shape[0], flen)
     self.assertEqual(keeploc_df['Latitude'].values.mean(), lat)
     self.assertEqual(keeploc_df['Longitude'].values.mean(), lon)
     self.assertEqual(noloc_df.shape[0], flen)
     self.assertEqual(noloc_df.columns.values.tolist(),
                      feature_keys + target_keys)
예제 #4
0
def las_export(spliced_logs, lognames, file_w_path):
    import lasio
    params = np.load(params_file_path)
    las = lasio.LASFile()
    las.add_curve('DEPT', spliced_logs[:, 0], unit='m')
    param_mnems = np.array([params[i]['mnemonic'] for i in range(len(params))])
    for i, lkey in enumerate(lognames):
        if lkey in param_mnems:
            indx = np.where(param_mnems == lkey)[0][0]
            print('*******************************')
            print(indx)
            las.add_curve(lkey,
                          spliced_logs[:, i + 1],
                          unit=params[indx]['unit'])
        else:
            print(
                'The log {} is not there in params, please add a unit to params, \n Until then this log will have unknown units'
                .format(lkey))
            las.add_curve(lkey, spliced_logs[:, i + 1], unit='UNKWN')
    las.other = 'This las is generated by Laggy, the splicing module developed by Ameyem Geosolutions exclusively for Cairn Vedanta...'
    las.well['NULL'] = lasio.HeaderItem('NULL',
                                        value=-999.25,
                                        descr='NULL VALUE')
    las.well['WELL'] = lasio.HeaderItem('WELL', value='W1', descr='WELL')
    print('Writing to ', file_w_path)
    las.write(file_w_path)
예제 #5
0
    def to_las(self):
        """Return a lasio.LASFile object, which can then be written to disk.

        Example::

           las = collection.to_las()
           las.write('example_logs.las', version=2)
        """
        las = lasio.LASFile()

        las.well.WELL = str(self.support.wellbore_interpretation.title)
        las.well.DATE = datetime.today().strftime('%Y-%m-%d %H:%M:%S')

        # todo: Get UWI from somewhere
        # las.well.UWI = uwi

        # Lookup depths from associated WellboreFrame and Trajectory
        md_values = self.support.node_mds
        md_unit = self.support.trajectory.md_uom

        # Measured depths should be first column in LAS file
        # todo: include datum information in description
        las.append_curve('MD', md_values, unit = md_unit)

        for well_log in self.iter_logs():
            name = well_log.title
            unit = well_log.uom
            values = well_log.values()
            if values.ndim > 1:
                raise NotImplementedError('Multidimensional logs not yet supported in pandas')
            assert len(values) > 0
            log.debug(f"Writing log {name} of length {len(values)} and shape {values.shape}")
            las.append_curve(name, values, unit = unit, descr = None)
        return las
def test_data_attr():
    las = lasio.LASFile()
    las.append_curve('TEST1', data=[1, 2, 3])
    las.append_curve_item(lasio.CurveItem('TEST2', data=[4, 5, 6]))
    las.append_curve('TEST3', data=[7, 8, 9])
    logger.debug('las.data = {}'.format(las.data))
    # the .all() method assumes these are numpy ndarrays; that should be the case.
    assert (las.data == np.asarray([[1, 4, 7], [2, 5, 8], [3, 6, 9]])).all()
예제 #7
0
파일: well.py 프로젝트: phumikrai/1D-MEM
    def export(self, **kwargs):
        """
        savepath = path to save folder
        ...
        export new las file (.las) and comma-separated values file (.csv)
        """

        savepath = kwargs.get('savepath')

        import os, datetime
        import lasio

        # create a new empty las file and export to the new one

        las_file = lasio.LASFile()
        las_file.set_data(self.df)

        # update curve unit and its description

        for curve_1, curve_2 in zip(las_file.curves, self.las.curves):
            if curve_1.mnemonic == curve_2.mnemonic:
                curve_1.unit = curve_2.unit
                curve_1.descr = curve_2.descr

        # update header

        las_file.well = self.las.well

        # note in las file

        las_file.other = 'This file was written by python in %s' % datetime.date.today(
        ).strftime('%m-%d-%Y')

        # setup header for csv file

        headers = []

        for curve in self.las.curves:
            header = '%s[%s]' % (curve.mnemonic, curve.unit)
            headers.append(header)

        index = headers.pop(0)

        # export las and csv files

        lasfolder, csvfolder = 'LASfiles', 'CSVfiles'

        for folder in [lasfolder, csvfolder]:
            if not os.path.isdir(os.path.join(savepath, folder)):
                os.makedirs(os.path.join(savepath, folder))

        las_file.write(os.path.join(savepath, lasfolder,
                                    '%s_py.las' % self.name),
                       version=2.0)  # export las

        self.df.rename_axis(index).to_csv(os.path.join(
            savepath, csvfolder, '%s_py.csv' % self.name),
                                          header=headers)  # export csv
예제 #8
0
def test_insert_curve_2():
    las = lasio.LASFile()
    a = np.array([1, 2, 3, 4])
    b1 = np.array([5, 9, 1, 4])
    b2 = np.array([1, 2, 3, 2])
    las.append_curve("DEPT", a)
    las.append_curve("B", b2, descr="b2")
    las.insert_curve(2, "B", b1, descr="b1")
    assert [c.descr for c in las.curves] == ["", "b2", "b1"]
def test_insert_curve_2():
    las = lasio.LASFile()
    a = np.array([1, 2, 3, 4])
    b1 = np.array([5, 9, 1, 4])
    b2 = np.array([1, 2, 3, 2])
    las.append_curve('DEPT', a)
    las.append_curve('B', b2, descr='b2')
    las.insert_curve(2, 'B', b1, descr='b1')
    assert [c.descr for c in las.curves] == ['', 'b2', 'b1']
def test_add_curve_duplicate():
    las = lasio.LASFile()
    a = np.array([1, 2, 3, 4])
    b1 = np.array([5, 9, 1, 4])
    b2 = np.array([1, 2, 3, 2])
    las.add_curve('DEPT', a)
    las.add_curve('B', b1, descr='b1')
    las.add_curve('B', b2, descr='b2')
    # assert l.keys == ['DEPT', 'B', 'B']
    assert [c.descr for c in las.curves] == ['', 'b1', 'b2']
예제 #11
0
def test_add_curve_duplicate():
    las = lasio.LASFile()
    a = np.array([1, 2, 3, 4])
    b1 = np.array([5, 9, 1, 4])
    b2 = np.array([1, 2, 3, 2])
    las.add_curve("DEPT", a)
    las.add_curve("B", b1, descr="b1")
    las.add_curve("B", b2, descr="b2")
    # assert l.keys == ['DEPT', 'B', 'B']
    assert [c.descr for c in las.curves] == ["", "b1", "b2"]
예제 #12
0
    def _create_well_object(dataframe, dataframe_name=None):

        isinstance(dataframe, pandas.core.frame.DataFrame)

        las = lasio.LASFile()
        las.well.DATE = datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')
        if dataframe_name:
            las.well.WELL = dataframe_name
        for curve in dataframe.columns:
            las.add_curve(curve, dataframe[curve], unit='')

        return las
예제 #13
0
def test_delete_curve_mnemonic():
    las = lasio.LASFile()
    a = np.array([1, 2, 3, 4])
    b1 = np.array([5, 9, 1, 4])
    b2 = np.array([1, 2, 3, 2])
    logger.info(str([c.mnemonic for c in las.curves]))
    las.append_curve("DEPT", a)
    logger.info(str([c.mnemonic for c in las.curves]))
    las.append_curve("B", b2, descr="b2")
    logger.info(str([c.mnemonic for c in las.curves]))
    las.insert_curve(2, "B", b1, descr="b1")
    logger.info(str([c.mnemonic for c in las.curves]))
    las.delete_curve(mnemonic="DEPT")
    assert [c.descr for c in las.curves] == ["b2", "b1"]
def test_delete_curve_mnemonic():
    las = lasio.LASFile()
    a = np.array([1, 2, 3, 4])
    b1 = np.array([5, 9, 1, 4])
    b2 = np.array([1, 2, 3, 2])
    logger.info(str([c.mnemonic for c in las.curves]))
    las.append_curve('DEPT', a)
    logger.info(str([c.mnemonic for c in las.curves]))
    las.append_curve('B', b2, descr='b2')
    logger.info(str([c.mnemonic for c in las.curves]))
    las.insert_curve(2, 'B', b1, descr='b1')
    logger.info(str([c.mnemonic for c in las.curves]))
    las.delete_curve(mnemonic='DEPT')
    assert [c.descr for c in las.curves] == ['b2', 'b1']
예제 #15
0
def create_log_files(wells, run_id, serv_num, well_id_name=False):
    for well in wells:
        if well_id_name:
            well_id = well.core()[0].well_data_id
        else:
            well_id = well.name

        Path(f"{current_app.config['SERVICES_PATH']}{serv_num}/{str(run_id)}/input_data/wellLogs/{well_id}") \
            .mkdir(parents=True, exist_ok=True)

        filename = f"{current_app.config['SERVICES_PATH']}{serv_num}/{str(run_id)}" \
                   f"/input_data/wellLogs/{well_id}/{well.name}.las"

        las = copy.deepcopy(lasio.LASFile())
        las.well['WELL'].value = well.name
        las.add_curve("DEPT", data=list(well.depth), descr='')
        for crv in well.curves():
            las.add_curve(crv.name, data=list(crv.data), descr='')

        with open(filename, mode='w', encoding='utf-8') as f:
            las.write(f, version=2.0)
예제 #16
0
    def write_las(self, processed_df: pd.core.frame.DataFrame, source: lasio.las.LASFile, filename: str, location=None) -> None:
        r"""Write LAS file

        Arguments:
            processed_df: Dataframe that needs to be written
            source: LAS file from which headers are copied
            filename: Name of the output file
            location (tuple, optional): Latitude and longitude that is written to the header
        """

        if processed_df.empty:
            #TODO: This needs to be a warning through logging
            print('Input dataframe is empty')
            return 
 
        # Create new LAS file here
        processed_las = lasio.LASFile()

        # Copy the headers that haven't been changed
        for entry in _HEADERS_TO_COPY:
            processed_las.header[entry] = source.header[entry]

        # Insert location information to the header
        if location:
            assert(len(location) == 2)

            latitude = location[0]
            longitude = location[1] 

            processed_las.well['SLAT'] = lasio.HeaderItem('SLAT', unit='WGS84', value=latitude, descr='Surface Latitude')
            processed_las.well['SLON'] = lasio.HeaderItem('SLON', unit='WGS84', value=longitude, descr='Surface Longitude')

        # Insert curves now
        for entry in processed_df.columns:
            if entry == 'DEPT':
                processed_las.add_curve('DEPT', processed_df['DEPT'].values, unit='ft')
            else:
                processed_las.add_curve(entry, processed_df[entry].values)

        processed_las.write(os.path.join(self.output_path, filename), version=2)
예제 #17
0
파일: test_formprep.py 프로젝트: RRiya/TGS
    def test_generateBinSum(self):
        """
        Test sum of bins function on a dataframe
        """
        print('Testing bin sum.')
        las_data = lasio.LASFile()
        flen = len(self.depth_bins)
        c1 = np.random.random(flen)
        c2 = np.random.random(flen)

        las_data.add_curve('DEPT', self.depth_bins, unit='m')
        las_data.add_curve('C1', c1, descr='form1')
        las_data.add_curve('C2', c2, descr='form2')
        info_dict = {'form1': 'C1', 'form2': 'C2'}
        data_df = self.widget.generateBinSum(las_data, info_dict, 'b2')

        self.assertEqual(data_df.columns.values.tolist(),
                         ['DEPT', 'form1', 'form2', 'bins'])
        self.assertEqual(data_df['DEPT'].values.tolist(),
                         self.depth_bins.tolist())
        self.assertEqual(data_df['form1'].values.tolist(), c1.tolist())
        self.assertEqual(data_df['form2'].values.tolist(), c2.tolist())
예제 #18
0
파일: create.py 프로젝트: rocktype/steinbit
    def output_las(cls, frame: Frame, output: str):
        """
        Output a LAS file
        """
        df = frame.result()
        unit = df[RequiredFields.D_UNIT.value][0]
        depths = df[RequiredFields.DEPTH.value]
        strt = depths.min()
        stop = depths.max()
        step = (stop - strt) / len(df.index)
        las = lasio.LASFile()
        las.well.STRT.unit = unit
        las.well.STRT.value = strt
        las.well.STOP.unit = unit
        las.well.STOP.value = stop
        las.well.STEP.unit = unit
        las.well.STEP.value = step or 1.0
        las.well.WELL.value = df[RequiredFields.WELL.value][0]

        columns = [RequiredFields.DEPTH.value] + frame.minerals()
        for mnemonic, column in zip(mnemonics(columns), columns):
            las.append_curve(mnemonic, df[column], descr=column)
        with open(output, mode="w") as handle:
            las.write(handle)
예제 #19
0
def convert_dlis_to_las(filepath, output_folder_location, null=-999.25):
    filename = os.path.basename(filepath)
    filename = os.path.splitext(filename)[0]
    embedded_files = []
    origins = []
    frame_count = 0

    def df_column_uniquify(df):
        df_columns = df.columns
        new_columns = []
        for item in df_columns:
            counter = 0
            newitem = item
            while newitem in new_columns:
                counter += 1
                newitem = "{}_{}".format(item, counter)
            new_columns.append(newitem)
        df.columns = new_columns
        return df

    with dlisio.load(filepath) as file:
        print(file.describe())
        for d in file:
            embedded_files.append(d)
            frame_count = 0
            for origin in d.origins:
                origins.append(origin)
            for fram in d.frames:
                curves_name = []
                longs = []
                unit = []
                curves_L = []
                frame_count = frame_count + 1
                for channel in fram.channels:
                    curves_name.append(channel.name)
                    longs.append(channel.long_name)
                    unit.append(channel.units)
                    curves = channel.curves()
                    curves_L.append(curves)
                name_index = 0
                las = lasio.LASFile()
                curve_df = pd.DataFrame()
                las_units = []
                las_longs = []
                for c in curves_L:
                    name = curves_name[name_index]
                    print("Processing " + name)
                    units = unit[name_index]
                    long = longs[name_index]
                    c = np.vstack(c)
                    try:
                        num_col = c.shape[1]
                        col_name = [name] * num_col
                        df = pd.DataFrame(data=c, columns=col_name)
                        curve_df = pd.concat([curve_df, df], axis=1)
                        name_index = name_index + 1
                        object_warning = str(
                            name
                        ) + ' had to be expanded in the final .las file, as it has multiple samples per index'
                    except:
                        num_col = 1
                        df = pd.DataFrame(data=c, columns=[name])
                        name_index = name_index + 1
                        curve_df = pd.concat([curve_df, df], axis=1)
                        continue
                    u = [units] * num_col
                    l = [long] * num_col
                    las_units.append(u)
                    las_longs.append(l)
                    print("Completed " + name)
                las_units = [item for sublist in las_units for item in sublist]
                las_longs = [item for sublist in las_longs for item in sublist]

                # Check that the lists are ready for the curve metadata
                print("If these are different lengths, something is wrong:")
                print(len(las_units))
                print(len(las_longs))
                curve_df = df_column_uniquify(curve_df)
                curves_name = list(curve_df.columns.values)
                print(len(curves_name))

                # we will take the first curve in the frame as the index.
                curve_df = curve_df.set_index(curves_name[0])
                # write the pandas data to the las file
                las.set_data(curve_df)
                # write the curve metadata from our three lists.
                counter = 0
                for x in curves_name:
                    las.curves[x].unit = las_units[counter]
                    las.curves[x].descr = las_longs[counter]
                    counter = counter + 1
                las.well.COMP = origin.company
                las.well.WELL = origin.well_name
                las.well.FLD = origin.field_name
                las.well.SRVC = origin.producer_name
                las.well.DATE = origin.creation_time
                las.well.UWI = origin.well_id
                las.well.API = origin.well_id
                las.well.NULL = null
                las.params['PROD'] = lasio.HeaderItem('PROD',
                                                      value=origin.product)
                las.params['PROG'] = lasio.HeaderItem('PROG',
                                                      value=origin.programs)
                las.params['RUN'] = lasio.HeaderItem('RUN',
                                                     value=origin.run_nr)
                las.params['DESCENT'] = lasio.HeaderItem(
                    'DESCENT', value=origin.descent_nr)
                las.params['VERSION'] = lasio.HeaderItem('VERSION',
                                                         value=origin.version)
                las.params['LINEAGE'] = lasio.HeaderItem(
                    'LINEAGE', value="Python-converted from DLIS")
                las.params['ORFILE'] = lasio.HeaderItem('ORFILE',
                                                        value=filepath)

                # -----------------------------------------------------------------------
                # Write file
                # -----------------------------------------------------------------------
                outfile = filename + "_" + "converted_with_python_" + str(
                    frame_count) + ".las"
                outpath = os.path.join(output_folder_location, outfile)

                if not os.path.exists(output_folder_location):
                    print("Making output directory: [{}]\n".format(
                        output_folder_location))
                    os.makedirs(output_folder_location)

                print("Writing: [{}]\n".format(outpath))
                las.write(outpath, version=2)

            print("number of frames: " + str(frame_count) +
                  ": this is the number of .las files created")
            print("embedded_files: " + str(len(embedded_files)))
            print("This file has " + str(len(origins)) +
                  " metadata headers.  This code has used the first.")
            print(object_warning)
예제 #20
0
def get_linking(run_id, wells_list):
    abs_path = 'app/modules/second/'
    run_path = abs_path + str(run_id) + '/'
    out_path_log = replaceSlash(run_path + "\\output_data\\Report.txt")
    logging.basicConfig(format=u'%(levelname)-8s : %(message)s', level=logging.WARNING, filename=out_path_log,
                        filemode='w')
    Err_count = 0
    pattern_text_log = '   %s   :   %s'

    # регулярные выражения для фильтра имен скважин
    regex = r"[^а-яА-Яa-zA-Z\s\0\^\$\`\~\!\@\"\#\№\;\%\^\:\?\*\(\)\-\_\=\+\\\|\[\]\{\}\,\.\/\'\d]"
    ignoreLiterals = True
    regWellName = r"[^0-9]"

    # локальный путь к папке обрабатываемых лас-файлов с ГИС
    wellLogsPath = run_path + "input_data\\wellLogs"
    # локальный путь к папке обрабатываемых эксель-файлов с керном
    wellCorePath = run_path + "input_data\\wellCore"
    # локальный путь к папке стратиграфии
    wellTopsPath = run_path + "input_data\\stratigraphy"

    coreNames = []
    wells = []
    log = []
    for index in wells_list:

        wellsLasFiles = {}
        wellsInfo = {}

        files = getFilePaths(os.path.abspath(replaceSlash(wellLogsPath + "\\" + index)))
        for file in files:

            file = replaceSlash(file)

            enc = 'None'
            maxLen = 999999
            maybeEnc = 'ascii'

            for e in encodincs:
                try:
                    cod = codecs.open(file, 'r', encoding=e)
                    l = str(cod.read())
                    l = l.replace(' ', '')
                    check = re.findall(regex, l)
                    curLen = len(check)
                    if curLen != 0:
                        if curLen < maxLen:
                            maxLen = curLen
                            maybeEnc = e
                    else:
                        enc = e
                        break
                except:
                    pass

            if enc == 'None': enc = maybeEnc
            if enc != 'None':
                try:
                    try:
                        f = lasio.read(file, encoding=enc)
                    except:
                        newFile = checkAndFixLas(file, enc)
                        f = lasio.read(newFile, encoding=enc, ignore_header_errors=True)
                    if str(f.well['WELL'].value) == '':
                        log.append(file)
                        log.append('No wellname')
                        continue
                    try:
                        lasInfoObj = LasInfo(file, f[f.keys()[0]][0], f[f.keys()[0]][-1],
                                             f[f.keys()[0]][1] - f[f.keys()[0]][0], enc, f)
                    except Exception as exc:
                        log.append(file)
                        log.append(str(exc))
                        continue
                    wellName = re.sub(regWellName, '', str(f.well['WELL'].value)).replace(' ', '') if (
                            ignoreLiterals == True and not isinstance(str(f.well['WELL'].value), float)) else str(
                        f.well['WELL'].value)
                    if not wellName in wellsLasFiles.keys():
                        wellsLasFiles[wellName] = []
                    wellsLasFiles[wellName].append(lasInfoObj)
                except Exception as e:
                    log.append(file)
                    log.append(str(e))

        for wellName in wellsLasFiles.keys():
            try:
                globalMinDept = (wellsLasFiles.get(wellName)[0]).minDept
                globalMaxDept = (wellsLasFiles.get(wellName)[0]).maxDept
                globalMinStep = (wellsLasFiles.get(wellName)[0]).step
                for lasInfoObject in wellsLasFiles.get(wellName):
                    minn = lasInfoObject.minDept
                    maxx = lasInfoObject.maxDept
                    step = lasInfoObject.step
                    if minn < globalMinDept:
                        globalMinDept = minn
                    if maxx > globalMaxDept:
                        globalMaxDept = maxx
                globalMinStep = 0.1
                newWell = WellClass(wellName)
                dept = np.round(np.arange(globalMinDept, globalMaxDept, globalMinStep), 1)
                if dept[-1] != np.round(globalMaxDept, 1):
                    dept = np.hstack((dept, np.round(globalMaxDept, 1)))
                globalShape = len(dept)
                curves = {}
                curvesInfo = {}
                curves['DEPT'] = dept
            except Exception as exc:
                print(index, wellName)
                print(exc)
                pass

            unexpectedError = False
            downloadedFiles = 0

            for lasInfoObject in wellsLasFiles.get(wellName):
                try:
                    crvNames = [lasInfoObject.las.keys()[i] for i in range(0, len(lasInfoObject.las.keys()))]
                    if 'UNKNOWN' in crvNames:
                        log.append(lasInfoObject.filePath)
                        log.append('Unexpected curve name')
                        continue
                    crvs = ''

                    for crv in range(1, len(lasInfoObject.las.curves.keys()) - 1):
                        crvs = crvs + lasInfoObject.las.curves.keys()[crv] + ', '

                    crvs = crvs + lasInfoObject.las.curves.keys()[len(lasInfoObject.las.curves.keys()) - 1]
                    previousInfo = lasInfoObject.las.curves
                    localDept = np.round(lasInfoObject.las[lasInfoObject.las.keys()[0]], 1)

                    try:
                        curvesInfo['DEPT'] = CurveInfo(previousInfo[0].unit, previousInfo[0].descr, 'DEPT', 'DEPT',
                                                       lasInfoObject.filePath)
                    except:
                        curvesInfo['DEPT'] = CurveInfo('', '', 'DEPT', 'DEPT', lasInfoObject.filePath)

                    for i in range(1, len(lasInfoObject.las.keys())):
                        try:
                            try:
                                curveName = lasInfoObject.las.keys()[i]
                                newCurveName = curveName
                            except:
                                continue
                            try:
                                if curveName in curves.keys():
                                    count = 1
                                    while newCurveName in curves.keys():
                                        if count > 1:
                                            newCurveName = newCurveName[:-1]
                                        newCurveName = newCurveName + str(count)
                                        count = count + 1
                            except Exception as exc:
                                print(index, wellName)
                                print(exc)
                                pass
                            newCurve = newMesh(lasInfoObject.las[curveName], localDept, dept)
                            curves[newCurveName] = newCurve
                            try:
                                curvesInfo[newCurveName] = CurveInfo(previousInfo[curveName].unit,
                                                                     previousInfo[curveName].descr,
                                                                     previousInfo[curveName].original_mnemonic,
                                                                     curveName,
                                                                     lasInfoObject.filePath)
                            except Exception as exc:
                                print(index, wellName)
                                print(exc)
                                pass
                                curvesInfo[newCurveName] = CurveInfo('', '', curveName, curveName,
                                                                     lasInfoObject.filePath)
                        except Exception as exc:
                            pass
                            print(index, wellName)
                            print(exc)

                    downloadedFiles += 1

                except Exception as e:
                    log.append(lasInfoObject.filePath)
                    log.append(str(e))

            if downloadedFiles == 0:
                unexpectedError = True

            if not unexpectedError:
                wellsInfo[wellName] = curvesInfo
                newWell.curves = curves
                newWell.keys = curves.keys()
                newWell.index = index

            # стратиграфия
            try:
                topsPath = replaceSlash(wellTopsPath + "\\" + index)
                with open(replaceSlash(
                        os.path.join(topsPath,
                                     [file for file in os.listdir(topsPath) if file.lower().endswith('json')][0])),
                        "r") as read_file:
                    xlsTops = json.load(read_file)
                    newWell.tops = [xlsTops['Lingula_top'], xlsTops['P2ss2_top'], xlsTops['P2ss2_bot']]
            except:
                logging.error(pattern_text_log, str(wellName), "ошибка чтения")
                Err_count += 1
                continue

            # Добавляем информацию по керну
            indWellPath = replaceSlash(wellCorePath + "\\" + index)
            filePaths = [replaceSlash(os.path.join(indWellPath, file)) for file in os.listdir(indWellPath) if
                         not file.startswith('.')]
            if len(filePaths) > 1:
                logging.warning(pattern_text_log, str(indWellPath),
                                "в указанной директории находятся более одного файла. ")

            for i in filePaths:
                try:
                    dataFrame, name_well, dept = parse_json(i, pattern_text_log)

                    if dataFrame is None:
                        continue
                    curves_core = {}
                    for i in range(dept, len(dataFrame.columns)):
                        curves_core[dataFrame.columns[i]] = np.array(
                            [value for value in dataFrame[dataFrame.columns[i]]])
                        if dataFrame.columns[i] not in coreNames and not dataFrame.columns[i] == 'DEPT':
                            coreNames.append(dataFrame.columns[i])

                    for key in curves_core.keys():
                        if not key == "DEPT":
                            newWell.curves[key] = np.nan * np.ones(len(newWell.curves["DEPT"]))
                            for i in range(len(curves_core["DEPT"])):
                                dept = curves_core["DEPT"][i]
                                newWell.curves[key][np.round(newWell.curves["DEPT"], 1) == round(dept, 1)] = \
                                curves_core[key][i]
                    break
                except Exception as e:
                    logging.error(pattern_text_log, str(index), str(i) + ". " + str(e))
                    Err_count += 1
            wells.append(newWell)
    # прогресс-бар для отображения процесса в консоли
    progress = Bar('   Core shifting', max=len(wells) + 1, fill='*', suffix='%(percent)d%%')
    progress.next()

    Warns = ''
    for well in wells:
        try:
            Res_table = []
            # стандартизация имен кривых в скважине

            GK_names = ["gkv", "gr", "gk", "gk1", "гк", "gk:1", "гк  ", "_гк", "= gk$"]
            IK_names = ["ik", "ild", "ик", "ik1", "ik:1", "ик  ", "иk", "зонд ик", "rik", "ик$", "ик_n"]
            NGK_names = ["нгк alfa", "ngl", "нгк", "nkdt", "jb", "jm", "ngk", "ngk:1", "ннкб", "nnkb", "nnkb  (ннкб)",
                         "бз ннк", "nktd", "бз", "_нгк", "ннк", "_ннк"]
            MD_names = ["dept", "md", "depth"]

            go_keys = list(well.curves.keys())
            for name_crv in go_keys:
                if str(name_crv).lower() in GK_names:
                    well.curves["GK"] = well.curves[name_crv]
                    continue

                if str(name_crv).lower() in IK_names:
                    well.curves["IK"] = well.curves[name_crv]
                    continue

                if str(name_crv).lower() in NGK_names:
                    well.curves["NGK"] = well.curves[name_crv]
                    continue

                if str(name_crv).lower() in MD_names:
                    well.curves["DEPT"] = well.curves[name_crv]
                    continue

            # проверка наличичия необходимых кривых в скважине
            if not "GK" in well.curves.keys():
                logging.error(pattern_text_log, str(well.index), "no GK data")
                Err_count += 1
                progress.next()
                continue
            if not "NGK" in well.curves.keys():
                logging.error(pattern_text_log, str(well.index), "no NGK data")
                Err_count += 1
                progress.next()
                continue
            if not "IK" in well.curves.keys():
                logging.error(pattern_text_log, str(well.index), "no IK data")
                Err_count += 1
                progress.next()
                continue

            if not "Volume_density" in well.curves.keys():
                logging.error(pattern_text_log, str(well.index), "no Volume_density in curves keys")
                Err_count += 1
                progress.next()
                continue
            if not "SOIL_mass" in well.curves.keys():
                logging.error(pattern_text_log, str(well.index), "no SOIL_mass in curves keys")
                Err_count += 1
                progress.next()
                continue

            dept = well.curves["DEPT"]
            kden = well.curves["Volume_density"]
            kbit = well.curves["SOIL_mass"]
            lito = well.curves['Lithotype']

            if len(lito[~np.isnan(lito)]) <= 2:
                logging.error(pattern_text_log, str(well.index), "Недостаточно данных для расчетов : Lithotype")
                Err_count += 1
                progress.next()
                continue
            if len(kbit[~np.isnan(kbit)]) <= 2:
                logging.error(pattern_text_log, str(well.index), "Недостаточно данных для расчетов : SOIL_mass")
                Err_count += 1
                progress.next()
                continue
            if len(kden[~np.isnan(kden)]) <= 2:
                logging.error(pattern_text_log, str(well.index), "Недостаточно данных для расчетов : Volume_density")
                Err_count += 1
                progress.next()
                continue

            for i in range(len(kden)):
                if not np.isnan(kden[i]) and kden[i] > 4:
                    lito[i] = np.nan

            # Volume_density

            crv1 = well.curves["IK"]
            crv2 = well.curves["NGK"]

            kbit1 = np.copy(kbit)
            kden1 = np.copy(kden)
            lito1 = np.copy(lito)

            ind = np.isfinite(kden)
            kden1 = np.interp(dept, dept[ind], kden[ind], left=np.nan, right=np.nan)

            ind = np.isfinite(kbit)
            kbit1 = np.interp(dept, dept[ind], kbit[ind], left=np.nan, right=np.nan)

            I0 = get_step(lito)

            if I0 is None:
                logging.warning(pattern_text_log, str(well.index), "не установлена связь с данными <Lithotype>")
                progress.next()
                continue

            de_Dept = well.tops[1] - dept[I0]
            if de_Dept >= 0:
                if abs(de_Dept) >= 3.0:
                    logging.error(pattern_text_log, str(well.index),
                                  "необходима проверка стратиграфии, интервал отбора выходит за пределы песчаной пачки")
                    Err_count += 1
                    progress.next()
                    continue
            else:
                if abs(de_Dept) >= 7.0:
                    logging.error(pattern_text_log, str(well.index),
                                  "необходима проверка стратиграфии, интервал отбора выходит за пределы песчаной пачки")
                    Err_count += 1
                    progress.next()
                    continue

            wind_cc = 3.1
            up_sp = max(dept[I0] - wind_cc, well.tops[1])
            lft_prt = len(dept[np.logical_and(dept >= dept[I0], dept <= dept[I0] + wind_cc)])
            dpu_d = max(dept[I0] - wind_cc, well.tops[1])
            ii_di = np.nanargmin(np.abs(dept - dpu_d))
            rgt_prt = ii_di - I0
            dpt_span_cc = [rgt_prt, lft_prt]

            step, cc1, cc2, ccz = GetShift(dept, crv1, crv2, kbit, kden, dpt_span_cc, lito)

            cheS_d = dept[I0 + step]
            if abs(well.tops[1] - cheS_d) >= 4.0:
                logging.error(pattern_text_log, str(well.index),
                              "необходима проверка стратиграфии, интервал отбора выходит за пределы песчаной пачки")
                Err_count += 1
                progress.next()
                continue

            Res_table.append([well.name, round(step / 10, 1)])

            if step > 0:
                kbit = np.hstack((np.nan * np.ones(step), kbit[:-step]))
                kbit1 = np.hstack((np.nan * np.ones(step), kbit1[:-step]))
            else:
                kbit = np.hstack((kbit[-step:], np.nan * np.ones(-step)))
                kbit1 = np.hstack((kbit1[-step:], np.nan * np.ones(-step)))
            if step > 0:
                kden = np.hstack((np.nan * np.ones(step), kden[:-step]))
                kden1 = np.hstack((np.nan * np.ones(step), kden1[:-step]))
            else:
                kden = np.hstack((kden[-step:], np.nan * np.ones(-step)))
                kden1 = np.hstack((kden1[-step:], np.nan * np.ones(-step)))

            for name in coreNames:
                if name in well.curves.keys():
                    if step > 0:
                        well.curves[name] = np.hstack((np.nan * np.ones(step), well.curves[name][:-step]))
                    else:
                        well.curves[name] = np.hstack((well.curves[name][-step:], np.nan * np.ones(-step)))

            fig, f = plt.subplots(figsize=(20, 12), nrows=1, ncols=6, sharey=True)
            fig.suptitle("№" + well.name + ":  " + str(step / 10) + 'm', fontsize=30)

            dmin = dept[np.isfinite(kden)][0]
            dmax = dept[np.isfinite(kden)][-1]

            try:
                f[0].set_ylim(well.tops[0] - 5, well.tops[2] + 5)
            except Exception as exc:
                print(well)
                print(exc)
            pass
            f[0].plot(well.curves["GK"][~np.isnan(well.curves["GK"])], well.curves["DEPT"][~np.isnan(well.curves["GK"])],
                      'r', label='GK')
            f[0].set_xlabel("GK", fontsize=16)

            zone = np.logical_and(dept >= dmin - 5, dept <= dmax + 5)

            f[0].set_ylabel("DEPT, m", fontsize=16)
            f[0].invert_yaxis()
            f[0].grid()
            f[0].set_xlim(np.nanmin(well.curves["GK"][zone]), np.nanmax(well.curves["GK"][zone]))
            f[0].axhline(y=well.tops[0], linewidth=2, color='k')
            f[0].axhline(y=well.tops[1], linewidth=2, color='k')
            f[0].axhline(y=well.tops[2], linewidth=2, color='k')

            f[1].plot(well.curves["NGK"][~np.isnan(well.curves["NGK"])], well.curves["DEPT"][~np.isnan(well.curves["NGK"])],
                      'k', label='NGK')
            f[1].set_xlabel("NGK", fontsize=16)
            f[1].invert_yaxis()
            f[1].grid()
            f[1].set_xlim(0, np.nanmax(well.curves["NGK"][zone]))
            f[1].axhline(y=well.tops[0], linewidth=2, color='k')
            f[1].axhline(y=well.tops[1], linewidth=2, color='k')
            f[1].axhline(y=well.tops[2], linewidth=2, color='k')

            f[2].plot(well.curves["IK"][~np.isnan(well.curves["IK"])], well.curves["DEPT"][~np.isnan(well.curves["IK"])],
                      'k', label='IK')
            f[2].set_xlabel("IK", fontsize=16)
            f[2].set_ylabel("DEPT, m", fontsize=16)
            f[2].invert_yaxis()
            f[2].grid()
            f[2].set_xlim(np.nanmin(well.curves["IK"][zone]), np.nanmax(well.curves["IK"][zone]))
            f[2].axhline(y=well.tops[0], linewidth=2, color='k')
            f[2].axhline(y=well.tops[1], linewidth=2, color='k')
            f[2].axhline(y=well.tops[2], linewidth=2, color='k')

            f[3].plot(kbit, well.curves["DEPT"], 'g', marker='o', markersize=8, linestyle='None')
            f[3].plot(kbit1, well.curves["DEPT"], 'b')
            f[3].set_xlabel("MASS.SOIL", fontsize=16)
            f[3].invert_yaxis()
            f[3].grid()
            f[3].set_xlim(0, 15)
            f[3].axhline(y=well.tops[0], linewidth=2, color='k')
            f[3].axhline(y=well.tops[1], linewidth=2, color='k')
            f[3].axhline(y=well.tops[2], linewidth=2, color='k')

            f[4].plot(kden, well.curves["DEPT"], 'g', marker='o', markersize=8, linestyle='None')
            f[4].plot(kden1, well.curves["DEPT"], 'b')
            f[4].set_xlabel("VOL.DENSITY", fontsize=16)
            f[4].invert_yaxis()
            f[4].grid()
            f[4].set_xlim(1.6, 2.7)
            f[4].axhline(y=well.tops[0], linewidth=2, color='k')
            f[4].axhline(y=well.tops[1], linewidth=2, color='k')
            f[4].axhline(y=well.tops[2], linewidth=2, color='k')

            try:
                f[5].plot(well.curves["Lithotype"], well.curves["DEPT"], 'g', marker='o', markersize=8, linestyle='None')
                f[5].set_xlabel("Lithotype", fontsize=16)
                f[5].invert_yaxis()
                f[5].grid()
                f[5].axhline(y=well.tops[0], linewidth=2, color='k')
                f[5].axhline(y=well.tops[1], linewidth=2, color='k')
                f[5].axhline(y=well.tops[2], linewidth=2, color='k')
            except Exception as exc:
                print(well)
                print(exc)
            pass

            outPath = replaceSlash(run_path + 'output_data/' + well.index)
            if not os.path.exists(outPath): os.makedirs(outPath)
            plt.savefig(replaceSlash(outPath + '/' + str(well.name) + ".png"))

            las = copy.deepcopy(lasio.LASFile())
            las.well['WELL'].value = well.name

            las.add_curve("DEPT", data=well.curves["DEPT"], descr='')
            for name in well.curves.keys():
                if name != "DEPT":
                    las.add_curve(name, data=well.curves[name], descr='')

            with open(replaceSlash(outPath + '/' + str(well.name) + '.las'), mode='w', encoding='utf-8') as f:
                las.write(f, version=2.0)

            workbook = xlsxwriter.Workbook(replaceSlash(outPath + '/' + "Results.xlsx"))
            worksheet = workbook.add_worksheet('KFB')
            bold = workbook.add_format({'bold': True})
            worksheet.write('A1', 'Well', bold)
            worksheet.write('B1', 'Shift, m.', bold)

            row = 1
            for wellData in Res_table:
                for position, value in enumerate(wellData):
                    worksheet.write(row, position, value)
                row += 1
            workbook.close()

            progress.next()
        except BaseException as e:
            logging.error(pattern_text_log, str(well.index), str(e))
            Err_count += 1

    if Err_count == 0:
        logging.addLevelName(100, "INFO")
        logging.log(100, pattern_text_log, "", "No errors in data")

    print()
    print()
    print("Interpretation successfully finished")
    print()
예제 #21
0
파일: test_api.py 프로젝트: kwinkunks/lasio
def test_append_curve_and_item():
    las = lasio.LASFile()
    data = [1, 2, 3]
    las.append_curve("TEST1", data=data)
    las.append_curve_item(lasio.CurveItem("TEST2", data=data))
    assert (las["TEST1"] == las["TEST2"]).all()
def test_append_curve_and_item():
    las = lasio.LASFile()
    data = [1, 2, 3]
    las.append_curve('TEST1', data=data)
    las.append_curve_item(lasio.CurveItem('TEST2', data=data))
    assert (las['TEST1'] == las['TEST2']).all()
예제 #23
0
def spliced_las_writer(CORRECTED_LAS_NAME, single_well):

    ##############################################################
    # INSTANTIATE A NEW .LAS FILE OBJECT
    ##############################################################
    las = lasio.LASFile()

    ##############################################################
    # ADD CUSTOM COMMENTS...
    # TODO: The interpolation and printing of custom comments / labels does not quite work yet.
    ##############################################################
    las.sections.update({
        "Comments_1": "",
        "Comments_2": "",
        "Comments_3": "",
        "Comments_4": "",
        "Comments_5": ""
    })
    key_order = ("Comments_1", "Version", "Comments_2", "Well", "Comments_3",
                 "Curves", "Comments_4", "Parameter", "Comments_5", "Other")
    las.sections = dict((k, las.sections[k]) for k in key_order)
    # print(las.sections.keys())

    # Comments_1
    line1 = f"" + "\n"
    line2 = f"# LAS WORX (tm) v17.01.12" + "\n"
    line3 = f"# Original File Owner: ANADARKO" + "\n"
    line4 = f"" + "\n"
    # ~Version ---------------------------------------------------

    # Comments_2
    line5 = f"" + "\n"
    line6 = f"# MNEM UNIT        VALUE/NAME              DESCRIPTION    " + "\n"
    line7 = f"# ---- ---- -------------------------   ------------------" + "\n"
    line8 = f"" + "\n"
    # ~Well ------------------------------------------------------

    # Comments_3
    line9 = f"" + "\n"
    line10 = f"# MNEM UNIT API CODES               DESCRIPTION          " + "\n"
    line11 = f"# ---- ---- ---------   ---------------------------------" + "\n"
    line12 = f"" + "\n"
    # ~Curves ----------------------------------------------------

    # Comments_4
    line13 = f"" + "\n"
    line14 = f"# MNEM UNIT       VALUE/NAME              DESCRIPTION    " + "\n"
    line15 = f"# ---- ---- -----------------------   -------------------" + "\n"
    line16 = f"" + "\n"

    # Comments_5
    # Headers / labels for the curve data should go here.

    ##############################################################
    # SET ALL HEADERS AND DATA
    ##############################################################
    las.sections["Comments_1"] = line1 + line2 + line3 + line4
    # ~Version ---------------------------------------------------
    las.version["VERS"] = lasio.HeaderItem("VERS", value="2.0")
    las.version["WRAP"] = lasio.HeaderItem("WRAP", value="NO")

    las.sections["Comments_2"] = line5 + line6 + line7 + line8
    # ~Well ------------------------------------------------------

    las.well["STRT"] = lasio.HeaderItem(mnemonic="STRT",
                                        value=single_well.top_depth,
                                        descr="START DEPTH")
    las.well["STOP"] = lasio.HeaderItem(mnemonic="STOP",
                                        value=single_well.bottom_depth,
                                        descr="STOP DEPTH")
    las.well["STEP"] = lasio.HeaderItem(mnemonic="STEP",
                                        value=single_well.step,
                                        descr="STEP")

    las.well["CNTY"] = lasio.HeaderItem(mnemonic="CNTY",
                                        value=single_well.county,
                                        descr="COUNTY")
    las.well["SRVC"] = lasio.HeaderItem(mnemonic="SRVC",
                                        value=single_well.logging_contractor,
                                        descr="SERVICE COMPANY")
    las.well["UWI"] = lasio.HeaderItem(mnemonic="UWI",
                                       value=single_well.uwi,
                                       descr="UNIQUE WELL ID")
    las.well["WELL"] = lasio.HeaderItem(mnemonic="WELL",
                                        value=single_well.wellname,
                                        descr="WELL NAME")

    # Adding add'l headers via attribute dot method will not work; must use
    # item-style access...
    las.well["LAT"] = lasio.HeaderItem(mnemonic="LAT",
                                       value=single_well.lat,
                                       descr="LATITUDE")
    las.well["LON"] = lasio.HeaderItem(mnemonic="LON",
                                       value=single_well.lon,
                                       descr="LONGITUDE")

    # Adding header for NULL values
    las.well["NULL"] = lasio.HeaderItem(mnemonic="NULL",
                                        value=ls_config.MISSING,
                                        descr="NULL VALUE")

    las.sections["Comments_3"] = line9 + line10 + line11 + line12
    # ~Curves ----------------------------------------------------
    # Curve description
    single_well.data.columns = single_well.data.columns.map(str.upper)
    col_names = single_well.data.columns.tolist()
    las.curves["DEPT"] = lasio.HeaderItem(mnemonic="DEPT", descr="Depth")
    for col_name in col_names:
        if col_name in ls_config.CURVE_DESC.keys():
            las.curves[col_name] = lasio.HeaderItem(
                mnemonic=col_name, descr=ls_config.CURVE_DESC[col_name])
        else:
            las.curves[col_name] = lasio.HeaderItem(mnemonic=col_name,
                                                    descr="")

    las.sections["Comments_4"] = line13 + line14 + line15 + line16
    # ~Params ----------------------------------------------------

    # las.params['BHT'] = lasio.HeaderItem(
    #     mnemonic='BHT',
    #     value=single_well.bottom_hole_pressure,
    #     descr='BOTTOM HOLE TEMPERATURE')
    # las.params['BS'] = lasio.HeaderItem(
    #     mnemonic='BS',
    #     value=single_well.bit_size,
    #     descr='BIT SIZE')
    las.params["LCNM"] = lasio.HeaderItem(mnemonic="LCNM",
                                          value=single_well.logging_contractor,
                                          descr="LOGGING CONTRACTOR")
    # las.params['RMF'] = lasio.HeaderItem(
    #     mnemonic='RMF',
    #     value= single_well.mud_filtrate_resistivity,
    #     descr='MUD FILTRATE RESISTIVITY')
    las.params["DFD"] = lasio.HeaderItem(mnemonic="DFD",
                                         value=single_well.mud_density,
                                         descr="DRILL FLUID DENSITY")
    las.params["MRT"] = lasio.HeaderItem(mnemonic="MRT",
                                         value=single_well.max_rec_temp,
                                         descr="MAX REC TEMP")
    las.params["RMS"] = lasio.HeaderItem(mnemonic="RMS",
                                         value=single_well.mud_resistivity,
                                         descr="MUD RESISTIVITY")
    las.params["MST"] = lasio.HeaderItem(mnemonic="MST",
                                         value=single_well.mud_temp,
                                         descr="MUD TEMP")
    las.params["MFST"] = lasio.HeaderItem(
        mnemonic="MFST",
        value=single_well.mud_density,
        descr="DRILL FLUID DENSITY")  # mud_density TWICE??

    names = las.curves.keys()

    # ~Other -----------------------------------------------------
    # ~ASCII -----------------------------------------------------
    # single_well.data = single_well.data[names].set_index('DEPT')
    las.set_data(single_well.data, names=names)

    ##############################################################
    # WRITE ALL HEADERS AND DATA
    ##############################################################
    las.write(CORRECTED_LAS_NAME, version=2, fmt="%.3f")
예제 #24
0
    def to_lasio(self, keys=None, basis=None):
        """
        Makes a lasio object from the current well.

        Args:
            basis (ndarray): Optional. The basis to export the curves in. If
                you don't specify one, it will survey all the curves with
                ``survey_basis()``.
            keys (list): List of strings: the keys of the data items to
                include, if not all of them. You can have nested lists, such
                as you might use for ``tracks`` in ``well.plot()``.

        Returns:
            lasio. The lasio object.
        """

        # Create an empty lasio object.
        l = lasio.LASFile()
        l.well.DATE = str(datetime.datetime.today())

        # Deal with header.
        for obj, dic in LAS_FIELDS.items():
            if obj == 'data':
                continue
            for attr, (sect, item) in dic.items():
                value = getattr(getattr(self, obj), attr, None)
                try:
                    getattr(l, sect)[item].value = value
                except:
                    h = lasio.HeaderItem(item, "", value, "")
                    getattr(l, sect)[item] = h

        # Clear curves from header portion.
        l.header['Curves'] = []

        # Add a depth basis.
        if basis is None:
            basis = self.survey_basis(keys=keys)
        try:
            l.add_curve('DEPT', basis)
        except:
            raise Exception("Please provide a depth basis.")

        # Add meta from basis.
        setattr(l.well, 'STRT', basis[0])
        setattr(l.well, 'STOP', basis[-1])
        setattr(l.well, 'STEP', basis[1] - basis[0])

        # Add data entities.
        other = ''

        if keys is None:
            keys = [k for k, v in self.data.items() if isinstance(v, Curve)]
        else:
            keys = utils.flatten_list(keys)

        for k in keys:
            d = self.data[k]
            if getattr(d, 'null', None) is not None:
                d[np.isnan(d)] = d.null
            try:
                new_data = np.copy(d.to_basis_like(basis))
            except:
                # Basis shift failed; is probably not a curve
                pass
            try:
                descr = getattr(d, 'description', '')
                l.add_curve(k.upper(), new_data, unit=d.units, descr=descr)
            except:
                try:
                    # Treat as OTHER
                    other += "{}\n".format(k.upper()) + d.to_csv()
                except:
                    pass

        # Write OTHER, if any.
        if other:
            l.other = other

        return l
예제 #25
0
key = input("Please enter the name of the legend: ")
data = input("Please enter the name of the sheet to be LAS'd: ")
file_name = input("Please enter the name of the save file: ")
# check to make sure they are .csv files
if key.endswith(".csv") != True:
    key += ".csv"
if data.endswith(".csv") != True:
    data += ".csv"

# load the data from the key csv
keys = np.loadtxt(open(key, "r"), dtype="str", delimiter=",", skiprows=1)

# load the data from the datasheet
dataset = np.genfromtxt(data, delimiter=',', skip_header=True)[:, 0:]

# assign necessary attributes to xrf_las object
xrf_las = lasio.LASFile()
xrf_las.version.WRAP
xrf_las.well.DATE = str(datetime.today())
xrf_las.well.API = str("")
xrf_las.well.COMP = ""
xrf_las.well.WELL = ""

# iterate over all keys, and add curve of corresponding data column
for i in range(len(keys)):
#   on the first pass, we're adding ("Depth (ft)", The Column of Depth Data, "ft", "Value") to our xrf_las object
#   on the second pass, we're adding ("%Na", The Column of Na Data, "wt_pct", "XRF Sodium") to our xrf_las object
    xrf_las.add_curve(keys[i][0], dataset[:,i], unit=keys[i][1], descr=keys[i][2])

# write the xrf_las object into an .las file specified by user
xrf_las.write(str(file_name + ".las"), STEP=1, version =1.2)