def test_dupnames(self): log = las.LASReader('las_files/dupnames.las') self.check_item(log.curves.DEPT, units='M', data='', value='', descr='1 DEPTH') self.check_item(log.curves.DT, units='US/M', data='60 520 32 00', value='60 520 32 00', descr='2 SONIC TRANSIT TIME') self.check_item(log.curves.RHOB, units='K/M3', data='45 350 01 00', value='45 350 01 00', descr='3 BULK DENSITY') self.check_item(log.curves.NPHI, units='V/V', data='42 890 00 00', value='42 890 00 00', descr='4 NEUTRON POROSITY') self.check_item(log.curves.SFL, units='OHMM', data='07 220 04 00', value='07 220 04 00', descr='5 SHALLOW RESISTIVITY') self.check_item(log.curves.SFL1, units='OHMM', data='07 222 01 00', value='07 222 01 00', descr='6 DUP NAME') self.check_item(log.curves.NPHI1, units='V/V', data='42 890 00 00', value='42 890 00 00', descr='7 DUP NAME') self.check_item(log.curves.NPHI2, units='V/V', data='42 890 00 00', value='42 890 00 00', descr='8 DUP NAME') self.check_item(log.parameters.MUD, units='', data='GEL CHEM', value='GEL CHEM', descr='MUD TYPE') self.check_item(log.parameters.MUD1, units='', data='MUCK', value='MUCK', descr='MORE MUD TYPE') np.testing.assert_array_equal(log.data2d, dupnames_data2d)
def parse_lasfile(lasfile): # logger.info('Retrieving data from LAS file ' + lasfile) filename = os.path.splitext(os.path.basename(lasfile))[0] new_folder_path = os.path.join(os.path.dirname(lasfile), 'outputDir', filename) if not os.path.isdir(new_folder_path): os.makedirs(new_folder_path) csvfile = os.path.join(new_folder_path, ''.join([filename, '.csv'])) jsonfile = os.path.join(new_folder_path, ''.join([filename, '.json'])) try: metadata = {} print(('Retrieving data from LAS file ' + lasfile)) retrieved_data = pd.DataFrame() log = las.LASReader(lasfile) # VERSION INFORMATION version_info = log.version.items field_names = version_info.keys() metadata['Version Information'] = {} for fn in field_names: field = version_info.get(fn) metadata['Version Information'][fn] = {} metadata['Version Information'][fn]['mnemonic'] = field.name metadata['Version Information'][fn]['units'] = field.units metadata['Version Information'][fn]['value'] = field.value metadata['Version Information'][fn]['description'] = field.descr # WELL INORMATION fields = log.well.items field_names = fields.keys() metadata['Well Information'] = {} for fn in field_names: field = fields.get(fn) metadata['Well Information'][fn] = {} metadata['Well Information'][fn]['mnemonic'] = field.name metadata['Well Information'][fn]['units'] = field.units metadata['Well Information'][fn]['value'] = field.value metadata['Well Information'][fn]['description'] = field.descr # LOG PARAMETERS log_parameters = log.parameters.items param_names = log_parameters.keys() metadata['Parameters'] = {} for pn in param_names: log_parameter = log_parameters.get(pn) metadata['Parameters'][pn] = {} metadata['Parameters'][pn]['mnemonic'] = log_parameter.name metadata['Parameters'][pn]['units'] = log_parameter.units metadata['Parameters'][pn]['value'] = log_parameter.data metadata['Parameters'][pn]['description'] = log_parameter.descr # ASCII curves = log.curves.items curve_names = curves.keys() print(type(log.data)) for cn in curve_names: curve = curves.get(cn) metadata['ASCII'][cn] = {} metadata['ASCII'][cn]['mnemonic'] = curve.name metadata['ASCII'][cn]['units'] = curve.units metadata['ASCII'][cn]['value'] = curve.value metadata['ASCII'][cn]['description'] = curve.descr retrieved_data[cn] = pd.Series(log.data[cn]) save_metadata(metadata, jsonfile) # retrieved_data = retrieved_data.replace(-999.25, np.nan) retrieved_data.to_csv(csvfile, index=False, float_format='%.5f') replace_null_values_in_csv(csvfile, -999.25) except Exception as e: # logger.error(e) print(e) file_contents = read_file_contents(lasfile) clean_file_contents = remove_comments_blanklines(file_contents) metadata = read_metadata_sections(clean_file_contents) file_overview = save_curve_data(clean_file_contents, metadata, csvfile) metadata.update(file_overview) save_metadata(metadata, jsonfile)
def grafica(): if request.method == 'POST': #LEER DATOS filename = request.form['filename'] try: salinidad = float(request.form['salinidad']) except: salinidad = 100 try: pma = float(request.form['dma']) except: pma = 2.45 try: pf = float(request.form['dfl']) except: pf = 1.7 try: lma = float(request.form['lma']) except: lma = 120 try: lfl = float(request.form['lfl']) except: lfl = 80 try: ftort = float(request.form['ftort']) except: ftort = 1 try: expc = float(request.form['expc']) except: expc = 2 try: exps = float(request.form['exps']) except: exps = 2 try: vgr = float(request.form['vgr']) except: vgr = 1 try: vsp = float(request.form['vsp']) except: vsp = 1 try: vden = float(request.form['vden']) except: vden = 0 try: vnphi = float(request.form['vnphi']) except: vnphi = 0 try: vrp = float(request.form['vrp']) except: vrp = 1 try: vrm = float(request.form['vrm']) except: vrm = 1 try: vrs = float(request.form['vrs']) except: vrs = 1 try: nclust = int(request.form['nclust']) except: nclust = -1 try: tipoVsh = request.form['tipo'] except: tipoVsh = 'lineal' #Buscar curvas def buscarCurva(tipo, dicNombres, dh, unidad=False): for nombre in dicNombres[tipo]["nombre"]: i = 0 for curva in dh.Nombre.values: i += 1 #print (nombre, curva) if (nombre in curva): #print('Se encontró un valor por nombre') return i - 1 for desc in dicNombres[tipo]["desc"]: i = 0 for curva in dh.Descripcion.values: #print(desc, curva) if (desc in curva): #print('Se encontró un valor por descripción') return i - 1 if (unidad): for unidadad in dicNombres[tipo]["unidad"]: i = 0 for curva in dh.Unidades.values: i += 1 #print(desc, curva) if (unidad in curva): #print('Se encontró un valor por descripción') return i - 1 #Cálculos #Temperatura def calcTempIntervalo(bht, ts, pm, cprof): ti = (((bht - ts) / pm) * cprof) + ts return ti #RESISTIVIDAD def calcRi(resistividad, curvaTemp, ts): #resistividad puede ser rmf, rmc, rm #6.77 es en Farenheit si TS está en grados debe cambiarse por 21.5 Rint = resistividad * ((ts + 6.77) / (curvaTemp + 6.77)) return Rint def calcRmfEq(Rmfi, tf): temp1 = Rmfi * (10**((0.426) / (np.log((tf) / (50.8))))) temp2 = (1) / (np.log((tf) / (19.9))) temp3 = 0.131 * (10**((temp2) - 2)) Rmfe = (temp1 - temp3) / (1 + (0.5 * Rmfi)) return Rmfe def calcRwEq(Rmfe, SSP): #Está en Farenheit #!!!!!!!!!!!!!!!!!!!!checar antes de entregar!!!!!!!!!!!!!!!!!!!! K = 65 + 0.24 Rwe = 10**((K * np.log(Rmfe) + SSP) / (K)) return Rwe def calcRw(Rwe, bht): temp1 = Rwe + (0.131 * 10**((1 / (np.log(bht / 19.9))) - 2)) temp2 = -0.5 * Rwe + (10**((0.0426) / (np.log(bht / 50.8)))) Rw = temp1 / temp2 return Rw def calcRxo(curvaProf, curvaSom): arr = abs(curvaProf - curvaSom) result = np.where(arr == np.amax(arr)) return curvaSom[result[0][0]] #VOLUMEN DE ARCILLA def calcVArcilla(GR, metodo): IGR = (GR - min(GR)) / (max(GR) - min(GR)) if metodo == 'lineal': return IGR elif (metodo == 'larinovj'): Vsh = 0.083 * ((2**(3.71 * IGR)) - 1) return Vsh elif (metodo == 'clavier'): Vsh = 1.7 * math.sqrt((3.38) * ((IGR + 0.7)**2)) return Vsh elif (metodo == 'larinovv'): Vsh = 0.33 * ((2**(2 * IGR)) - 1) return Vsh #CORRECCIÓN DE POROSIDAD def calcCurvaPorDen(pma, pf, RHOB): #Curva de porosidad densidad pord = (pma - RHOB) / (pma - pf) #pord>1=1 return pord #Curva de porosidad total def calcPorTot(pord, NPHI): port = (pord - NPHI) / 2 return port #Curva de porosidad primaria o de matriz def calcPorP(lma, lfl, DT): porp = (DT - lma) / (lfl - lma) return porp #Curva de porosidad efectiva def calcPorEfec(port, Vsh): return (port * (1 - Vsh)) #Saturaciones def calcSw(a, m, n, Rw, Rt, por): temp1 = Rt * ((por)**m) Sw = ((a * Rw) / (temp1))**(1 / n) return Sw def calcSxo(a, m, n, Rxo, Rmf, por): temp1 = Rxo * ((por)**m) Sxo = ((a * Rmf) / (temp1))**(1 / n) return Sxo #Separación de capas def evaluarScore(score, porc): temp = 100 for i in range(len(score)): temp = abs( (abs(score[i]) - abs(score[i + 1])) * (100 / score[i])) print(temp) if (temp < porc): return i + 1 if (i == (len(score) - 1)): return i + 1 #Clasificación litológica def crearCurvas(): pArena = np.array([[-1.5, 2.66], [-1, 2.64], [0.5, 2.605], [2, 2.57], [5, 2.51], [8, 2.46], [10, 2.43], [20, 2.26], [25, 2.18], [31, 2.08], [36, 2.0], [40.5, 1.92]]) pCaliza = np.array([[3, 2.66], [36, 2.1]]) pDolomita = np.array([[4, 2.86], [8.5, 2.82], [13, 2.76], [18, 2.68], [23, 2.59], [27, 2.5], [32, 2.4], [36, 2.3], [38, 2.26], [41, 2.18], [44, 2.1]]) pLutita = np.array([[30, 2.8], [40, 2.6], [41, 2.5]]) arena = [] caliza = [] dolomita = [] lutita = [] za = np.polyfit(pArena[:, 0], pArena[:, 1], 5) pa = np.poly1d(za) zc = np.polyfit(pCaliza[:, 0], pCaliza[:, 1], 1) pc = np.poly1d(zc) zd = np.polyfit(pDolomita[:, 0], pDolomita[:, 1], 5) pd = np.poly1d(zd) zl = np.polyfit(pLutita[:, 0], pLutita[:, 1], 1) pl = np.poly1d(zl) for i in range(45): arena.append([0.93 * i - 1.5, pa(0.93 * i - 1.5)]) caliza.append([1 * i, pc(1 * i)]) dolomita.append([0.94 * i + 2.5, pd(0.94 * i + 2.5)]) for i in range(20): lutita.append([0.7 * i + 30, pc(1 * i) + 0.1]) arena = np.array(arena) caliza = np.array(caliza) dolomita = np.array(dolomita) lutita = np.array(lutita) return (arena, caliza, dolomita, lutita) def e_dist(a, b, metric='euclidean'): a = np.asarray(a) b = np.atleast_2d(b) a_dim = a.ndim b_dim = b.ndim if a_dim == 1: a = a.reshape(1, 1, a.shape[0]) if a_dim >= 2: a = a.reshape(np.prod(a.shape[:-1]), 1, a.shape[-1]) if b_dim > 2: b = b.reshape(np.prod(b.shape[:-1]), b.shape[-1]) diff = a - b dist_arr = np.einsum('ijk,ijk->ij', diff, diff) if metric[:1] == 'e': dist_arr = np.sqrt(dist_arr) dist_arr = np.squeeze(dist_arr) return dist_arr def clasificarLito(arena, caliza, dolomita, lutita, dato1, dato2): minimo = [] tarena = np.copy(arena) tcaliza = np.copy(caliza) tdolomita = np.copy(dolomita) tlutita = np.copy(lutita) tarena[:, 0] = arena[:, 0] / 10 tcaliza[:, 0] = caliza[:, 0] / 10 tdolomita[:, 0] = dolomita[:, 0] / 10 tlutita[0][0] = lutita[0][0] / 10 dato1 = dato1 / 10 minArena = (e_dist([dato1, dato2], tarena)) minCaliza = (e_dist([dato1, dato2], tcaliza)) minDolomita = (e_dist([dato1, dato2], tdolomita)) minLutita = (e_dist([dato1, dato2], tlutita)) minimo.append(min(minArena)) minimo.append(min(minCaliza)) minimo.append(min(minDolomita)) minimo.append(min(minLutita)) #return minimo if np.argmin(minimo) == 0: mini = np.argmin(minArena) return ("Arena", mini) elif np.argmin(minimo) == 1: mini = np.argmin(minCaliza) return ("Caliza", mini) elif np.argmin(minimo) == 2: mini = np.argmin(minDolomita) return ("Dolomita", mini) elif np.argmin(minimo) == 3: mini = 0 return ("Lutita", mini) def dibujarRegistros(df): fig, ax = plt.subplots(nrows=1, ncols=len(df.columns.values), figsize=(20, 10), sharey=True) fig.suptitle("Registros geofísicos de pozos", fontsize=22) fig.subplots_adjust(top=0.75, wspace=0.2) i = 0 if 'Clasif' in df.columns: for registro in (df.columns.values[:-1]): color = 'black' ax10 = ax[i].twiny() #ax10.set_xlim(min(df[registro]),max(df[registro])) #ax10.spines['top'].set_position(('outward',0)) ax10.plot(df[registro], df.index.values, color=color) ax10.set_xlabel(registro + ' [' ']', color=color) #ax10.tick_params(axis='x', colors=color) ax10.invert_yaxis() ax10.grid(True) i += 1 ax10 = ax[len(df.columns) - 1].twiny() a = df.Clasif.values data = df.Clasif.values.reshape(len(df.Clasif.values), 1) cmap = plt.get_cmap('Dark2', np.max(data) - np.min(data) + 1) mat = ax10.matshow(np.repeat(data, 300, 1), cmap=cmap, vmin=np.min(data) - .5, vmax=np.max(data) + .5) cax = plt.colorbar(mat, ticks=np.arange(np.min(data), np.max(data) + 1)) plt.gca().xaxis.set_major_locator(plt.NullLocator()) ax10.set_xlabel(registro + ' [' ']', color=color) #ax.figure.set_size_inches(300, 500) #ax10.tick_params(axis='x', colors=color) ax10.invert_yaxis() ax10.grid(True) i += 1 else: for registro in (df.columns.values): color = 'black' ax10 = ax[i].twiny() #ax10.set_xlim(min(df[registro]),max(df[registro])) #ax10.spines['top'].set_position(('outward',0)) ax10.plot(df[registro], df.index.values, color=color) ax10.set_xlabel(registro + ' [' ']', color=color) #ax10.tick_params(axis='x', colors=color) ax10.invert_yaxis() ax10.grid(True) i += 1 #LEER ARCHIVO log = las.LASReader('./info/{}'.format(filename)) dat = pd.DataFrame(data=log.data, columns=log.curves.names) dat = dat.replace(-999.00000, 0.0) #Se guarda el header en un diccionario dic = log.curves.items #Se crean arreglos de nombre, unidades y descripción ldescr = [] lunidades = [] lnombres = log.curves.names #Se guardan los datos dentro del arreglo for i in range(len(lnombres)): ldescr.append(dic[log.curves.names[i]].descr.upper()) lunidades.append(dic[log.curves.names[i]].units.upper()) data = { "Nombre": lnombres, "Descripcion": ldescr, "Unidades": lunidades } dh = pd.DataFrame(data) dicNombres = { "profundidad": { 'nombre': ["DEPT", "DEPTH"], 'desc': ["DEPT", "DEPTH"], #FALSE 'unidad': ["FT", "M"] }, "caliper": { 'nombre': [ "CALIPER", "CALI", "CAL", "DAC", "MSC", "CL", "TAC", "MCT", "EMS", "CCT", "XYT", "CCN", "DNSC", "DSNCM" ], 'desc': ["CALIPER", "CALI"], 'unidad': ["IN"] }, "sp": { 'nombre': ["SP"], 'desc': ["SP", "SPONTANEUS", "POTENCIAL"], 'unidad': ["MV", "V"] }, "gr": { 'nombre': [ "GR", "MCG", "MGS", "NGS", "NGT", "IPL", "GRT", "DGR", "DG", "SL", "HDS1", "RGD", "CWRD", "SGR" ], 'desc': ["GR", "GAMMA", "RAY"], 'unidad': ["GAPI", "API"] }, "rhob": { 'nombre': [ "RHOB", "APLS", "ZDL", "CDL", "SPeD", "SDL", "PDS", "MPD", "IPL", "CDT", "LDT", "ORD", "MDL", "DNSC", "ASLD" ], 'desc': ["DENSITY", "RHOB", "RHO"], 'unidad': ["G/C3"] }, "nphi": { 'nombre': [ "NPHI", "NPH", "CN", "DSN", "DSEN", "MDN", "IPL", "CNT", "CCN", "MNP", "DNSC", "CTN" ], 'desc': ["NEUTRON", "NEUT"], #FALSE 'unidad': ["V/V"] }, "rsom": { 'nombre': ["LL3", "SGRD", "SFL", "SLL", "LLS", "RLLS"], 'desc': ["SHALL"], #FALSE 'unidad': ["OHMMxxx"] }, "rmed": { 'nombre': ["R60O", "ILM", "RILM"], 'desc': ["MEDR", "MED"], #FALSE 'unidad': ["OHMMxxxx"] }, "rprof": { 'nombre': ["R85O", "ILD", "RILD", "DLL", "LLD", "RLLD"], 'desc': ["DEEPR", "DEEP"], #FALSE 'unidad': ["OHMMxxxx"] }, "dt": { 'nombre': [ "DT", "APX", "XMAC", "DAL", "AC", "BCS", "DAR", "FWS", "XACT", "CSS", "LCS", "MSS", "UGD", "DSI", "CST", "LST", "DNSC", "SONIC", "BAT" ], 'desc': ["DT", "SONIC"], 'unidad': ["US/F"] }, } n1 = [ 'DEPTH', 'CALIPER', 'GR', 'SP', 'RHOB', 'NPHI', 'RCERC', 'RMED', 'RPROF', 'DT' ] n2 = [ 'profundidad', 'caliper', 'gr', 'sp', 'rhob', 'nphi', 'rsom', 'rmed', 'rprof', 'dt' ] noSePuede = [] data = {} #Se rellena el vector "No se pudo encontrar" for i in range(len(n1)): try: data[n1[i]] = dat[log.curves.names[buscarCurva( n2[i], dicNombres, dh)]] #print ('Se encontró la curva '+n2[i]) except: noSePuede.append(n1[i]) #print ('No se han encontrado '+n2[i]) calcularSat = True calcularXPlot = True calcularRw = True calcularVsh = True calcularRxo = True calcularPort = True calcularPore = True calcularSw = True calcularSxo = True #Se determina qué cálculos se pueden realizar con base en las que se pudieron encontrar if ('DEPTH' in noSePuede or 'SP' in noSePuede): calcularRw = False calcularSat = False calcularSw = False if ('RPROF' in noSePuede): calcularSat = False calcularSw = False if ('RHOB' in noSePuede or 'NPHI' in noSePuede): calcularXPlot = False calcularPort = False calcularSw = False calcularSxo = False if ('GR' in noSePuede): calcularVsh = False calcularPore = False if ('RCERC' in noSePuede): calcularRxo = False calcularSxo = False df = pd.DataFrame(data=data) #Datos del archivo .LAS try: rmf = float(log.parameters.RMF.data) except: rmf = 0.95 try: rmc = float(log.parameters.RMC.data) except: rmc = 1.55 try: rm = float(log.parameters.RM.data) except: rm = 1.13 try: bht = float(log.parameters.BHT.data) except: bht = 3000 try: ts = float(log.parameters.MST.data) except: ts = 3000 try: pm = max(df.DEPTH) except: pm = 1000 #Aplicación de correcciones if ('GR' in df.columns): df['GR'] = df['GR'] * vgr if ('SP' in df.columns): df['SP'] = df['SP'] * vsp if ('RHOB' in df.columns): df['RHOB'] = df['RHOB'] + vden if ('NPHI' in df.columns): df['NPHI'] = df['NPHI'] + vnphi if ('RCERC' in df.columns): df['RCERC'] = df['RCERC'] * vrs if ('RMED' in df.columns): df['RMED'] = df['RMED'] * vrm if ('RPROF' in df.columns): df['RPROF'] = df['RPROF'] * vrp df1 = df[df.columns[1:]].copy() calcularXPlot = True if (calcularVsh): df['VSH'] = calcVArcilla(df.GR, tipoVsh) if (calcularPort): pord = calcCurvaPorDen(pma, pf, df.RHOB) df['PTOT'] = calcPorTot(pord, df.NPHI) if (calcularPore): df['PEfec'] = calcPorEfec(df.PTOT, df.VSH) if (calcularRw): df['TEMP'] = calcTempIntervalo(bht, ts, pm, df.DEPTH) curvaRmf = calcRi(rmf, df.TEMP, ts) tf = df.TEMP.values[df.SP.idxmin()] rmEq = calcRmfEq(curvaRmf, tf) rwEq = calcRwEq(rmEq, df.SP) curvaRw = calcRw(rwEq, bht) Rw = curvaRw[df.SP.idxmin()] if (calcularRxo): Rxo = calcRxo(df['RPROF'], df['RCERC']) if (calcularSw): df['Sw'] = calcSw(ftort, expc, exps, Rw, df['RPROF'], df['PTOT']) if (calcularSxo): df['Sxo'] = calcSxo(a, m, n, Rxo, rmf, df['PTOT']) #Se crea un dataframe y se realiza un preprocesamiento para su clasificación x = df1.values #returns a numpy array min_max_scaler = preprocessing.MinMaxScaler() x_scaled = min_max_scaler.fit_transform(x) df1nor = pd.DataFrame(x_scaled) df1nor.columns = df1.columns # #Se obtiene un arreglo de datos del dataframe de datos normalizados X = np.array(df1nor) # Se obtiene una predicción del modelo por cada muestra if (nclust == -1): #Se ajusta el número de clusters con la información del dataframe labels = MeanShift().fit_predict(X) else: kmeans = KMeans(n_clusters=nclust).fit(X) centroides = kmeans.cluster_centers_ labels = kmeans.predict(X) #Se agrega la clasificación realizada al dataframe original df['Clasif'] = labels #Se cuenta el número de clusters ncl = len(np.unique(labels)) #Creación de puntos de clasificación ptos = np.zeros(shape=(ncl, 2)) for i in range(ncl): temp = df[df.Clasif == i]['RHOB'].mean() temp2 = df[df.Clasif == i]['NPHI'].mean() ptos[i] = [temp, temp2] if (max(abs(ptos[:, 1])) < 1): ptos[:, 1] = ptos[:, 1] * 100 #Se crean las curvas de cada litología [arena, caliza, dolomita, lutita] = crearCurvas() lit = [] por = [] for i in range(len(ptos)): [temp1, temp2] = clasificarLito(arena, caliza, dolomita, lutita, ptos[i, 1], ptos[i, 0]) lit.append(temp1) por.append(temp2) #GRAFICAR plt.clf() #Imagen 1 img = io.BytesIO() #Gráfica i = 0 #Numero de gráficos if ('CALIPER' in df.columns or 'SP' in df.columns): i += 1 if ('GR' in df.columns): i += 1 if ('RCERC' in df.columns or 'RMED' in df.columns or 'RPROF' in df.columns): i += 1 if ('NPHI' in df.columns or 'RHOB' in df.columns): i += 1 if ('Clasif' in df.columns): i += 1 if ('DT' in df.columns): i += 1 if ('PTOT' in df.columns or 'PEfec' in df.columns): i += 1 if ('Sw' in df.columns or 'Sxo' in df.columns): i += 1 fig, ax = plt.subplots(nrows=1, ncols=i, figsize=(20, 10), sharey=True) fig.suptitle("Registros geofísicos de pozos", fontsize=22) fig.subplots_adjust(top=0.75, wspace=0.2) for axes in ax: axes.set_ylim(log.start, log.stop) axes.invert_yaxis() axes.yaxis.grid(True) axes.get_xaxis().set_visible(False) i = -1 #CARRIL 1 #CALI if ('CALIPER' in df.columns or 'SP' in df.columns): i += 1 if ('CALIPER' in df.columns): color = 'black' ax1 = ax[i].twiny() ax1.set_xlim(min(df.CALIPER), max(df.CALIPER)) ax1.spines['top'].set_position(('outward', 0)) ax1.plot(df.CALIPER, df.DEPTH, '--', color=color) ax1.set_xlabel('CALIPER', color=color) ax1.tick_params(axis='x', colors=color) ax1.grid(True) #SP if ('SP' in df.columns): color = 'blue' ax2 = ax[i].twiny() ax2.set_xlim(min(df.SP), max(df.SP)) ax2.spines['top'].set_position(('outward', 40)) ax2.plot(df.SP, df.DEPTH, color=color) ax2.set_xlabel('SP', color=color) ax2.tick_params(axis='x', colors=color) ax2.grid(True) #CARRIL 2 #GR if ('GR' in df.columns): i += 1 color = 'green' ax3 = ax[i].twiny() ax3.set_xlim(min(df.GR), max(df.GR)) ax3.spines['top'].set_position(('outward', 0)) ax3.plot(df.GR, df.DEPTH, color=color) ax3.set_xlabel('GR', color=color) ax3.tick_params(axis='x', colors=color) ax3.grid(True) # #CARRIL 3 if ('RCERC' in df.columns or 'RMED' in df.columns or 'RPROF' in df.columns): i += 1 #RPROFUNDA if ('RPROF' in df.columns): color = 'red' ax4 = ax[i].twiny() ax4.set_xlim(0.1, 10000) ax4.set_xscale('log') ax4.grid(True) ax4.spines['top'].set_position(('outward', 0)) ax4.set_xlabel('RPROF', color=color) ax4.plot(df.RPROF, df.DEPTH, color=color) ax4.tick_params(axis='x', colors=color) #RMEDIA if ('RMED' in df.columns): color = 'orange' ax5 = ax[i].twiny() ax5.set_xlim(0.1, 10000) ax5.set_xscale('log') ax5.grid(True) ax5.spines['top'].set_position(('outward', 40)) ax5.set_xlabel('RMED', color=color) ax5.plot(df.RMED, df.DEPTH, color=color) ax5.tick_params(axis='x', colors=color) #RSOMERA if ('RCERC' in df.columns): color = 'yellow' ax6 = ax[i].twiny() ax6.set_xlim(0.1, 10000) ax6.set_xscale('log') ax6.grid(True) ax6.spines['top'].set_position(('outward', 80)) ax6.set_xlabel('RSOM', color=color) ax6.plot(df.RCERC, df.DEPTH, color=color) ax6.tick_params(axis='x', colors=color) #CARRIL 4 if ('NPHI' in df.columns or 'RHOB' in df.columns): i += 1 #NPHI if ('NPHI' in df.columns): color = 'blue' ax7 = ax[i].twiny() ax7.set_xlim(0.45, -0.15) ax7.invert_xaxis() ax7.plot(df.NPHI, df.DEPTH, color=color) ax7.spines['top'].set_position(('outward', 0)) ax7.set_xlabel('NPHI', color=color) ax7.tick_params(axis='x', colors=color) #RHOB if ('RHOB' in df.columns): color2 = 'red' ax8 = ax[i].twiny() ax8.set_xlim(min(df.RHOB), max(df.RHOB)) ax8.plot(df.RHOB, df.DEPTH, label='RHOB', color=color2) ax8.spines['top'].set_position(('outward', 40)) ax8.set_xlabel('RHOB', color=color2) ax8.tick_params(axis='x', colors=color2) #CARRIL 5 #DT if ('DT' in df.columns): i += 1 color = 'purple' ax9 = ax[i].twiny() ax9.set_xlim(min(df.DT), max(df.DT)) ax9.invert_xaxis() ax9.plot(df.DT, df.DEPTH, color=color) ax9.spines['top'].set_position(('outward', 0)) ax9.set_xlabel('DT', color=color) ax9.tick_params(axis='x', colors=color) #CARRIL 6 #Porosidad total if ('PTOT' in df.columns or 'PEfec' in df.columns): i += 1 if ('PTOT' in df.columns): color = 'red' ax10 = ax[i].twiny() ax10.set_xlim(1, 0) ax10.spines['top'].set_position(('outward', 0)) ax10.plot(df.PTOT, df.DEPTH, color=color) ax10.fill_betweenx(df.DEPTH, 0, df.PTOT, color='lightcoral') ax10.set_xlabel('P.Tot', color=color) ax10.tick_params(axis='x', colors=color) ax10.grid(True) if ('PTOT' in df.columns): color = 'blue' ax11 = ax[i].twiny() ax11.set_xlim(1, 0) ax11.spines['top'].set_position(('outward', 40)) ax11.plot(df.PEfec, df.DEPTH, color=color) ax11.fill_betweenx(df.DEPTH, 0, df.PEfec, color='lightblue') ax11.set_xlabel('P.Efec', color=color) ax11.tick_params(axis='x', colors=color) ax11.grid(True) #CARRIL 7 if ('Sw' in df.columns or 'Sxo' in df.columns): i += 1 if ('Sw' in df.columns): color = 'blue' ax11 = ax[i].twiny() ax11.set_xlim(min(df.Sw), max(df.Sw)) ax11.spines['top'].set_position(('outward', 0)) ax11.plot(df.Sw, df.DEPTH, color=color) ax11.set_xlabel('Sw', color=color) ax11.tick_params(axis='x', colors=color) ax11.grid(True) if ('Sxo' in df.columns): color = 'lightgreen' ax11 = ax[i].twiny() ax11.set_xlim(min(df.Sxo), max(df.Sxo)) ax11.spines['top'].set_position(('outward', 40)) ax11.plot(df.Sxo, df.DEPTH, color=color) ax11.set_xlabel('Sxo', color=color) ax11.tick_params(axis='x', colors=color) ax11.grid(True) # CARRIL 8 if ('Clasif' in df.columns): i += 1 X = np.arange(0, 1, 0.1) Y = df.DEPTH.values Z = df.Clasif.values Z = Z.reshape(len(Z), 1) Z2 = np.repeat(Z, 10, 1) ax20 = ax[i] cmap = plt.get_cmap('Dark2', np.max(Z) - np.min(Z) + 1) c = ax20.pcolor(X, Y, Z2, cmap=cmap, vmin=np.min(Z) - .5, vmax=np.max(Z) + .5) cbar = fig.colorbar(c, ax=ax20, ticks=np.arange(np.min(Z), np.max(Z) + 1)) cbar.ax.set_yticklabels(lit) #fig.colorbar(c, ax=ax20, ticks=['Arena','Caliza','Lutita']) # cbar = fig.colorbar(cax, ticks=[-1, 0, 1]) # cbar.ax.set_yticklabels(['< -1', '0', '> 1']) plt.savefig(img, format='png') img.seek(0) plot_url = base64.b64encode(img.getvalue()).decode() # session['data']=plot_url #Imagen 2 img2 = io.BytesIO() fig, ax = plt.subplots(figsize=(15, 7), ) #plt.scatter (pArena[:,0], pArena[:,1]) ax.plot(arena[:, 0], arena[:, 1], '--', label='Arenisca') ax.plot(caliza[:, 0], caliza[:, 1], '--', label='Caliza') ax.plot(dolomita[:, 0], dolomita[:, 1], '--', label='Dolomita') ax.scatter(ptos[:, 1], ptos[:, 0]) ax.scatter(lutita[9, 0], lutita[9, 1], label='Lutita') # plt.scatter(lutita[0][0], lutita[0][1], label='Lutita') ax.set_ylabel('Densidad [g/cm3]') ax.set_xlabel('Porosidad de neutrón') ax.invert_yaxis() ax.legend() ax.grid() plt.savefig(img2, format='png') img2.seek(0) plot_url2 = base64.b64encode(img2.getvalue()).decode() return render_template('grafica.html', imagen={ 'imagen': plot_url, 'imagen2': plot_url2 })
import las import json import requests import os import fnmatch url = 'http://fuzzylas.appspot.com/lookup?' input_dir = '../geohack_well_data' output_dir = 'well_info' if not os.path.exists(output_dir): os.makedirs(output_dir) for path, dirs, files in os.walk(input_dir): for f in fnmatch.filter(files, '*.las'): las_file = os.path.join(path, f) las_reader = las.LASReader(las_file) output_file = os.path.join(output_dir, f.split('.')[0]) + ".txt" with open(output_file, 'w') as output: for name in las_reader.data.dtype.names: r = requests.get(url + 'mnemonic=%s&guesses=1&format=json' % name) if name in r.json()[0]: output.write(name + ',' + r.json()[0][name][0]['description'] + '\n')
def __init__(self, fileDir): self.damage_Tag = '' self.lines = [] # 生成的成果list self.log = las.LASReader(fileDir, null_subs=np.nan) self.fig1 = plt.figure('MIT油套管快速评价系统', figsize=(12, 8)) xls = xlrd.open_workbook(".\\casing_data.xls") table = xls.sheet_by_name('Sheet1') # 注意下面几个的类型为excel单元格对象 self.outer_diameter = table.cell(0, 2) self.inner_diameter = table.cell(0, 3) self.thickness = table.cell(0, 4) self.scale_left = float(self.inner_diameter.value) / 2 - 20 self.scale_right = float(self.inner_diameter.value) / 2 + 120 self.scale_left_min = float(self.inner_diameter.value) - 30 self.scale_right_max = float(self.inner_diameter.value) + 30 # 定义RadioButtons axcolor = 'lightgoldenrodyellow' rax = plt.axes([0.75, 0.05, 0.12, 0.07], facecolor=axcolor) radio = RadioButtons( rax, (u'Penetration', u'Projection', u'Transformation'), active=-1, activecolor='purple') plt.subplots_adjust(bottom=0.15, top=0.95, right=0.9, left=0.10, wspace=0.60) radio.on_clicked(self.actionfunc) ##################################################################################### # 坐标轴1 self.ax1 = plt.subplot(141) # 下面赋值加逗号是为了使得type(self.line1)为matplotlib.lines.Line2D对象,而不是list self.line1, = self.ax1.plot(self.log.data['D01'], self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D02'] + 2.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D03'] + 5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D04'] + 7.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D05'] + 10, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D06'] + 12.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D07'] + 15, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D08'] + 17.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D09'] + 20, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D10'] + 22.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D11'] + 25, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D12'] + 27.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D13'] + 30, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D14'] + 32.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D15'] + 35, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D16'] + 37.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D17'] + 40, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D18'] + 42.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D19'] + 45, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D20'] + 47.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D21'] + 50, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D22'] + 52.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D23'] + 55, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D24'] + 57.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D25'] + 60, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D26'] + 62.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D27'] + 65, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D28'] + 67.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D29'] + 70, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D30'] + 72.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D31'] + 75, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D32'] + 77.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D33'] + 80, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D34'] + 82.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D35'] + 85, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D36'] + 87.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax1.plot(self.log.data['D37'] + 90, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D38'] + 92.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D39'] + 95, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.plot(self.log.data['D40'] + 97.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax1.set_xlim(self.scale_left, self.scale_right) self.ax1.set_ylim(self.log.start, self.log.stop) self.ax1.invert_yaxis() span1 = SpanSelector(self.ax1, self.onselect1, 'vertical', useblit=False, rectprops=dict(alpha=0.5, facecolor='yellow'), span_stays=True) # plt.ylabel(self.log.curves.DEPT.descr + " (%s)" % self.log.curves.DEPT.units) # plt.xlabel(self.log.curves.D01.descr + " (%s)" % self.log.curves.D01.units) # plt.title(self.log.well.WELL.data) plt.ylabel('Measured Depth(m)') plt.title('Original') plt.gca().spines['bottom'].set_position(('data', 0)) plt.gca().spines['top'].set_position(('data', 0)) plt.grid() ##################################################################################### # 坐标轴2 self.ax2 = plt.subplot(142) self.line2, = self.ax2.plot(self.log.data['D01'], self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D02'] + 2.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D03'] + 5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D04'] + 7.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D05'] + 10, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D06'] + 12.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D07'] + 15, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D08'] + 17.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D09'] + 20, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D10'] + 22.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D11'] + 25, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D12'] + 27.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D13'] + 30, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D14'] + 32.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D15'] + 35, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D16'] + 37.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D17'] + 40, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D18'] + 42.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D19'] + 45, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D20'] + 47.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D21'] + 50, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D22'] + 52.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D23'] + 55, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D24'] + 57.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D25'] + 60, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D26'] + 62.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D27'] + 65, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D28'] + 67.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D29'] + 70, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D30'] + 72.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D31'] + 75, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D32'] + 77.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D33'] + 80, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D34'] + 82.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D35'] + 85, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D36'] + 87.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax2.plot(self.log.data['D37'] + 90, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D38'] + 92.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D39'] + 95, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.plot(self.log.data['D40'] + 97.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax2.set_xlim(self.scale_left, self.scale_right) self.ax2.set_ylim(self.log.start, self.log.stop) self.ax2.invert_yaxis() span2 = SpanSelector(self.ax2, self.onselect2, 'vertical', useblit=False, rectprops=dict(alpha=0.5, facecolor='yellow'), span_stays=True) plt.title('Middle') plt.gca().spines['bottom'].set_position(('data', 0)) plt.gca().spines['top'].set_position(('data', 0)) self.ax2.grid() ##################################################################################### # 坐标轴3 self.ax3 = plt.subplot(143) self.line3, = self.ax3.plot(self.log.data['D01'], self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D02'] + 2.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D03'] + 5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D04'] + 7.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D05'] + 10, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D06'] + 12.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D07'] + 15, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D08'] + 17.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D09'] + 20, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D10'] + 22.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D11'] + 25, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D12'] + 27.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D13'] + 30, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D14'] + 32.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D15'] + 35, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D16'] + 37.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D17'] + 40, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D18'] + 42.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D19'] + 45, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D20'] + 47.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D21'] + 50, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D22'] + 52.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D23'] + 55, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D24'] + 57.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D25'] + 60, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D26'] + 62.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D27'] + 65, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D28'] + 67.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D29'] + 70, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D30'] + 72.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D31'] + 75, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D32'] + 77.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D33'] + 80, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D34'] + 82.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D35'] + 85, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D36'] + 87.5, self.log.data['DEPT'], 'r-', lw=0.3) self.ax3.plot(self.log.data['D37'] + 90, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D38'] + 92.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D39'] + 95, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.plot(self.log.data['D40'] + 97.5, self.log.data['DEPT'], 'g-', lw=0.3) self.ax3.set_xlim(self.scale_left, self.scale_right) self.ax3.set_ylim(self.log.start, self.log.stop) self.ax3.invert_yaxis() # self.span3_cyan = SpanSelector(self.ax3, self.onselect3, 'vertical', useblit=True, # rectprops=dict(alpha=0.5, facecolor='cyan'), span_stays=True) plt.title('Large') plt.gca().spines['bottom'].set_position(('data', 0)) plt.gca().spines['top'].set_position(('data', 0)) self.ax3.grid() ##################################################################################### # 坐标轴4 self.ax4 = plt.subplot(144) self.ax4.plot(self.log.data['IDMX'], self.log.data['DEPT'], 'r--', lw=0.3) self.ax4.plot(self.log.data['IDMN'], self.log.data['DEPT'], 'b-', lw=0.3) self.ax4.plot(self.log.data['IDAV'], self.log.data['DEPT'], 'k--', lw=0.3) self.ax4.set_xlim(self.scale_left_min, self.scale_right_max) self.ax4.set_ylim(self.log.start, self.log.stop) self.ax4.invert_yaxis() plt.title('Min-Max') plt.gca().spines['bottom'].set_position(('data', 0)) plt.gca().spines['top'].set_position(('data', 0)) self.ax4.grid() multi = MultiCursor(plt.gcf().canvas, (self.ax1, self.ax2, self.ax3, self.ax4), color='r', lw=1, horizOn=True, vertOn=False) ######################### plt.show()
def parse_lasfile(lasfile): """ mpath - the way that the path is to be modified""" """ mpath will be inserted between destination folder and files_name.csv""" # logger.info('Retrieving data from LAS file ' + lasfile) filename = os.path.splitext(os.path.basename(lasfile))[0] source_folder = os.path.dirname(lasfile) #destination_folder = 'E:/IT/Projects/REP/LAS/LASTEST' new_folder_path = os.path.join(source_folder, 'outputDir', filename) #print(new_folder_path) if not os.path.isdir(new_folder_path): os.makedirs(new_folder_path) csvfile = os.path.join(new_folder_path, ''.join([filename, '.csv'])) print('Generated CSV path: '+csvfile) jsonfile = os.path.join(new_folder_path, ''.join([filename, '.json'])) try: metadata = {} logger.info('Retrieving data from LAS file ' + lasfile) retrieved_data = pd.DataFrame() log = las.LASReader(lasfile) # VERSION INFORMATION version_info = log.version.items field_names = version_info.keys() metadata['Version Information'] = {} for fn in field_names: field = version_info.get(fn) metadata['Version Information'][fn] = {} metadata['Version Information'][fn]['mnemonic'] = field.name metadata['Version Information'][fn]['units'] = field.units metadata['Version Information'][fn]['value'] = field.value metadata['Version Information'][fn]['description'] = field.descr # WELL INORMATION fields = log.well.items field_names = fields.keys() metadata['Well Information'] = {} for fn in field_names: field = fields.get(fn) metadata['Well Information'][fn] = {} metadata['Well Information'][fn]['mnemonic'] = field.name metadata['Well Information'][fn]['units'] = field.units metadata['Well Information'][fn]['value'] = field.value metadata['Well Information'][fn]['description'] = field.descr # LOG PARAMETERS log_parameters = log.parameters.items param_names = log_parameters.keys() metadata['Parameters'] = {} for pn in param_names: log_parameter = log_parameters.get(pn) metadata['Parameters'][pn] = {} metadata['Parameters'][pn]['mnemonic'] = log_parameter.name metadata['Parameters'][pn]['units'] = log_parameter.units metadata['Parameters'][pn]['value'] = log_parameter.data metadata['Parameters'][pn]['description'] = log_parameter.descr # ASCII curves = log.curves.items curve_names = curves.keys() #print(type(log.data)) for cn in curve_names: curve = curves.get(cn) metadata['ASCII'][cn] = {} metadata['ASCII'][cn]['mnemonic'] = curve.name metadata['ASCII'][cn]['units'] = curve.units metadata['ASCII'][cn]['value'] = curve.value metadata['ASCII'][cn]['description'] = curve.descr retrieved_data[cn] = pd.Series(log.data[cn]) # save_metadata(metadata, jsonfile) # retrieved_data = retrieved_data.replace(-999.25, np.nan) retrieved_data.to_csv(csvfile, index=False) metadata['LAS file']=os.path.basename(lasfile) if os.path.isfile(csvfile): replace_null_values_in_csv(csvfile, -999.25) metadata['CSV_files']={} temp = os.path.realpath(csvfile) # print(temp.find('outputDir')) metadata['Data files']= temp[temp.find('outputDir')+8:] print(list(metadata.keys())) metadata = standardize_meta_section_names(metadata) print(list(metadata.keys())) save_metadata(metadata, jsonfile) except Exception as e: logger.error(e) try: file_contents = read_file_contents(lasfile) clean_file_contents = remove_comments_blanklines(file_contents) #file_contents have to be used so in case ~CURVE used in LASv3 we can retrieve curve names metadata = read_metadata_sections(clean_file_contents) parse_curve_data(metadata, clean_file_contents, csvfile) ver = check_las_version(file_contents) except Exception as e: print(e)
# -*- coding: utf-8 -*- import las import numpy as np log = las.LASReader('.\\ning209H19-4_resample_jz.LAS', null_subs=np.nan)
def test_case1b(self): log = las.LASReader('las_files/case1b.las') self.check_case1_log(log, null_subs=None)
def test_case1_null_subs_nan(self): log = las.LASReader('las_files/case1.las', null_subs=np.nan) self.check_case1_log(log, null_subs=np.nan)
import io import numpy as np import matplotlib.pyplot as plt import las try: from urllib.request import urlopen except ImportError: from urllib import urlopen url = "http://www.kgs.ku.edu/software/DEWL/HELP/pc_read/Shamar-1.las" f = io.StringIO(urlopen(url).read().decode('iso-8859-1')) log = las.LASReader(f, null_subs=np.nan) plt.figure(figsize=(9, 5)) plt.plot(log.data['DEPT'], log.data['GR']) plt.xlabel(log.curves.DEPT.descr + " (%s)" % log.curves.DEPT.units) plt.ylabel(log.curves.GR.descr + " (%s)" % log.curves.GR.units) plt.title(log.well.WELL.data + ', ' + log.well.DATE.data) plt.grid() plt.show()