Example #1
0
def test_dt_fallback(tmpdir):
    with segyio.open(tmpdir / 'small.sgy', "r+") as f:
        # Both zero
        f.bin[BinField.Interval] = 0
        f.header[0][TraceField.TRACE_SAMPLE_INTERVAL] = 0
        f.flush()
        fallback_dt = 4
        assert segyio.dt(f, fallback_dt) == approx(fallback_dt)

        # dt in bin header different from first trace
        f.bin[BinField.Interval] = 6000
        f.header[0][TraceField.TRACE_SAMPLE_INTERVAL] = 1000
        f.flush()
        fallback_dt = 4
        assert segyio.dt(f, fallback_dt) == approx(fallback_dt)
Example #2
0
def segy_model_read(filename):

    with segyio.open(filename, "r", ignore_geometry=True) as segyfile:
        segyfile.mmap()

        # Assume input data is for single common shot gather
        sourceX = segyfile.attributes(segyio.TraceField.SourceX)
        dx = segyio.dt(segyfile) / 1e3

        # Apply scaling
        coordScalar = segyfile.attributes(
            segyio.TraceField.SourceGroupScalar)[0]

        if coordScalar < 0.:
            sourceX = sourceX / np.abs(coordScalar)
        elif coordScalar > 0.:
            sourceX = sourceX * np.abs(coordScalar)

        nx = len(sourceX)
        nz = len(segyfile.trace[0])

        # Extract data
        data = np.zeros(shape=(nx, nz), dtype='float32')
        for i in range(nx):
            data[i, :] = segyfile.trace[i]

        return data, sourceX, dx
Example #3
0
def test_dt_no_fallback(tmpdir):
    dt_us = 6000
    with segyio.open(tmpdir / 'small.sgy', "r+") as f:
        f.bin[BinField.Interval] = dt_us
        f.header[0][TraceField.TRACE_SAMPLE_INTERVAL] = dt_us
        f.flush()
        assert segyio.dt(f) == approx(dt_us)
Example #4
0
    def process(self, collect_stats=False, **kwargs):
        """ Create dataframe based on `segy` file headers. """
        # Note that all the `segyio` structure inference is disabled
        # self.segyfile = SafeIO(self.path, opener=segyio.open, mode='r', strict=False, ignore_geometry=True)
        self.segyfile = segyio.open(self.path,
                                    mode='r',
                                    strict=False,
                                    ignore_geometry=True)
        self.segyfile.mmap()

        self.depth = len(self.segyfile.trace[0])
        self.delay = self.segyfile.header[0].get(
            segyio.TraceField.DelayRecordingTime)
        self.sample_rate = segyio.dt(self.segyfile) / 1000

        # Load all the headers
        dataframe = {}
        for column in self.headers:
            dataframe[column] = self.segyfile.attributes(
                getattr(segyio.TraceField, column))[slice(None)]

        dataframe = pd.DataFrame(dataframe)
        dataframe.reset_index(inplace=True)
        dataframe.rename(columns={'index': 'trace_index'}, inplace=True)
        self.dataframe = dataframe.set_index(self.index_headers)

        self.add_attributes()
        if collect_stats:
            self.collect_stats(**kwargs)
Example #5
0
    def process(self, collect_stats=True, recollect=False, **kwargs):
        """ Create dataframe based on `segy` file headers. """
        # Note that all the `segyio` structure inference is disabled
        self.segyfile = SafeIO(self.path,
                               opener=segyio.open,
                               mode='r',
                               strict=False,
                               ignore_geometry=True)
        self.segyfile.mmap()

        self.depth = len(self.segyfile.trace[0])
        self.delay = self.segyfile.header[0].get(
            segyio.TraceField.DelayRecordingTime)
        self.sample_rate = segyio.dt(self.segyfile) / 1000

        # Load all the headers
        dataframe = {}
        for column in self.headers:
            dataframe[column] = self.segyfile.attributes(
                getattr(segyio.TraceField, column))[slice(None)]

        dataframe = pd.DataFrame(dataframe)
        dataframe.reset_index(inplace=True)
        dataframe.rename(columns={'index': 'trace_index'}, inplace=True)
        self.dataframe = dataframe.set_index(self.index_headers)

        self.add_attributes()

        # Collect stats, if needed and not collected previously
        if os.path.exists(self.path_meta) and not recollect:
            self.load_meta()
            self.has_stats = True
        elif collect_stats:
            self.collect_stats(**kwargs)

        # Create a matrix with ones at fully-zero traces
        if self.index_headers == self.INDEX_POST and not hasattr(
                self, 'zero_traces'):
            try:
                size = self.depth // 10
                slc = np.stack([self[:, :, i * size] for i in range(1, 10)],
                               axis=0)
                self.zero_traces = np.zeros(self.lens, dtype=np.int32)
                self.zero_traces[np.std(slc, axis=0) == 0] = 1
            except (ValueError, AttributeError):  # can't reshape
                pass

        # Store additional segy info
        self.segy_path = self.path
        self.segy_text = [
            self.segyfile.text[i] for i in range(1 + self.segyfile.ext_headers)
        ]

        # Computed from CDP_X/CDP_Y information
        try:
            self.rotation_matrix = self.compute_rotation_matrix()
            self.area = self.compute_area()
        except (ValueError, KeyError):  # single line SEG-Y
            self.rotation_matrix = None
            self.area = -1.
Example #6
0
    def test_dt_fallback(self):
        with TestContext("dt_fallback") as context:
            context.copy_file(self.filename)
            with segyio.open("small.sgy", "r+") as f:
                # Both zero
                f.bin[BinField.Interval] = 0
                f.header[0][TraceField.TRACE_SAMPLE_INTERVAL] = 0
                f.flush()
                fallback_dt = 4
                np.testing.assert_almost_equal(segyio.dt(f, fallback_dt), fallback_dt)

                # dt in bin header different from first trace
                f.bin[BinField.Interval] = 6000
                f.header[0][TraceField.TRACE_SAMPLE_INTERVAL] = 1000
                f.flush()
                fallback_dt = 4
                np.testing.assert_almost_equal(segyio.dt(f, fallback_dt), fallback_dt)
Example #7
0
 def test_dt_no_fallback(self):
     with TestContext("dt_no_fallback") as context:
         context.copy_file(self.filename)
         dt_us = 6000
         with segyio.open("small.sgy", "r+") as f:
             f.bin[BinField.Interval] = dt_us
             f.header[0][TraceField.TRACE_SAMPLE_INTERVAL] = dt_us
             f.flush()
             np.testing.assert_almost_equal(segyio.dt(f), dt_us)
Example #8
0
def segy_read(filename, ndims=2):

    with segyio.open(filename, "r", ignore_geometry=True) as segyfile:
        segyfile.mmap()

        # Assume input data is for single common shot gather
        sourceX = segyfile.attributes(segyio.TraceField.SourceX)[0]
        sourceY = segyfile.attributes(segyio.TraceField.SourceY)[0]
        sourceZ = segyfile.attributes(
            segyio.TraceField.SourceSurfaceElevation)[0]
        groupX = segyfile.attributes(segyio.TraceField.GroupX)[:]
        groupY = segyfile.attributes(segyio.TraceField.GroupY)[:]
        groupZ = segyfile.attributes(
            segyio.TraceField.ReceiverGroupElevation)[:]
        dt = segyio.dt(segyfile) / 1e3

        # Apply scaling
        elevScalar = segyfile.attributes(segyio.TraceField.ElevationScalar)[0]
        coordScalar = segyfile.attributes(
            segyio.TraceField.SourceGroupScalar)[0]

        if coordScalar < 0.:
            sourceX = sourceX / np.abs(coordScalar)
            sourceY = sourceY / np.abs(coordScalar)
            sourceZ = sourceZ / np.abs(elevScalar)
            groupX = groupX / np.abs(coordScalar)
            groupY = groupY / np.abs(coordScalar)
        elif coordScalar > 0.:
            sourceX = sourceX * np.abs(coordScalar)
            sourceY = sourceY * np.abs(coordScalar)
            sourceZ = sourceZ * np.abs(elevScalar)
            groupX = groupX * np.abs(coordScalar)
            groupY = groupY * np.abs(coordScalar)

        if elevScalar < 0.:
            groupZ = groupZ / np.abs(elevScalar)
        elif elevScalar > 0.:
            groupZ = groupZ * np.abs(elevScalar)

        nrec = len(groupX)
        nt = len(segyfile.trace[0])

        # Extract data
        data = np.zeros(shape=(nt, nrec), dtype='float32')
        for i in range(nrec):
            data[:, i] = segyfile.trace[i]
        tmax = (nt - 1) * dt

    if ndims == 2:
        return data, sourceX, sourceZ, groupX, groupZ, tmax, dt, nt
    else:
        return data, sourceX, sourceY, sourceZ, groupX, groupY, groupZ, tmax, dt, nt
Example #9
0
    def process(self, collect_stats=False, recollect=False, **kwargs):
        """ Create dataframe based on `segy` file headers. """
        # Note that all the `segyio` structure inference is disabled
        self.segyfile = SafeIO(self.path,
                               opener=segyio.open,
                               mode='r',
                               strict=False,
                               ignore_geometry=True)
        self.segyfile.mmap()

        self.depth = len(self.segyfile.trace[0])
        self.delay = self.segyfile.header[0].get(
            segyio.TraceField.DelayRecordingTime)
        self.sample_rate = segyio.dt(self.segyfile) / 1000

        # Load all the headers
        dataframe = {}
        for column in self.headers:
            dataframe[column] = self.segyfile.attributes(
                getattr(segyio.TraceField, column))[slice(None)]

        dataframe = pd.DataFrame(dataframe)
        dataframe.reset_index(inplace=True)
        dataframe.rename(columns={'index': 'trace_index'}, inplace=True)
        self.dataframe = dataframe.set_index(self.index_headers)

        self.add_attributes()

        # Create a matrix with ones at fully-zeroes traces
        if self.index_headers == self.INDEX_POST:
            size = self.depth // 10
            slc = np.stack([self[:, :, i * size] for i in range(1, 10)],
                           axis=-1)
            self.zero_traces = np.zeros(self.lens, dtype=np.int)
            self.zero_traces[np.std(slc, axis=-1) == 0] = 1

        path_meta = os.path.splitext(self.path)[0] + '.meta'
        if os.path.exists(path_meta) and not recollect:
            self.load_meta()
        elif collect_stats:
            self.collect_stats(**kwargs)

        # Store additional segy info, that is preserved in HDF5
        self.segy_path = self.path
        self.segy_text = [
            self.segyfile.text[i] for i in range(1 + self.segyfile.ext_headers)
        ]
        self.add_rotation_matrix()
Example #10
0
    def _read_velocity(self, vcube, cell_corners):
        """
        Read velocity from segy file. Upscale.
        :param vcube:
        :param cell_corners:
        :return:
        """

        with segyio.open(vcube) as f:
            x = np.empty(f.tracecount, dtype=np.float64)
            y = np.empty(f.tracecount, dtype=np.float64)
            dt = segyio.dt(f) / 1e6
            nt = len(f.samples)

            for i, h in enumerate(f.header):
                scalar = h[TraceField.SourceGroupScalar]
                if scalar < 0:
                    scalar = -1.0 / scalar
                x[i] = h[TraceField.CDP_X] * scalar
                y[i] = h[TraceField.CDP_Y] * scalar

            ny = len(f.xlines)
            nx = len(f.ilines)

            # memory issue might happen if volume becomes too large
            traces = f.trace.raw[:]

            if f.sorting == segyio.TraceSortingFormat.INLINE_SORTING:
                x = x.reshape(ny, nx)
                y = y.reshape(ny, nx)
                traces = traces.reshape(ny, nx, nt)
                x = np.transpose(x)
                y = np.transpose(y)
                traces = np.transpose(traces, (1, 0, 2))
            elif f.sorting == segyio.TraceSortingFormat.CROSSLINE_SORTING:
                x = x.reshape(nx, ny)
                y = y.reshape(nx, ny)
                traces = traces.reshape(nx, ny, nt)
            else:
                raise RuntimeError

        x, y, traces, nt, dt = self._upscale_velocity(cell_corners, x, y,
                                                      traces, nt, dt)

        return x, y, traces, nt, dt
Example #11
0
    ax.set_xlabel("CMP number")
    ax.set_ylabel("Time (ms)")
    for h in horizons:
        ax.plot(h[0], h[1])

    return ax


if __name__ == "__main__":
    segydir = "C:/Users/Luc Pelletier/Desktop/Universite/UPIR/modele_pergelisol/BF01_1sec.sgy"
    segydir = "./BF01_1sec.sgy"

    with segyio.open(segydir, "r", ignore_geometry=True) as segy:
        seis = np.array([segy.trace[trid] for trid in range(segy.tracecount)])
        seis = np.transpose(seis)
        dt = segyio.dt(segy) * 10**-3

    #x = plt.ginput(10)
    #print(x)

    horizons = []
    nhorizons = 2

    for n in range(nhorizons):
        ax = plot_seis(seis, dt, horizons=horizons)
        line, = ax.plot([], [])  # empty line
        linebuilder = LineBuilder(line)
        plt.show()
        horizons.append([linebuilder.xs, linebuilder.ys])
        time.sleep(0.5)