示例#1
0
 def test_pickle_roundtrip(self):
     packer1 = make_header_packer(PickleableHeader, endian='>')
     s = pickle.dumps(packer1)
     packer2 = pickle.loads(s)
     assert packer1._header_format_class == packer2._header_format_class
     assert packer1._structure.format == packer2._structure.format
     assert packer1._field_name_allocations == packer2._field_name_allocations
示例#2
0
 def test_pickle_roundtrip(self):
     packer1 = make_header_packer(PickleableHeader, endian='>')
     s = pickle.dumps(packer1)
     packer2 = pickle.loads(s)
     assert packer1._header_format_class == packer2._header_format_class
     assert packer1._structure.format == packer2._structure.format
     assert packer1._field_name_allocations == packer2._field_name_allocations
示例#3
0
def write_segy(fh,
               dataset,
               encoding=None,
               trace_header_format=TraceHeaderRev1,
               endian='>',
               progress=None):
    """
    Args:
        fh: A file-like object open for binary write, positioned to write the textual reel header.

        dataset: An object implementing the interface of segpy.dataset.Dataset, such as a SegYReader.

        trace_header_format: The class which defines the layout of the trace header. Defaults to TraceHeaderRev1.

        encoding: Optional encoding for text data. Typically 'cp037' for EBCDIC or 'ascii' for ASCII. If omitted, the
            seg_y_data object will be queries for an encoding property.

        endian: Big endian by default. If omitted, the seg_y_data object will be queried for an encoding property.

        progress: A unary callable which will be passed a number
            between zero and one indicating the progress made. If
            provided, this callback will be invoked at least once with
            an argument equal to one.

    Raises:
        UnsupportedEncodingError: If the specified encoding is neither ASCII nor EBCDIC
        UnicodeError: If textual data provided cannot be encoded into the required encoding.
    """

    progress_callback = progress if progress is not None else lambda p: None

    if not callable(progress_callback):
        raise TypeError("write_segy(): progress callback must be callable")

    encoding = encoding or (hasattr(dataset, 'encoding')
                            and dataset.encoding) or ASCII

    if not is_supported_encoding(encoding):
        raise UnsupportedEncodingError("Writing SEG Y", encoding)

    write_textual_reel_header(fh, dataset.textual_reel_header, encoding)
    write_binary_reel_header(fh, dataset.binary_reel_header, endian)
    write_extended_textual_headers(fh, dataset.extended_textual_header,
                                   encoding)

    trace_header_packer = make_header_packer(trace_header_format, endian)

    num_traces = dataset.num_traces()

    for trace_index in dataset.trace_indexes():
        write_trace_header(fh, dataset.trace_header(trace_index),
                           trace_header_packer)
        write_trace_samples(fh,
                            dataset.trace_samples(trace_index),
                            dataset.data_sample_format,
                            endian=endian)
        progress_callback(trace_index / num_traces)

    progress_callback(1)
示例#4
0
文件: reader.py 项目: wassname/segpy
    def __init__(self,
                 fh,
                 textual_reel_header,
                 binary_reel_header,
                 extended_textual_headers,
                 trace_offset_catalog,
                 trace_length_catalog,
                 trace_header_format,
                 encoding,
                 endian='>'):
        """Initialize a SegYReader around a file-like-object.

                Note:
            Usually a SegYReader is most easily constructed using the
            create_reader() function.

        Args:
            fh: A file-like object, which must support seeking and
            support binary reading.

            textual_reel_header: A sequence of forty 80-character Unicode strings
                containing header data.

            binary_reel_header: A Header object containing reel header data.

            extended_textual_headers: A sequence of sequences of Unicode strings.

            trace_offset_catalog: A mapping from zero-based trace_samples index to
                the byte-offset to individual traces within the file.

            trace_length_catalog: A mapping from zero-based trace_samples index to the
                number of samples in that trace_samples.

            trace_header_format: The class defining the layout of the trace header.

            encoding: Either ASCII or EBCDIC.

            endian: '>' for big-endian data (the standard and default), '<' for
                little-endian (non-standard)

        """
        self._fh = fh
        self._endian = endian
        self._encoding = encoding

        self._textual_reel_header = textual_reel_header
        self._binary_reel_header = binary_reel_header
        self._extended_textual_headers = extended_textual_headers

        self._trace_header_packer = make_header_packer(trace_header_format,
                                                       endian)

        self._trace_offset_catalog = trace_offset_catalog
        self._trace_length_catalog = trace_length_catalog

        self._revision = extract_revision(self._binary_reel_header)
        self._bytes_per_sample = bytes_per_sample(self._binary_reel_header,
                                                  self.revision)
        self._max_num_trace_samples = None
示例#5
0
    def test_make_bijective_header_packer_successfully(self):
        class BijectiveHeader(Header):
            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")

        packer = make_header_packer(BijectiveHeader)
        assert isinstance(packer, BijectiveHeaderPacker)
        assert packer.header_format_class == BijectiveHeader
示例#6
0
 def test_read_truncated_header_raises_eoferror(self, trace_header_written, endian, random):
     trace_header_packer = make_header_packer(TraceHeaderRev1, endian)
     buffer = trace_header_packer.pack(trace_header_written)
     truncate_pos = random.randrange(0, len(buffer) - 1)
     truncated_buffer = buffer[:truncate_pos]
     with BytesIO(truncated_buffer) as fh:
         with raises(EOFError):
             toolkit.read_trace_header(fh, trace_header_packer, pos=0)
示例#7
0
 def test_read_header_successfully(self, trace_header_written, endian):
     trace_header_packer = make_header_packer(TraceHeaderRev1, endian)
     buffer = trace_header_packer.pack(trace_header_written)
     with BytesIO(buffer) as fh:
         trace_header = toolkit.read_trace_header(fh,
                                                  trace_header_packer,
                                                  pos=0)
         assert are_equal(trace_header_written, trace_header)
示例#8
0
文件: reader.py 项目: wassname/segpy
    def __init__(self,
                 fh,
                 textual_reel_header,
                 binary_reel_header,
                 extended_textual_headers,
                 trace_offset_catalog,
                 trace_length_catalog,
                 trace_header_format,
                 encoding,
                 endian='>'):
        """Initialize a SegYReader around a file-like-object.

                Note:
            Usually a SegYReader is most easily constructed using the
            create_reader() function.

        Args:
            fh: A file-like object, which must support seeking and
            support binary reading.

            textual_reel_header: A sequence of forty 80-character Unicode strings
                containing header data.

            binary_reel_header: A Header object containing reel header data.

            extended_textual_headers: A sequence of sequences of Unicode strings.

            trace_offset_catalog: A mapping from zero-based trace_samples index to
                the byte-offset to individual traces within the file.

            trace_length_catalog: A mapping from zero-based trace_samples index to the
                number of samples in that trace_samples.

            trace_header_format: The class defining the layout of the trace header.

            encoding: Either ASCII or EBCDIC.

            endian: '>' for big-endian data (the standard and default), '<' for
                little-endian (non-standard)

        """
        self._fh = fh
        self._endian = endian
        self._encoding = encoding

        self._textual_reel_header = textual_reel_header
        self._binary_reel_header = binary_reel_header
        self._extended_textual_headers = extended_textual_headers

        self._trace_header_packer = make_header_packer(trace_header_format, endian)

        self._trace_offset_catalog = trace_offset_catalog
        self._trace_length_catalog = trace_length_catalog

        self._revision = extract_revision(self._binary_reel_header)
        self._bytes_per_sample = bytes_per_sample(
            self._binary_reel_header, self.revision)
        self._max_num_trace_samples = None
示例#9
0
def write_segy(fh,
               seg_y_data,
               encoding=None,
               trace_header_format=TraceHeaderRev1,
               endian='>',
               progress=None):
    """
    Args:
        fh: A file-like object open for binary write, positioned to write the textual reel header.

        seg_y_data:  An object from which the headers and trace_samples data can be retrieved. Requires the following
            properties and methods:
              seg_y_data.textual_reel_header
              seg_y_data.binary_reel_header
              seg_y_data.extended_textual_header
              seg_y_data.trace_indexes
              seg_y_data.trace_header(trace_index)
              seg_y_data.trace_samples(trace_index)

              seg_y_data.encoding
              seg_y_data.endian

              One such legitimate object would be a SegYReader instance.

        trace_header_format: The class which defines the layout of the trace header. Defaults to TraceHeaderRev1.

        encoding: Optional encoding for text data. Typically 'cp037' for EBCDIC or 'ascii' for ASCII. If omitted, the
            seg_y_data object will be queries for an encoding property.

        endian: Big endian by default. If omitted, the seg_y_data object will be queried for an encoding property.

        progress: An optional progress bar object.

    Raises:
        UnsupportedEncodingError: If the specified encoding is neither ASCII nor EBCDIC
        UnicodeError: If textual data provided cannot be encoded into the required encoding.
    """

    encoding = encoding or (hasattr(seg_y_data, 'encoding')
                            and seg_y_data.encoding) or ASCII

    if not is_supported_encoding(encoding):
        raise UnsupportedEncodingError("Writing SEG Y", encoding)

    write_textual_reel_header(fh, seg_y_data.textual_reel_header, encoding)
    write_binary_reel_header(fh, seg_y_data.binary_reel_header, endian)
    write_extended_textual_headers(fh, seg_y_data.extended_textual_header,
                                   encoding)

    trace_header_packer = make_header_packer(trace_header_format, endian)

    for trace_index in seg_y_data.trace_indexes():
        write_trace_header(fh, seg_y_data.trace_header(trace_index),
                           trace_header_packer)
        write_trace_samples(fh,
                            seg_y_data.trace_samples(trace_index),
                            seg_y_data.data_sample_format,
                            endian=endian)
示例#10
0
    def test_make_bijective_header_packer_successfully(self):

        class BijectiveHeader(Header):
            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")

        packer = make_header_packer(BijectiveHeader)
        assert isinstance(packer, BijectiveHeaderPacker)
        assert packer.header_format_class == BijectiveHeader
示例#11
0
 def test_read_truncated_header_raises_eoferror(self, trace_header_written,
                                                endian, random):
     trace_header_packer = make_header_packer(TraceHeaderRev1, endian)
     buffer = trace_header_packer.pack(trace_header_written)
     truncate_pos = random.randrange(0, len(buffer) - 1)
     truncated_buffer = buffer[:truncate_pos]
     with BytesIO(truncated_buffer) as fh:
         with raises(EOFError):
             toolkit.read_trace_header(fh, trace_header_packer, pos=0)
示例#12
0
    def test_pack_incorrect_type(self):
        class BijectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")

        packer = make_header_packer(BijectiveHeader, endian='>')
        with raises(TypeError):
            packer.pack(None)
示例#13
0
    def test_pack_incorrect_type(self):
        class BijectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")

        packer = make_header_packer(BijectiveHeader, endian='>')
        with raises(TypeError):
            packer.pack(None)
示例#14
0
    def test_unpack_bijective_header(self):

        class BijectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")

        packer = make_header_packer(BijectiveHeader, endian='>')
        buffer = bytes((0x12, 0x34, 0x56, 0x78, 0x01, 0x35, 0x79, 0x32))
        header = packer.unpack(buffer)
        assert are_equal(header, BijectiveHeader(field_a=0x12345678, field_b=0x01357932))
示例#15
0
文件: writer.py 项目: rth/segpy
def write_segy(fh,
               seg_y_data,
               encoding=None,
               trace_header_format=TraceHeaderRev1,
               endian='>',
               progress=None):
    """
    Args:
        fh: A file-like object open for binary write, positioned to write the textual reel header.

        seg_y_data:  An object from which the headers and trace_samples data can be retrieved. Requires the following
            properties and methods:
              seg_y_data.textual_reel_header
              seg_y_data.binary_reel_header
              seg_y_data.extended_textual_header
              seg_y_data.trace_indexes
              seg_y_data.trace_header(trace_index)
              seg_y_data.trace_samples(trace_index)

              seg_y_data.encoding
              seg_y_data.endian
              seg_y_data.data_sample_format

              One such legitimate object would be a SegYReader instance.

        trace_header_format: The class which defines the layout of the trace header. Defaults to TraceHeaderRev1.

        encoding: Optional encoding for text data. Typically 'cp037' for EBCDIC or 'ascii' for ASCII. If omitted, the
            seg_y_data object will be queries for an encoding property.

        endian: Big endian by default. If omitted, the seg_y_data object will be queried for an encoding property.

        progress: An optional progress bar object.

    Raises:
        UnsupportedEncodingError: If the specified encoding is neither ASCII nor EBCDIC
        UnicodeError: If textual data provided cannot be encoded into the required encoding.
    """

    encoding = encoding or (hasattr(seg_y_data, 'encoding') and seg_y_data.encoding) or ASCII

    if not is_supported_encoding(encoding):
        raise UnsupportedEncodingError("Writing SEG Y", encoding)

    write_textual_reel_header(fh, seg_y_data.textual_reel_header, encoding)
    write_binary_reel_header(fh, seg_y_data.binary_reel_header, endian)
    write_extended_textual_headers(fh, seg_y_data.extended_textual_header, encoding)

    trace_header_packer = make_header_packer(trace_header_format, endian)

    for trace_index in seg_y_data.trace_indexes():
        write_trace_header(fh, seg_y_data.trace_header(trace_index), trace_header_packer)
        write_trace_samples(fh, seg_y_data.trace_samples(trace_index), seg_y_data.data_sample_format, endian=endian)
示例#16
0
    def test_unpack_bijective_header(self):
        class BijectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")

        packer = make_header_packer(BijectiveHeader, endian='>')
        buffer = bytes((0x12, 0x34, 0x56, 0x78, 0x01, 0x35, 0x79, 0x32))
        header = packer.unpack(buffer)
        assert are_equal(
            header, BijectiveHeader(field_a=0x12345678, field_b=0x01357932))
示例#17
0
文件: writer.py 项目: abingham/segpy
def write_segy(fh,
               dataset,
               encoding=None,
               trace_header_format=TraceHeaderRev1,
               endian='>',
               progress=None):
    """
    Args:
        fh: A file-like object open for binary write, positioned to write the textual reel header.

        dataset: An object implementing the interface of segpy.dataset.Dataset, such as a SegYReader.

        trace_header_format: The class which defines the layout of the trace header. Defaults to TraceHeaderRev1.

        encoding: Optional encoding for text data. Typically 'cp037' for EBCDIC or 'ascii' for ASCII. If omitted, the
            seg_y_data object will be queries for an encoding property.

        endian: Big endian by default. If omitted, the dataset object will be queried for an encoding property.

        progress: A unary callable which will be passed a number
            between zero and one indicating the progress made. If
            provided, this callback will be invoked at least once with
            an argument equal to one.

    Raises:
        UnsupportedEncodingError: If the specified encoding is neither ASCII nor EBCDIC
        UnicodeError: If textual data provided cannot be encoded into the required encoding.
    """

    progress_callback = progress if progress is not None else lambda p: None

    if not callable(progress_callback):
        raise TypeError("write_segy(): progress callback must be callable")

    encoding = encoding or (hasattr(dataset, 'encoding') and dataset.encoding) or ASCII

    if not is_supported_encoding(encoding):
        raise UnsupportedEncodingError("Writing SEG Y", encoding)

    write_textual_reel_header(fh, dataset.textual_reel_header, encoding)
    write_binary_reel_header(fh, dataset.binary_reel_header, endian)
    write_extended_textual_headers(fh, dataset.extended_textual_header, encoding)

    trace_header_packer = make_header_packer(trace_header_format, endian)

    num_traces = dataset.num_traces()

    for trace_index in dataset.trace_indexes():
        write_trace_header(fh, dataset.trace_header(trace_index), trace_header_packer)
        write_trace_samples(fh, dataset.trace_samples(trace_index), dataset.data_sample_format, endian=endian)
        progress_callback(trace_index / num_traces)

    progress_callback(1)
示例#18
0
    def test_pack_inconsistent_surjective_raises_value_error(self):

        class SurjectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")
            field_c = field(Int32, 1, 0, "Field C.")

        packer = make_header_packer(SurjectiveHeader, endian='>')
        bh = SurjectiveHeader(field_a=0x12345678, field_b=0x01357932, field_c=0x52345678)
        with raises(ValueError):
            packer.pack(bh)
示例#19
0
    def test_pack_surjective(self):

        class SurjectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")
            field_c = field(Int32, 1, 0, "Field C.")

        packer = make_header_packer(SurjectiveHeader, endian='>')
        bh = SurjectiveHeader(field_a=0x12345678, field_b=0x01357932, field_c=0x12345678)
        buffer = packer.pack(bh)
        assert buffer == bytes((0x12, 0x34, 0x56, 0x78, 0x01, 0x35, 0x79, 0x32))
示例#20
0
    def test_repr(self):
        class MyHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")
            field_c = field(Int32, 1, 0, "Field C.")

        packer = make_header_packer(MyHeader, endian='>')
        r = repr(packer)
        assert 'SurjectiveHeaderPacker' in r
        assert 'MyHeader' in r
        assert check_balanced(r)
示例#21
0
    def test_repr(self):

        class MyHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")
            field_c = field(Int32, 1, 0, "Field C.")

        packer = make_header_packer(MyHeader, endian='>')
        r = repr(packer)
        assert 'SurjectiveHeaderPacker' in r
        assert 'MyHeader' in r
        assert check_balanced(r)
示例#22
0
def read_binary_reel_header(fh, endian='>'):
    """Read the SEG Y binary reel header.

    Args:
        fh: A file-like object open in binary mode. Binary header is assumed to
            be at an offset of 3200 bytes from the beginning of the file.

        endian: '>' for big-endian data (the standard and default), '<' for
            little-endian (non-standard)
    """
    header_packer = make_header_packer(BinaryReelHeader, endian)
    buffer = fh.read(BinaryReelHeader.LENGTH_IN_BYTES)
    reel_header = header_packer.unpack(buffer)
    return reel_header
示例#23
0
    def test_pack_inconsistent_surjective_raises_value_error(self):
        class SurjectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")
            field_c = field(Int32, 1, 0, "Field C.")

        packer = make_header_packer(SurjectiveHeader, endian='>')
        bh = SurjectiveHeader(field_a=0x12345678,
                              field_b=0x01357932,
                              field_c=0x52345678)
        with raises(ValueError):
            packer.pack(bh)
示例#24
0
    def test_pack_surjective(self):
        class SurjectiveHeader(Header):
            START_OFFSET_IN_BYTES = 1

            field_a = field(Int32, 1, 0, "Field A.")
            field_b = field(NNInt32, 5, 0, "Field B.")
            field_c = field(Int32, 1, 0, "Field C.")

        packer = make_header_packer(SurjectiveHeader, endian='>')
        bh = SurjectiveHeader(field_a=0x12345678,
                              field_b=0x01357932,
                              field_c=0x12345678)
        buffer = packer.pack(bh)
        assert buffer == bytes(
            (0x12, 0x34, 0x56, 0x78, 0x01, 0x35, 0x79, 0x32))
示例#25
0
def write_binary_reel_header(fh, binary_reel_header, endian='>'):
    """Write the binary_reel_header to the given file-like object.

    Args:
        fh: A file-like object open in binary mode for writing.

        binary_reel_header: A header object.

    Post-condition:
        The file pointer for fh will be positioned at the first byte following
        the binary reel header.
    """
    header_packer = make_header_packer(type(binary_reel_header), endian)
    buffer = header_packer.pack(binary_reel_header)
    fh.write(buffer)
示例#26
0
def extract_trace_headers(reader, fields, trace_indexes=None):
    """Extract trace header fields from the specified trace headers as separate arrays.

    Args:
        reader: A SegYReader

        fields: A an iterable series where each item is either the name of a field as a string
            or an object such as a NamedField with a 'name' attribute which in turn is the name
            of a field as a string, such as a NamedField.

        trace_indexes: An optional iterable series of trace_indexes. If not provided or None,
            the headers for all trace indexes will be returned.

    Returns:
        A namedtuple with attributes which are one-dimensionsal Numpy arrays.
    """
    if trace_indexes is None:
        trace_indexes = reader.trace_indexes()

    field_names = [_extract_field_name(field) for field in fields]

    class SubFormat(metaclass=SubFormatMeta,
                    parent_format=reader.trace_header_format_class,
                    parent_field_names=field_names):
        pass

    sub_header_packer = make_header_packer(SubFormat, reader.endian)
    trace_header_arrays_cls = namedtuple('trace_header_arrays_cls',
                                         field_names)

    trace_headers = [
        reader.trace_header(trace_index, sub_header_packer)
        for trace_index in trace_indexes
    ]

    trace_header_arrays = trace_header_arrays_cls(*(np.fromiter(
        (getattr(trace_header, field_name) for trace_header in trace_headers),
        dtype=make_dtype(getattr(SubFormat, field_name).value_type.SEG_Y_TYPE),
        count=len(trace_headers)) for field_name in field_names))

    return trace_header_arrays
示例#27
0
文件: extract.py 项目: abingham/segpy
def extract_trace_headers(reader, fields, trace_indexes=None):
    """Extract trace header fields from the specified trace headers as separate arrays.

    Args:
        reader: A SegYReader

        fields: A an iterable series where each item is either the name of a field as a string
            or an object such as a NamedField with a 'name' attribute which in turn is the name
            of a field as a string, such as a NamedField.

        trace_indexes: An optional iterable series of trace_indexes. If not provided or None,
            the headers for all trace indexes will be returned.

    Returns:
        A namedtuple with attributes which are one-dimensionsal Numpy arrays.
    """
    if trace_indexes is None:
        trace_indexes = reader.trace_indexes()

    field_names = [_extract_field_name(field) for field in fields]

    class SubFormat(metaclass=SubFormatMeta,
                    parent_format=reader.trace_header_format_class,
                    parent_field_names=field_names):
        pass

    sub_header_packer = make_header_packer(SubFormat, reader.endian)
    trace_header_arrays_cls = namedtuple('trace_header_arrays_cls', field_names)

    trace_headers = [reader.trace_header(trace_index, sub_header_packer)
                     for trace_index in trace_indexes]

    trace_header_arrays = trace_header_arrays_cls(
        *(np.fromiter((getattr(trace_header, field_name) for trace_header in trace_headers),
                      dtype=make_dtype(getattr(SubFormat, field_name).value_type.SEG_Y_TYPE),
                      count=len(trace_headers))
          for field_name in field_names)
    )

    return trace_header_arrays
示例#28
0
文件: writer.py 项目: BoKu123/segpy
def write_segy(fh,
               dataset,
               encoding=None,
               trace_header_format=TraceHeaderRev1,
               endian='>',
               progress=None):
    """
    Args:
        fh: A file-like object open for binary write, positioned to write the textual reel header.

        dataset: An object implementing the interface of segpy.dataset.Dataset, such as a SegYReader.

        trace_header_format: The class which defines the layout of the trace header. Defaults to TraceHeaderRev1.

        encoding: Optional encoding for text data. Typically 'cp037' for EBCDIC or 'ascii' for ASCII. If omitted, the
            seg_y_data object will be queries for an encoding property.

        endian: Big endian by default. If omitted, the seg_y_data object will be queried for an encoding property.

        progress: An optional progress bar object.

    Raises:
        UnsupportedEncodingError: If the specified encoding is neither ASCII nor EBCDIC
        UnicodeError: If textual data provided cannot be encoded into the required encoding.
    """

    encoding = encoding or (hasattr(dataset, 'encoding') and dataset.encoding) or ASCII

    if not is_supported_encoding(encoding):
        raise UnsupportedEncodingError("Writing SEG Y", encoding)

    write_textual_reel_header(fh, dataset.textual_reel_header, encoding)
    write_binary_reel_header(fh, dataset.binary_reel_header, endian)
    write_extended_textual_headers(fh, dataset.extended_textual_header, encoding)

    trace_header_packer = make_header_packer(trace_header_format, endian)

    for trace_index in dataset.trace_indexes():
        write_trace_header(fh, dataset.trace_header(trace_index), trace_header_packer)
        write_trace_samples(fh, dataset.trace_samples(trace_index), dataset.data_sample_format, endian=endian)
示例#29
0
文件: extract.py 项目: abingham/segpy
def extract_trace_header_field_3d(reader_3d, fields, inline_numbers=None, xline_numbers=None, null=None):
    """Extract a single trace header field from all trace headers as an array.

    Args:
        reader_3d: A SegYReader3D

        fields: A an iterable series where each item is either the name of a field as a string
            or an object such as a NamedField with a 'name' attribute which in turn is the name
            of a field as a string, such as a NamedField.

        inline_numbers: The inline numbers for which traces are to be extracted.
            This argument can be specified in three ways:

            None (the default) - All traces within the each crossline will be be extracted.

            sequence - When a sequence, such as a range or a list is provided only those traces at
                inline numbers corresponding to the items in the sequence will be extracted. The
                traces will always be extracted in increasing numeric order and duplicate entries
                will be ignored.  For example inline_numbers=range(100, 200, 2) will extract alternate
                traces from inline number 100 to inline number 198 inclusive.

            slice - When a slice object is provided the slice will be applied to the sequence of all
                inline numbers. For example inline_numbers=slice(100, -100) will omit the first
                one hundred and the last one hundred traces, irrespective of their numbers.

        xline_numbers: The crossline numbers at which traces are to be extracted.
            This argument can be specified in three ways:

            None (the default) - All traces at within each inline will be be extracted.

            sequence - When a sequence, such as a range or a list is provided only those traces at
                crossline numbers corresponding to the items in the sequence will be extracted. The
                traces will always be extracted in increasing numeric order and duplicate entries
                will be ignored.  For example xline_numbers=range(100, 200, 2) will extract alternate
                traces from crossline number 100 to crossline number 198 inclusive.

            slice - When a slice object is provided the slice will be applied to the sequence of all
                crossline numbers. For example xline_numbers=slice(100, -100) will omit the first
                one hundred and the last one hundred traces, irrespective of their numbers.

        null: An optional null value for missing traces. The null value must be convertible
            to all field value types.

    Returns:
        A namedtuple object with attributes which are two-dimensional Numpy arrays.
        If a null value was specified the arrays will be ndarrays, otherwise they
        will be masked arrays.  The attributes of the named tuple are in the same
        order as the fields specified in the `fields` argument.

    Raises:
        AttributeError: If the the named fields do not exist in the trace header definition.
    """
    field_names = [_extract_field_name(field) for field in fields]

    inline_numbers = ensure_superset(reader_3d.inline_numbers(), inline_numbers)
    xline_numbers = ensure_superset(reader_3d.xline_numbers(), xline_numbers)
    shape = (len(inline_numbers), len(xline_numbers))

    class SubFormat(metaclass=SubFormatMeta,
                    parent_format=reader_3d.trace_header_format_class,
                    parent_field_names=field_names):
        pass

    sub_header_packer = make_header_packer(SubFormat, reader_3d.endian)
    TraceHeaderArrays = namedtuple('TraceHeaderArrays', field_names)

    arrays = (_make_array(shape,
                          make_dtype(getattr(SubFormat, field_name).value_type.SEG_Y_TYPE),
                          null)
              for field_name in field_names)

    trace_header_arrays = TraceHeaderArrays(*arrays)

    for inline_index, inline_number in enumerate(inline_numbers):
        for xline_index, xline_number in enumerate(xline_numbers):
            inline_xline_number = (inline_number, xline_number)
            if reader_3d.has_trace_index(inline_xline_number):
                trace_index = reader_3d.trace_index((inline_number, xline_number))
                trace_header = reader_3d.trace_header(trace_index, sub_header_packer)

                for field_name, a in zip(field_names, trace_header_arrays):
                    field_value = getattr(trace_header, field_name)
                    a[inline_index, xline_index] = field_value

    return trace_header_arrays
示例#30
0
def catalog_traces(fh, bps, trace_header_format=TraceHeaderRev1, endian='>', progress=None):
    """Build catalogs to facilitate random access to trace_samples data.

    Note:
        This function can take significant time to run, proportional
        to the number of traces in the SEG Y file.

    Four catalogs will be build:

     1. A catalog mapping trace_samples index (0-based) to the position of that
        trace_samples header in the file.

     2. A catalog mapping trace_samples index (0-based) to the number of
        samples in that trace_samples.

     3. A catalog mapping CDP number to the trace_samples index.

     4. A catalog mapping an (inline, crossline) number 2-tuple to
        trace_samples index.

    Args:
        fh: A file-like-object open in binary mode, positioned at the
            start of the first trace_samples header.

        bps: The number of bytes per sample, such as obtained by a call
            to bytes_per_sample()

        trace_header_format: The class defining the trace header format.
            Defaults to TraceHeaderRev1.

        endian: '>' for big-endian data (the standard and default), '<'
            for little-endian (non-standard)

        progress: A unary callable which will be passed a number
            between zero and one indicating the progress made. If
            provided, this callback will be invoked at least once with
            an argument equal to 1

    Returns:
        A 4-tuple of the form (trace_samples-offset-catalog,
                               trace_samples-length-catalog,
                               cdp-catalog,
                               line-catalog)` where
        each catalog is an instance of ``collections.Mapping`` or None
        if no catalog could be built.
    """
    progress_callback = progress if progress is not None else lambda p: None

    if not callable(progress_callback):
        raise TypeError("catalog_traces(): progress callback must be callable")

    class CatalogSubFormat(metaclass=SubFormatMeta,
                           parent_format=trace_header_format,
                           parent_field_names=(
                               'file_sequence_num',
                               'ensemble_num',
                               'num_samples',
                               'inline_number',
                               'crossline_number',
                           )):
        pass

    trace_header_packer = make_header_packer(CatalogSubFormat, endian)

    length = file_length(fh)

    pos_begin = fh.tell()

    trace_offset_catalog_builder = CatalogBuilder()
    trace_length_catalog_builder = CatalogBuilder()
    line_catalog_builder = CatalogBuilder()
    alt_line_catalog_builder = CatalogBuilder()
    cdp_catalog_builder = CatalogBuilder()

    for trace_number in count():
        progress_callback(_READ_PROPORTION * pos_begin / length)
        fh.seek(pos_begin)
        data = fh.read(TRACE_HEADER_NUM_BYTES)
        if len(data) < TRACE_HEADER_NUM_BYTES:
            break
        trace_header = trace_header_packer.unpack(data)

        num_samples = trace_header.num_samples
        trace_length_catalog_builder.add(trace_number, num_samples)
        samples_bytes = num_samples * bps
        trace_offset_catalog_builder.add(trace_number, pos_begin)
        # Should we check the data actually exists?
        line_catalog_builder.add((trace_header.inline_number,
                                  trace_header.crossline_number),
                                 trace_number)
        alt_line_catalog_builder.add((trace_header.file_sequence_num,
                                     trace_header.ensemble_num),
                                     trace_number)
        cdp_catalog_builder.add(trace_header.ensemble_num, trace_number)
        pos_end = pos_begin + TRACE_HEADER_NUM_BYTES + samples_bytes
        pos_begin = pos_end

    progress_callback(_READ_PROPORTION)

    trace_offset_catalog = trace_offset_catalog_builder.create()
    progress_callback(_READ_PROPORTION + (_READ_PROPORTION / 4))

    trace_length_catalog = trace_length_catalog_builder.create()
    progress_callback(_READ_PROPORTION + (_READ_PROPORTION / 2))

    cdp_catalog = cdp_catalog_builder.create()
    progress_callback(_READ_PROPORTION + (_READ_PROPORTION * 3 / 4))

    line_catalog = line_catalog_builder.create()

    if line_catalog is None:
        # Some 3D files put Inline and Crossline numbers in (TraceSequenceFile, cdp) pair
        line_catalog = alt_line_catalog_builder.create()

    progress_callback(1)

    return (trace_offset_catalog,
            trace_length_catalog,
            cdp_catalog,
            line_catalog)
示例#31
0
 def test_pickle_versioning_mismatch_raises_type_error(self):
     packer1 = make_header_packer(PickleableHeader, endian='>')
     s = pickle.dumps(packer1)
     s = s.replace(segpy.__version__.encode('ascii'), b'xxxxx')
     with raises(TypeError):
         pickle.loads(s)
示例#32
0
def extract_trace_header_field_3d(reader_3d,
                                  fields,
                                  inline_numbers=None,
                                  xline_numbers=None,
                                  null=None):
    """Extract a single trace header field from all trace headers as an array.

    Args:
        reader_3d: A SegYReader3D

        fields: A an iterable series where each item is either the name of a field as a string
            or an object such as a NamedField with a 'name' attribute which in turn is the name
            of a field as a string, such as a NamedField.

        inline_numbers: The inline numbers for which traces are to be extracted.
            This argument can be specified in three ways:

            None (the default) - All traces within the each crossline will be be extracted.

            sequence - When a sequence, such as a range or a list is provided only those traces at
                inline numbers corresponding to the items in the sequence will be extracted. The
                traces will always be extracted in increasing numeric order and duplicate entries
                will be ignored.  For example inline_numbers=range(100, 200, 2) will extract alternate
                traces from inline number 100 to inline number 198 inclusive.

            slice - When a slice object is provided the slice will be applied to the sequence of all
                inline numbers. For example inline_numbers=slice(100, -100) will omit the first
                one hundred and the last one hundred traces, irrespective of their numbers.

        xline_numbers: The crossline numbers at which traces are to be extracted.
            This argument can be specified in three ways:

            None (the default) - All traces at within each inline will be be extracted.

            sequence - When a sequence, such as a range or a list is provided only those traces at
                crossline numbers corresponding to the items in the sequence will be extracted. The
                traces will always be extracted in increasing numeric order and duplicate entries
                will be ignored.  For example xline_numbers=range(100, 200, 2) will extract alternate
                traces from crossline number 100 to crossline number 198 inclusive.

            slice - When a slice object is provided the slice will be applied to the sequence of all
                crossline numbers. For example xline_numbers=slice(100, -100) will omit the first
                one hundred and the last one hundred traces, irrespective of their numbers.

        null: An optional null value for missing traces. The null value must be convertible
            to all field value types.

    Returns:
        A namedtuple object with attributes which are two-dimensional Numpy arrays.
        If a null value was specified the arrays will be ndarrays, otherwise they
        will be masked arrays.  The attributes of the named tuple are in the same
        order as the fields specified in the `fields` argument.

    Raises:
        AttributeError: If the the named fields do not exist in the trace header definition.
    """
    field_names = [_extract_field_name(field) for field in fields]

    inline_numbers = ensure_superset(reader_3d.inline_numbers(),
                                     inline_numbers)
    xline_numbers = ensure_superset(reader_3d.xline_numbers(), xline_numbers)
    shape = (len(inline_numbers), len(xline_numbers))

    class SubFormat(metaclass=SubFormatMeta,
                    parent_format=reader_3d.trace_header_format_class,
                    parent_field_names=field_names):
        pass

    sub_header_packer = make_header_packer(SubFormat, reader_3d.endian)
    TraceHeaderArrays = namedtuple('TraceHeaderArrays', field_names)

    arrays = (_make_array(
        shape,
        make_dtype(getattr(SubFormat, field_name).value_type.SEG_Y_TYPE), null)
              for field_name in field_names)

    trace_header_arrays = TraceHeaderArrays(*arrays)

    for inline_index, inline_number in enumerate(inline_numbers):
        for xline_index, xline_number in enumerate(xline_numbers):
            inline_xline_number = (inline_number, xline_number)
            if reader_3d.has_trace_index(inline_xline_number):
                trace_index = reader_3d.trace_index(
                    (inline_number, xline_number))
                trace_header = reader_3d.trace_header(trace_index,
                                                      sub_header_packer)

                for field_name, a in zip(field_names, trace_header_arrays):
                    field_value = getattr(trace_header, field_name)
                    a[inline_index, xline_index] = field_value

    return trace_header_arrays
示例#33
0
 def test_read_header_successfully(self, trace_header_written, endian):
     trace_header_packer = make_header_packer(TraceHeaderRev1, endian)
     buffer = trace_header_packer.pack(trace_header_written)
     with BytesIO(buffer) as fh:
         trace_header = toolkit.read_trace_header(fh, trace_header_packer, pos=0)
         assert are_equal(trace_header_written, trace_header)
示例#34
0
 def test_pickle_versioning_mismatch_raises_type_error(self):
     packer1 = make_header_packer(PickleableHeader, endian='>')
     s = pickle.dumps(packer1)
     s = s.replace(segpy.__version__.encode('ascii'), b'xxxxx')
     with raises(TypeError):
         pickle.loads(s)