def test_create_point(cnet_dataframe, tmpdir): # Write the cnet io_controlnetwork.to_isis(cnet_dataframe, tmpdir.join('test.net'), mode='wb', targetname='Moon') with open(tmpdir.join('test.net'), 'rb') as f: f.seek(cnet_dataframe.point_start_byte) for i, length in enumerate(cnet_dataframe.measure_size): point_protocol = cnf.ControlPointFileEntryV0002() raw_point = f.read(length) point_protocol.ParseFromString(raw_point) assert str(i) == point_protocol.id assert 2 == point_protocol.type assert i % 2 == point_protocol.referenceIndex if i == cnet_dataframe.npts - 1: assert point_protocol.aprioriCovar == [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] for j, m in enumerate(point_protocol.measures): assert m.serialnumber in cnet_dataframe.serials.values() assert 2 == m.type assert len(m.log) == j # Only the second measure has a message
def read(self): """ Given an ISIS store, read the underlying ISIS3 compatible control network and return an IsisControlNetwork dataframe. """ pvl_header = pvl.load(self._path) header_start_byte = find_in_dict(pvl_header, 'HeaderStartByte') header_bytes = find_in_dict(pvl_header, 'HeaderBytes') point_start_byte = find_in_dict(pvl_header, 'PointsStartByte') version = find_in_dict(pvl_header, 'Version') if version == 2: point_attrs = [ i for i in cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name if i != 'measures' ] measure_attrs = [ i for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name ] cols = point_attrs + measure_attrs cp = cnf.ControlPointFileEntryV0002() self._handle.seek(header_start_byte) pbuf_header = cnf.ControlNetFileHeaderV0002() pbuf_header.ParseFromString(self._handle.read(header_bytes)) self._handle.seek(point_start_byte) cp = cnf.ControlPointFileEntryV0002() pts = [] for s in pbuf_header.pointMessageSizes: cp.ParseFromString(self._handle.read(s)) pt = [getattr(cp, i) for i in point_attrs if i != 'measures'] for measure in cp.measures: meas = pt + [getattr(measure, j) for j in measure_attrs] pts.append(meas) df = IsisControlNetwork(pts, columns=cols) df.header = pvl_header return df
def test_create_point(self): with open('test.net', 'rb') as f: f.seek(self.point_start_byte) for i, length in enumerate([135] * self.npts): point_protocol = cnf.ControlPointFileEntryV0002() raw_point = f.read(length) point_protocol.ParseFromString(raw_point) self.assertEqual(str(i), point_protocol.id) self.assertEqual(2, point_protocol.type) for m in point_protocol.measures: self.assertTrue(m.serialnumber in self.serials.values()) self.assertEqual(2, m.type)
def to_protobuf(self, version=2): """ Return protobuf compliant measure log object representation of this class. Returns ------- log_message : obj MeasureLogData object suitable to append to a MeasureLog repeated field. """ # I do not see a better way to get to the inner MeasureLogData obj than this # imports were not working because it looks like these need to instantiate off # an object if version == 2: log_message = cnf.ControlPointFileEntryV0002().Measure( ).MeasureLogData() elif version == 5: log_message = cnp5.ControlPointFileEntryV0005().Measure( ).MeasureLogData() log_message.doubleDataValue = self.value log_message.doubleDataType = self.messagetype return log_message
def create_points(self, df, serials, pointid_prefix, pointid_suffix): """ Step through a control network (C) and return protocol buffer point objects Parameters ---------- df : DataFrame with the appropriate attributes: point_id, point_type, serial, measure_type, x, y required. The entries in the list must support grouping by the point_id attribute. Returns ------- point_messages : list of serialized points buffers point_sizes : list of integer point sizes """ def _set_pid(pointid): return '{}{}{}'.format(xstr(pointid_prefix), pointid, xstr(pointid_suffix)) # TODO: Rewrite using apply syntax for performance point_sizes = [] point_messages = [] for i, g in df.groupby('point_id'): # Get the point specification from the protobuf point_spec = cnf.ControlPointFileEntryV0002() # Set the ID and then loop over all of the attributes that the # point has and check for corresponding columns in the group and # set with the correct type #point_spec.id = _set_pid(i) point_spec.id = _set_pid(i) for attr, attrtype in self.point_attrs: if attr in g.columns: # As per protobuf docs for assigning to a repeated field. if attr == 'aprioriCovar': arr = g.iloc[0]['aprioriCovar'] point_spec.aprioriCovar.extend(arr.ravel().tolist()) else: setattr(point_spec, attr, attrtype(g.iloc[0][attr])) point_spec.type = 2 # Hardcoded to free # The reference index should always be the image with the lowest index point_spec.referenceIndex = 0 # A single extend call is cheaper than many add calls to pack points measure_iterable = [] for node_id, m in g.iterrows(): measure_spec = point_spec.Measure() # For all of the attributes, set if they are an dict accessible attr of the obj. for attr, attrtype in self.measure_attrs: if attr in g.columns: setattr(measure_spec, attr, attrtype(m[attr])) measure_spec.serialnumber = serials[m.image_index] measure_spec.sample = m.x measure_spec.line = m.y measure_spec.type = 2 measure_iterable.append(measure_spec) self.nmeasures += 1 self.npoints += 1 point_spec.measures.extend(measure_iterable) point_message = point_spec.SerializeToString() point_sizes.append(point_spec.ByteSize()) point_messages.append(point_message) return point_messages, point_sizes
def read(self): """ Given an ISIS store, read the underlying ISIS3 compatible control network and return an IsisControlNetwork dataframe. """ pvl_header = pvl.load(self._path) header_start_byte = find_in_dict(pvl_header, 'HeaderStartByte') header_bytes = find_in_dict(pvl_header, 'HeaderBytes') point_start_byte = find_in_dict(pvl_header, 'PointsStartByte') version = find_in_dict(pvl_header, 'Version') if version == 2: self.point_attrs = [ i for i in cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name if i != 'measures' ] self.measure_attrs = [ i for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name ] cp = cnf.ControlPointFileEntryV0002() self._handle.seek(header_start_byte) pbuf_header = cnf.ControlNetFileHeaderV0002() pbuf_header.ParseFromString(self._handle.read(header_bytes)) self._handle.seek(point_start_byte) cp = cnf.ControlPointFileEntryV0002() pts = [] for s in pbuf_header.pointMessageSizes: cp.ParseFromString(self._handle.read(s)) pt = [ getattr(cp, i) for i in self.point_attrs if i != 'measures' ] for measure in cp.measures: meas = pt + [ getattr(measure, j) for j in self.measure_attrs ] pts.append(meas) elif version == 5: self.point_attrs = [ i for i in cnp5._CONTROLPOINTFILEENTRYV0005.fields_by_name if i != 'measures' ] self.measure_attrs = [ i for i in cnp5._CONTROLPOINTFILEENTRYV0005_MEASURE.fields_by_name ] cp = cnp5.ControlPointFileEntryV0005() self._handle.seek(header_start_byte) pbuf_header = cnh5.ControlNetFileHeaderV0005() pbuf_header.ParseFromString(self._handle.read(header_bytes)) self._handle.seek(point_start_byte) cp = cnp5.ControlPointFileEntryV0005() pts = [] byte_count = 0 while byte_count < find_in_dict(pvl_header, 'PointsBytes'): message_size = struct.unpack('I', self._handle.read(4))[0] cp.ParseFromString(self._handle.read(message_size)) pt = [ getattr(cp, i) for i in self.point_attrs if i != 'measures' ] for measure in cp.measures: meas = pt + [ getattr(measure, j) for j in self.measure_attrs ] pts.append(meas) byte_count += 4 + message_size # Some point and measure fields have the same name, so mangle them as point_ and measure_ point_cols = [ self.point_field_map[attr] if attr in self.point_field_map else attr for attr in self.point_attrs ] measure_cols = [ self.measure_field_map[attr] if attr in self.measure_field_map else attr for attr in self.measure_attrs ] cols = point_cols + measure_cols df = IsisControlNetwork(pts, columns=cols) # Convert the (0.5, 0.5) origin pixels back to (0,0) pixels df['line'] -= 0.5 df['sample'] -= 0.5 df.header = pvl_header return df
def read(self): """ Given an ISIS store, read the underlying ISIS3 compatible control network and return an IsisControlNetwork dataframe. """ pvl_header = pvl.load(self._path) header_start_byte = find_in_dict(pvl_header, 'HeaderStartByte') header_bytes = find_in_dict(pvl_header, 'HeaderBytes') point_start_byte = find_in_dict(pvl_header, 'PointsStartByte') version = find_in_dict(pvl_header, 'Version') if version == 2: self.point_attrs = [ i for i in cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name if i != 'measures' ] self.measure_attrs = [ i for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name ] cp = cnf.ControlPointFileEntryV0002() self._handle.seek(header_start_byte) pbuf_header = cnf.ControlNetFileHeaderV0002() pbuf_header.ParseFromString(self._handle.read(header_bytes)) self._handle.seek(point_start_byte) cp = cnf.ControlPointFileEntryV0002() pts = [] for s in pbuf_header.pointMessageSizes: cp.ParseFromString(self._handle.read(s)) pt = [ getattr(cp, i) for i in self.point_attrs if i != 'measures' ] for measure in cp.measures: meas = pt + [ getattr(measure, j) for j in self.measure_attrs ] pts.append(meas) elif version == 5: self.point_attrs = [ i for i in cnp5._CONTROLPOINTFILEENTRYV0005.fields_by_name if i != 'measures' ] self.measure_attrs = [ i for i in cnp5._CONTROLPOINTFILEENTRYV0005_MEASURE.fields_by_name ] cp = cnp5.ControlPointFileEntryV0005() self._handle.seek(header_start_byte) pbuf_header = cnh5.ControlNetFileHeaderV0005() pbuf_header.ParseFromString(self._handle.read(header_bytes)) self._handle.seek(point_start_byte) cp = cnp5.ControlPointFileEntryV0005() pts = [] byte_count = 0 while byte_count < find_in_dict(pvl_header, 'PointsBytes'): message_size = struct.unpack('I', self._handle.read(4))[0] cp.ParseFromString(self._handle.read(message_size)) pt = [ getattr(cp, i) for i in self.point_attrs if i != 'measures' ] for measure in cp.measures: meas = pt + [ getattr(measure, j) for j in self.measure_attrs ] pts.append(meas) byte_count += 4 + message_size self.point_attrs = [ i if i != 'jigsawRejected' else 'pointJigsawRejected' for i in self.point_attrs ] cols = self.point_attrs + self.measure_attrs cols = self.point_attrs + self.measure_attrs df = IsisControlNetwork(pts, columns=cols) df.header = pvl_header return df
def create_points(self, df, pointid_prefix, pointid_suffix): """ Step through a control network (C) and return protocol buffer point objects Parameters ---------- df : DataFrame with the appropriate attributes: point_id, point_type, serial, measure_type, x, y required. The entries in the list must support grouping by the point_id attribute. Returns ------- point_messages : list of serialized points buffers point_sizes : list of integer point sizes """ def _set_pid(pointid): return '{}{}{}'.format(xstr(pointid_prefix), pointid, xstr(pointid_suffix)) # TODO: Rewrite using apply syntax for performance point_sizes = [] point_messages = [] for i, g in df.groupby('id'): # Get the point specification from the protobuf point_spec = cnf.ControlPointFileEntryV0002() # Set the ID and then loop over all of the attributes that the # point has and check for corresponding columns in the group and # set with the correct type #point_spec.id = _set_pid(i) point_spec.id = _set_pid(i) point_spec.type = g.iloc[0].pointType try: point_spec.referenceIndex = g.iloc[0].referenceIndex except: warnings.warn( f'Unable to identify referenceIndex for point {point_spec.id}. Defaulting to index 0.' ) point_spec.referenceIndex = 0 for attr, attrtype in self.point_attrs: # Un-mangle common attribute names between points and measures df_attr = self.point_field_map.get(attr, attr) if df_attr in g.columns: if df_attr == 'pointLog': # Currently pointLog is not supported. warnings.warn( 'The pointLog field is currently unsupported. Any pointLog data will not be saved.' ) continue # As per protobuf docs for assigning to a repeated field. if df_attr == 'aprioriCovar' or df_attr == 'adjustedCovar': arr = g.iloc[0][df_attr] if isinstance(arr, np.ndarray): arr = arr.ravel().tolist() if arr: point_spec.aprioriCovar.extend(arr) # If field is repeated you must extend instead of assign elif cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name[ attr].label == 3: getattr(point_spec, attr).extend(g.iloc[0][df_attr]) else: setattr(point_spec, attr, attrtype(g.iloc[0][df_attr])) # A single extend call is cheaper than many add calls to pack points measure_iterable = [] for node_id, m in g.iterrows(): measure_spec = point_spec.Measure() # For all of the attributes, set if they are an dict accessible attr of the obj. for attr, attrtype in self.measure_attrs: # Un-mangle common attribute names between points and measures df_attr = self.measure_field_map.get(attr, attr) if df_attr in g.columns: if df_attr == 'measureLog': [ getattr(measure_spec, attr).extend([i.to_protobuf()]) for i in m[df_attr] ] # If field is repeated you must extend instead of assign elif cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name[ attr].label == 3: getattr(measure_spec, attr).extend(m[df_attr]) else: setattr(measure_spec, attr, attrtype(m[df_attr])) # ISIS pixels are centered on (0.5, 0.5). NDArrays are (0,0) based. measure_spec.sample = m['sample'] + 0.5 measure_spec.line = m['line'] + 0.5 if 'apriorisample' in g.columns: measure_spec.apriorisample = m['apriorisample'] + 0.5 measure_spec.aprioriline = m['aprioriline'] + 0.5 measure_iterable.append(measure_spec) self.nmeasures += 1 self.npoints += 1 point_spec.measures.extend(measure_iterable) point_message = point_spec.SerializeToString() point_sizes.append(point_spec.ByteSize()) point_messages.append(point_message) return point_messages, point_sizes