def to_instance(cls, ob, default=None, strict=False): """Encode a GeoJSON dict into an GeoJSON object. Assumes the caller knows that the dict should satisfy a GeoJSON type. """ if ob is None and default is not None: instance = default() elif isinstance(ob, GeoJSON): instance = ob else: mapping = to_mapping(ob) d = dict((str(k), mapping[k]) for k in mapping) try: type_ = d.pop("type") geojson_factory = getattr(geojson.factory, type_) if not issubclass(geojson_factory, GeoJSON): raise TypeError("""\ Not a valid GeoJSON type: %r (geojson_factory: %r, cls: %r) """ % (type_, geojson_factory, cls)) instance = geojson_factory(**d) except (AttributeError, KeyError) as invalid: if not strict: instance = ob else: msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= (ob, invalid) raise ValueError(msg) return instance
def to_instance(cls, ob, default=None, strict=False): """Encode a GeoJSON dict into an GeoJSON object. Assumes the caller knows that the dict should satisfy a GeoJSON type. """ if ob is None and default is not None: instance = default() elif isinstance(ob, GeoJSON): instance = ob else: mapping = to_mapping(ob) d = dict((str(k), mapping[k]) for k in mapping) try: type_ = d.pop("type") geojson_factory = getattr(geojson.factory, type_) if not issubclass(geojson_factory, GeoJSON): raise TypeError("""\ Not a valid GeoJSON type: %r (geojson_factory: %r, cls: %r) """ % (type_, geojson_factory, cls)) instance = geojson_factory(**d) except (AttributeError, KeyError), invalid: if not strict: instance = ob else: msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= (ob, invalid) raise ValueError(msg)
def positions(self) -> ScanResult: ''' Extracts positions from SWATH_BATHYMETRY records. Scan result includes geojson line definition comprised of these unique positions. :return: :class:`hyo2.mate.lib.scan.ScanResult` ''' if pygsf.SWATH_BATHYMETRY not in self.datagrams: msg = ( "Swath bathymetry datagram (record id {}) not found in file. " "Unable to extract position data".format( pygsf.SWATH_BATHYMETRY)) return ScanResult(state=ScanState.WARNING, messages=msg) p_datagrams = self.datagrams[pygsf.SWATH_BATHYMETRY] position_points = [] last_point = None for p_datagram in p_datagrams: pt = Point([p_datagram.longitude, p_datagram.latitude]) if (last_point is not None and pt.coordinates[0] == last_point.coordinates[0] and pt.coordinates[1] == last_point.coordinates[1]): # skip any points that have the same location continue position_points.append(pt) last_point = pt line = LineString(position_points) feature = Feature(geometry=line) feature_collection = FeatureCollection([feature]) data = {'map': to_mapping(feature_collection)} return ScanResult(state=ScanState.PASS, messages=[], data=data)
def positions(self) -> ScanResult: ''' Extracts positions from position datagram. Scan result includes geojson line definition comprised of these unique positions. :return: :class:`hyo2.mate.lib.scan.ScanResult` ''' if 'P' not in self.datagrams: return ScanResult( state=ScanState.WARNING, messages="Posistion datagram (P) not found in file", data={}) p_datagrams = self.datagrams['P'] position_points = [] last_point = None for p_datagram in p_datagrams: pt = Point([p_datagram.Longitude, p_datagram.Latitude]) if (last_point is not None and pt.coordinates[0] == last_point.coordinates[0] and pt.coordinates[1] == last_point.coordinates[1]): # skip any points that have the same location continue position_points.append(pt) last_point = pt line = LineString(position_points) feature = Feature(geometry=line) feature_collection = FeatureCollection([feature]) data = {'map': to_mapping(feature_collection)} return ScanResult(state=ScanState.PASS, messages=[], data=data)
def to_instance(cls, ob, default=None, strict=False): """Encode a GeoJSON dict into an GeoJSON object. Assumes the caller knows that the dict should satisfy a GeoJSON type. :param cls: Dict containing the elements to be encoded into a GeoJSON object. :type cls: dict :param ob: GeoJSON object into which to encode the dict provided in `cls`. :type ob: GeoJSON :param default: A default instance to append the content of the dict to if none is provided. :type default: GeoJSON :param strict: Raise error if unable to coerce particular keys or attributes to a valid GeoJSON structure. :type strict: bool :return: A GeoJSON object with the dict's elements as its constituents. :rtype: GeoJSON :raises TypeError: If the input dict contains items that are not valid GeoJSON types. :raises UnicodeEncodeError: If the input dict contains items of a type that contain non-ASCII characters. :raises AttributeError: If the input dict contains items that are not valid GeoJSON types. """ if ob is None and default is not None: instance = default() elif isinstance(ob, GeoJSON): instance = ob else: mapping = to_mapping(ob) d = {} for k in mapping: d[k] = mapping[k] try: type_ = d.pop("type") try: type_ = str(type_) except UnicodeEncodeError: # If the type contains non-ascii characters, we can assume # it's not a valid GeoJSON type raise AttributeError("{0} is not a GeoJSON type").format( type_) geojson_factory = getattr(geojson.factory, type_) if not issubclass(geojson_factory, GeoJSON): raise TypeError("""\ Not a valid GeoJSON type: %r (geojson_factory: %r, cls: %r) """ % (type_, geojson_factory, cls)) instance = geojson_factory(**d) except (AttributeError, KeyError) as invalid: if strict: msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= (ob, invalid) raise ValueError(msg) instance = ob return instance
def to_instance(cls, ob, default=None, strict=False): """Encode a GeoJSON dict into an GeoJSON object. Assumes the caller knows that the dict should satisfy a GeoJSON type. :param cls: Dict containing the elements to be encoded into a GeoJSON object. :type cls: dict :param ob: GeoJSON object into which to encode the dict provided in `cls`. :type ob: GeoJSON :param default: A default instance to append the content of the dict to if none is provided. :type default: GeoJSON :param strict: Raise error if unable to coerce particular keys or attributes to a valid GeoJSON structure. :type strict: bool :return: A GeoJSON object with the dict's elements as its constituents. :rtype: GeoJSON :raises TypeError: If the input dict contains items that are not valid GeoJSON types. :raises UnicodeEncodeError: If the input dict contains items of a type that contain non-ASCII characters. :raises AttributeError: If the input dict contains items that are not valid GeoJSON types. """ if ob is None and default is not None: instance = default() elif isinstance(ob, GeoJSON): instance = ob else: mapping = to_mapping(ob) d = {} for k in mapping: d[k] = mapping[k] try: type_ = d.pop("type") try: type_ = str(type_) except UnicodeEncodeError: # If the type contains non-ascii characters, we can assume # it's not a valid GeoJSON type raise AttributeError( "{0} is not a GeoJSON type").format(type_) geojson_factory = getattr(geojson.factory, type_) if not issubclass(geojson_factory, GeoJSON): raise TypeError("""\ Not a valid GeoJSON type: %r (geojson_factory: %r, cls: %r) """ % (type_, geojson_factory, cls)) instance = geojson_factory(**d) except (AttributeError, KeyError) as invalid: if strict: msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= (ob, invalid) raise ValueError(msg) instance = ob return instance
def default(self, obj): if isinstance(obj, Mapping): mapping = obj else: mapping = to_mapping(obj) d = dict(mapping) type_str = d.pop("type", None) if type_str: geojson_factory = getattr(geojson.factory, type_str, geojson.factory.GeoJSON) d = geojson_factory(**d).__geo_interface__ return d
def encode(self, message, **kwargs): """ Encode message for sending via channel - message in format FeatureCollection """ if "metadata" in message: message["data"] = to_mapping(message["data"]) print message return self.encode_channel(json.dumps(message)) else: return self.encode_channel(geojson.dumps(message))
def pack(self, messages, which="all", which_index=0): """ pack messages for store """ if which == "first": return geojson.dumps(messages[0]) elif which == "last": return geojson.dumps(messages[-1]) elif which == "index": return geojson.dumps(messages[which_index]) else: package = {'messages': [to_mapping(m) for m in messages]} return json.dumps(package)
def to_instance(cls, ob, default=None, strict=False): """Encode a GeoJSON dict into an GeoJSON object. Assumes the caller knows that the dict should satisfy a GeoJSON type. """ if ob is None and default is not None: instance = default() elif isinstance(ob, GeoJSON): instance = ob else: mapping = to_mapping(ob) d = {} for k in mapping: try: str_key = str(k) except (UnicodeEncodeError): str_key = unicode(k) d[str_key] = mapping[k] try: type_ = d.pop("type") try: type_ = str(type_) except (UnicodeEncodeError): # If the type contains non-ascii characters, we can assume # it's not a valid GeoJSON type raise AttributeError( unicode("{0} is not a GeoJSON type").format( unicode(type_))) geojson_factory = getattr(geojson.factory, type_) if not issubclass(geojson_factory, GeoJSON): raise TypeError("""\ Not a valid GeoJSON type: %r (geojson_factory: %r, cls: %r) """ % (type_, geojson_factory, cls)) instance = geojson_factory(**d) except (AttributeError, KeyError) as invalid: if not strict: instance = ob else: msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= (ob, invalid) raise ValueError(msg) return instance
def _to_points_geojson(self, items): ''' Converts a list of dicts, where each dict contains a Latitude and Longitude into a geojson object ''' features = [] for item in items: pt = Point([item["Longitude"], item["Latitude"]]) # remove the lat/lng otherwise it will be included in the geom # definition AND the properties for this point item_clone = item.copy() del item_clone["Longitude"] del item_clone["Latitude"] feature = Feature(geometry=pt, properties=item_clone) features.append(feature) fc = FeatureCollection(features) return to_mapping(fc)
def test_GeoJSON(self): self.assertEqual(None, geojson.GeoJSON().__geo_interface__) self.assertEqual({"type": "GeoJSON"}, to_mapping(geojson.GeoJSON()))
def test_mapping(self): self.assertEqual(to_mapping(geojson.Point([1.0, 2.0])), { "coordinates": [1.0, 2.0], "type": "Point" })
def dumps(obj, cls=GeoJSONEncoder, allow_nan=False, **kwargs): return json.dumps(to_mapping(obj), cls=cls, allow_nan=allow_nan, **kwargs)
def test_mapping(self): self.assertEqual(to_mapping(geojson.Point([1, 2])), {"coordinates": [1, 2], "type": "Point"})
def dumps(obj, cls=GeoJSONEncoder, **kwargs): return simplejson.dumps(to_mapping(obj), cls=cls, **kwargs)
def to_dict(instance, deep=None, exclude=None, include=None, exclude_relations=None, include_relations=None, include_methods=None): """Returns a dictionary representing the fields of the specified `instance` of a SQLAlchemy model. The returned dictionary is suitable as an argument to :func:`flask.jsonify`; :class:`datetime.date` and :class:`uuid.UUID` objects are converted to string representations, so no special JSON encoder behavior is required. `deep` is a dictionary containing a mapping from a relation name (for a relation of `instance`) to either a list or a dictionary. This is a recursive structure which represents the `deep` argument when calling :func:`!_to_dict` on related instances. When an empty list is encountered, :func:`!_to_dict` returns a list of the string representations of the related instances. If either `include` or `exclude` is not ``None``, exactly one of them must be specified. If both are not ``None``, then this function will raise a :exc:`ValueError`. `exclude` must be a list of strings specifying the columns which will *not* be present in the returned dictionary representation of the object (in other words, it is a blacklist). Similarly, `include` specifies the only columns which will be present in the returned dictionary (in other words, it is a whitelist). .. note:: If `include` is an iterable of length zero (like the empty tuple or the empty list), then the returned dictionary will be empty. If `include` is ``None``, then the returned dictionary will include all columns not excluded by `exclude`. `include_relations` is a dictionary mapping strings representing relation fields on the specified `instance` to a list of strings representing the names of fields on the related model which should be included in the returned dictionary; `exclude_relations` is similar. `include_methods` is a list mapping strings to method names which will be called and their return values added to the returned dictionary. """ if (exclude is not None or exclude_relations is not None) and \ (include is not None or include_relations is not None): raise ValueError('Cannot specify both include and exclude.') # create a list of names of columns, including hybrid properties instance_type = type(instance) columns = [] try: inspected_instance = sqlalchemy_inspect(instance_type) column_attrs = inspected_instance.column_attrs.keys() descriptors = inspected_instance.all_orm_descriptors.items() hybrid_columns = [k for k, d in descriptors if d.extension_type == hybrid.HYBRID_PROPERTY] columns = column_attrs + hybrid_columns except NoInspectionAvailable: return instance # filter the columns based on exclude and include values if exclude is not None: columns = (c for c in columns if c not in exclude) elif include is not None: columns = (c for c in columns if c in include) # create a dictionary mapping column name to value result = dict((col, getattr(instance, col)) for col in columns if not (col.startswith('__') or col in COLUMN_BLACKLIST)) # add any included methods if include_methods is not None: result.update(dict((method, getattr(instance, method)()) for method in include_methods if not '.' in method)) # Check for objects in the dictionary that may not be serializable by # default. Convert datetime objects to ISO 8601 format, convert UUID # objects to hexadecimal strings, etc. for key, value in result.items(): if isinstance(value, (datetime.date, datetime.time)): result[key] = value.isoformat() elif isinstance(value, uuid.UUID): result[key] = str(value) elif isinstance(value, WKBElement): shape = to_shape(value) result[key] = to_mapping(shape) elif key not in column_attrs and is_mapped_class(type(value)): result[key] = to_dict(value) # recursively call _to_dict on each of the `deep` relations deep = deep or {} for relation, rdeep in deep.items(): # Get the related value so we can see if it is None, a list, a query # (as specified by a dynamic relationship loader), or an actual # instance of a model. relatedvalue = getattr(instance, relation) if relatedvalue is None: result[relation] = None continue # Determine the included and excluded fields for the related model. newexclude = None newinclude = None if exclude_relations is not None and relation in exclude_relations: newexclude = exclude_relations[relation] elif (include_relations is not None and relation in include_relations): newinclude = include_relations[relation] # Determine the included methods for the related model. newmethods = None if include_methods is not None: newmethods = [method.split('.', 1)[1] for method in include_methods if method.split('.', 1)[0] == relation] if is_like_list(instance, relation): result[relation] = [to_dict(inst, rdeep, exclude=newexclude, include=newinclude, include_methods=newmethods) for inst in relatedvalue] continue # If the related value is dynamically loaded, resolve the query to get # the single instance. if isinstance(relatedvalue, Query): relatedvalue = relatedvalue.one() result[relation] = to_dict(relatedvalue, rdeep, exclude=newexclude, include=newinclude, include_methods=newmethods) return result