class VectorPolyTable(Base): __tablename__ = 'poly_data_test' id = sa.Column(sa.Interval, primary_key=True) name = sa.Column(sa.Text) description= sa.Column(sa.Text) geometry = sa.Column(geotypes.Geometry(geometry_type='POLYGON', srid=4326))
def update_headers(geometry_type, columns, table_name, schema): if not columns: raise NoSuchColumnError headers = { column['column_name']: sa.Column( column['column_name'], TYPES_MAPPING.get(column['data_type']) ) for column in columns if column['data_type'] in TYPES_MAPPING } headers.update({ '__tablename__': table_name, '__table_args__': { 'schema': schema, 'extend_existing': True, }, ID_COL_NAME: sa.Column( ID_COL_NAME, sa.INTEGER, primary_key=True ), GEOMETRY_COL_NAME: sa.Column( geotypes.Geometry(geometry_type=geometry_type[0], srid=4326), ), }) return headers
class Contribution(db.Model): """An animal's observation around a road.""" __tablename__ = 'contribution' id = db.Column(db.Integer, primary_key=True, autoincrement=True) date_time = db.Column(db.DateTime, nullable=False) group_id = db.Column(db.Text, db.ForeignKey('group.id'), nullable=False) specie_id = db.Column(db.Integer, db.ForeignKey('taxon.id')) count_accuracy_id = db.Column(db.Enum('=', '≅', '≥', name='count_accuracy_type'), nullable=False) count = db.Column(db.Integer, nullable=False) is_alive = db.Column(db.Boolean, nullable=False) comments = db.Column(db.Text) first_name = db.Column(db.Text, nullable=False) surname = db.Column(db.Text, nullable=False) email = db.Column(db.Text, nullable=False) geometry = db.Column(geo_types.Geometry(geometry_type='POINT', srid=4326), nullable=False) group = db.relationship('WildLifeGroup', backref='observed_in', foreign_keys=[group_id]) specie = db.relationship('Taxon', backref='observed_in', foreign_keys=[specie_id])
class Geom(DB.Model): id = DB.Column(DB.Integer, primary_key=True) type = DB.Column(DB.String(255), nullable=False) geom = DB.Column(geotypes.Geometry(srid=4326), nullable=False) def __init__(self, type, geom): self.type = type self.geom = geom
class VectorTable(Base): __tablename__ = 'points_data_test' id = sa.Column(sa.Interval, primary_key=True) rackid = sa.Column(sa.BigInteger) address = sa.Column(sa.Text) ward = sa.Column(sa.BigInteger) community_area = sa.Column(sa.BigInteger) community_name = sa.Column(sa.Text) geometry = sa.Column(geotypes.Geometry(geometry_type='POINT', srid=4326))
def upgrade(): op.create_table('map', sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('secret', sa.Unicode(), nullable=True), sa.Column('name', sa.Unicode(), nullable=True), sa.Column('description', sa.Unicode(), nullable=True), sa.Column('place', sa.Unicode(), nullable=True), sa.Column('_datetime', sa.DateTime(timezone=True), nullable=True), sa.Column('_bbox', gsa.Geometry(geometry_type='POLYGON', from_text='ST_GeomFromEWKT', name='geometry'), nullable=True), sa.Column('attributes', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('published', sa.Boolean(), nullable=True), sa.Column('lifespan', sa.Interval(), nullable=True), sa.Column('theme', sa.Unicode(), nullable=True), sa.PrimaryKeyConstraint('uuid') ) op.create_table('feature', sa.Column('id', sa.Integer(), nullable=False), sa.Column('map_uuid', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('_geo', gsa.Geometry(from_text='ST_GeomFromEWKT', name='geometry'), nullable=True), sa.Column('style', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint(['map_uuid'], ['map.uuid'], ), sa.PrimaryKeyConstraint('id') )
def associate_column_names_and_sqlalchemy_types(dataframe): """ This creates a dict of column names and sqlalchemy types for use in Pandas to_sql :returns dict """ dtypedict = {} for i,j in zip(dataframe.columns, dataframe.dtypes): if "geom" in str(i): dtypedict.update({i: gatypes.Geometry(geometry_type="POINT", srid=4326)}) else: dtypedict.update({i: types.TEXT()}) return dtypedict
class Lake(Base): __tablename__ = 'lakes' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, ) name = sa.Column( postgresql_types.VARCHAR(length=100), primary_key=False, unique=False, nullable=False, ) geom = sa.Column( geotypes.Geometry(geometry_type="POLYGON", srid=4326, dimension=2, spatial_index=True), primary_key=False, unique=False, nullable=False, )
class Address(Base): __tablename__ = 'addresses' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, ) detail = sa.Column( postgresql_types.VARCHAR(length=255), primary_key=False, unique=False, nullable=False, ) geom = sa.Column( geotypes.Geometry(geometry_type="POINT", srid=4326, dimension=2, spatial_index=True), primary_key=False, unique=False, nullable=False, )
def mapped_classes(metadata): """ Returns classes mapped to the openFRED database via SQLAlchemy. The classes are dynamically created and stored in a dictionary keyed by class names. The dictionary also contains the special entry `__Base__`, which an SQLAlchemy `declarative_base` instance used as the base class from which all mapped classes inherit. """ Base = declarative_base(metadata=metadata) classes = {"__Base__": Base} def map(name, registry, namespace): namespace["__tablename__"] = "openfred_" + name.lower() namespace["__table_args__"] = namespace.get("__table_args__", ()) + ({ "keep_existing": True }, ) if namespace["__tablename__"][-1] != "s": namespace["__tablename__"] += "s" registry[name] = type(name, (registry["__Base__"], ), namespace) map( "Timespan", classes, { "id": C(BI, primary_key=True), "start": C(DT), "stop": C(DT), "resolution": C(Interval), "segments": C(ARRAY(DT, dimensions=2)), "__table_args__": (UC("start", "stop", "resolution"), ), }, ) map( "Location", classes, { "id": C(BI, primary_key=True), "point": C( geotypes.Geometry(geometry_type="POINT", srid=4326), unique=True, ), }, ) # TODO: Handle units. class Variable(Base): __table_args__ = ({"keep_existing": True}, ) __tablename__ = "openfred_variables" id = C(BI, primary_key=True) name = C(Str(255), nullable=False, unique=True) # TODO: Figure out whether and where this is in the '.nc' files. type = C(Str(37)) netcdf_attributes = C(JSON) description = C(Text) standard_name = C(Str(255)) __mapper_args_ = { "polymorphic_identity": "variable", "polymorphic_on": type, } classes["Variable"] = Variable class Flags(Variable): __table_args__ = ({"keep_existing": True}, ) __tablename__ = "openfred_flags" id = C(BI, FK(Variable.id), primary_key=True) flag_ks = C(ARRAY(Int), nullable=False) flag_vs = C(ARRAY(Str(37)), nullable=False) __mapper_args_ = {"polymorphic_identity": "flags"} @property def flag(self, key): flags = dict(zip(self.flag_ks, self.flag_vs)) return flags[key] classes["Flags"] = Flags class Series(Base): __tablename__ = "openfred_series" __table_args__ = ( UC("height", "location_id", "timespan_id", "variable_id"), { "keep_existing": True }, ) id = C(BI, primary_key=True) values = C(ARRAY(Float), nullable=False) height = C(Float) timespan_id = C(BI, FK(classes["Timespan"].id), nullable=False) location_id = C(BI, FK(classes["Location"].id), nullable=False) variable_id = C(BI, FK(classes["Variable"].id), nullable=False) timespan = relationship(classes["Timespan"], backref="series") location = relationship(classes["Location"], backref="series") variable = relationship(classes["Variable"], backref="series") classes["Series"] = Series return classes