Esempio n. 1
0
class IntegrationLog(DataFrameMixin, db.Model):

    __tablename__ = "integration_log"
    # __table_args__ = {"schema": schema}

    id = db.Column(db.Integer, primary_key=True, nullable=False)
    integrated_at = db.Column(db.DateTime(timezone=True), nullable=False)
    model_name = db.Column(db.String(), nullable=False)
    inserts = db.Column(db.Integer(), nullable=False, default=0)
    updates = db.Column(db.Integer(), nullable=False, default=0)
    deletes = db.Column(db.Integer(), nullable=False, default=0)
    updated_by = db.Column(db.String(),
                           default=func.current_user(),
                           nullable=False)
    created_at = db.Column(db.DateTime(timezone=True),
                           default=func.now(),
                           nullable=False)
    updated_at = db.Column(db.DateTime(timezone=True),
                           default=func.now(),
                           nullable=False)
Esempio n. 2
0
METADATA_TYPE = Table(
    'metadata_type',
    _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),
    Column('name', String, unique=True, nullable=False),
    Column('definition', postgres.JSONB, nullable=False),

    # When it was added and by whom.
    Column('added',
           DateTime(timezone=True),
           server_default=func.now(),
           nullable=False),
    Column('added_by',
           sql.PGNAME,
           server_default=func.current_user(),
           nullable=False),

    # Note that the `updated` column is not included here to maintain backwards-compatibility
    # with pre-1.8.3 datacubes (and it is not used by any internal ODC functionality yet anyway)

    # Name must be alphanumeric + underscores.
    CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

PRODUCT = Table(
    'dataset_type',
    _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    # A name/label for this type (eg. 'ls7_nbar'). Specified by users.
Esempio n. 3
0
# Map a dataset type to how we will store it (storage_type and each measurement/band).
STORAGE_TYPE = Table(
    'storage_type', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    # A name/label for this storage type (eg. 'ls7_nbar'). Specified by users.
    Column('name', String, unique=True, nullable=False),

    # Match any datasets whose metadata is a superset of this.
    Column('dataset_metadata', postgres.JSONB, nullable=False),

    Column('definition', postgres.JSONB, nullable=False),

    # When it was added and by whom.
    Column('added', DateTime(timezone=True), server_default=func.now(), nullable=False),
    Column('added_by', String, server_default=func.current_user(), nullable=False),

    # Name must be alphanumeric + underscores.
    CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

STORAGE_UNIT = Table(
    'storage_unit', _core.METADATA,
    Column('id', Integer, primary_key=True, autoincrement=True),
    Column('storage_type_ref', None, ForeignKey(STORAGE_TYPE.c.id), index=True, nullable=False),

    # These should match the linked datasets.
    #  -> They are duplicated here so that we can define indexes on this table based on them.
    Column('collection_ref', None, ForeignKey(_dataset.COLLECTION.c.id), index=True, nullable=False),
    Column('metadata_type_ref', None, ForeignKey(_dataset.METADATA_TYPE.c.id), index=True, nullable=False),
Esempio n. 4
0
from . import _core

_LOG = logging.getLogger(__name__)

METADATA_TYPE = Table(
    'metadata_type', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    Column('name', String, unique=True, nullable=False),

    Column('definition', postgres.JSONB, nullable=False),

    # When it was added and by whom.
    Column('added', DateTime(timezone=True), server_default=func.now(), nullable=False),
    Column('added_by', _core.PGNAME, server_default=func.current_user(), nullable=False),

    # Name must be alphanumeric + underscores.
    CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

DATASET_TYPE = Table(
    'dataset_type', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    # A name/label for this type (eg. 'ls7_nbar'). Specified by users.
    Column('name', String, unique=True, nullable=False),

    # All datasets of this type should contain these fields.
    # (newly-ingested datasets may be matched against these fields to determine the dataset type)
    Column('metadata', postgres.JSONB, nullable=False),
Esempio n. 5
0
from . import _core

_LOG = logging.getLogger(__name__)

METADATA_TYPE = Table(
    'metadata_type', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    Column('name', String, unique=True, nullable=False),

    Column('definition', postgres.JSONB, nullable=False),

    # When it was added and by whom.
    Column('added', DateTime(timezone=True), server_default=func.now(), nullable=False),
    Column('added_by', String, server_default=func.current_user(), nullable=False),

    # Name must be alphanumeric + underscores.
    CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

COLLECTION = Table(
    'collection', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    Column('name', String, unique=True, nullable=False),

    # All datasets in the collection have this metadata type.
    Column('metadata_type_ref', None, ForeignKey(METADATA_TYPE.c.id), nullable=False),

    # Match any datasets whose metadata is a superset of this document.
Esempio n. 6
0
from . import _core, _sql

_LOG = logging.getLogger(__name__)

METADATA_TYPE = Table(
    'metadata_type', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    Column('name', String, unique=True, nullable=False),

    Column('definition', postgres.JSONB, nullable=False),

    # When it was added and by whom.
    Column('added', DateTime(timezone=True), server_default=func.now(), nullable=False),
    Column('added_by', _sql.PGNAME, server_default=func.current_user(), nullable=False),

    # Name must be alphanumeric + underscores.
    CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

DATASET_TYPE = Table(
    'dataset_type', _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    # A name/label for this type (eg. 'ls7_nbar'). Specified by users.
    Column('name', String, unique=True, nullable=False),

    # All datasets of this type should contain these fields.
    # (newly-ingested datasets may be matched against these fields to determine the dataset type)
    Column('metadata', postgres.JSONB, nullable=False),