Example #1
0
class UTC_TIMESTAMP(sa.types.TypeDecorator):

    impl = TIMESTAMP(timezone=True)

    def process_bind_param(self, value, dialect):
        if isinstance(value, datetime):
            value.replace(tzinfo=pytz.utc)
        elif isinstance(value, int):
            value = datetime.fromtimestamp(value, pytz.utc)
        elif isinstance(value, str):
            value = parse_datetime(value).replace(tzinfo=pytz.utc)
        elif value is None:
            return None
        else:
            raise ValueError('invalid UTC_TIMESTAMP value.')

        return value

    @staticmethod
    def now():
        return datetime.now(pytz.utc)
Example #2
0
class SignatureProduct(DeclarativeBase):
    __tablename__ = 'signature_products'

    # column definitions
    first_report = Column(u'first_report', TIMESTAMP(timezone=True))
    product_version_id = Column(u'product_version_id',
                                INTEGER(),
                                primary_key=True,
                                nullable=False,
                                autoincrement=False,
                                index=True)
    signature_id = Column(u'signature_id',
                          INTEGER(),
                          ForeignKey('signatures.signature_id'),
                          primary_key=True,
                          nullable=False)

    # relationship definitions
    signatures = relationship(
        'Signature',
        primaryjoin='SignatureProduct.signature_id==Signature.signature_id')
Example #3
0
 def create_table(self, Variable_tableName):
     # If table don't exist, Create.
     if not self.engine.dialect.has_table(self.engine, Variable_tableName):
         metadata = self.MetaData(self.engine)
         Table(
             Variable_tableName, metadata,
             Column('id',
                    BigInteger,
                    primary_key=True,
                    server_default=text(
                        "nextval('company_id_seq'::regclass)")),
             Column('country', String(50), nullable=False),
             Column('date', TIMESTAMP(precision=6), nullable=False),
             Column('price', Numeric(10, 5), nullable=False),
             Column('open', Numeric(10, 5), nullable=False),
             Column('high', Numeric(10, 5), nullable=False),
             Column('low', Numeric(10, 5), nullable=False),
             Column('volume', BigInteger, nullable=False),
             Column('change', Numeric(5, 4), nullable=False))
         # Implement the creation
         metadata.create_all()
Example #4
0
class FaceImage(Base):
    __tablename__ = "face_image"

    id = Column(UUID(as_uuid=True),
                primary_key=True,
                default=uuid.uuid4,
                unique=True,
                nullable=False)
    create_date = Column(TIMESTAMP(False), nullable=False)
    customer_id = Column(UUID, ForeignKey('customer.id'))
    image_path = Column(TEXT)

    def __init__(self, id, image_path, customer_id):
        """
        Конструктор класса для таблицы FaceImage
        :param image_path: Путь к изображению на сервере
        :param customer_id: Идентификатор посетителя
        """
        self.id = str(id)
        self.image_path = image_path
        self.create_date = datetime.utcnow()
        self.customer_id = str(customer_id)
Example #5
0
class NoDataNotifications(Entity):
    __tablename__ = 'no_data_notifications'

    id: int = Column(Integer, primary_key=True)
    pipeline_id: str = Column(String,
                              ForeignKey('pipelines.name'),
                              primary_key=True)
    notification_id: int = Column(Integer,
                                  ForeignKey('pipeline_notifications.id'),
                                  primary_key=True)

    notification_period: int = Column(Integer, nullable=False)  # In minutes
    notification_sent: bool = Column(Boolean, default=False)

    last_updated = Column(TIMESTAMP(timezone=True),
                          default=func.now(),
                          onupdate=func.now())

    def __init__(self, pipeline_, notification_period: str):
        self.pipeline_id = pipeline_.name
        self.notification_period = get_period_in_minutes(notification_period)
        self.notification_sent = False
Example #6
0
class BalancingSummary(Base, BaseModel):

    __tablename__ = "balancing_summary"

    __table_args__ = (
        Index(
            "idx_balancing_summary_trading_interval_year",
            text("date_trunc('year', trading_interval AT TIME ZONE 'UTC')"),
        ),
        Index(
            "idx_balancing_summary_trading_interval_month",
            text("date_trunc('month', trading_interval AT TIME ZONE 'UTC')"),
        ),
        Index(
            "idx_balancing_summary_trading_interval_day",
            text("date_trunc('day', trading_interval AT TIME ZONE 'UTC')"),
        ),
        Index(
            "idx_balancing_summary_trading_interval_hour",
            text("date_trunc('hour', trading_interval AT TIME ZONE 'UTC')"),
        ),
    )

    network_id = Column(
        Text,
        ForeignKey("network.code", name="fk_balancing_summary_network_code"),
        primary_key=True,
    )
    network = relationship("Network")

    trading_interval = Column(
        TIMESTAMP(timezone=True), index=True, primary_key=True
    )
    network_region = Column(Text, primary_key=True)
    forecast_load = Column(Numeric, nullable=True)
    generation_scheduled = Column(Numeric, nullable=True)
    generation_non_scheduled = Column(Numeric, nullable=True)
    generation_total = Column(Numeric, nullable=True)
    price = Column(Numeric, nullable=True)
Example #7
0
class BalancingSummary(Base, BaseModel):

    __tablename__ = "balancing_summary"

    network_id = Column(
        Text,
        ForeignKey("network.code", name="fk_balancing_summary_network_code"),
        primary_key=True,
    )
    network = relationship("Network")

    trading_interval = Column(TIMESTAMP(timezone=True),
                              index=True,
                              primary_key=True)
    network_region = Column(Text, primary_key=True)
    forecast_load = Column(Numeric, nullable=True)
    generation_scheduled = Column(Numeric, nullable=True)
    generation_non_scheduled = Column(Numeric, nullable=True)
    generation_total = Column(Numeric, nullable=True)
    net_interchange = Column(Numeric, nullable=True)
    demand = Column(Numeric, nullable=True)
    demand_total = Column(Numeric, nullable=True)
    price = Column(Numeric, nullable=True)
    price_dispatch = Column(Numeric, nullable=True)
    net_interchange_trading = Column(Numeric, nullable=True)
    is_forecast = Column(Boolean, default=False)

    __table_args__ = (
        Index(
            "idx_balancing_summary_network_id_trading_interval",
            network_id,
            trading_interval.desc(),
        ),
        Index(
            "idx_balancing_summary_network_region_trading_interval",
            network_region,
            trading_interval.desc(),
        ),
    )
Example #8
0
def convert_type(colname, ora_type):
    """
    Converts oracle type to Postgres type
    """
    pg_type = ora_type
    
    # "NullType is used as a default type for those cases 
    # where a type cannot be determined"
    # NB: this needs to be first in the list
    # Otherwise str(ora_type) clauses will error
    if isinstance(ora_type,sqlalchemy.types.NullType):
        pg_type = sqlalchemy.types.String()
        logging.info('\t{}: NULL DETECTED'.format(colname))
        return pg_type
    elif isinstance(ora_type,sqlalchemy.types.Numeric):
        pg_type = sqlalchemy.types.Numeric()
    elif isinstance(ora_type,sqlalchemy.types.DateTime):
        pg_type = TIMESTAMP()
    elif isinstance(ora_type,sqlalchemy.types.Text):
        pg_type = sqlalchemy.types.Text()
    elif isinstance(ora_type,sqlalchemy.types.NVARCHAR):
        pg_type = sqlalchemy.types.VARCHAR()
    elif isinstance(ora_type,sqlalchemy.types.BLOB):
        pg_type = BYTEA()
    elif str(ora_type) == 'RAW':
        pg_type = BYTEA()
    # this isn't currently catching the binary_float
    elif str(ora_type) == 'BINARY_FLOAT':
        pg_type = REAL()
    elif str(ora_type) == 'INTERVAL DAY TO SECOND':
        pg_type = sqlalchemy.types.Interval(second_precision=True)
    else:
        pass

    if pg_type != ora_type:
        msg = "\t{}: {} converted to {}".format(colname,ora_type,pg_type)
        logging.info(msg)

    return pg_type
Example #9
0
class Person(db.Model, AutoSerialize, Serializer):
    __tablename__ = 'persons'

    id = Column(Integer, primary_key=True,
                server_default=text("nextval('persons_id_seq'::regclass)"))
    name = Column(String(64), nullable=False)
    mention_date = Column(TIMESTAMP(True, 6))
    valid = Column(Boolean, server_default=text("true"))

    def serialize(self):
        """

        :return:
        """
        d = Serializer.serialize(self)
        del d['id']
        del d['mention_date']
        del d['valid']
        return d

    def __init__(self, name, mention_date):
        self.name = name
        self.mention_date = mention_date
Example #10
0
class AuthenticationToken(Entity):
    __tablename__ = 'authentication_tokens'

    id = Column(Integer, primary_key=True)
    destination_id = Column(Integer, ForeignKey('destinations.id'))
    authentication_token = Column(String, nullable=False)
    created_at = Column(TIMESTAMP(timezone=True), default=func.now())

    EXPIRATION_PERIOD_IN_SECONDS = 24 * 60 * 60

    def __init__(self, destination_id: int, token: str):
        self.destination_id = destination_id
        self.authentication_token = token
        self.created_at = datetime.now()

    def update(self, token: str):
        self.authentication_token = token
        self.created_at = datetime.now()

    def is_expired(self) -> bool:
        # leave 100 sec gap just to be sure we're not using expired token
        return (datetime.now() - self.created_at
                ).total_seconds() > self.EXPIRATION_PERIOD_IN_SECONDS - 100
Example #11
0
class BomObservation(Base):
    __tablename__ = "bom_observation"

    observation_time = Column(
        TIMESTAMP(timezone=True), index=True, primary_key=True, nullable=False
    )

    station_id = Column(
        Text,
        ForeignKey("bom_station.code", name="fk_bom_observation_station_code"),
        primary_key=True,
    )
    station = relationship("BomStation")
    temp_apparent = Column(Numeric)
    temp_air = Column(Numeric)
    temp_min = Column(Numeric)
    temp_max = Column(Numeric)
    press_qnh = Column(Numeric)
    wind_dir = Column(Text, nullable=True)
    wind_spd = Column(Numeric)
    wind_gust = Column(Numeric)
    humidity = Column(Numeric, nullable=True)
    cloud = Column(Text, nullable=True)
    cloud_type = Column(Text, nullable=True)
Example #12
0
class User(Base):
    __tablename__ = 'user'

    uuid = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
    email = Column(VARCHAR(255), nullable=False)
    name = Column(VARCHAR(255), nullable=False)
    hash = Column(VARCHAR(60), nullable=False)
    last_seen = Column(TIMESTAMP(timezone=True),
                       nullable=False,
                       default=datetime.datetime.utcnow)
    conferences_count = Column(INTEGER, nullable=False, default=0)
    fingerprint = Column(VARCHAR(255), nullable=False)
    public_key = Column(TEXT, nullable=False)
    private_key = Column(TEXT, nullable=False)
    revocation_certificate = Column(TEXT, nullable=False)

    conferences = relationship(
        "Conference",
        secondary=
        "join(Conference, Conference_Reference, Conference.uuid == Conference_Reference.conference_uuid)",
        primaryjoin="Conference_Reference.user_uuid == User.uuid",
        secondaryjoin="Conference_Reference.user_uuid == User.uuid",
        # back_populates="user"
    )
Example #13
0
 def posted(cls):
     return cast(cls.data.op('->>')('posted'), TIMESTAMP(timezone=True))
Example #14
0
class ScrapedJob(Base):
    __tablename__ = 'scraped-jobs'
    id = Column(UUID, server_default=func.gen_random_uuid())
    url = Column(Text, nullable=False, primary_key=True)
    created_at = Column(TIMESTAMP(timezone=True),
                        nullable=False,
                        default=func.now())
    last_modified = Column(TIMESTAMP(timezone=True),
                           nullable=False,
                           default=func.now())
    data = Column(JSONB, nullable=False)

    def __repr__(self):
        return '<Job created_at: {self.created_at}' \
               ' last_modified: {self.last_modified}' \
               ' url: {self.url!r}' \
               '>'.format(self=self)

    @classmethod
    def from_dict(cls, input_dict):
        """
        Used to instantiate a `ScrapedJob` instance from the data yielded by a Scrapy spider.
        Args:
            input_dict (dict): the Scrapy item

        Returns:
            obj:`ScrapedJob`: instance of the `ScrapedJob` class

        """
        instance = cls()
        if 'url' in input_dict:
            instance.url = input_dict['url']
        instance.data = input_dict
        return instance

    @hybrid_property
    def posted(self):
        """
        Extracts the timestamp of the posting from the data blob. If it is missing, or the data blob hasn't been set
        we get None instead.

        Returns:
            datetime.datetime or None: posting date

        """
        if self.data is None or 'posted' not in self.data:
            return None
        return dateutil.parser.parse(self.data['posted'])

    @posted.expression
    def posted(cls):
        return cast(cls.data.op('->>')('posted'), TIMESTAMP(timezone=True))

    @hybrid_property
    def date_of_posting(self):
        """
        Returns the date of the posting, as a YYYY-MM-DD string.

        Returns:
            str: date of posting

        """
        posting = self.posted
        if posting is not None:
            return posting.date()
        return None

    @date_of_posting.expression
    def date_of_posting(cls):
        return cast(func.date_trunc('day', cls.posted), DATE)

    @hybrid_property
    def spider(self):
        """
        Extracts the spider name from the data blob. If the spider field isn't present, or there is not data blob,
        we get a None instead.

        Returns:
            string or None: the name of the spider

        """
        if self.data is None or 'spider' not in self.data:
            return None
        return self.data['spider']

    @spider.expression
    def spider(cls):
        return cls.data.op('->>')('spider')

    def as_dict(self):
        """
        Flattens the fields of the instance into a single dict. This is useful for returning the data in the API,
        since all fields are included.

        Returns:
            dict: all the fields in the `ScrapedJob` instance, flattened

        """
        contents = dict()

        if self.data:
            contents.update(self.data)

        for key in ('created_at', 'last_modified'):
            contents[key] = value = getattr(self, key)
            if value is not None:
                contents[key] = getattr(self, key).isoformat()

        contents['id'] = self.id

        return contents
Example #15
0
class Pipeline(Entity, sdc_client.IPipeline):
    __tablename__ = 'pipelines'

    STATUS_RUNNING = 'RUNNING'
    STATUS_STOPPED = 'STOPPED'
    STATUS_EDITED = 'EDITED'
    STATUS_RETRY = 'RETRY'
    STATUS_STOPPING = 'STOPPING'
    STATUS_STARTING = 'STARTING'
    STATUS_RUN_ERROR = 'RUN_ERROR'
    STATUS_START_ERROR = 'START_ERROR'
    STATUS_STOP_ERROR = 'STOP_ERROR'
    STATUS_RUNNING_ERROR = 'RUNNING_ERROR'
    OVERRIDE_SOURCE = 'override_source'
    FLUSH_BUCKET_SIZE = 'flush_bucket_size'

    COUNTER = 'counter'
    GAUGE = 'gauge'
    RUNNING_COUNTER = 'running_counter'

    id = Column(Integer, primary_key=True)
    name = Column(String)
    type = Column(String)
    source_id = Column(Integer, ForeignKey('sources.id'))
    destination_id = Column(Integer,
                            ForeignKey('destinations.id'),
                            nullable=True)
    config = Column(JSON)
    schema = Column(JSON)
    override_source = Column(JSON)
    created_at = Column(TIMESTAMP(timezone=True), default=func.now())
    last_edited = Column(TIMESTAMP(timezone=True),
                         default=func.now(),
                         onupdate=func.now())
    status = Column(String, default=STATUS_EDITED)
    streamsets_id = Column(Integer, ForeignKey('streamsets.id'))

    offset = relationship("PipelineOffset", cascade="delete", uselist=False)
    watermark = relationship("PipelineWatermark",
                             cascade="delete",
                             uselist=False)
    source_ = relationship('Source', back_populates='pipelines')
    destination = relationship('HttpDestination', cascade="merge")
    streamsets = relationship('StreamSets')
    retries = relationship('PipelineRetries', cascade="delete", uselist=False)
    notifications = relationship('PiplineNotifications',
                                 cascade="delete",
                                 uselist=False)

    def __init__(self, pipeline_id: str, source_: Source,
                 destination: HttpDestination):
        self.name = pipeline_id
        self._previous_config = {}
        self._previous_override_source = {}
        self.config = {}
        self.source_ = source_
        self.source_id = source_.id
        self.destination = destination
        self.destination_id = destination.id
        self.override_source = {}
        self.streamsets_id = None
        self.streamsets = None
        self.type = REGULAR_PIPELINE
        self.notifications: PiplineNotifications = None

    def config_changed(self) -> bool:
        if not hasattr(self, '_previous_config'):
            return False
        return self.config != self._previous_config or self.override_source != self._previous_override_source

    def set_config(self, config: dict):
        self._previous_config = deepcopy(self.config)
        self._previous_override_source = deepcopy(self.override_source)
        self.override_source = config.pop(self.OVERRIDE_SOURCE, {})
        self.config = deepcopy(config)

    @property
    def source(self) -> Source:
        return self.source_

    def has_periodic_watermark_config(self) -> bool:
        return bool(self.config.get('periodic_watermark'))

    def has_offset(self) -> bool:
        return bool(self.offset)

    def has_watermark(self) -> bool:
        return bool(self.watermark)

    @property
    def periodic_watermark_config(self) -> dict:
        return self.config.get('periodic_watermark', {})

    @property
    def watermark_delay(self) -> int:
        return self.config.get('periodic_watermark', {}).get('delay', 0)

    @property
    def watermark_logs_enabled(self) -> bool:
        return self.config.get('watermark_logs_enabled', False)

    @property
    def flush_bucket_size(self) -> FlushBucketSize:
        return FlushBucketSize(self.config.get(self.FLUSH_BUCKET_SIZE, '1d'))

    @flush_bucket_size.setter
    def flush_bucket_size(self, value: str):
        self.config[self.FLUSH_BUCKET_SIZE] = FlushBucketSize(value).value

    @property
    def static_dimensions(self) -> dict:
        return self.config.get('properties', {})

    @property
    def static_dimension_names(self) -> list:
        return [
            tools.replace_illegal_chars(s_dim)
            for s_dim in self.static_dimensions.keys()
        ]

    @property
    def dimensions(self) -> list | dict:
        return self.config.get('dimensions', [])

    @property
    def dimension_paths(self) -> list:
        return [
            self._get_property_path(value) for value in self.all_dimensions
        ]

    @property
    def required_dimensions(self) -> list:
        if type(self.dimensions) is list:
            return []
        return self.dimensions.get('required', [])

    @property
    def required_dimension_paths(self) -> list:
        return [
            self._get_property_path(value)
            for value in self.required_dimensions
        ]

    @property
    def optional_dimensions(self) -> list:
        if type(self.dimensions) is list:
            return []
        return self.dimensions.get('optional', [])

    @property
    def all_dimensions(self) -> list:
        if not self.dimensions or type(self.dimensions) is list:
            return self.dimensions + self.static_dimension_names
        return self.required_dimensions + self.optional_dimensions + self.static_dimension_names

    @property
    def dimension_names(self) -> list:
        return [
            tools.replace_illegal_chars(d.replace('/', '_'))
            for d in self.all_dimensions
        ]

    @property
    def dimension_paths_with_names(self) -> dict:
        return dict(zip(self.dimension_paths, self.dimension_names))

    @property
    def dimension_configurations(self) -> Optional[dict]:
        if not isinstance(self.dimensions, list):
            raise PipelineException((
                'Pipeline dimensions should be a list in order to build dimension_configurations, '
                f'but {type(self.dimensions).__name__} provided'))
        return _build_transformation_configurations(
            self.dimensions, self.config.get('dimension_configurations'))

    @property
    def measurement_configurations(self) -> Optional[dict]:
        return _build_transformation_configurations(
            list(self.values),
            self.config.get('measurement_configurations', {}),
        )

    @property
    def tag_configurations(self) -> Optional[dict]:
        return self.config.get('tag_configurations', {})

    @property
    def timestamp_path(self) -> str:
        return self._get_property_path(self.config['timestamp']['name'])

    @property
    def timezone(self) -> str:
        return self.config.get('timezone', 'UTC')

    @property
    def timestamp_type(self) -> TimestampType:
        return TimestampType(self.config['timestamp']['type'])

    @property
    def timestamp_name(self) -> Optional[str]:
        return self.config.get('timestamp', {}).get('name')

    @property
    def timestamp_format(self) -> str:
        return self.config['timestamp'].get('format')

    @property
    def values(self) -> dict:
        return self.config.get('values', {})

    @property
    def value_paths(self) -> list:
        return [self._get_property_path(value) for value in self.values.keys()]

    @property
    def target_types(self) -> list:
        if self.source.type == source.TYPE_INFLUX:
            return [self.config.get('target_type', 'gauge')] * len(
                self.value_paths)
        return list(self.values.values())

    @property
    def measurement_paths_with_names(self) -> dict:
        return dict(
            zip(
                self.config.get('measurement_names', {}).keys(),
                self.measurement_names))

    @property
    def measurement_names(self) -> list:
        return [
            tools.replace_illegal_chars(
                self.config.get('measurement_names', {}).get(key, key))
            for key in self.values.keys()
        ]

    @property
    def measurement_names_with_target_types(self) -> dict:
        result = {}
        measurement_names = self.config.get('measurement_names', {})
        for measurement, target_type in self.values.items():
            measurement_name = measurement_names.get(measurement, measurement)
            measurement_name = tools.replace_illegal_chars(measurement_name)
            result[measurement_name] = target_type
        return result

    @property
    def measurement_names_paths(self):
        return [
            self._get_property_path(value) for value in self.measurement_names
        ]

    @property
    def value_paths_with_names(self) -> dict:
        # value_paths should work the same as value_names that were here
        # value_paths are needed for directory and kafka and mb something else
        return dict(zip(self.value_paths, self.measurement_names))

    @property
    def target_types_paths(self):
        return [
            self._get_property_path(t_type) for t_type in self.target_types
        ]

    @property
    def count_records(self) -> bool:
        return self.config.get('count_records', False)

    @property
    def count_records_measurement_name(self) -> str:
        return tools.replace_illegal_chars(
            self.config.get('count_records_measurement_name', 'count'))

    @property
    def static_what(self) -> bool:
        return self.config.get('static_what', True)

    @property
    def transformations_config(self) -> str:
        return self.config.get('transform', {}).get('config')

    @property
    def filter_condition(self) -> str:
        return self.config.get('filter', {}).get('condition')

    @property
    def tags(self) -> dict:
        return self.config.get('tags', {})

    @property
    def values_array_path(self) -> str:
        return self.config.get('values_array_path', '')

    @property
    def values_array_filter_metrics(self) -> list:
        return self.config.get('values_array_filter_metrics', [])

    @property
    def query_file(self) -> Optional[str]:
        return self.config.get('query_file')

    @property
    def query(self) -> Optional[str]:
        return self.config.get('query')

    @query.setter
    def query(self, query: str):
        self.config['query'] = query

    @property
    def interval(self) -> Optional[int]:
        # returns interval in seconds
        interval = self.config.get('interval')
        if interval in Interval.VALUES:
            return Interval(interval).total_seconds()
        return int(interval) if interval is not None else None

    @property
    def days_to_backfill(self) -> str:
        return str(self.config.get('days_to_backfill', '0'))

    @property
    def delay(self) -> str:
        return self.config.get('delay', '0')

    @property
    def watermark_in_local_timezone(self) -> str:
        return self.config.get('watermark_in_local_timezone', False)

    @property
    def batch_size(self) -> str:
        return self.config.get('batch_size', '1000')

    def uses_schema(self) -> bool:
        return bool(self.config.get('uses_schema'))

    @property
    def histories_batch_size(self) -> str:
        return self.config.get('histories_batch_size', '100')

    @property
    def header_attributes(self) -> list:
        return self.config.get('header_attributes', [])

    @property
    def log_everything(self) -> bool:
        return bool(self.config.get('log_everything'))

    @property
    def transform_script_config(self) -> str:
        return self.config.get('transform_script', {}).get('config', '')

    @property
    def watermark_sleep_time(self) -> int:
        return self.config.get('watermark_sleep_time', 10)

    @property
    def lookups(self) -> dict:
        return self.config.get('lookups', {})

    @property
    def is_strict(self) -> bool:
        return bool(self.config.get('strict', True))

    @property
    def dvp_config(self) -> dict:
        return self.config.get('dvpConfig', {})

    @property
    def dynamic_step(self) -> bool:
        return bool(self.config.get('dynamic_step', False))

    def get_streamsets_config(self) -> dict:
        return pipeline.manager.create_streamsets_pipeline_config(self)

    def get_id(self) -> str:
        return self.name

    def get_offset(self) -> Optional[str]:
        return self.offset.offset if self.offset else None

    def get_streamsets(self) -> Optional[sdc_client.IStreamSets]:
        return self.streamsets

    def set_streamsets(self, streamsets_: StreamSets):
        self.streamsets_id = streamsets_.id
        self.streamsets = streamsets_

    def delete_streamsets(self):
        self.streamsets_id = None
        self.streamsets = None

    def get_schema(self) -> dict:
        return self.schema or {}

    def has_schema(self) -> bool:
        return bool(self.schema)

    def get_schema_id(self) -> Optional[str]:
        return self.get_schema().get('id')

    def export(self) -> dict:
        return {
            **self.config,
            self.OVERRIDE_SOURCE: self.override_source,
            'pipeline_id': self.name,
            'source': self.source.name,
        }

    def to_dict(self) -> dict:
        return {
            'id': self.name,
            'config': self.config,
            'schema': self.get_schema(),
            'override_source': self.override_source,
            'source': self.source.config,
            'destination': self.destination.config,
        }

    def _get_property_path(self, property_value: str) -> str:
        for idx, item in self.source.config.get('csv_mapping', {}).items():
            if item == property_value:
                return str(idx)
        if property_value in self.config.get('dimension_value_paths', {}):
            return str(
                self.config.get('dimension_value_paths', {})[property_value])
        return property_value

    def meta_tags(self) -> dict:
        return {
            'source': ['anodot-agent'],
            'source_host_id': [self.destination.host_id],
            'source_host_name': [tools.replace_illegal_chars(HOSTNAME)],
            'pipeline_id': [self.name],
            'pipeline_type': [self.source.type]
        }

    def get_tags(self) -> dict:
        return {**self.meta_tags(), **self.tags}

    def error_notification_enabled(self) -> bool:
        return not self.config.get('disable_error_notifications', False)
Example #16
0
File: model.py Project: zwant/ngcd
class PullRequest(ProjectionBase):
    __tablename__ = 'pull_requests'
    id = Column(Integer, primary_key=True)
    external_id = Column(String, nullable=False, index=True)
    is_closed = Column(Boolean, nullable=False, index=True)
    repo_external_id = Column(String, nullable=False)
    head_sha = Column(String, nullable=True, index=True)
    base_sha = Column(String, nullable=True, index=True)
    branch = Column(String, nullable=True, index=True)
    base_repo_external_id = Column(String, nullable=True, index=True)
    html_url = Column(String, nullable=True)
    api_url = Column(String, nullable=True)
    opened_by = Column(AdaptableJSONB, nullable=True)
    closed_by = Column(AdaptableJSONB, nullable=True)
    opened_at = Column(TIMESTAMP(timezone=True), nullable=True)
    closed_at = Column(TIMESTAMP(timezone=True), nullable=True)
    last_update = Column(TIMESTAMP(timezone=True), nullable=False)

    def __init__(self,
                 external_id,
                 id=None,
                 is_closed=False,
                 repo_external_id=None,
                 head_sha=None,
                 base_sha=None,
                 branch=None,
                 base_repo_external_id=None,
                 html_url=None,
                 api_url=None,
                 opened_by=None,
                 closed_by=None,
                 opened_at=None,
                 closed_at=None,
                 last_update=None):
        self.id = id
        self.external_id = external_id
        self.is_closed = is_closed
        self.repo_external_id = repo_external_id
        self.head_sha = head_sha
        self.base_sha = base_sha
        self.branch = branch
        self.base_repo_external_id = base_repo_external_id
        self.html_url = html_url
        self.api_url = api_url
        self.opened_by = opened_by
        self.closed_by = closed_by
        self.opened_at = opened_at
        self.closed_at = closed_at
        self.last_update = last_update

    def __repr__(self):
        return '<PullRequest {}> is_closed: {}, branch: {}, repo_external_id: {}, head_sha: {}, base_sha: {}, last_update: {}'.format(
        self.external_id,
        self.is_closed,
        self.branch,
        self.repo_external_id,
        self.head_sha,
        self.base_sha,
        self.last_update)

    def as_dict(self):
        return {
            'id': self.id,
            'external_id': self.external_id,
            'is_closed': self.is_closed,
            'repo_external_id': self.repo_external_id,
            'head_sha': self.head_sha,
            'base_sha': self.base_sha,
            'branch': self.branch,
            'base_repo_external_id': self.base_repo_external_id,
            'html_url': self.html_url,
            'api_url': self.api_url,
            'last_update': self.last_update.isoformat(),
            'opened_by': self.opened_by,
            'closed_by': self.closed_by,
            'opened_at': self.opened_at,
            'closed_at': self.closed_at
        }
Example #17
0
def load_schema(meta):
    """This is the definition of the database schema

    :param meta: metadata structure to add schema to
    :return: dict of Table specifications, indexed by name
    """
    return {
        Entity.PERSON: Table('person', meta,
            Column('id',                Integer,     primary_key=True),
            Column('name',              Text,        nullable=False),  # basic normalization
            # REVISIT: should raw_name be a JSONB???
            Column('raw_name',          Text,        nullable=True),

            # parsed (and normalized???)
            Column('prefix',            Text),
            Column('first_name',        Text),
            Column('middle_name',       Text),
            Column('last_name',         Text),
            Column('suffix',            Text),
            Column('full_name',         Text),       # assembled from normalized name components
            Column('tags',              ARRAY(Text)),

            # denormalized flags
            Column('is_composer',       Boolean),
            Column('is_conductor',      Boolean),
            Column('is_performer',      Boolean),

            # canonicality
            Column('is_canonical',      Boolean),
            Column('cnl_person_id',     Integer,     ForeignKey('person.id')),  # points to self, if canonical
            Column('arkiv_uri',         Text),

            # constraints/indexes
            UniqueConstraint('name'),
            UniqueConstraint('full_name')
        ),
        Entity.PERFORMER: Table('performer', meta,
            Column('id',                Integer,     primary_key=True),
            Column('person_id',         Integer,     ForeignKey('person.id'), nullable=False),
            Column('role',              Text,        nullable=True),  # instrument, voice, role, etc.
            Column('raw_role',          Text,        nullable=True),
            Column('cnl_performer_id',  Integer,     ForeignKey('performer.id')),

            # constraints/indexes
            UniqueConstraint('person_id', 'role')
        ),
        Entity.ENSEMBLE: Table('ensemble', meta,
            Column('id',                Integer,     primary_key=True),
            Column('name',              Text,        nullable=False),
            Column('raw_name',          Text,        nullable=True),

            # parsed and normalized
            Column('ens_type',          Text),
            Column('ens_name',          Text),
            Column('ens_location',      Text),       # informational
            Column('tags',              ARRAY(Text)),

            # canonicality
            Column('is_canonical',      Boolean),
            Column('cnl_ensemble_id',   Integer,     ForeignKey('ensemble.id')),  # points to self, if canonical
            Column('arkiv_uri',         Text),

            # constraints/indexes
            UniqueConstraint('name')
        ),
        Entity.WORK: Table('work', meta,
            Column('id',                Integer,     primary_key=True),
            Column('composer_id',       Integer,     ForeignKey('person.id'), nullable=False),
            Column('name',              Text,        nullable=False),
            Column('raw_name',          Text,        nullable=True),

            # parsed and normalized
            Column('work_type',         Text),
            Column('work_name',         Text),
            Column('work_key',          Text),
            Column('catalog_no',        Text),       # i.e. op., K., BWV, etc.
            Column('tags',              ARRAY(Text)),

            # canonicality
            Column('is_canonical',      Boolean),
            Column('cnl_work_id',       Integer,     ForeignKey('work.id')),  # points to self, if canonical
            Column('arkiv_uri',         Text),

            # constraints/indexes
            UniqueConstraint('composer_id', 'name')
        ),
        Entity.RECORDING: Table('recording', meta,
            Column('id',                Integer,     primary_key=True),
            Column('name',              Text,        nullable=True),  # if null, need label/catalog_no
            Column('label',             Text),
            Column('catalog_no',        Text),
            Column('release_date',      Date),
            Column('arkiv_uri',         Text),

            # constraints/indexes
            # TODO: put a partial index on ('name', 'label')!!!
            #       CREATE UNIQUE INDEX recording_altkey
            #           ON recording (name, label)
            #        WHERE catalog_no IS NULL;
            UniqueConstraint('label', 'catalog_no')
        ),
        Entity.PERFORMANCE: Table('performance', meta,
            Column('id',                Integer,     primary_key=True),
            Column('work_id',           Integer,     ForeignKey('work.id'), nullable=False),
            Column('performer_ids',     ARRAY(Integer)),  # ForeignKey('performer.id')
            Column('ensemble_ids',      ARRAY(Integer)),  # ForeignKey('ensemble.id')
            Column('conductor_id',      Integer,     ForeignKey('person.id')),
            Column('recording_id',      Integer,     ForeignKey('recording.id')),
            Column('notes',             ARRAY(Text)),

            # constraints/indexes
            UniqueConstraint('work_id', 'performer_ids', 'ensemble_ids', 'conductor_id')
        ),
        Entity.STATION: Table('station', meta,
            Column('id',                Integer,     primary_key=True),
            Column('name',              Text,        nullable=False),
            Column('timezone',          Text,        nullable=False),  # tzdata (Olson/IANA) format

            # the following are informational only
            Column('location',          Text),       # e.g. "<city>, <state>"
            Column('frequency',         Text),
            Column('website',           Text),

            # misc/external information
            Column('tags',              ARRAY(Text)),
            Column('notes',             ARRAY(Text)),
            Column('ext_id',            Text),
            Column('ext_mstr_id',       Text),

            # canonicality/analytics metadata
            Column('synd_level',        Integer,     server_default=text('10')),  # 0-100 (default: 10)

            # constraints/indexes
            UniqueConstraint('name')
        ),
        Entity.PROGRAM: Table('program', meta,
            Column('id',                Integer,     primary_key=True),
            Column('name',              Text,        nullable=False),
            Column('host_name',         Text),
            Column('is_syndicated',     Boolean),
            Column('station_id',        Integer,     ForeignKey('station.id'), nullable=True),

            # misc/external information
            Column('tags',              ARRAY(Text)),
            Column('notes',             ARRAY(Text)),
            Column('ext_id',            Text),
            Column('ext_mstr_id',       Text),

            # canonicality/analytics metadata
            Column('synd_level',        Integer),    # inherit from station
            Column('mstr_program_id',   Integer,     ForeignKey('program.id')),  # not null if syndicated
            Column('website',           Text),

            # constraints/indexes
            UniqueConstraint('name', 'host_name')
        ),
        Entity.PROGRAM_PLAY: Table('program_play', meta,
            Column('id',                Integer,     primary_key=True),
            Column('station_id',        Integer,     ForeignKey('station.id'), nullable=False),
            Column('prog_play_info',    JSONB,       nullable=False),  # nornalized information
            Column('prog_play_date',    Date,        nullable=False),  # listed local date
            Column('prog_play_start',   Time,        nullable=False),  # listed local time
            Column('prog_play_end',     Time),       # if listed
            Column('prog_play_dur',     Interval),   # if listed

            # foreign key lookups (OPEN ISSUE: should we create additional metadata
            # at this level for associations to composers, performers, etc.???)
            Column('program_id',        Integer,     ForeignKey('program.id')),
            Column('mstr_prog_play_id', Integer,     ForeignKey('program_play.id')),  # not null if syndicated

            # misc/external information
            Column('tags',              ARRAY(Text)),
            Column('notes',             ARRAY(Text)),
            Column('ext_id',            Text),
            Column('ext_mstr_id',       Text),

            # technical
            Column('start_time',        TIMESTAMP(timezone=True)),
            Column('end_time',          TIMESTAMP(timezone=True)),
            Column('duration',          Interval),

            # constraints/indexes
            UniqueConstraint('station_id', 'prog_play_date', 'prog_play_start', 'program_id')
        ),
        Entity.PLAY: Table('play', meta,
            Column('id',                Integer,     primary_key=True),
            Column('station_id',        Integer,     ForeignKey('station.id'), nullable=False),
            Column('prog_play_id',      Integer,     ForeignKey('program_play.id'), nullable=True),
            Column('play_info',         JSONB,       nullable=False),  # nornalized information
            Column('play_date',         Date,        nullable=False),  # listed local date
            Column('play_start',        Time,        nullable=False),  # listed local time
            Column('play_end',          Time),       # if listed
            Column('play_dur',          Interval),   # if listed

            # foreign key lookups
            Column('program_id',        Integer,     ForeignKey('program.id')),  # denorm from program_play (do we need this???)
            Column('composer_id',       Integer,     ForeignKey('person.id')),
            Column('work_id',           Integer,     ForeignKey('work.id')),
            # NOTE: typically, there will be either artist(s) OR ensemble(s)
            # (though there can also be both) conductors and soloists are
            # associated with ensembles
            # the following two are denorms of the intersect table (let's see if it
            # is worth maintaining these for convenience)
            Column('performer_ids',     ARRAY(Integer)),  # ForeignKey('performer.id')
            Column('ensemble_ids',      ARRAY(Integer)),  # ForeignKey('ensemble.id')
            # NOTE: this also is a denorm, since conductor will be a role in the performer table
            Column('conductor_id',      Integer,     ForeignKey('person.id')),
            # REVISIT: commenting this out for now, not sure if soloists really needs to be
            # distinct from performers
            #Column('soloist_ids',     ARRAY(Integer)),    # ForeignKey('performer.id')
            Column('recording_id',      Integer,     ForeignKey('recording.id')),
            Column('mstr_play_id',      Integer,     ForeignKey('play.id')),  # not null if syndicated

            # misc/external information
            Column('tags',              ARRAY(Text)),
            Column('notes',             ARRAY(Text)),
            Column('ext_id',            Text),
            Column('ext_mstr_id',       Text),

            # technical
            Column('start_time',        TIMESTAMP(timezone=True)),
            Column('end_time',          TIMESTAMP(timezone=True)),
            Column('duration',          Interval),

            # constraints/indexes
            UniqueConstraint('station_id', 'play_date', 'play_start', 'work_id')
        ),
        Entity.PLAY_PERFORMER: Table('play_performer', meta,
            Column('id',                Integer,     primary_key=True),
            Column('play_id',           Integer,     ForeignKey('play.id'), nullable=False),
            Column('performer_id',      Integer,     ForeignKey('performer.id'), nullable=False),
            Column('mstr_play_perf_id', Integer,     ForeignKey('play_performer.id')),  # not null if syndicated (denorm)
            Column('notes',             ARRAY(Text)),

            # constraints/indexes
            UniqueConstraint('play_id', 'performer_id')
        ),
        Entity.PLAY_ENSEMBLE: Table('play_ensemble', meta,
            Column('id',                Integer,     primary_key=True),
            Column('play_id',           Integer,     ForeignKey('play.id'), nullable=False),
            Column('ensemble_id',       Integer,     ForeignKey('ensemble.id'), nullable=False),
            Column('mstr_play_ens_id',  Integer,     ForeignKey('play_ensemble.id')),  # not null if syndicated (denorm)
            Column('notes',             ARRAY(Text)),

            # constraints/indexes
            UniqueConstraint('play_id', 'ensemble_id')
        ),
        Entity.PLAY_SEQ: Table('play_seq', meta,
            Column('id',                Integer,     primary_key=True),
            Column('seq_hash',          BigInteger,  nullable=False),
            Column('hash_level',        Integer,     nullable=False),
            Column('hash_type',         Integer,     nullable=False),
            Column('play_id',           Integer,     ForeignKey('play.id'), nullable=False),
            # denorms
            Column('station_id',        Integer,     ForeignKey('station.id')),
            Column('program_name',      Text),
            Column('program_id',        Integer,     ForeignKey('program.id')),
            Column('prog_play_id',      Integer,     ForeignKey('program_play.id')),

            # constraints/indexes
            UniqueConstraint('hash_level', 'hash_type', 'play_id'),
            Index('play_seq_seq_hash', 'seq_hash')
        ),
        Entity.PLAY_SEQ_MATCH: Table('play_seq_match', meta,
            Column('id',                Integer,     primary_key=True),
            Column('seq_len',           Integer,     nullable=False),
            Column('seq_time',          Integer),    # elapsed time
            Column('pub_start_play_id', Integer,     ForeignKey('play.id'), nullable=False),
            Column('pub_end_play_id',   Integer,     ForeignKey('play.id'), nullable=False),
            Column('sub_start_play_id', Integer,     ForeignKey('play.id'), nullable=False),
            Column('sub_end_play_id',   Integer,     ForeignKey('play.id'), nullable=False),
            # denorms from {pub,sub}_start_play_id
            Column('pub_station_id',    Integer,     ForeignKey('station.id')),
            Column('sub_station_id',    Integer,     ForeignKey('station.id')),
            Column('pub_program_name',  Text)
        ),
        Entity.ENTITY_STRING: Table('entity_string', meta,
            Column('id',                Integer,     primary_key=True),
            Column('entity_str',        Text,        nullable=False),
            # source field (category): program, composer, conductor, ensemble, performer, work, etc.
            Column('source_fld',        Text,        nullable=False),
            Column('parsed_data',       JSONB),
            Column('station_id',        Integer,     ForeignKey('station.id')),  # denorm
            Column('prog_play_id',      Integer,     ForeignKey('program_play.id')),
            Column('play_id',           Integer,     ForeignKey('play.id')),

            # constraints/indexes
            UniqueConstraint('entity_str', 'source_fld', 'station_id')
        ),
        Entity.ENTITY_REF: Table('entity_ref', meta,
            Column('id',                Integer,     primary_key=True),
            Column('entity_ref',        Text,        nullable=False),
            Column('entity_type',       Text,        nullable=False),
            Column('ref_source',        Text,        nullable=True),
            Column('addl_ref',          Text,        nullable=True),
            Column('source_data',       JSONB),
            Column('is_raw',            Boolean),
            # REVISIT: master entities are stored in the same table, for now!!!
            Column('is_entity',         Boolean),
            Column('mstr_entity_name',  Text),
            Column('mstr_entity_id',    Integer,     ForeignKey('entity_ref.id')),
            Column('entity_strength',   Integer),    # experiemental, meaning TBD
            Column('ref_strength',      Integer),    # experiemental, meaning TBD

            # constraints/indexes
            UniqueConstraint('entity_ref', 'entity_type', 'ref_source')
        ),
        Entity.TO_DO_LIST: Table('to_do_list', meta,
            Column('id',                Integer,     primary_key=True),
            Column('action',            Text,        nullable=False),
            Column('depends_on',        Text),
            Column('status',            Text,        nullable=False),
            Column('station_id',        Integer,     ForeignKey('station.id')),
            Column('prog_play_id',      Integer,     ForeignKey('program_play.id')),
            Column('play_id',           Integer,     ForeignKey('play.id')),
            Column('created_at',        TIMESTAMP(timezone=True), nullable=False),
            Column('updated_at',        TIMESTAMP(timezone=True), nullable=False)
        )
    }
class ProgramHotelBid(Base):
    __tablename__ = 'program_hotel_bid'

    id = Column(Integer, primary_key=True)
    lanyon_bid_id = Column(BigInteger, nullable=False)
    program_hotel_id = Column(ForeignKey('program_hotel.id'), nullable=False)
    bid_order = Column(Integer, nullable=False)
    bid_status = Column(String(32), nullable=False)
    bid_info = Column(String(32))
    consultant_recommendation = Column(Text)
    client_recommendation = Column(Text)
    is_shared = Column(Boolean)
    is_saved = Column(Boolean)
    property_overview = Column(Text)
    rate_type = Column(String(16))
    rate_currency = Column(String(8))
    rate = Column(Numeric(20, 2))
    rate_yoy_variance = Column(Float(53))
    rate_tcs = Column(Numeric(20, 2))
    dynamic_pricing = Column(CHAR(1))
    dynamic_percent_discount = Column(Integer)
    property_number_rooms = Column(Integer)
    room_nights = Column(Integer)
    room_type1_number = Column(Integer)
    room_inventory_percent = Column(Float(53))
    cancellation_policy = Column(String(16))
    amenities_breakfast = Column(String(1))
    amenities_breakfast_fee = Column(Numeric(10, 2))
    py_amenities_breakfast = Column(String(1))
    amenities_hsia = Column(String(1))
    amenities_hsia_fee = Column(Numeric(10, 2))
    py_amenities_hsia = Column(String(1))
    amenities_wifi = Column(String(1))
    amenities_wifi_fee = Column(Numeric(10, 2))
    py_amenities_wifi = Column(String(1))
    amenities_parking = Column(String(1))
    amenities_parking_fee = Column(Numeric(10, 2))
    py_amenities_parking = Column(String(1))
    amenities_airporttx = Column(String(1))
    amenities_airporttx_fee = Column(Numeric(10, 2))
    py_amenities_airporttx = Column(String(1))
    amenities_officetx = Column(String(1))
    amenities_officetx_fee = Column(Numeric(10, 2))
    py_amenities_officetx = Column(String(1))
    amenities_fitness = Column(String(1))
    amenities_fitness_fee = Column(Numeric(10, 2))
    py_amenities_fitness = Column(String(1))
    amenities_localphone = Column(String(1))
    amenities_localphone_fee = Column(Numeric(10, 2))
    py_amenities_localphone = Column(String(1))
    amenities_tollphone = Column(String(1))
    amenities_tollphone_fee = Column(Numeric(10, 2))
    py_amenities_tollphone = Column(String(1))
    bid_comments = Column(Text)
    fee_vat = Column(Numeric(10, 2))
    fee_vat_uom = Column(String(1))
    fee_vat_included = Column(String(1))
    py_fee_vat_included = Column(String(1))
    fee_vatfb = Column(Numeric(10, 2))
    fee_vatfb_uom = Column(String(1))
    fee_vatfb_included = Column(String(1))
    py_fee_vatfb_included = Column(String(1))
    fee_service = Column(Numeric(10, 2))
    fee_service_uom = Column(String(1))
    fee_service_included = Column(String(1))
    py_fee_service_included = Column(String(1))
    fee_lodgetax = Column(Numeric(10, 2))
    fee_lodgetax_uom = Column(String(1))
    fee_lodgetax_included = Column(String(1))
    py_fee_lodgetax_included = Column(String(1))
    fee_statetax = Column(Numeric(10, 2))
    fee_statetax_uom = Column(String(1))
    fee_statetax_included = Column(String(1))
    py_fee_statetax_included = Column(String(1))
    fee_citytax = Column(Numeric(10, 2))
    fee_citytax_uom = Column(String(1))
    fee_citytax_included = Column(String(1))
    py_fee_citytax_included = Column(String(1))
    fee_occ = Column(Numeric(10, 2))
    fee_occ_uom = Column(String(1))
    fee_occ_included = Column(String(1))
    py_fee_occ_included = Column(String(1))
    fee_other = Column(Numeric(10, 2))
    fee_other_uom = Column(String(1))
    fee_other_description = Column(Text)
    fee_other_included = Column(String(1))
    py_fee_other_included = Column(String(1))
    count_seasons = Column(Integer)
    season_1_start = Column(Date)
    season_1_end = Column(Date)
    lra_s1_rt1_sgl = Column(Numeric(20, 2))
    lra_s1_rt1_dbl = Column(Numeric(20, 2))
    lra_s1_rt2_sgl = Column(Numeric(20, 2))
    lra_s1_rt2_dbl = Column(Numeric(20, 2))
    lra_s1_rt3_sgl = Column(Numeric(20, 2))
    lra_s1_rt3_dbl = Column(Numeric(20, 2))
    nlra_s1_rt1_sgl = Column(Numeric(20, 2))
    nlra_s1_rt1_dbl = Column(Numeric(20, 2))
    nlra_s1_rt2_sgl = Column(Numeric(20, 2))
    nlra_s1_rt2_dbl = Column(Numeric(20, 2))
    nlra_s1_rt3_sgl = Column(Numeric(20, 2))
    nlra_s1_rt3_dbl = Column(Numeric(20, 2))
    govt_s1_rt1_sgl = Column(Numeric(20, 2))
    govt_s1_rt1_dbl = Column(Numeric(20, 2))
    govt_s1_rt2_sgl = Column(Numeric(20, 2))
    govt_s1_rt2_dbl = Column(Numeric(20, 2))
    govt_s1_rt3_sgl = Column(Numeric(20, 2))
    govt_s1_rt3_dbl = Column(Numeric(20, 2))
    season_2_start = Column(Date)
    season_2_end = Column(Date)
    lra_s2_rt1_sgl = Column(Numeric(20, 2))
    lra_s2_rt1_dbl = Column(Numeric(20, 2))
    lra_s2_rt2_sgl = Column(Numeric(20, 2))
    lra_s2_rt2_dbl = Column(Numeric(20, 2))
    lra_s2_rt3_sgl = Column(Numeric(20, 2))
    lra_s2_rt3_dbl = Column(Numeric(20, 2))
    nlra_s2_rt1_sgl = Column(Numeric(20, 2))
    nlra_s2_rt1_dbl = Column(Numeric(20, 2))
    nlra_s2_rt2_sgl = Column(Numeric(20, 2))
    nlra_s2_rt2_dbl = Column(Numeric(20, 2))
    nlra_s2_rt3_sgl = Column(Numeric(20, 2))
    nlra_s2_rt3_dbl = Column(Numeric(20, 2))
    govt_s2_rt1_sgl = Column(Numeric(20, 2))
    govt_s2_rt1_dbl = Column(Numeric(20, 2))
    govt_s2_rt2_sgl = Column(Numeric(20, 2))
    govt_s2_rt2_dbl = Column(Numeric(20, 2))
    govt_s2_rt3_sgl = Column(Numeric(20, 2))
    govt_s2_rt3_dbl = Column(Numeric(20, 2))
    season_3_start = Column(Date)
    season_3_end = Column(Date)
    lra_s3_rt1_sgl = Column(Numeric(20, 2))
    lra_s3_rt1_dbl = Column(Numeric(20, 2))
    lra_s3_rt2_sgl = Column(Numeric(20, 2))
    lra_s3_rt2_dbl = Column(Numeric(20, 2))
    lra_s3_rt3_sgl = Column(Numeric(20, 2))
    lra_s3_rt3_dbl = Column(Numeric(20, 2))
    nlra_s3_rt1_sgl = Column(Numeric(20, 2))
    nlra_s3_rt1_dbl = Column(Numeric(20, 2))
    nlra_s3_rt2_sgl = Column(Numeric(20, 2))
    nlra_s3_rt2_dbl = Column(Numeric(20, 2))
    nlra_s3_rt3_sgl = Column(Numeric(20, 2))
    nlra_s3_rt3_dbl = Column(Numeric(20, 2))
    govt_s3_rt1_sgl = Column(Numeric(20, 2))
    govt_s3_rt1_dbl = Column(Numeric(20, 2))
    govt_s3_rt2_sgl = Column(Numeric(20, 2))
    govt_s3_rt2_dbl = Column(Numeric(20, 2))
    govt_s3_rt3_sgl = Column(Numeric(20, 2))
    govt_s3_rt3_dbl = Column(Numeric(20, 2))
    season_4_start = Column(Date)
    season_4_end = Column(Date)
    lra_s4_rt1_sgl = Column(Numeric(20, 2))
    lra_s4_rt1_dbl = Column(Numeric(20, 2))
    lra_s4_rt2_sgl = Column(Numeric(20, 2))
    lra_s4_rt2_dbl = Column(Numeric(20, 2))
    lra_s4_rt3_sgl = Column(Numeric(20, 2))
    lra_s4_rt3_dbl = Column(Numeric(20, 2))
    nlra_s4_rt1_sgl = Column(Numeric(20, 2))
    nlra_s4_rt1_dbl = Column(Numeric(20, 2))
    nlra_s4_rt2_sgl = Column(Numeric(20, 2))
    nlra_s4_rt2_dbl = Column(Numeric(20, 2))
    nlra_s4_rt3_sgl = Column(Numeric(20, 2))
    nlra_s4_rt3_dbl = Column(Numeric(20, 2))
    govt_s4_rt1_sgl = Column(Numeric(20, 2))
    govt_s4_rt1_dbl = Column(Numeric(20, 2))
    govt_s4_rt2_sgl = Column(Numeric(20, 2))
    govt_s4_rt2_dbl = Column(Numeric(20, 2))
    govt_s4_rt3_sgl = Column(Numeric(20, 2))
    govt_s4_rt3_dbl = Column(Numeric(20, 2))
    season_5_start = Column(Date)
    season_5_end = Column(Date)
    lra_s5_rt1_sgl = Column(Numeric(20, 2))
    lra_s5_rt1_dbl = Column(Numeric(20, 2))
    lra_s5_rt2_sgl = Column(Numeric(20, 2))
    lra_s5_rt2_dbl = Column(Numeric(20, 2))
    lra_s5_rt3_sgl = Column(Numeric(20, 2))
    lra_s5_rt3_dbl = Column(Numeric(20, 2))
    nlra_s5_rt1_sgl = Column(Numeric(20, 2))
    nlra_s5_rt1_dbl = Column(Numeric(20, 2))
    nlra_s5_rt2_sgl = Column(Numeric(20, 2))
    nlra_s5_rt2_dbl = Column(Numeric(20, 2))
    nlra_s5_rt3_sgl = Column(Numeric(20, 2))
    nlra_s5_rt3_dbl = Column(Numeric(20, 2))
    govt_s5_rt1_sgl = Column(Numeric(20, 2))
    govt_s5_rt1_dbl = Column(Numeric(20, 2))
    govt_s5_rt2_sgl = Column(Numeric(20, 2))
    govt_s5_rt2_dbl = Column(Numeric(20, 2))
    govt_s5_rt3_sgl = Column(Numeric(20, 2))
    govt_s5_rt3_dbl = Column(Numeric(20, 2))
    count_blackouts = Column(Integer)
    blackout_1_start_date = Column(Date)
    blackout_1_end_date = Column(Date)
    blackout_1_name = Column(String(64))
    blackout_2_start_date = Column(Date)
    blackout_2_end_date = Column(Date)
    blackout_2_name = Column(String(64))
    blackout_3_start_date = Column(Date)
    blackout_3_end_date = Column(Date)
    blackout_3_name = Column(String(64))
    blackout_4_start_date = Column(Date)
    blackout_4_end_date = Column(Date)
    blackout_4_name = Column(String(64))
    blackout_5_start_date = Column(Date)
    blackout_5_end_date = Column(Date)
    blackout_5_name = Column(String(64))
    blackout_6_start_date = Column(Date)
    blackout_6_end_date = Column(Date)
    blackout_6_name = Column(String(64))
    blackout_7_start_date = Column(Date)
    blackout_7_end_date = Column(Date)
    blackout_7_name = Column(String(64))
    blackout_8_start_date = Column(Date)
    blackout_8_end_date = Column(Date)
    blackout_8_name = Column(String(64))
    bar_rate_discount = Column(Numeric(20, 2))
    offered_lra = Column(Numeric(20, 2))
    py_lra = Column(Numeric(20, 2))
    yoy_lra_percent_change = Column(Float(53))
    offered_nlra = Column(Numeric(20, 2))
    py_nlra = Column(Numeric(20, 2))
    yoy_nlra_percent_change = Column(Float(53))
    requested_adr_round_1 = Column(Numeric(20, 2))
    overall_score = Column(Float(53))
    created = Column(TIMESTAMP(precision=6),
                     nullable=False,
                     server_default=text("now()"))
    modified = Column(TIMESTAMP(precision=6),
                      nullable=False,
                      server_default=text("now()"))

    program_hotel = relationship('ProgramHotel')
Example #19
0
class ConsistencyCheck(AbstractConcreteBase, DQBase):
    """
    Representation of abstract consistency check table.
    """

    __abstract__ = True
    _table_prefix = "consistency_check"

    id = Column(BIGINT, primary_key=True)
    type = Column(TEXT, nullable=False)
    name = Column(TEXT, nullable=False)
    description = Column(TEXT)
    left_table = Column(TEXT, nullable=False)
    right_table = Column(TEXT, nullable=False)

    status = Column(TEXT)
    time_filter = Column(
        TEXT,
        nullable=False,
        default=TIME_FILTER_DEFAULT,
        server_default=TIME_FILTER_DEFAULT,
    )
    task_ts = Column(TIMESTAMP(timezone=True), nullable=False, index=True)
    created_at = Column(
        DateTime(timezone=True),
        server_default=text("NOW()"),
        nullable=False,
        index=True,
    )

    @declared_attr
    def __table_args__(cls):
        """
        Concrete classes derived from this abstract one should have unique check among the columns
        that below. But the constraint needs to have unique name, therefore we are using
        @declared_attr here to construct name of the constraint using its table name.
        :return:
        """
        return (
            UniqueConstraint(
                "type",
                "name",
                "left_table",
                "right_table",
                "task_ts",
                "time_filter",
                name=f"{cls.__tablename__}_unique",
            ),
        )

    def init_row(
        self,
        check: Dict,
        results: AggregatedResult,
        left_table_name: str,
        right_table_name: str,
        time_filter=None,
        context: Dict = None,
        **_,
    ):
        """
        Set result to consistency check object.
        """
        self.type = check["type"]
        self.task_ts = context["task_ts"]
        self.name = check["name"]
        self.description = check["description"]
        self.left_table = left_table_name
        self.right_table = right_table_name
        if isinstance(time_filter, str):
            self.time_filter = time_filter
        else:
            self.time_filter = json.dumps(time_filter)
        self.status = "valid" if results.failed == 0 else "invalid"

    def __repr__(self):
        return f"Rule ({self.type} - {self.name} - {self.task_ts})"
Example #20
0
class CreatedUpdatedTimestampMixin:
    date_created = db.Column(TIMESTAMP(timezone=True), default=datetime.utcnow)
    date_updated = db.Column(TIMESTAMP(timezone=True),
                             onupdate=datetime.utcnow)
Example #21
0
class Job(BaseModel):
    __tablename__ = "jobs"
    __table_args__ = (Index("ix_jobs_project_uuid_pipeline_uuid",
                            "project_uuid", "pipeline_uuid"), )

    name = db.Column(
        db.String(255),
        unique=False,
        nullable=False,
        # For migrating users.
        server_default=text("'job'"),
    )

    pipeline_name = db.Column(
        db.String(255),
        unique=False,
        nullable=False,
        # For migrating users.
        server_default=text("''"),
    )

    uuid = db.Column(db.String(36), primary_key=True)
    project_uuid = db.Column(
        db.String(36),
        db.ForeignKey("projects.uuid", ondelete="CASCADE"),
        index=True,
        nullable=False,
    )
    pipeline_uuid = db.Column(db.String(36), index=True, nullable=False)

    # Jobs that are to be schedule once (right now) or once in the
    # future will have no schedule (null).
    schedule = db.Column(db.String(100), nullable=True)

    # A list of dictionaries. The length of the list is the number of
    # non interactive runs that will be run, one for each parameters
    # dictinary. A parameter dictionary maps step uuids to a dictionary,
    # containing the parameters of that step for that particular run.
    # [{ <step_uuid>: {"a": 1}, ...}, ...GG]
    parameters = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid. Note that the entries will be stored as a list
        # of dicts.
        server_default="[]",
    )

    # Note that this column also contains the parameters that were
    # stored within the pipeline definition file. These are not the job
    # parameters, but the original ones.
    pipeline_definition = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid.
        server_default="{}",
    )

    pipeline_run_spec = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid.
        server_default="{}",
    )

    # So that we can efficiently look for jobs to run.
    next_scheduled_time = db.Column(TIMESTAMP(timezone=True), index=True)

    # So that we can show the user the last time it was scheduled/run.
    last_scheduled_time = db.Column(TIMESTAMP(timezone=True), index=True)

    # So that we can "stamp" every non interactive run with the
    # execution number it belongs to, e.g. the first time a job runs it
    # will be batch 1, then 2, etc.
    total_scheduled_executions = db.Column(
        db.Integer,
        unique=False,
        server_default=text("0"),
    )

    pipeline_runs = db.relationship(
        "NonInteractivePipelineRun",
        lazy="select",
        # let the db take care of cascading deletions
        # https://docs.sqlalchemy.org/en/13/orm/relationship_api.html#sqlalchemy.orm.relationship.params.passive_deletes
        # A value of True indicates that unloaded child items should not
        # be loaded during a delete operation on the parent. Normally,
        # when a parent item is deleted, all child items are loaded so
        # that they can either be marked as deleted, or have their
        # foreign key to the parent set to NULL. Marking this flag as
        # True usually implies an ON DELETE <CASCADE|SET NULL> rule is
        # in place which will handle updating/deleting child rows on the
        # database side.
        passive_deletes=True,
        # https://docs.sqlalchemy.org/en/14/orm/cascades.html#using-foreign-key-on-delete-cascade-with-orm-relationships
        # In order to use ON DELETE foreign key cascades in conjunction
        # with relationship(), it’s important to note first and foremost
        # that the relationship.cascade setting must still be configured
        # to match the desired “delete” or “set null” behavior
        # Essentially, the specified behaviour in the FK column
        # and the one specified in the relationship must match.
        cascade="all, delete",
        # When querying a job and its runs the runs will be sorted by
        # job schedule number and the index of the pipeline in that job.
        order_by=(
            "[desc(NonInteractivePipelineRun.job_run_index), "
            "desc(NonInteractivePipelineRun.job_run_pipeline_run_index)]"),
    )

    # The status of a job can be DRAFT, PENDING, STARTED, SUCCESS,
    # ABORTED, FAILURE. Jobs start as DRAFT, this indicates that the job
    # has been created but that has not been started by the user. Once a
    # job is started by the user, what happens depends on the type of
    # job. One time jobs become PENDING, and become STARTED once they
    # are run by the scheduler and their pipeline runs are added to the
    # queue. Once they are completed, their status will be SUCCESS, if
    # they are aborted, their status will be set to ABORTED. Recurring
    # jobs, characterized by having a schedule, become STARTED, and can
    # only move to the ABORTED state in case they get cancelled, which
    # implies that the job will not be scheduled anymore. One time jobs
    # which fail to run (the related pipeline runs scheduling fails) are
    # set to FAILURE, this is not related to a failure at the pipeline
    # run level.
    status = db.Column(
        db.String(15),
        unique=False,
        nullable=False,
        # Pre-existing Jobs of migrating users will be set to SUCCESS.
        server_default=text("'SUCCESS'"),
    )

    strategy_json = db.Column(
        JSONB,
        nullable=False,
        server_default="{}",
    )

    env_variables = deferred(
        db.Column(
            JSONB,
            nullable=False,
            server_default="{}",
        ))

    created_time = db.Column(
        db.DateTime,
        unique=False,
        nullable=False,
        index=True,
        # For migrating users.
        server_default=text("timezone('utc', now())"),
    )

    def __repr__(self):
        return f"<Job: {self.uuid}>"
Example #22
0
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.dialects.postgresql import INTERVAL, TIMESTAMP, TSTZRANGE, UUID

db = SQLAlchemy()

db.INTERVAL = INTERVAL()
db.TIMESTAMP = TIMESTAMP()
db.TSTZRANGE = TSTZRANGE()
db.UUID = UUID()


class Profile(db.Model):
    __tablename__ = 'profile'
    __table_args__ = {'schema': 'account'}

    id = db.Column(UUID(as_uuid=False), primary_key=True)
    user_name = db.Column(db.Text)
    employee_id = db.Column(UUID())
Example #23
0
class QualityCheck(AbstractConcreteBase, DQBase):
    """
    Representation of abstract quality check table.
    """

    __abstract__ = True
    _table_prefix = "quality_check"

    id = Column(BIGINT, primary_key=True)
    attribute = Column(TEXT, nullable=False)
    rule_name = Column(TEXT, nullable=False)
    rule_type = Column(TEXT, nullable=False)
    rule_description = Column(TEXT)
    total_records = Column(INTEGER)

    failed = Column(INTEGER)
    median_30_day_failed = Column(DOUBLE_PRECISION)
    failed_percentage = Column(DOUBLE_PRECISION)

    passed = Column(INTEGER)
    median_30_day_passed = Column(DOUBLE_PRECISION)
    passed_percentage = Column(DOUBLE_PRECISION)

    status = Column(TEXT)
    time_filter = Column(
        TEXT,
        default=TIME_FILTER_DEFAULT,
        server_default=TIME_FILTER_DEFAULT,
        nullable=False,
    )
    task_ts = Column(TIMESTAMP(timezone=True), nullable=False, index=True)
    created_at = Column(
        DateTime(timezone=True),
        server_default=text("NOW()"),
        nullable=False,
        index=True,
    )

    @declared_attr
    def __table_args__(cls):
        """
        Concrete classes derived from this abstract one should have unique check among the columns
        that below. But the constraint needs to have unique name, therefore we are using
        @declared_attr here to construct name of the constraint using its table name.
        :return:
        """
        return (
            UniqueConstraint(
                "attribute",
                "rule_name",
                "rule_type",
                "task_ts",
                "time_filter",
                name=f"{cls.__tablename__}_unique",
            ),
        )

    def init_row(
        self,
        rule: Rule,
        results: AggregatedResult,
        conn: Connector,
        context: Dict = None,
    ):
        """
        Count metrics we want to measure and set them to quality check object.
        """

        # todo - add to doc
        self.task_ts = context["task_ts"]
        self.attribute = rule.attribute
        self.rule_name = rule.name
        self.rule_type = rule.type
        self.rule_description = rule.description

        self.total_records = results.total_records
        self.failed = results.failed
        self.passed = results.passed

        self.set_medians(conn)

        if rule.time_filter:
            self.time_filter = str(rule.time_filter)
        else:
            self.time_filter = TIME_FILTER_DEFAULT
        self.failed_percentage = self._perc(self.failed, self.total_records)
        self.passed_percentage = self._perc(self.passed, self.total_records)
        self.status = "invalid" if self.failed > 0 else "valid"

    def _perc(self, a, b):
        res = 0
        try:
            res = (a / b) * 100
        except ZeroDivisionError:
            pass
        return res

    def set_medians(self, conn: Connector, days=30):
        """
        Calculate median of passed/failed quality checks from last 30 days.
        """
        now = datetime.today().date()
        past = now - timedelta(days=days)
        cls = self.__class__

        session = conn.make_session()
        checks = (
            session.query(cls.failed, cls.passed)
            .filter(and_(cls.task_ts <= str(now), cls.task_ts >= str(past)))
            .all()
        )
        session.expunge_all()
        session.commit()
        session.close()

        failed = [ch.failed for ch in checks]
        self.median_30_day_failed = median(failed) if failed else None

        passed = [ch.passed for ch in checks]
        self.median_30_day_passed = median(passed) if passed else None

    def __repr__(self):
        return f"Rule ({self.attribute} - {self.rule_name} - {self.rule_type} - {self.task_ts})"
Example #24
0
def downgrade():
    """
    Because the writing of this migration coincides with the removal of
    raw_sql/procs/update_gccrashes.sql and raw_sql/procs/backfill_gccrashes.sql
    we can't rely on loading those files from disc.
    So for the downgrade we'll just simply re-execute the necessary SQL.
    """
    op.create_table(
        'gccrashes',
        Column(u'report_date', TIMESTAMP(timezone=True), nullable=False),
        Column(u'product_version_id', INTEGER(), nullable=False),
        Column(u'build', NUMERIC(), nullable=True),
        Column(u'gc_count_madu', REAL(), nullable=False),
    )
    op.execute("""
        CREATE OR REPLACE FUNCTION backfill_gccrashes(
            updateday date, check_period interval DEFAULT '01:00:00'::interval) RETURNS boolean
            LANGUAGE plpgsql
            AS $$
        BEGIN
        -- function for administrative backfilling of gccrashes
        -- designed to be called by backfill_matviews
        DELETE FROM gccrashes WHERE report_date = updateday;
        PERFORM update_gccrashes(updateday, false, check_period);

        RETURN TRUE;
        END;$$;
    """)
    op.execute("""
        CREATE OR REPLACE FUNCTION update_gccrashes(
            updateday date,
            checkdata boolean DEFAULT true,
            check_period interval DEFAULT '01:00:00'::interval
        ) RETURNS boolean
            LANGUAGE plpgsql
            SET client_min_messages TO 'ERROR'
        AS $$
        BEGIN
        -- this procedure goes through raw crashes and creates a matview with count of
        -- is_gc annotated crashes per build ID
        -- designed to be run only once for each day

        -- check that it hasn't already been run

        IF checkdata THEN
            PERFORM 1 FROM gccrashes
            WHERE report_date = updateday LIMIT 1;
            IF FOUND THEN
                RAISE NOTICE 'gccrashes has already been run for the day %.',updateday;
                RETURN FALSE;
            END IF;
        END IF;

        -- check if reports_clean is complete
        IF NOT reports_clean_done(updateday, check_period) THEN
            IF checkdata THEN
                RAISE NOTICE 'Reports_clean has not been updated to the end of %',updateday;
                RETURN FALSE;
            ELSE
                RAISE INFO 'reports_clean not updated';
                RETURN FALSE;
            END IF;
        END IF;

        INSERT INTO gccrashes (
            report_date,
            product_version_id,
            build,
            gc_count_madu
        )
        WITH raw_crash_filtered AS (
            SELECT
                  uuid
                , json_object_field_text(r.raw_crash, 'IsGarbageCollecting')
                as is_garbage_collecting
            FROM
                raw_crashes r
            WHERE
                date_processed BETWEEN updateday::timestamptz
                    AND updateday::timestamptz + '1 day'::interval
        )
        SELECT updateday
            , product_version_id
            , build
            , crash_madu(sum(
                CASE WHEN r.is_garbage_collecting = '1' THEN 1 ELSE 0 END), sum(adu_count), 1
            ) as gc_count_madu
        FROM reports_clean
            JOIN product_versions USING (product_version_id)
            JOIN build_adu USING (product_version_id)
            LEFT JOIN raw_crash_filtered r ON r.uuid::text = reports_clean.uuid
        WHERE utc_day_is(date_processed, updateday)
                AND tstz_between(date_processed, build_date(build), sunset_date)
                AND product_versions.build_type = 'nightly'
                AND tstz_between(adu_date, build_date(build), sunset_date)
                AND adu_count > 0
                AND build_date(build) = build_adu.build_date
                AND date_processed - build_date(build) < '7 days'::interval
                AND length(build::text) >= 10
        GROUP BY build, product_version_id
        ORDER BY build;

        RETURN TRUE;
        END;
        $$;
    """)

    load_stored_proc(op, ['backfill_matviews.sql'])
class ProgramHotel(Base):
    __tablename__ = 'program_hotel'

    id = Column(BigInteger, primary_key=True)
    program_id = Column(ForeignKey('program.id'), nullable=False)
    hotel_property_id = Column(ForeignKey('hotel_property.id'), nullable=False)
    hotel_master_id = Column(BigInteger)
    hotel_status = Column(String(16),
                          nullable=False,
                          server_default=text("'None'::character varying"))
    hotel_status_datetime = Column(TIMESTAMP(precision=6))
    dynamic_pricing_recommended = Column(Boolean,
                                         nullable=False,
                                         server_default=text("false"))
    spend_total_usd = Column(Numeric(16, 2))
    room_nights_total = Column(Integer)
    is_preferred = Column(Boolean)
    abr = Column(Numeric(16, 2))
    anr = Column(Numeric(16, 2))
    bar = Column(Numeric(16, 2))
    bcd = Column(Numeric(16, 2))
    olset_rating = Column(Integer)
    market_share = Column(Float(53))
    py_bid = Column(String(32))
    rate_availability = Column(Float(53))
    los_greater_than_5 = Column(Float(53))
    distance_closet_miles = Column(Float(53))
    anr_lanyon = Column(Numeric(16, 2))
    anr_lanyon_currency_code = Column(String(4))
    anr_dynamic = Column(Numeric(10, 0))
    consultant_recommendation = Column(
        String(32), server_default=text("'None'::character varying"))
    client_response = Column(String(16),
                             nullable=False,
                             server_default=text("'None'::character varying"))
    activity_count = Column(Integer, nullable=False, server_default=text("0"))
    count_consultant_comments = Column(Integer, server_default=text("0"))
    last_consultant_comment = Column(TIMESTAMP(precision=6))
    count_client_comments = Column(Integer, server_default=text("0"))
    last_client_comment = Column(TIMESTAMP(precision=6))
    count_free_comments = Column(Integer, server_default=text("0"))
    count_batch_comments = Column(Integer, server_default=text("0"))
    count_smartselect_comments = Column(Integer, server_default=text("0"))
    py_is_dynamic_pricing = Column(Boolean, server_default=text("false"))
    py_room_nights = Column(Integer)
    amenities_breakfast = Column(String(1))
    amenities_hsia = Column(String(1))
    amenities_wifi = Column(String(1))
    amenities_parking = Column(String(1))
    amenities_airporttx = Column(String(1))
    amenities_officetx = Column(String(1))
    amenities_fitness = Column(String(1))
    amenities_local_phone = Column(String(1))
    amenities_toll_phone = Column(String(1))
    fee_vatincluded = Column(String(1))
    fee_serviceincluded = Column(String(1))
    fee_lodgetaxincluded = Column(String(1))
    fee_statetaxincluded = Column(String(1))
    fee_citytaxincluded = Column(String(1))
    fee_occincluded = Column(String(1))
    fee_otherincluded = Column(String(1))
    pa_share_status = Column(String(16))
    count_pa_consultant_comments = Column(Integer,
                                          nullable=False,
                                          server_default=text("0"))
    count_pa_client_comments = Column(Integer,
                                      nullable=False,
                                      server_default=text("0"))
    count_pa_activity = Column(Integer,
                               nullable=False,
                               server_default=text("0"))
    count_pa_client_activity = Column(Integer,
                                      nullable=False,
                                      server_default=text("0"))
    bid_status = Column(String(32))
    bid_consultant_recommendation = Column(String(255))
    bid_client_recommendation = Column(String(255))
    bid_info = Column(String(64))
    bid_read_only = Column(Boolean,
                           nullable=False,
                           server_default=text("false"))
    is_reviewed_client = Column(Boolean, server_default=text("false"))
    is_reviewed_consultant = Column(Boolean, server_default=text("false"))
    created = Column(TIMESTAMP(precision=6),
                     nullable=False,
                     server_default=text("now()"))
    modified = Column(TIMESTAMP(precision=6),
                      nullable=False,
                      server_default=text("now()"))
    modified_label = Column(String(255),
                            server_default=text("''::character varying"))

    hotel_property = relationship('HotelProperty')
    program = relationship('Program')
Example #26
0
Parts of the application use these tables to create sqlalchemy core queries,
which are then handed off to the connectionpool (txchatexamples.util.ConnectionPool)
to be rendered.

"""

from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, Sequence
from sqlalchemy.dialects.postgresql import TIMESTAMP

md = MetaData()

chat_users_id_seq = Sequence('chat_users_id_seq')

users = Table(
    'chat_users', md,
    Column('user_id', Integer(), chat_users_id_seq, primary_key=True),
    Column('token', String(32), unique=True),
    Column('name', String(200), unique=True))

chat_logs_seq = Sequence('chat_logs_id_seq')

logs = Table(
    'chat_logs', md,
    Column('log_id',
           Integer(),
           chat_logs_seq,
           server_default=chat_logs_seq.next_value(),
           primary_key=True),
    Column('user_id', Integer(), ForeignKey('chat_users')),
    Column('when', TIMESTAMP(timezone=False)), Column('text', String()))
class ProgramSetting(Base):
    __tablename__ = 'program_settings'

    id = Column(
        BigInteger,
        primary_key=True,
        server_default=text("nextval('program_settings_id_seq'::regclass)"))
    program_id = Column(ForeignKey('program.id'), nullable=False)
    preferred_yoy_increase = Column(Integer)
    number_nights_no_yoy = Column(Integer)
    number_nights_1 = Column(Integer)
    number_nights_percentage_1 = Column(Integer)
    number_nights_2 = Column(Integer)
    number_nights_percentage_2 = Column(Integer)
    number_nights_3 = Column(Integer)
    number_nights_percentage_3 = Column(Integer)
    number_nights_threshold = Column(Integer)
    inventory_number_1 = Column(Integer)
    inventory_percentage_1 = Column(Integer)
    inventory_number_2 = Column(Integer)
    inventory_percentage_2 = Column(Integer)
    inventory_number_3 = Column(Integer)
    inventory_percentage_3 = Column(Integer)
    amenities_breakfast = Column(Boolean)
    amenities_parking = Column(Boolean)
    amenities_fitness = Column(Boolean)
    amenities_wifi = Column(Boolean)
    amenities_hsia = Column(Boolean)
    amenities_airport = Column(Boolean)
    amenities_office_tx = Column(Boolean)
    amenities_phone = Column(Boolean)
    amenities_toll = Column(Boolean)
    fee_vat = Column(Boolean)
    fee_service = Column(Boolean)
    fee_lodge = Column(Boolean)
    fee_tax_city = Column(Boolean)
    fee_tax_state = Column(Boolean)
    fee_occ = Column(Boolean)
    fee_vat_fb = Column(Boolean)
    created = Column(TIMESTAMP(precision=6), server_default=text("now()"))
    modified = Column(TIMESTAMP(precision=6), server_default=text("now()"))

    program = relationship('Program')

    def serialize(self):
        return {
            'id':
            self.id,
            'clientProgramId':
            self.program_id,
            'preferredYoyIncrease':
            ('Allow Increase YOY'
             if self.preferred_yoy_increase else 'No Increase YOY'),
            'numberNightsNoYoy':
            self.number_nights_no_yoy,
            'numberNights1':
            self.number_nights_1,
            'numberNightsPercentage1':
            '{}%'.format(self.number_nights_percentage_1),
            'numberNights2':
            self.number_nights_2,
            'numberNightsPercentage2':
            '{}%'.format(self.number_nights_percentage_2),
            'numberNightsThreshold':
            self.number_nights_threshold,
            'inventoryNumber1':
            self.inventory_number_1,
            'inventoryPercentage1':
            '{}%'.format(self.inventory_percentage_1),
            'inventoryNumber2':
            self.inventory_number_2,
            'inventoryPercentage2':
            '{}%'.format(self.inventory_percentage_2),
            'inventoryNumber3':
            self.inventory_number_3,
            'inventoryPercentage3':
            '{}%'.format(self.inventory_percentage_3),
            'breakfast':
            self.amenities_breakfast,
            'parking':
            self.amenities_parking,
            'fitness':
            self.amenities_fitness,
            'wifi':
            self.amenities_wifi,
            'hsia':
            self.amenities_hsia,
            'airport':
            self.amenities_airport,
            'office':
            self.amenities_office_tx,
            'localPhone':
            self.amenities_phone,
            'tollCalls':
            self.amenities_toll,
            'feeVat':
            self.fee_vat,
            'feeService':
            self.fee_service,
            'feeLodge':
            self.fee_lodge,
            'feeCityTax':
            self.fee_tax_city,
            'feeStateTax':
            self.fee_tax_state,
            'feeOcc':
            self.fee_occ,
            'feeVatFb':
            self.fee_vat_fb
        }

    def deserialize(self, data):
        self.preferred_yoy_increase = data.get('PreferredYoyIncrease')
        self.number_nights_no_yoy = data.get('NumberNightsNoYoy')
        self.number_nights_1 = data.get('NumberNights1')
        self.number_nights_percentage_1 = data.get('NumberNightsPercentage1')
        self.number_nights_2 = data.get('NumberNights2')
        self.number_nights_percentage_2 = data.get('NumberNightsPercentage2')
        # self.number_nights_3 = data['NumberNights3']
        # self.number_nights_percentage_3 = data.get('NumberNightsPercentage3')
        self.number_nights_threshold = data.get('NumberNightsThreshold')
        self.inventory_number_1 = data.get('InventoryNumber1')
        self.inventory_percentage_1 = data.get('InventoryPercentage1')
        self.inventory_number_2 = data.get('InventoryNumber2')
        self.inventory_percentage_2 = data.get('InventoryPercentage2')
        self.inventory_number_3 = data.get('InventoryNumber3')
        self.inventory_percentage_3 = data.get('InventoryPercentage3')
        self.amenities_breakfast = data.get('breakfast')
        self.amenities_parking = data.get('parking')
        self.amenities_fitness = data.get('fitness')
        self.amenities_wifi = data.get('wifi')
        self.amenities_hsia = data.get('hsia')
        self.amenities_airport = data.get('airport')
        self.amenities_office_tx = data.get('office')
        self.amenities_phone = data.get('localPhone')
        self.amenities_toll = data.get('tollCalls')
        self.fee_vat = data.get('FeeVat')
        self.fee_service = data.get('FeeService')
        self.fee_lodge = data.get('FeeLodge')
        self.fee_tax_city = data.get('FeeCityTax')
        self.fee_tax_state = data.get('FeeStateTax')
        self.fee_occ = data.get('FeeOcc')
        self.fee_vat_fb = data.get('FeeVatFb')
        self.modified = datetime.now()
Example #28
0
def default_fkey(*args, **kwargs):
    return ForeignKeyConstraint(*args,
                                onupdate='CASCADE',
                                ondelete='CASCADE',
                                **kwargs)


ads = Table(
    'ads',
    metadata,
    Column('id', Integer(), primary_key=True, nullable=False),
    Column('owner', Text(), nullable=False),
    Column('link_target', Text(), nullable=False),
    Column('file', Integer(), nullable=False),
    Column('start', TIMESTAMP(), nullable=True),
    Column('end', TIMESTAMP(), nullable=True),
    default_fkey(['file'], ['media.mediaid'], name='ads_file_fkey'),
)

Index('ind_ads_end', ads.c.end)

api_tokens = Table(
    'api_tokens',
    metadata,
    Column('userid', Integer(), primary_key=True, nullable=False),
    Column('token', String(length=64), primary_key=True, nullable=False),
    Column('description', String()),
    default_fkey(['userid'], ['login.userid'], name='api_tokens_userid_fkey'),
)
Example #29
0
    Column('amount_max', Integer(), nullable=False),
    Column('settings', String(length=20), nullable=False, server_default=''),
    default_fkey(['userid'], ['login.userid'],
                 name='commishprice_userid_fkey'),
)

Index('ind_classid_userid_title',
      commishprice.c.classid,
      commishprice.c.userid,
      commishprice.c.title,
      unique=True)

cron_runs = Table(
    'cron_runs',
    metadata,
    Column('last_run', TIMESTAMP(), nullable=False),
)

emailblacklist = Table(
    'emailblacklist',
    metadata,
    Column('id', Integer(), primary_key=True, nullable=False),
    Column('added_by', Integer(), nullable=False),
    Column('domain_name', String(length=252), nullable=False, unique=True),
    Column('reason', Text(), nullable=False),
    default_fkey(['added_by'], ['login.userid'],
                 name='emailblacklist_userid_fkey'),
)

emailverify = Table(
    'emailverify',
Example #30
0
File: model.py Project: zwant/ngcd
class Repository(ProjectionBase):
    __tablename__ = 'repositories'
    id = Column(Integer, primary_key=True)
    external_id = Column(String, nullable=False, index=True)
    is_deleted = Column(Boolean, nullable=False, index=True)
    short_name = Column(String, nullable=False)
    full_name = Column(String, nullable=False, unique=True, index=True)
    type = Column(String, nullable=False, index=True)
    description = Column(String, nullable=True)
    html_url = Column(String, nullable=True)
    api_url = Column(String, nullable=True)
    created_by = Column(AdaptableJSONB, nullable=True)
    deleted_by = Column(AdaptableJSONB, nullable=True)
    last_pusher = Column(AdaptableJSONB, nullable=True)
    head_sha = Column(String, nullable=True)
    previous_head_sha = Column(String, nullable=True)
    last_update = Column(TIMESTAMP(timezone=True), nullable=False)
    commits = Column(AdaptableJSONB, nullable=True, index=True)

    def __init__(self,
                 external_id,
                 id=None,
                 is_deleted=False,
                 short_name=None,
                 full_name=None,
                 type=None,
                 description=None,
                 html_url=None,
                 api_url=None,
                 created_by=None,
                 deleted_by=None,
                 last_pusher=None,
                 head_sha=None,
                 previous_head_sha=None,
                 last_update=None,
                 commits=None):
        self.id = id
        self.external_id = external_id
        self.is_deleted = is_deleted
        self.short_name = short_name
        self.full_name = full_name
        self.type = type
        self.description = description
        self.html_url = html_url
        self.api_url = api_url
        self.created_by = created_by
        self.deleted_by = deleted_by
        self.last_pusher = last_pusher
        self.head_sha = head_sha
        self.previous_head_sha = previous_head_sha
        self.last_update = last_update
        if commits == None:
            self.commits = []
        else:
            self.commits = commits

    def __repr__(self):
        return '<Repository {}> is_deleted: {}, full_name: {}, short_name: {}, last_update: {}, last_pusher: {}, head_sha: {}, previous_head_sha: {}'.format(
        self.external_id,
        self.is_deleted,
        self.full_name,
        self.short_name,
        self.last_update,
        self.last_pusher,
        self.head_sha,
        self.previous_head_sha)

    def as_dict(self):
        return {
            'id': self.id,
            'external_id': self.external_id,
            'short_name': self.short_name,
            'full_name': self.full_name,
            'type': self.type,
            'is_deleted': self.is_deleted,
            'html_url': self.html_url,
            'api_url': self.api_url,
            'last_update': self.last_update.isoformat(),
            'last_pusher': self.last_pusher,
            'head_sha': self.head_sha,
            'previous_head_sha': self.previous_head_sha,
            'commits': self.commits
        }