Example #1
0
    def test_fk_copy(self):
        c1 = Column('foo', Integer)
        c2 = Column('bar', Integer)
        m = MetaData()
        t1 = Table('t', m, c1, c2)

        kw = dict(onupdate="X", 
                        ondelete="Y", use_alter=True, name='f1',
                        deferrable="Z", initially="Q", link_to_name=True)

        fk1 = ForeignKey(c1, **kw) 
        fk2 = ForeignKeyConstraint((c1,), (c2,), **kw)

        t1.append_constraint(fk2)
        fk1c = fk1.copy()
        fk2c = fk2.copy()

        for k in kw:
            eq_(getattr(fk1c, k), kw[k])
            eq_(getattr(fk2c, k), kw[k])
Example #2
0
class V_Hop(Base, _Hop):
    __tablename__ = "v_hop"
    __table_args__ = (
        Index('ix_v_hop_ipaddr_ttl', 'ipaddr', 'ttl'),
        ForeignKeyConstraint(['trace'], ['v_trace.id'], ondelete='CASCADE')
    )
Example #3
0
class N_Trace(Base, _Trace):
    __tablename__ = "n_trace"
    __table_args__ = (
        ForeignKeyConstraint(['scan'], ['n_scan.id'], ondelete='CASCADE'),
    )
Example #4
0
class PstDetail(pbbBase, CommonModel):
    __tablename__ = 'pst_detail'
    __table_args__ = (
        ForeignKeyConstraint([
            'kd_kanwil', 'kd_kantor', 'thn_pelayanan', 'bundel_pelayanan',
            'no_urut_pelayanan'
        ], [
            'pst_permohonan.kd_kanwil', 'pst_permohonan.kd_kantor',
            'pst_permohonan.thn_pelayanan', 'pst_permohonan.bundel_pelayanan',
            'pst_permohonan.no_urut_pelayanan'
        ]),
        ARGS,
    )
    kd_kanwil = Column(String(2), primary_key=True)
    kd_kantor = Column(String(2), primary_key=True)
    thn_pelayanan = Column(String(4), primary_key=True)
    bundel_pelayanan = Column(String(4), primary_key=True)
    no_urut_pelayanan = Column(String(3), primary_key=True)
    kd_propinsi_pemohon = Column(String(2), primary_key=True)
    kd_dati2_pemohon = Column(String(2), primary_key=True)
    kd_kecamatan_pemohon = Column(String(3), primary_key=True)
    kd_kelurahan_pemohon = Column(String(3), primary_key=True)
    kd_blok_pemohon = Column(String(3), primary_key=True)
    no_urut_pemohon = Column(String(4), primary_key=True)
    kd_jns_op_pemohon = Column(String(1), primary_key=True)
    kd_jns_pelayanan = Column(String(2), )
    thn_pajak_permohonan = Column(String(4), )
    nama_penerima = Column(String(30))
    catatan_penyerahan = Column(String(75), )
    status_selesai = Column(Integer)
    tgl_selesai = Column(Date)
    kd_seksi_berkas = Column(String(2), )
    tgl_penyerahan = Column(Date)
    nip_penyerah = Column(String(18), )

    @classmethod
    def get_by_nopel(cls, r):
        return pbbDBSession.query(cls).\
                             filter(cls.kd_kanwil            ==r['kd_kanwil'],
                                    cls.kd_kanwil            ==r['kd_kanwil'],
                                    cls.kd_kantor            ==r['kd_kantor'],
                                    cls.thn_pelayanan        ==r['thn_pelayanan'],
                                    cls.bundel_pelayanan     ==r['bundel_pelayanan'],
                                    cls.no_urut_pelayanan    ==r['no_urut_pelayanan'],
                                    cls.kd_propinsi_pemohon  ==r['kd_propinsi_pemohon'],
                                    cls.kd_dati2_pemohon     ==r['kd_dati2_pemohon'],
                                    cls.kd_kecamatan_pemohon ==r['kd_kecamatan_pemohon'],
                                    cls.kd_kelurahan_pemohon ==r['kd_kelurahan_pemohon'],
                                    cls.kd_blok_pemohon      ==r['kd_blok_pemohon'],
                                    cls.no_urut_pemohon      ==r['no_urut_pemohon'],
                                    cls.kd_jns_op_pemohon    ==r['kd_jns_op_pemohon'], ).\
                             first()

    @classmethod
    def get_position(cls, r):
        return pbbDBSession.query(cls.kd_kanwil, cls.kd_kantor, cls.thn_pelayanan,
                    cls.bundel_pelayanan, cls.no_urut_pelayanan,
                    cls.kd_propinsi_pemohon, cls.kd_dati2_pemohon, cls.kd_kecamatan_pemohon,
                    cls.kd_kelurahan_pemohon, cls.kd_blok_pemohon, cls.no_urut_pemohon,
                    cls.kd_jns_op_pemohon, cls.kd_jns_pelayanan, cls.thn_pajak_permohonan,
                    cls.nama_penerima, cls.catatan_penyerahan, cls.status_selesai,
                    cls.tgl_selesai, cls.kd_seksi_berkas, cls.tgl_penyerahan, cls.nip_penyerah,
                    Seksi.kd_seksi, Seksi.nm_seksi).\
                             filter(cls.kd_kanwil            ==r['kd_kanwil'],
                                    cls.kd_kanwil            ==r['kd_kanwil'],
                                    cls.kd_kantor            ==r['kd_kantor'],
                                    cls.thn_pelayanan        ==r['thn_pelayanan'],
                                    cls.bundel_pelayanan     ==r['bundel_pelayanan'],
                                    cls.no_urut_pelayanan    ==r['no_urut_pelayanan'],
                                    cls.kd_propinsi_pemohon  ==r['kd_propinsi_pemohon'],
                                    cls.kd_dati2_pemohon     ==r['kd_dati2_pemohon'],
                                    cls.kd_kecamatan_pemohon ==r['kd_kecamatan_pemohon'],
                                    cls.kd_kelurahan_pemohon ==r['kd_kelurahan_pemohon'],
                                    cls.kd_blok_pemohon      ==r['kd_blok_pemohon'],
                                    cls.no_urut_pemohon      ==r['no_urut_pemohon'],
                                    cls.kd_jns_op_pemohon    ==r['kd_jns_op_pemohon'],
                                    cls.kd_seksi_berkas      == Seksi.kd_seksi)

    @classmethod
    def get_tracking(cls, r):
        SeksiAlias = aliased(Seksi, name='seksi_alias')
        return pbbDBSession.query(cls.kd_kanwil, cls.kd_kantor, cls.thn_pelayanan,
                    cls.bundel_pelayanan, cls.no_urut_pelayanan,
                    cls.kd_propinsi_pemohon, cls.kd_dati2_pemohon, cls.kd_kecamatan_pemohon,
                    cls.kd_kelurahan_pemohon, cls.kd_blok_pemohon, cls.no_urut_pemohon,
                    cls.kd_jns_op_pemohon,
                    PstBerkasKirim.kd_seksi,
                    PstBerkasKirim.thn_agenda_kirim,
                    PstBerkasKirim.no_agenda_kirim,
                    PstBerkasKirim.tgl_kirim,
                    PstBerkasTerima.kd_seksi_terima,
                    PstBerkasTerima.tgl_terima,
                    Seksi.nm_seksi.label('pengirim'),
                    SeksiAlias.nm_seksi.label('penerima')).\
                             filter(cls.kd_kanwil            ==r['kd_kanwil'],
                                    cls.kd_kanwil            ==r['kd_kanwil'],
                                    cls.kd_kantor            ==r['kd_kantor'],
                                    cls.thn_pelayanan        ==r['thn_pelayanan'],
                                    cls.bundel_pelayanan     ==r['bundel_pelayanan'],
                                    cls.no_urut_pelayanan    ==r['no_urut_pelayanan'],
                                    cls.kd_propinsi_pemohon  ==r['kd_propinsi_pemohon'],
                                    cls.kd_dati2_pemohon     ==r['kd_dati2_pemohon'],
                                    cls.kd_kecamatan_pemohon ==r['kd_kecamatan_pemohon'],
                                    cls.kd_kelurahan_pemohon ==r['kd_kelurahan_pemohon'],
                                    cls.kd_blok_pemohon      ==r['kd_blok_pemohon'],
                                    cls.no_urut_pemohon      ==r['no_urut_pemohon'],
                                    cls.kd_jns_op_pemohon    ==r['kd_jns_op_pemohon'],

                                    cls.kd_kanwil            ==PstBerkasKirim.kd_kanwil,
                                    cls.kd_kantor            ==PstBerkasKirim.kd_kantor,
                                    cls.thn_pelayanan        ==PstBerkasKirim.thn_pelayanan,
                                    cls.bundel_pelayanan     ==PstBerkasKirim.bundel_pelayanan,
                                    cls.no_urut_pelayanan    ==PstBerkasKirim.no_urut_pelayanan,
                                    cls.kd_propinsi_pemohon  ==PstBerkasKirim.kd_propinsi_pemohon,
                                    cls.kd_dati2_pemohon     ==PstBerkasKirim.kd_dati2_pemohon,
                                    cls.kd_kecamatan_pemohon ==PstBerkasKirim.kd_kecamatan_pemohon,
                                    cls.kd_kelurahan_pemohon ==PstBerkasKirim.kd_kelurahan_pemohon,
                                    cls.kd_blok_pemohon      ==PstBerkasKirim.kd_blok_pemohon,
                                    cls.no_urut_pemohon      ==PstBerkasKirim.no_urut_pemohon,
                                    cls.kd_jns_op_pemohon    ==PstBerkasKirim.kd_jns_op_pemohon,
                                    PstBerkasKirim.kd_seksi  ==Seksi.kd_seksi,
                                    PstBerkasKirim.kd_kanwil            ==PstBerkasTerima.kd_kanwil,
                                    PstBerkasKirim.kd_kantor            ==PstBerkasTerima.kd_kantor,
                                    PstBerkasKirim.thn_pelayanan        ==PstBerkasTerima.thn_pelayanan,
                                    PstBerkasKirim.bundel_pelayanan     ==PstBerkasTerima.bundel_pelayanan,
                                    PstBerkasKirim.no_urut_pelayanan    ==PstBerkasTerima.no_urut_pelayanan,
                                    PstBerkasKirim.kd_propinsi_pemohon  ==PstBerkasTerima.kd_propinsi_pemohon,
                                    PstBerkasKirim.kd_dati2_pemohon     ==PstBerkasTerima.kd_dati2_pemohon,
                                    PstBerkasKirim.kd_kecamatan_pemohon ==PstBerkasTerima.kd_kecamatan_pemohon,
                                    PstBerkasKirim.kd_kelurahan_pemohon ==PstBerkasTerima.kd_kelurahan_pemohon,
                                    PstBerkasKirim.kd_blok_pemohon      ==PstBerkasTerima.kd_blok_pemohon,
                                    PstBerkasKirim.no_urut_pemohon      ==PstBerkasTerima.no_urut_pemohon,
                                    PstBerkasKirim.kd_jns_op_pemohon    ==PstBerkasTerima.kd_jns_op_pemohon,
                                    PstBerkasKirim.kd_seksi             ==PstBerkasTerima.kd_seksi        ,
                                    PstBerkasKirim.thn_agenda_kirim     ==PstBerkasTerima.thn_agenda_kirim,
                                    PstBerkasKirim.no_agenda_kirim      ==PstBerkasTerima.no_agenda_kirim ,

                                    PstBerkasTerima.kd_seksi_terima      ==SeksiAlias.kd_seksi,
                                    )
Example #5
0
class Involved(Base):
    __tablename__ = "involved"
    id = Column(Integer, primary_key=True)
    provider_code = Column(Integer)
    accident_id = Column(Integer)
    involved_type = Column(Integer)
    license_acquiring_date = Column(Integer)
    age_group = Column(Integer)
    sex = Column(Integer)
    car_type = Column(Integer)
    safety_measures = Column(Integer)
    home_city = Column(Integer)
    injury_severity = Column(Integer)
    injured_type = Column(Integer)
    injured_position = Column(Integer)
    population_type = Column(Integer)
    home_district = Column(Integer)
    home_nafa = Column(Integer)
    home_area = Column(Integer)
    home_municipal_status = Column(Integer)
    home_residence_type = Column(Integer)
    hospital_time = Column(Integer)
    medical_type = Column(Integer)
    release_dest = Column(Integer)
    safety_measures_use = Column(Integer)
    late_deceased = Column(Integer)
    __table_args__ = (ForeignKeyConstraint([accident_id, provider_code],
                                           [Marker.id, Marker.provider_code],
                                           ondelete="CASCADE"), {})

    def serialize(self):
        return {
            "id": self.id,
            "provider_code": self.provider_code,
            "accident_id": self.accident_id,
            "involved_type": self.involved_type,
            "license_acquiring_date": self.license_acquiring_date,
            "age_group": self.age_group,
            "sex": self.sex,
            "car_type": self.car_type,
            "safety_measures": self.safety_measures,
            "home_city": self.home_city,
            "injury_severity": self.injury_severity,
            "injured_type": self.injured_type,
            "injured_position": self.injured_position,
            "population_type": self.population_type,
            "home_district": self.home_district,
            "home_nafa": self.home_nafa,
            "home_area": self.home_area,
            "home_municipal_status": self.home_municipal_status,
            "home_residence_type": self.home_residence_type,
            "hospital_time": self.hospital_time,
            "medical_type": self.medical_type,
            "release_dest": self.release_dest,
            "safety_measures_use": self.safety_measures_use,
            "late_deceased": self.late_deceased
        }

    # Flask-Login integration
    def is_authenticated(self):
        return True

    def is_active(self):
        return True

    def is_anonymous(self):
        return False

    def get_id(self):
        return self.id
Example #6
0
class PembayaranSppt(pbbBase, CommonModel):
    __tablename__ = 'pembayaran_sppt'
    __table_args__ = (ForeignKeyConstraint([
        'kd_propinsi', 'kd_dati2', 'kd_kecamatan', 'kd_kelurahan', 'kd_blok',
        'no_urut', 'kd_jns_op', 'thn_pajak_sppt'
    ], [
        'sppt.kd_propinsi', 'sppt.kd_dati2', 'sppt.kd_kecamatan',
        'sppt.kd_kelurahan', 'sppt.kd_blok', 'sppt.no_urut', 'sppt.kd_jns_op',
        'sppt.thn_pajak_sppt'
    ]), {
        'extend_existing': True,
        'autoload': True,
        'schema': pbbBase.pbb_schema
    })
    #sppt = relationship("Sppt",
    #                      backref=backref('pembayaransppt'),
    #                      primaryjoin='foreign(Sppt.no_urut) == remote(PembyaranSppt.no_urut)')
    @classmethod
    def query_data(cls):
        return pbbDBSession.query(cls)

    @classmethod
    def get_by_nop(cls, p_nop):
        pkey = FixLength(NOP)
        pkey.set_raw(p_nop)
        query = cls.query_data()
        return query.filter_by(
            kd_propinsi=pkey['kd_propinsi'],
            kd_dati2=pkey['kd_dati2'],
            kd_kecamatan=pkey['kd_kecamatan'],
            kd_kelurahan=pkey['kd_kelurahan'],
            kd_blok=pkey['kd_blok'],
            no_urut=pkey['no_urut'],
            kd_jns_op=pkey['kd_jns_op'],
        )

    @classmethod
    def get_by_nop_thn(cls, p_nop, p_tahun):
        query = cls.get_by_nop(p_nop)
        return query.filter_by(thn_pajak_sppt=p_tahun)

    @classmethod
    def get_by_kelurahan(cls, p_kode, p_tahun):
        pkey = FixLength(DESA)
        pkey.set_raw(p_kode)
        query = cls.query_data()
        return query.filter_by(kd_propinsi=pkey['kd_propinsi'],
                               kd_dati2=pkey['kd_dati2'],
                               kd_kecamatan=pkey['kd_kecamatan'],
                               kd_kelurahan=pkey['kd_kelurahan'],
                               thn_pajak_sppt=p_tahun)

    @classmethod
    def get_by_kecamatan(cls, p_kode, p_tahun):
        pkey = FixLength(KECAMATAN)
        pkey.set_raw(p_kode)
        query = cls.query_data()
        return query.filter_by(kd_propinsi=pkey['kd_propinsi'],
                               kd_dati2=pkey['kd_dati2'],
                               kd_kecamatan=pkey['kd_kecamatan'],
                               kd_kelurahan=pkey['kd_kelurahan'],
                               thn_pajak_sppt=p_tahun)

    @classmethod
    def get_by_tanggal(cls, p_kode, p_tahun):
        pkey = DateVar
        p_kode = re.sub("[^0-9]", "", p_kode)
        pkey.set_raw(p_kode)
        query = cls.query_data()
        return query.filter_by(tgl_pembayaran_sppt=pkey.get_value)

    @classmethod
    def get_rekap_by_kecamatan(cls, p_kode, p_tahun):
        pkey = FixLength(KECAMATAN)
        pkey.set_raw(p_kode)
        query = pbbDBSession.query(cls.kd_propinsi, cls.kd_dati2, cls.kd_kecamatan, cls.kd_kelurahan,
                               func.sum(cls.denda_sppt).label('denda'),
                               func.sum(cls.pbb_yg_dibayar_sppt).label('jumlah') ).\
                               group_by(cls.kd_propinsi, cls.kd_dati2, cls.kd_kecamatan, cls.kd_kelurahan)
        return query.filter_by(kd_propinsi=pkey['kd_propinsi'],
                               kd_dati2=pkey['kd_dati2'],
                               kd_kecamatan=pkey['kd_kecamatan'],
                               thn_pajak_sppt=p_tahun)

    @classmethod
    def get_rekap_by_thn(cls, p_tahun):
        query = pbbDBSession.query(cls.kd_propinsi, cls.kd_dati2, cls.kd_kecamatan,
                               func.sum(cls.denda_sppt).label('denda'),
                               func.sum(cls.pbb_yg_dibayar_sppt).label('jumlah')).\
                               group_by(cls.kd_propinsi, cls.kd_dati2, cls.kd_kecamatan)
        return query.filter_by(thn_pajak_sppt=p_tahun)
Example #7
0
from sqlalchemy import Column, Integer, String, ForeignKey, create_engine, Table, ForeignKeyConstraint
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref

Base = declarative_base()

relation_user_address = Table(
    'relation_user_address', Base.metadata,
    Column('user_id', Integer, ForeignKey('users.id'), nullable=False),
    Column('address_id', Integer, ForeignKey('addresses.id'), nullable=False),
    ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
    ForeignKeyConstraint(['address_id'], ['addresses.id'], ondelete='CASCADE'))


class User(Base):
    __tablename__ = 'users'
    id = Column(Integer, primary_key=True)
    name = Column(String)
    fullname = Column(String)
    password = Column(String)

    def __repr__(self):
        return "<User(name='%s', fullname='%s', password'%s')>" % \
               (self.name, self.fullname, self.password)


class Address(Base):
    __tablename__ = 'addresses'
    id = Column(Integer, primary_key=True)
    email_address = Column(String, nullable=False)
    users = relationship('User',
Example #8
0
def _history_mapper(local_mapper):
    cls = local_mapper.class_

    # set the "active_history" flag
    # on on column-mapped attributes so that the old version
    # of the info is always loaded (currently sets it on all attributes)
    for prop in local_mapper.iterate_properties:
        getattr(local_mapper.class_, prop.key).impl.active_history = True

    super_mapper = local_mapper.inherits
    super_history_mapper = getattr(cls, '__history_mapper__', None)

    polymorphic_on = None
    super_fks = []

    def _col_copy(col):
        col = col.copy()
        col.unique = False
        col.default = col.server_default = None
        return col

    if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
        cols = []
        for column in local_mapper.local_table.c:
            if _is_versioning_col(column):
                continue

            col = _col_copy(column)

            if super_mapper and col_references_table(column,
                                                     super_mapper.local_table):
                super_fks.append(
                    (col.key,
                     list(super_history_mapper.local_table.primary_key)[0]))

            cols.append(col)

            if column is local_mapper.polymorphic_on:
                polymorphic_on = col

        if super_mapper:
            super_fks.append(
                ('version', super_history_mapper.local_table.c.version))

        version_meta = {"version_meta": True}  # add column.info to identify
        # columns specific to versioning

        # "version" stores the integer version id.  This column is
        # required.
        cols.append(
            Column('version',
                   Integer,
                   primary_key=True,
                   autoincrement=False,
                   info=version_meta))

        # "changed" column stores the UTC timestamp of when the
        # history row was created.
        # This column is optional and can be omitted.
        cols.append(
            Column('changed',
                   DateTime,
                   default=datetime.datetime.utcnow,
                   info=version_meta))

        if super_fks:
            cols.append(ForeignKeyConstraint(*zip(*super_fks)))

        table = Table(local_mapper.local_table.name + '_history',
                      local_mapper.local_table.metadata,
                      *cols,
                      schema=local_mapper.local_table.schema)
    else:
        # single table inheritance.  take any additional columns that may have
        # been added and add them to the history table.
        for column in local_mapper.local_table.c:
            if column.key not in super_history_mapper.local_table.c:
                col = _col_copy(column)
                super_history_mapper.local_table.append_column(col)
        table = None

    if super_history_mapper:
        bases = (super_history_mapper.class_, )
    else:
        bases = local_mapper.base_mapper.class_.__bases__
    versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})

    m = mapper(versioned_cls,
               table,
               inherits=super_history_mapper,
               polymorphic_on=polymorphic_on,
               polymorphic_identity=local_mapper.polymorphic_identity)
    cls.__history_mapper__ = m

    if not super_history_mapper:
        local_mapper.local_table.append_column(
            Column('version', Integer, default=1, nullable=False))
        local_mapper.add_property("version",
                                  local_mapper.local_table.c.version)
Example #9
0
        Column('name', Text, nullable=False),
        Column('homepage', Text, nullable=False),
        Column('uri', Text, nullable=False)
        )


    files = Table('files', meta,
        Column('file_id', Integer, primary_key=True),
        Column('file_type_id', Integer, nullable=False),
        Column('sha1', String(40), nullable=False),
        Column('copyright_text', Text),
        Column('project_id', Integer),
        Column('comment', Text, nullable=False),
        Column('notice', Text, nullable=False),
        UniqueConstraint('sha1', name='uc_file_sha1'),
        ForeignKeyConstraint(['project_id'], ['projects.project_id']),
        ForeignKeyConstraint(['file_type_id'], ['file_types.file_type_id'])
        )


    files_licenses = Table('files_licenses', meta,
        Column('file_license_id', Integer, primary_key=True),
        Column('file_id', Integer, nullable=False),
        Column('license_id', Integer, nullable=False),
        Column('extracted_text', Text, nullable=False),
        UniqueConstraint('file_id', 'license_id', name='uc_file_license'),
        ForeignKeyConstraint(['file_id'], ['files.file_id']),
        ForeignKeyConstraint(['license_id'], ['licenses.license_id'])
        )

    creator_types = Table('creator_types', meta,
Example #10
0
class TransferOperation(Base):
    """

    dPay Blockchain Example
    ======================
    {
      "amount": "833.000 BEX",
      "from": "admin",
      "to": "jared",
      "memo": ""
    }

    """

    __tablename__ = 'dpds_op_transfers'
    __table_args__ = (
        PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'),
        ForeignKeyConstraint(['from'], ['dpds_meta_accounts.name'],
                             deferrable=True,
                             initially='DEFERRED',
                             use_alter=True),
        ForeignKeyConstraint(['to'], ['dpds_meta_accounts.name'],
                             deferrable=True,
                             initially='DEFERRED',
                             use_alter=True),
    )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    _from = Column('from', String(16))  # name:from
    to = Column(String(16))  # dpay_type:account_name_type
    amount = Column(Numeric(20, 6), nullable=False)  # dpay_type:asset
    amount_symbol = Column(String(5))  # dpay_type:asset
    memo = Column(UnicodeText)  # name:memo
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='transfer')

    _fields = dict(
        amount=lambda x: amount_field(x.get('amount'), num_func=float
                                      ),  # dpay_type:asset
        amount_symbol=lambda x: amount_symbol_field(x.get('amount')
                                                    ),  # dpay_type:asset
    )

    _account_fields = frozenset([
        'from',
        'to',
    ])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = dpds.dpds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return dpds.dpds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Example #11
0
    UniqueConstraint("class_namespace", "class_name", name="uq_classname"),
)

InventoryClassAttributes = Table(
    "invclassattributes",
    InventoryMetadata,
    Column("class_id", primary_key=True, autoincrement=False),
    Column("attr_key", String(255), primary_key=True),
    Column("attr_name", String(255), nullable=False),
    Column("attr_type",
           Enum('string', 'int', 'bool', 'float', 'json'),
           nullable=False),
    Column("attr_default", String(255)),
    Column("attr_mandatory", Boolean, default=True),
    ForeignKeyConstraint(["class_id"], ["invclasses.class_id"],
                         name="fk_objectclass1",
                         ondelete="CASCADE",
                         onupdate="CASCADE"),
)

InventoryObjects = Table(
    "invobjects",
    InventoryMetadata,
    Column("object_id", Integer, primary_key=True, autoincrement=True),
    Column("object_name", String(255), nullable=False),
    Column("object_subname", String(255)),
    Column("class_id", Integer, nullable=False),
    UniqueConstraint("object_name",
                     "object_subname",
                     "class_id",
                     name="uq_objectname"),
    Index("ix_objectclass", "class_id"),
Example #12
0
class TVEpisode(MainDBBase):
    __tablename__ = 'tv_episodes'
    __table_args__ = (
        ForeignKeyConstraint(['showid', 'indexer'], ['tv_shows.indexer_id', 'tv_shows.indexer']),
        Index('idx_showid_indexer', 'showid', 'indexer'),
        Index('idx_showid_indexerid', 'showid', 'indexer_id'),
        Index('idx_sta_epi_air', 'status', 'episode', 'airdate'),
        Index('idx_sea_epi_sta_air', 'season', 'episode', 'status', 'airdate'),
        Index('idx_indexer_id_airdate', 'indexer_id', 'airdate'),
    )

    showid = Column(Integer, index=True, primary_key=True)
    indexer_id = Column(Integer, default=0)
    indexer = Column(Integer, index=True, primary_key=True)
    season = Column(Integer, index=True, primary_key=True)
    episode = Column(Integer, index=True, primary_key=True)
    scene_season = Column(Integer, default=0)
    scene_episode = Column(Integer, default=0)
    name = Column(Text, default='')
    description = Column(Text, default='')
    subtitles = Column(Text, default='')
    subtitles_searchcount = Column(Integer, default=0)
    subtitles_lastsearch = Column(Integer, default=0)
    airdate = Column(Date, default=datetime.datetime.min)
    hasnfo = Column(Boolean, default=False)
    hastbn = Column(Boolean, default=False)
    status = Column(Integer, default=UNKNOWN)
    location = Column(Text, default='')
    file_size = Column(BigInteger, default=0)
    release_name = Column(Text, default='')
    is_proper = Column(Boolean, default=False)
    absolute_number = Column(Integer, default=0)
    scene_absolute_number = Column(Integer, default=0)
    version = Column(Integer, default=-1)
    release_group = Column(Text, default='')

    show = relationship('TVShow', uselist=False, backref='tv_episodes', lazy='joined')

    def __init__(self, **kwargs):
        super(TVEpisode, self).__init__(**kwargs)
        self.checkForMetaFiles()

    @validates('location')
    def validate_location(self, key, location):
        if os.path.exists(location):
            self.file_size = file_size(location)
        return location

    @property
    def related_episodes(self):
        return getattr(self, '_related_episodes', [])

    @related_episodes.setter
    def related_episodes(self, value):
        setattr(self, '_related_episodes', value)

    def refresh_subtitles(self):
        """Look for subtitles files and refresh the subtitles property"""
        subtitles, save_subtitles = Subtitles().refresh_subtitles(self.showid, self.season, self.episode)
        if save_subtitles:
            self.subtitles = ','.join(subtitles)

    def download_subtitles(self):
        if self.location == '':
            return

        if not os.path.isfile(self.location):
            sickrage.app.log.debug("%s: Episode file doesn't exist, can't download subtitles for S%02dE%02d" %
                                   (self.show.indexer_id, self.season or 0, self.episode or 0))
            return

        sickrage.app.log.debug("%s: Downloading subtitles for S%02dE%02d" % (self.show.indexer_id, self.season or 0, self.episode or 0))

        subtitles, newSubtitles = Subtitles().download_subtitles(self.showid, self.season, self.episode)

        self.subtitles = ','.join(subtitles)
        self.subtitles_searchcount += 1 if self.subtitles_searchcount else 1
        self.subtitles_lastsearch = datetime.datetime.now().toordinal()

        if newSubtitles:
            subtitle_list = ", ".join([Subtitles().name_from_code(newSub) for newSub in newSubtitles])
            sickrage.app.log.debug("%s: Downloaded %s subtitles for S%02dE%02d" %
                                   (self.show.indexer_id, subtitle_list, self.season or 0, self.episode or 0))

            Notifiers.mass_notify_subtitle_download(self.pretty_name(), subtitle_list)
        else:
            sickrage.app.log.debug("%s: No subtitles downloaded for S%02dE%02d" %
                                   (self.show.indexer_id, self.season or 0, self.episode or 0))

        return newSubtitles

    def checkForMetaFiles(self):
        oldhasnfo = self.hasnfo
        oldhastbn = self.hastbn

        cur_nfo = False
        cur_tbn = False

        # check for nfo and tbn
        if os.path.isfile(self.location):
            for cur_provider in sickrage.app.metadata_providers.values():
                if cur_provider.episode_metadata:
                    new_result = cur_provider._has_episode_metadata(self)
                else:
                    new_result = False
                cur_nfo = new_result or cur_nfo

                if cur_provider.episode_thumbnails:
                    new_result = cur_provider._has_episode_thumb(self)
                else:
                    new_result = False
                cur_tbn = new_result or cur_tbn

        self.hasnfo = cur_nfo
        self.hastbn = cur_tbn

        # if either setting has changed return true, if not return false
        return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn

    def populate_episode(self, season, episode, tvapi=None):
        # attempt populating episode
        success = {
            'nfo': False,
            'indexer': False
        }

        for method, func in OrderedDict([
            ('nfo', lambda: self.load_from_nfo(self.location)),
            ('indexer', lambda: self.load_from_indexer(season, episode, tvapi=tvapi)),
        ]).items():

            try:
                success[method] = func()
            except NoNFOException:
                sickrage.app.log.warning("%s: There was an issue loading the NFO for episode S%02dE%02d" % (
                    self.show.indexer_id, season or 0, episode or 0))
            except EpisodeDeletedException:
                pass

            # confirm if we successfully populated the episode
            if any(success.values()):
                return True

        # we failed to populate the episode
        raise EpisodeNotFoundException("Couldn't find episode S%02dE%02d" % (season or 0, episode or 0))

    def load_from_indexer(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None):
        indexer_name = IndexerApi(self.indexer).name

        season = (self.season, season)[season is not None]
        episode = (self.episode, episode)[episode is not None]

        sickrage.app.log.debug("{}: Loading episode details from {} for episode S{:02d}E{:02d}".format(
            self.show.indexer_id, indexer_name, season or 0, episode or 0)
        )

        indexer_lang = self.show.lang or sickrage.app.config.indexer_default_language

        try:
            if cachedSeason is None:
                t = tvapi
                if not t:
                    lINDEXER_API_PARMS = IndexerApi(self.indexer).api_params.copy()
                    lINDEXER_API_PARMS['cache'] = cache

                    lINDEXER_API_PARMS['language'] = indexer_lang

                    if self.show.dvdorder != 0:
                        lINDEXER_API_PARMS['dvdorder'] = True

                    t = IndexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
                myEp = t[self.show.indexer_id][season][episode]
            else:
                myEp = cachedSeason[episode]
        except (indexer_error, IOError) as e:
            sickrage.app.log.debug("{} threw up an error: {}".format(indexer_name, e))

            # if the episode is already valid just log it, if not throw it up
            if self.name:
                sickrage.app.log.debug("{} timed out but we have enough info from other sources, allowing the error".format(indexer_name))
                return False
            else:
                sickrage.app.log.error("{} timed out, unable to create the episode".format(indexer_name))
                return False
        except (indexer_episodenotfound, indexer_seasonnotfound):
            sickrage.app.log.debug("Unable to find the episode on {}, has it been removed?".format(indexer_name))

            # if I'm no longer on the Indexers but I once was then delete myself from the DB
            if self.indexer_id != -1:
                self.delete_episode()
            return False

        self.indexer_id = try_int(safe_getattr(myEp, 'id'), self.indexer_id)
        if not self.indexer_id:
            sickrage.app.log.warning("Failed to retrieve ID from " + IndexerApi(self.indexer).name)
            object_session(self).rollback()
            object_session(self).safe_commit()
            self.delete_episode()
            return False

        self.name = safe_getattr(myEp, 'episodename', self.name)
        if not myEp.get('episodename'):
            sickrage.app.log.info("This episode {} - S{:02d}E{:02d} has no name on {}. "
                                  "Setting to an empty string".format(self.show.name, season or 0, episode or 0, indexer_name))

        if not myEp.get('absolutenumber'):
            sickrage.app.log.debug("This episode {} - S{:02d}E{:02d} has no absolute number on {}".format(
                self.show.name, season or 0, episode or 0, indexer_name))
        else:
            sickrage.app.log.debug("{}: The absolute_number for S{:02d}E{:02d} is: {}".format(
                self.show.indexer_id, season or 0, episode or 0, myEp["absolutenumber"]))
            self.absolute_number = try_int(safe_getattr(myEp, 'absolutenumber'), self.absolute_number)

        self.season = season
        self.episode = episode

        from sickrage.core.scene_numbering import get_scene_absolute_numbering, get_scene_numbering

        self.scene_absolute_number = get_scene_absolute_numbering(
            self.show.indexer_id,
            self.show.indexer,
            self.absolute_number,
            session=object_session(self)
        )

        self.scene_season, self.scene_episode = get_scene_numbering(
            self.show.indexer_id,
            self.show.indexer,
            self.season, self.episode,
            session=object_session(self)
        )

        self.description = safe_getattr(myEp, 'overview', self.description)

        firstaired = safe_getattr(myEp, 'firstaired') or datetime.date.min

        try:
            rawAirdate = [int(x) for x in str(firstaired).split("-")]
            self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
        except (ValueError, IndexError, TypeError):
            sickrage.app.log.warning("Malformed air date of {} retrieved from {} for ({} - S{:02d}E{:02d})".format(
                firstaired, indexer_name, self.show.name, season or 0, episode or 0))

            # if I'm incomplete on the indexer but I once was complete then just delete myself from the DB for now
            object_session(self).rollback()
            object_session(self).safe_commit()
            self.delete_episode()
            return False

        # don't update show status if show dir is missing, unless it's missing on purpose
        if not os.path.isdir(self.show.location) and not sickrage.app.config.create_missing_show_dirs and not sickrage.app.config.add_shows_wo_dir:
            sickrage.app.log.info("The show dir %s is missing, not bothering to change the episode statuses since "
                                  "it'd probably be invalid" % self.show.location)
            return False

        if self.location:
            sickrage.app.log.debug("%s: Setting status for S%02dE%02d based on status %s and location %s" %
                                   (self.show.indexer_id, season or 0, episode or 0, statusStrings[self.status],
                                    self.location))

        if not os.path.isfile(self.location):
            if self.airdate >= datetime.date.today() or not self.airdate > datetime.date.min:
                sickrage.app.log.debug(
                    "Episode airs in the future or has no airdate, marking it %s" % statusStrings[
                        UNAIRED])
                self.status = UNAIRED
            elif self.status in [UNAIRED, UNKNOWN]:
                # Only do UNAIRED/UNKNOWN, it could already be snatched/ignored/skipped, or downloaded/archived to
                # disconnected media
                sickrage.app.log.debug(
                    "Episode has already aired, marking it %s" % statusStrings[self.show.default_ep_status])
                self.status = self.show.default_ep_status if self.season > 0 else SKIPPED  # auto-skip specials
            else:
                sickrage.app.log.debug(
                    "Not touching status [ %s ] It could be skipped/ignored/snatched/archived" % statusStrings[
                        self.status])

        # if we have a media file then it's downloaded
        elif is_media_file(self.location):
            # leave propers alone, you have to either post-process them or manually change them back
            if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + Quality.ARCHIVED:
                sickrage.app.log.debug(
                    "5 Status changes from " + str(self.status) + " to " + str(
                        Quality.status_from_name(self.location)))
                self.status = Quality.status_from_name(self.location, anime=self.show.is_anime)

        # shouldn't get here probably
        else:
            sickrage.app.log.debug("6 Status changes from " + str(self.status) + " to " + str(UNKNOWN))
            self.status = UNKNOWN

        object_session(self).safe_commit()

        return True

    def load_from_nfo(self, location):
        if not os.path.isdir(self.show.location):
            sickrage.app.log.info(
                "{}: The show dir is missing, not bothering to try loading the episode NFO".format(
                    self.show.indexer_id))
            return False

        sickrage.app.log.debug(
            "{}: Loading episode details from the NFO file associated with {}".format(self.show.indexer_id, location))

        if os.path.isfile(location):
            self.location = location
            if self.status == UNKNOWN:
                if is_media_file(self.location):
                    sickrage.app.log.debug("7 Status changes from " + str(self.status) + " to " + str(
                        Quality.status_from_name(self.location, anime=self.show.is_anime)))
                    self.status = Quality.status_from_name(self.location, anime=self.show.is_anime)

            nfoFile = replace_extension(self.location, "nfo")
            sickrage.app.log.debug(str(self.show.indexer_id) + ": Using NFO name " + nfoFile)

            self.hasnfo = False
            if os.path.isfile(nfoFile):
                try:
                    showXML = ElementTree(file=nfoFile)
                except (SyntaxError, ValueError) as e:
                    sickrage.app.log.warning("Error loading the NFO, backing up the NFO and skipping for now: {}".format(e))

                    try:
                        os.rename(nfoFile, nfoFile + ".old")
                    except Exception as e:
                        sickrage.app.log.warning("Failed to rename your episode's NFO file - you need to delete it or fix it: {}".format(e))

                    raise NoNFOException("Error in NFO format")

                for epDetails in showXML.iter('episodedetails'):
                    if (epDetails.findtext('season') is None or int(epDetails.findtext('season')) != self.season) or (epDetails.findtext(
                            'episode') is None or int(epDetails.findtext('episode')) != self.episode):
                        sickrage.app.log.debug("%s: NFO has an <episodedetails> block for a different episode - wanted S%02dE%02d but got "
                                               "S%02dE%02d" % (self.show.indexer_id,
                                                               self.season or 0,
                                                               self.episode or 0,
                                                               int(epDetails.findtext('season')) or 0,
                                                               int(epDetails.findtext('episode')) or 0))
                        continue

                    if epDetails.findtext('title') is None or epDetails.findtext('aired') is None:
                        raise NoNFOException("Error in NFO format (missing episode title or airdate)")

                    self.name = epDetails.findtext('title')
                    self.episode = try_int(epDetails.findtext('episode'))
                    self.season = try_int(epDetails.findtext('season'))

                    from sickrage.core.scene_numbering import get_scene_absolute_numbering, get_scene_numbering

                    self.scene_absolute_number = get_scene_absolute_numbering(
                        self.show.indexer_id,
                        self.show.indexer,
                        self.absolute_number,
                        session=object_session(self)
                    )

                    self.scene_season, self.scene_episode = get_scene_numbering(
                        self.show.indexer_id,
                        self.show.indexer,
                        self.season, self.episode,
                        session=object_session(self)
                    )

                    self.description = epDetails.findtext('plot') or self.description

                    self.airdate = datetime.date.min
                    if epDetails.findtext('aired'):
                        rawAirdate = [int(x) for x in epDetails.findtext('aired').split("-")]
                        self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])

                    self.hasnfo = True

            self.hastbn = False
            if os.path.isfile(replace_extension(nfoFile, "tbn")):
                self.hastbn = True

        object_session(self).safe_commit()

        return self.hasnfo

    def create_meta_files(self, force=False):
        if not os.path.isdir(self.show.location):
            sickrage.app.log.info(str(self.show.indexer_id) + ": The show dir is missing, not bothering to try to create metadata")
            return

        self.create_nfo(force)
        self.create_thumbnail(force)

        self.checkForMetaFiles()

    def create_nfo(self, force=False):
        result = False

        for cur_provider in sickrage.app.metadata_providers.values():
            try:
                result = cur_provider.create_episode_metadata(self, force) or result
            except Exception:
                sickrage.app.log.debug(traceback.print_exc())

        return result

    def update_video_metadata(self):
        try:
            video = MP4(self.location)
            video['\xa9day'] = str(self.airdate.year)
            video['\xa9nam'] = self.name
            video['\xa9cmt'] = self.description
            video['\xa9gen'] = ','.join(self.show.genre.split('|'))
            video.save()
        except MP4StreamInfoError:
            pass
        except Exception:
            sickrage.app.log.debug(traceback.print_exc())
            return False

        return True

    def create_thumbnail(self, force=False):
        result = False

        for cur_provider in sickrage.app.metadata_providers.values():
            result = cur_provider.create_episode_thumb(self, force) or result

        return result

    def delete_episode(self, full=False):
        sickrage.app.log.debug("Deleting %s S%02dE%02d from the DB" % (self.show.name, self.season or 0, self.episode or 0))

        # delete myself from the DB
        sickrage.app.log.debug("Deleting myself from the database")

        object_session(self).query(self.__class__).filter_by(showid=self.show.indexer_id, season=self.season, episode=self.episode).delete()
        object_session(self).safe_commit()

        data = sickrage.app.notifier_providers['trakt'].trakt_episode_data_generate([(self.season, self.episode)])
        if sickrage.app.config.use_trakt and sickrage.app.config.trakt_sync_watchlist and data:
            sickrage.app.log.debug("Deleting myself from Trakt")
            sickrage.app.notifier_providers['trakt'].update_watchlist(self.show, data_episode=data, update="remove")

        if full and os.path.isfile(self.location):
            sickrage.app.log.info('Attempt to delete episode file %s' % self.location)
            try:
                os.remove(self.location)
            except OSError as e:
                sickrage.app.log.warning('Unable to delete %s: %s / %s' % (self.location, repr(e), str(e)))

        raise EpisodeDeletedException()

    def fullPath(self):
        if self.location is None or self.location == "":
            return None
        else:
            return os.path.join(self.show.location, self.location)

    def createStrings(self, pattern=None):
        patterns = [
            '%S.N.S%SE%0E',
            '%S.N.S%0SE%E',
            '%S.N.S%SE%E',
            '%S.N.S%0SE%0E',
            '%SN S%SE%0E',
            '%SN S%0SE%E',
            '%SN S%SE%E',
            '%SN S%0SE%0E'
        ]

        strings = []
        if not pattern:
            for p in patterns:
                strings += [self._format_pattern(p)]
            return strings
        return self._format_pattern(pattern)

    def pretty_name(self):
        """
        Returns the name of this episode in a "pretty" human-readable format. Used for logging
        and notifications and such.

        Returns: A string representing the episode's name and season/ep numbers
        """

        if self.show.anime and not self.show.scene:
            return self._format_pattern('%SN - %AB - %EN')
        elif self.show.air_by_date:
            return self._format_pattern('%SN - %AD - %EN')

        return self._format_pattern('%SN - %Sx%0E - %EN')

    def proper_path(self):
        """
        Figures out the path where this episode SHOULD live according to the renaming rules, relative from the show dir
        """

        anime_type = sickrage.app.config.naming_anime
        if not self.show.is_anime:
            anime_type = 3

        result = self.formatted_filename(anime_type=anime_type)

        # if they want us to flatten it and we're allowed to flatten it then we will
        if self.show.flatten_folders and not sickrage.app.config.naming_force_folders:
            return result

        # if not we append the folder on and use that
        else:
            result = os.path.join(self.formatted_dir(), result)

        return result

    def rename(self):
        """
        Renames an episode file and all related files to the location and filename as specified
        in the naming settings.
        """

        if not os.path.isfile(self.location):
            sickrage.app.log.warning(
                "Can't perform rename on " + self.location + " when it doesn't exist, skipping")
            return

        proper_path = self.proper_path()
        absolute_proper_path = os.path.join(self.show.location, proper_path)
        absolute_current_path_no_ext, file_ext = os.path.splitext(self.location)
        absolute_current_path_no_ext_length = len(absolute_current_path_no_ext)

        related_subs = []

        current_path = absolute_current_path_no_ext

        if absolute_current_path_no_ext.startswith(self.show.location):
            current_path = absolute_current_path_no_ext[len(self.show.location):]

        sickrage.app.log.debug("Renaming/moving episode from the base path " + self.location + " to " + absolute_proper_path)

        # if it's already named correctly then don't do anything
        if proper_path == current_path:
            sickrage.app.log.debug(str(self.indexer_id) + ": File " + self.location + " is already named correctly, skipping")
            return

        from sickrage.core.processors.post_processor import PostProcessor

        related_files = PostProcessor(self.location).list_associated_files(self.location, subfolders=True, rename=True)

        # This is wrong. Cause of pp not moving subs.
        if self.show.subtitles and sickrage.app.config.subtitles_dir:
            subs_path = os.path.join(sickrage.app.config.subtitles_dir, os.path.basename(self.location))
            related_subs = PostProcessor(self.location).list_associated_files(subs_path, subtitles_only=True, subfolders=True, rename=True)

        sickrage.app.log.debug("Files associated to " + self.location + ": " + str(related_files))

        # move the ep file
        result = self.rename_ep_file(self.location, absolute_proper_path, absolute_current_path_no_ext_length)

        # move related files
        for cur_related_file in related_files:
            # We need to fix something here because related files can be in subfolders and the original code doesn't
            # handle this (at all)
            cur_related_dir = os.path.dirname(os.path.abspath(cur_related_file))
            subfolder = cur_related_dir.replace(os.path.dirname(os.path.abspath(self.location)), '')
            # We now have a subfolder. We need to add that to the absolute_proper_path.
            # First get the absolute proper-path dir
            proper_related_dir = os.path.dirname(os.path.abspath(absolute_proper_path + file_ext))
            proper_related_path = absolute_proper_path.replace(proper_related_dir, proper_related_dir + subfolder)

            cur_result = self.rename_ep_file(cur_related_file, proper_related_path,
                                             absolute_current_path_no_ext_length + len(subfolder))
            if not cur_result:
                sickrage.app.log.warning(str(self.indexer_id) + ": Unable to rename file " + cur_related_file)

        for cur_related_sub in related_subs:
            absolute_proper_subs_path = os.path.join(sickrage.app.config.subtitles_dir, self.formatted_filename())
            cur_result = self.rename_ep_file(cur_related_sub, absolute_proper_subs_path,
                                             absolute_current_path_no_ext_length)
            if not cur_result:
                sickrage.app.log.warning(str(self.indexer_id) + ": Unable to rename file " + cur_related_sub)

        # save the ep
        if result:
            self.location = absolute_proper_path + file_ext
            for relEp in self.related_episodes:
                relEp.location = absolute_proper_path + file_ext

        # in case something changed with the metadata just do a quick check
        for curEp in [self] + self.related_episodes:
            curEp.checkForMetaFiles()

    def airdateModifyStamp(self):
        """
        Make the modify date and time of a file reflect the show air date and time.
        Note: Also called from postProcessor

        """

        if not all([sickrage.app.config.airdate_episodes, self.airdate, self.location, self.show, self.show.airs,
                    self.show.network]): return

        try:
            if not self.airdate > datetime.date.min:
                return

            airdatetime = sickrage.app.tz_updater.parse_date_time(self.airdate, self.show.airs, self.show.network)

            if sickrage.app.config.file_timestamp_timezone == 'local':
                airdatetime = airdatetime.astimezone(sickrage.app.tz)

            filemtime = datetime.datetime.fromtimestamp(os.path.getmtime(self.location)).replace(tzinfo=sickrage.app.tz)

            if filemtime != airdatetime:
                import time

                airdatetime = airdatetime.timetuple()
                sickrage.app.log.debug(
                    str(self.show.indexer_id) + ": About to modify date of '" + self.location +
                    "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
                try:
                    if modify_file_timestamp(self.location, time.mktime(airdatetime)):
                        sickrage.app.log.info(
                            str(self.show.indexer_id) + ": Changed modify date of " + os.path.basename(self.location)
                            + " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
                    else:
                        sickrage.app.log.warning(
                            str(self.show.indexer_id) + ": Unable to modify date of " + os.path.basename(
                                self.location)
                            + " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
                except Exception:
                    sickrage.app.log.warning(
                        str(self.show.indexer_id) + ": Failed to modify date of '" + os.path.basename(self.location)
                        + "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
        except Exception:
            sickrage.app.log.warning(
                "{}: Failed to modify date of '{}'".format(self.show.indexer_id, os.path.basename(self.location)))

    def _ep_name(self):
        """
        Returns the name of the episode to use during renaming. Combines the names of related episodes.
        Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
            "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
        """

        multi_name_regex = r"(.*) \(\d{1,2}\)"

        single_name = True
        cur_good_name = None

        for curName in [self.name] + [x.name for x in sorted(self.related_episodes, key=lambda k: k.episode)]:
            match = re.match(multi_name_regex, curName)
            if not match:
                single_name = False
                break

            if cur_good_name is None:
                cur_good_name = match.group(1)
            elif cur_good_name != match.group(1):
                single_name = False
                break

        if single_name:
            good_name = cur_good_name or self.name
        else:
            good_name = self.name
            if len(self.related_episodes):
                good_name = "MultiPartEpisode"
            # for relEp in self.related_episodes:
            #     good_name += " & " + relEp.name

        return good_name

    def _replace_map(self):
        """
        Generates a replacement map for this episode which maps all possible custom naming patterns to the correct
        value for this episode.

        Returns: A dict with patterns as the keys and their replacement values as the values.
        """

        ep_name = self._ep_name()

        def dot(name):
            return sanitize_scene_name(name)

        def us(name):
            return re.sub('[ -]', '_', name)

        def release_name(name):
            if name:
                name = remove_non_release_groups(remove_extension(name))
            return name

        def release_group(show_id, name):
            from sickrage.core.nameparser import NameParser, InvalidNameException, InvalidShowException

            if name:
                name = remove_non_release_groups(remove_extension(name))

                try:
                    parse_result = NameParser(name, show_id=show_id, naming_pattern=True).parse(name)
                    if parse_result.release_group:
                        return parse_result.release_group
                except (InvalidNameException, InvalidShowException) as e:
                    sickrage.app.log.debug("Unable to get parse release_group: {}".format(e))

            return ''

        __, epQual = Quality.split_composite_status(self.status)

        if sickrage.app.config.naming_strip_year:
            show_name = re.sub(r"\(\d+\)$", "", self.show.name).rstrip()
        else:
            show_name = self.show.name

        # try to get the release group
        rel_grp = {"SiCKRAGE": 'SiCKRAGE'}
        if hasattr(self, 'location'):  # from the location name
            rel_grp['location'] = release_group(self.show.indexer_id, self.location)
            if not rel_grp['location']:
                del rel_grp['location']
        if hasattr(self, '_release_group'):  # from the release group field in db
            rel_grp['database'] = self.release_group
            if not rel_grp['database']:
                del rel_grp['database']
        if hasattr(self, 'release_name'):  # from the release name field in db
            rel_grp['release_name'] = release_group(self.show.indexer_id, self.release_name)
            if not rel_grp['release_name']:
                del rel_grp['release_name']

        # use release_group, release_name, location in that order
        if 'database' in rel_grp:
            relgrp = 'database'
        elif 'release_name' in rel_grp:
            relgrp = 'release_name'
        elif 'location' in rel_grp:
            relgrp = 'location'
        else:
            relgrp = 'SiCKRAGE'

        # try to get the release encoder to comply with scene naming standards
        encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ""), epQual)
        if encoder:
            sickrage.app.log.debug("Found codec for '" + show_name + ": " + ep_name + "'.")

        return {
            '%SN': show_name,
            '%S.N': dot(show_name),
            '%S_N': us(show_name),
            '%EN': ep_name,
            '%E.N': dot(ep_name),
            '%E_N': us(ep_name),
            '%QN': Quality.qualityStrings[epQual],
            '%Q.N': dot(Quality.qualityStrings[epQual]),
            '%Q_N': us(Quality.qualityStrings[epQual]),
            '%SQN': Quality.sceneQualityStrings[epQual] + encoder,
            '%SQ.N': dot(Quality.sceneQualityStrings[epQual] + encoder),
            '%SQ_N': us(Quality.sceneQualityStrings[epQual] + encoder),
            '%SY': str(self.show.startyear),
            '%S': str(self.season),
            '%0S': '%02d' % self.season,
            '%E': str(self.episode),
            '%0E': '%02d' % self.episode,
            '%XS': str(self.scene_season),
            '%0XS': '%02d' % self.scene_season,
            '%XE': str(self.scene_episode),
            '%0XE': '%02d' % self.scene_episode,
            '%AB': '%(#)03d' % {'#': self.absolute_number},
            '%XAB': '%(#)03d' % {'#': self.scene_absolute_number},
            '%RN': release_name(self.release_name),
            '%RG': rel_grp[relgrp],
            '%CRG': rel_grp[relgrp].upper(),
            '%AD': str(self.airdate).replace('-', ' '),
            '%A.D': str(self.airdate).replace('-', '.'),
            '%A_D': us(str(self.airdate)),
            '%A-D': str(self.airdate),
            '%Y': str(self.airdate.year),
            '%M': str(self.airdate.month),
            '%D': str(self.airdate.day),
            '%0M': '%02d' % self.airdate.month,
            '%0D': '%02d' % self.airdate.day,
            '%RT': "PROPER" if self.is_proper else "",
        }

    def _format_string(self, pattern, replace_map):
        """
        Replaces all template strings with the correct value
        """

        result_name = pattern

        # do the replacements
        for cur_replacement in sorted(replace_map.keys(), reverse=True):
            result_name = result_name.replace(cur_replacement,
                                              sanitize_file_name(replace_map[cur_replacement]))
            result_name = result_name.replace(cur_replacement.lower(),
                                              sanitize_file_name(replace_map[cur_replacement].lower()))

        return result_name

    def _format_pattern(self, pattern=None, multi=None, anime_type=None):
        """
        Manipulates an episode naming pattern and then fills the template in
        """

        if pattern is None:
            pattern = sickrage.app.config.naming_pattern

        if multi is None:
            multi = sickrage.app.config.naming_multi_ep

        if sickrage.app.config.naming_custom_anime:
            if anime_type is None:
                anime_type = sickrage.app.config.naming_anime
        else:
            anime_type = 3

        replace_map = self._replace_map()

        result_name = pattern

        # if there's no release group in the db, let the user know we replaced it
        if replace_map['%RG'] and replace_map['%RG'] != 'SiCKRAGE':
            if not hasattr(self, '_release_group'):
                sickrage.app.log.debug(
                    "Episode has no release group, replacing it with '" + replace_map['%RG'] + "'")
                self.release_group = replace_map['%RG']  # if release_group is not in the db, put it there
            elif not self.release_group:
                sickrage.app.log.debug(
                    "Episode has no release group, replacing it with '" + replace_map['%RG'] + "'")
                self.release_group = replace_map['%RG']  # if release_group is not in the db, put it there

        # if there's no release name then replace it with a reasonable facsimile
        if not replace_map['%RN']:

            if self.show.air_by_date or self.show.sports:
                result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-' + replace_map['%RG'])
                result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-' + replace_map['%RG'].lower())

            elif anime_type != 3:
                result_name = result_name.replace('%RN', '%S.N.%AB.%E.N-' + replace_map['%RG'])
                result_name = result_name.replace('%rn', '%s.n.%ab.%e.n-' + replace_map['%RG'].lower())

            else:
                result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-' + replace_map['%RG'])
                result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-' + replace_map['%RG'].lower())

                # LOGGER.debug(u"Episode has no release name, replacing it with a generic one: " + result_name)

        if not replace_map['%RT']:
            result_name = re.sub('([ _.-]*)%RT([ _.-]*)', r'\2', result_name)

        # split off ep name part only
        name_groups = re.split(r'[\\/]', result_name)

        # figure out the double-ep numbering style for each group, if applicable
        for cur_name_group in name_groups:

            season_format = sep = ep_sep = ep_format = None

            season_ep_regex = r'''
                                (?P<pre_sep>[ _.-]*)
                                ((?:s(?:eason|eries)?\s*)?%0?S(?![._]?N|Y))
                                (.*?)
                                (%0?E(?![._]?N))
                                (?P<post_sep>[ _.-]*)
                              '''
            ep_only_regex = r'(E?%0?E(?![._]?N))'

            # try the normal way
            season_ep_match = re.search(season_ep_regex, cur_name_group, re.I | re.X)
            ep_only_match = re.search(ep_only_regex, cur_name_group, re.I | re.X)

            # if we have a season and episode then collect the necessary data
            if season_ep_match:
                season_format = season_ep_match.group(2)
                ep_sep = season_ep_match.group(3)
                ep_format = season_ep_match.group(4)
                sep = season_ep_match.group('pre_sep')
                if not sep:
                    sep = season_ep_match.group('post_sep')
                if not sep:
                    sep = ' '

                # force 2-3-4 format if they chose to extend
                if multi in (NAMING_EXTEND, NAMING_LIMITED_EXTEND,
                             NAMING_LIMITED_EXTEND_E_PREFIXED):
                    ep_sep = '-'

                regex_used = season_ep_regex

            # if there's no season then there's not much choice so we'll just force them to use 03-04-05 style
            elif ep_only_match:
                season_format = ''
                ep_sep = '-'
                ep_format = ep_only_match.group(1)
                sep = ''
                regex_used = ep_only_regex

            else:
                continue

            # we need at least this much info to continue
            if not ep_sep or not ep_format:
                continue

            # start with the ep string, eg. E03
            ep_string = self._format_string(ep_format.upper(), replace_map)
            for other_ep in self.related_episodes:

                # for limited extend we only append the last ep
                if multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED) and other_ep != \
                        self.related_episodes[-1]:
                    continue

                elif multi == NAMING_DUPLICATE:
                    # add " - S01"
                    ep_string += sep + season_format

                elif multi == NAMING_SEPARATED_REPEAT:
                    ep_string += sep

                # add "E04"
                ep_string += ep_sep

                if multi == NAMING_LIMITED_EXTEND_E_PREFIXED:
                    ep_string += 'E'

                ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map())

            if anime_type != 3:
                if self.absolute_number == 0:
                    curAbsolute_number = self.episode
                else:
                    curAbsolute_number = self.absolute_number

                if self.season != 0:  # dont set absolute numbers if we are on specials !
                    if anime_type == 1:  # this crazy person wants both ! (note: +=)
                        ep_string += sep + "%(#)03d" % {"#": curAbsolute_number}
                    elif anime_type == 2:  # total anime freak only need the absolute number ! (note: =)
                        ep_string = "%(#)03d" % {"#": curAbsolute_number}

                    for relEp in self.related_episodes:
                        if relEp.absolute_number != 0:
                            ep_string += '-' + "%(#)03d" % {"#": relEp.absolute_number}
                        else:
                            ep_string += '-' + "%(#)03d" % {"#": relEp.episode}

            regex_replacement = None
            if anime_type == 2:
                regex_replacement = r'\g<pre_sep>' + ep_string + r'\g<post_sep>'
            elif season_ep_match:
                regex_replacement = r'\g<pre_sep>\g<2>\g<3>' + ep_string + r'\g<post_sep>'
            elif ep_only_match:
                regex_replacement = ep_string

            if regex_replacement:
                # fill out the template for this piece and then insert this piece into the actual pattern
                cur_name_group_result = re.sub('(?i)(?x)' + regex_used, regex_replacement, cur_name_group)
                # cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
                # LOGGER.debug(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group)
                result_name = result_name.replace(cur_name_group, cur_name_group_result)

        result_name = self._format_string(result_name, replace_map)

        sickrage.app.log.debug("Formatting pattern: " + pattern + " -> " + result_name)

        return result_name

    def formatted_filename(self, pattern=None, multi=None, anime_type=None):
        """
        Just the filename of the episode, formatted based on the naming settings
        """

        if pattern is None:
            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
            if self.show.air_by_date and sickrage.app.config.naming_custom_abd and not self.related_episodes:
                pattern = sickrage.app.config.naming_abd_pattern
            elif self.show.sports and sickrage.app.config.naming_custom_sports and not self.related_episodes:
                pattern = sickrage.app.config.naming_sports_pattern
            elif self.show.anime and sickrage.app.config.naming_custom_anime:
                pattern = sickrage.app.config.naming_anime_pattern
            else:
                pattern = sickrage.app.config.naming_pattern

        # split off the dirs only, if they exist
        name_groups = re.split(r'[\\/]', pattern)

        return sanitize_file_name(self._format_pattern(name_groups[-1], multi, anime_type))

    def formatted_dir(self, pattern=None, multi=None):
        """
        Just the folder name of the episode
        """

        if pattern is None:
            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
            if self.show.air_by_date and sickrage.app.config.naming_custom_abd and not self.related_episodes:
                pattern = sickrage.app.config.naming_abd_pattern
            elif self.show.sports and sickrage.app.config.naming_custom_sports and not self.related_episodes:
                pattern = sickrage.app.config.naming_sports_pattern
            elif self.show.anime and sickrage.app.config.naming_custom_anime:
                pattern = sickrage.app.config.naming_anime_pattern
            else:
                pattern = sickrage.app.config.naming_pattern

        # split off the dirs only, if they exist
        name_groups = re.split(r'[\\/]', pattern)

        if len(name_groups) == 1:
            return ''
        else:
            return self._format_pattern(os.sep.join(name_groups[:-1]), multi)

    def rename_ep_file(self, cur_path, new_path, old_path_length=0):
        """
        Creates all folders needed to move a file to its new location, renames it, then cleans up any folders
        left that are now empty.

        :param  cur_path: The absolute path to the file you want to move/rename
        :param new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION
        :param old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
        """

        # new_dest_dir, new_dest_name = os.path.split(new_path)

        if old_path_length == 0 or old_path_length > len(cur_path):
            # approach from the right
            cur_file_name, cur_file_ext = os.path.splitext(cur_path)
        else:
            # approach from the left
            cur_file_ext = cur_path[old_path_length:]
            cur_file_name = cur_path[:old_path_length]

        if cur_file_ext[1:] in Subtitles().subtitle_extensions:
            # Extract subtitle language from filename
            sublang = os.path.splitext(cur_file_name)[1][1:]

            # Check if the language extracted from filename is a valid language
            if sublang in Subtitles().subtitle_code_filter():
                cur_file_ext = '.' + sublang + cur_file_ext

        # put the extension on the incoming file
        new_path += cur_file_ext

        make_dirs(os.path.dirname(new_path))

        # move the file
        try:
            sickrage.app.log.info("Renaming file from %s to %s" % (cur_path, new_path))
            move_file(cur_path, new_path)
        except (OSError, IOError) as e:
            sickrage.app.log.warning("Failed renaming %s to %s : %r" % (cur_path, new_path, e))
            return False

        # clean up any old folders that are empty
        delete_empty_folders(os.path.dirname(cur_path))

        return True

    def __str__(self):
        to_return = ""
        to_return += "%r - S%02rE%02r - %r\n" % (self.show.name, self.season, self.episode, self.name)
        to_return += "location: %r\n" % self.location
        to_return += "description: %r\n" % self.description
        to_return += "subtitles: %r\n" % ",".join(self.subtitles)
        to_return += "subtitles_searchcount: %r\n" % self.subtitles_searchcount
        to_return += "subtitles_lastsearch: %r\n" % self.subtitles_lastsearch
        to_return += "airdate: %r\n" % self.airdate
        to_return += "hasnfo: %r\n" % self.hasnfo
        to_return += "hastbn: %r\n" % self.hastbn
        to_return += "status: %r\n" % self.status

        return to_return
from sqlalchemy import Table, Column, Integer, Text, ForeignKeyConstraint
from app.db.base_class import Base

track_department = Table(
    "track_department_association",
    Base.metadata,
    Column("university_id", Integer, primary_key=True),
    Column("track_id", Text, primary_key=True),
    Column("faculty_id", Text, primary_key=True),
    Column("department_id", Text, primary_key=True),
    ForeignKeyConstraint(
        ["university_id", "track_id"],
        ["track.university_id", "track.id"],
    ),
    ForeignKeyConstraint(
        ["university_id", "faculty_id", "department_id"],
        ["department.university_id", "department.faculty_id", "department.id"],
    ),
)
Example #14
0
def _history_mapper(local_mapper):
    cls = local_mapper.class_

    # set the "active_history" flag
    # on on column-mapped attributes so that the old version
    # of the info is always loaded (currently sets it on all attributes)
    for prop in local_mapper.iterate_properties:
        getattr(local_mapper.class_, prop.key).impl.active_history = True

    super_mapper = local_mapper.inherits
    super_history_mapper = getattr(cls, '__history_mapper__', None)

    polymorphic_on = None
    super_fks = []
    if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
        cols = []
        for column in local_mapper.local_table.c:

            if column.name in ('version', 'version_date'):
                continue

            col = column.copy()
            col.unique = False

            if super_mapper and col_references_table(column,
                                                     super_mapper.local_table):
                super_fks.append(
                    (col.key,
                     list(super_history_mapper.local_table.primary_key)[0]))

            cols.append(col)

            if column is local_mapper.polymorphic_on:
                polymorphic_on = col

        if super_mapper:
            super_fks.append(
                ('version',
                 super_history_mapper.base_mapper.local_table.c.version))
            super_fks.append(
                ('version_date',
                 super_history_mapper.base_mapper.local_table.c.version_date))
        cols.append(Column('version', Integer, primary_key=True))
        cols.append(
            Column('version_date',
                   DateTime,
                   default=datetime.datetime.now(),
                   nullable=False))

        if super_fks:
            cols.append(ForeignKeyConstraint(*zip(*super_fks)))

        table = Table(local_mapper.local_table.name + '_history',
                      local_mapper.local_table.metadata, *cols)
    else:
        # single table inheritance.  take any additional columns that may have
        # been added and add them to the history table.
        for column in local_mapper.local_table.c:
            if column.key not in super_history_mapper.local_table.c:
                col = column.copy()
                col.unique = False
                super_history_mapper.local_table.append_column(col)
        table = None

    if super_history_mapper:
        bases = (super_history_mapper.class_, )
    else:
        bases = local_mapper.base_mapper.class_.__bases__
    versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})

    m = mapper(versioned_cls,
               table,
               inherits=super_history_mapper,
               polymorphic_on=polymorphic_on,
               polymorphic_identity=local_mapper.polymorphic_identity)
    cls.__history_mapper__ = m

    if not super_history_mapper:
        cls.version = Column('version', Integer, default=1, nullable=False)
        cls.version_date = Column('version_date',
                                  DateTime,
                                  default=datetime.datetime.now(),
                                  nullable=False)
Example #15
0
class FeatureValues(DistributedExperimentModel):
    '''An individual value of a :class:`Feature <tmlib.models.feature.Feature>`
    that was extracted for a given
    :class:`Mapobject <tmlib.models.mapobject.Mapobject>`.
    '''

    __tablename__ = 'feature_values'

    __table_args__ = (PrimaryKeyConstraint('partition_key', 'mapobject_id',
                                           'tpoint'),
                      ForeignKeyConstraint(
                          ['mapobject_id', 'partition_key'],
                          ['mapobjects.id', 'mapobjects.partition_key'],
                          ondelete='CASCADE'))

    __distribute_by__ = 'partition_key'

    __distribution_method__ = 'hash'

    __colocate_with__ = 'mapobjects'

    partition_key = Column(Integer, index=True, nullable=False)

    #: Dict[str, str]: mapping of feature ID to value encoded as text
    # NOTE: HSTORE is more performant than JSONB upon SELECT and upon INSERT.
    # However, it only supports TEXT, such that values would need to be casted
    # when loaded into Python. One could define a custom type for this purpose.
    values = Column(HSTORE)

    #: int: zero-based time point index
    tpoint = Column(Integer, index=True)

    #: int: ID of the parent mapobject
    mapobject_id = Column(BigInteger, index=True)

    def __init__(self, partition_key, mapobject_id, values, tpoint=None):
        '''
        Parameters
        ----------
        partition_key: int
            key that determines on which shard the object will be stored
        mapobject_id: int
            ID of the mapobject to which values should be assigned
        values: Dict[str, float]
            mapping of feature ID to value
        tpoint: int, optional
            zero-based time point index
        '''
        self.partition_key = partition_key
        self.mapobject_id = mapobject_id
        self.tpoint = tpoint
        self.values = values

    @classmethod
    def _add(cls, connection, instance):
        if not isinstance(instance, FeatureValues):
            raise TypeError(
                'Object must have type tmlib.models.feature.FeatureValues')
        connection.execute(
            '''
            INSERT INTO feature_values AS v (
                parition_key, values, mapobject_id, tpoint
            )
            VALUES (
                %(partition_key)s, %(values)s, %(mapobject_id)s, %(tpoint)s
            )
            ON CONFLICT
            ON CONSTRAINT feature_values_mapobject_id_tpoint_key
            DO UPDATE
            SET values = v.values || %(values)s
            WHERE v.mapobject_id = %(mapobject_id)s
            AND v.tpoint = %(tpoint)s
        ''', {
                'partition_key': instance.partition_key,
                'values': instance.values,
                'mapobject_id': instance.mapobject_id,
                'tpoint': instance.tpoint
            })

    @classmethod
    def _bulk_ingest(cls, connection, instances):
        f = StringIO()
        w = csv.writer(f, delimiter=';')
        for obj in instances:
            w.writerow(
                (obj.partition_key, obj.mapobject_id, obj.tpoint, ','.join([
                    '=>'.join([k, str(v)]) for k, v in obj.values.iteritems()
                ])))
        columns = ('partition_key', 'mapobject_id', 'tpoint', 'values')
        f.seek(0)
        connection.copy_from(f,
                             cls.__table__.name,
                             sep=';',
                             columns=columns,
                             null='')
        f.close()

    def __repr__(self):
        return ('<FeatureValues(id=%r, tpoint=%r, mapobject_id=%r)>' %
                (self.id, self.tpoint, self.mapobject_id))
def upgrade():
    """Add TaskMap and map_index on TaskInstance."""
    # We need to first remove constraints on task_reschedule since they depend on task_instance.
    with op.batch_alter_table("task_reschedule") as batch_op:
        batch_op.drop_constraint("task_reschedule_ti_fkey", "foreignkey")
        batch_op.drop_index("idx_task_reschedule_dag_task_run")

    # Change task_instance's primary key.
    with op.batch_alter_table("task_instance") as batch_op:
        # I think we always use this name for TaskInstance after 7b2661a43ba3?
        batch_op.drop_constraint("task_instance_pkey", type_="primary")
        batch_op.add_column(
            Column("map_index",
                   Integer,
                   nullable=False,
                   server_default=text("-1")))
        batch_op.create_primary_key(
            "task_instance_pkey", ["dag_id", "task_id", "run_id", "map_index"])

    # Re-create task_reschedule's constraints.
    with op.batch_alter_table("task_reschedule") as batch_op:
        batch_op.add_column(
            Column("map_index",
                   Integer,
                   nullable=False,
                   server_default=text("-1")))
        batch_op.create_foreign_key(
            "task_reschedule_ti_fkey",
            "task_instance",
            ["dag_id", "task_id", "run_id", "map_index"],
            ["dag_id", "task_id", "run_id", "map_index"],
            ondelete="CASCADE",
        )
        batch_op.create_index(
            "idx_task_reschedule_dag_task_run",
            ["dag_id", "task_id", "run_id", "map_index"],
            unique=False,
        )

    # Create task_map.
    op.create_table(
        "task_map",
        Column("dag_id", StringID(), primary_key=True),
        Column("task_id", StringID(), primary_key=True),
        Column("run_id", StringID(), primary_key=True),
        Column("map_index", Integer, primary_key=True),
        Column("length", Integer, nullable=False),
        Column("keys", ExtendedJSON, nullable=True),
        CheckConstraint("length >= 0", name="task_map_length_not_negative"),
        ForeignKeyConstraint(
            ["dag_id", "task_id", "run_id", "map_index"],
            [
                "task_instance.dag_id",
                "task_instance.task_id",
                "task_instance.run_id",
                "task_instance.map_index",
            ],
            name="task_map_task_instance_fkey",
            ondelete="CASCADE",
        ),
    )
from sqlalchemy import Table, Column, MetaData, Integer, String, ForeignKeyConstraint, DateTime

metadata = MetaData()
metadata.clear()

PostCategories = Table(
    'post_categories',
    metadata,
    Column('id', Integer, primary_key=True),
    Column('title', String(length=100), nullable=False, unique=True),
)

Post = Table(
    'post', metadata, Column('id', Integer, primary_key=True),
    Column('category_id', Integer, nullable=False),
    Column('title', String(length=100), nullable=False),
    Column('text', String, nullable=False),
    Column('main_img', String, nullable=False),
    Column('created_at', DateTime, nullable=False),
    Column('last_updated', DateTime, nullable=False),
    ForeignKeyConstraint(['category_id'], [PostCategories.c.id],
                         name='post_category_id_fkey',
                         ondelete='CASCADE'))
Example #18
0
class Transaction(Base):
    '''Transaction model.

    Additional methods have been defined to facilitate extraction
    of data for table columns.
    '''

    __tablename__ = 'transaction'
    id = Column(Integer, primary_key=True)
    sender_ip_id = Column(Integer, nullable=False)
    target_ip_id = Column(Integer, nullable=False)
    count = Column(Integer, default=1)
    sender = relationship('IP',
                          back_populates='sender_transactions',
                          primaryjoin='and_(Transaction.sender_ip_id==IP.id)')
    target = relationship('IP',
                          back_populates='target_transactions',
                          primaryjoin='and_(Transaction.target_ip_id==IP.id)')
    ForeignKeyConstraint(
        [sender_ip_id, target_ip_id],
        [IP.id, IP.id],
    )

    def build_target(self, *args, **kwargs):
        return self.target.value

    def build_sender(self, *args, **kwargs):
        return self.sender.value

    def stale_target(self, display_false=True, *args, **kwargs):
        '''Return True if the target is stale, i.e. arp resolution
        has been attempted and no MAC address has been set.
        '''

        if self.target.arp_resolve_attempted and \
                not self.target.mac_address:
            return True
        else:
            if display_false:
                return False
            else:
                return ''

    def build_count(self, *args, **kwargs):
        '''Return the count of ARP requests as a string value.
        '''

        return str(self.count)

    def build_arp_count(self, *args, **kwargs):
        return self.build_count(*args, **kwargs)

    def build_stale(self,
                    color_profile=None,
                    display_false=True,
                    *args,
                    **kwargs):
        '''Build the value for the stale column. The character
        returned will be derived from the color_profile value.
        '''

        if not color_profile or not color_profile.stale_emoji:
            stale_char = True
        else:
            stale_char = color_profile.stale_emoji

        if self.stale_target():
            return stale_char
        elif not self.target.arp_resolve_attempted:
            return '[UNCONFIRMED]'
        else:
            if display_false:
                return False
            else:
                return ''

    def build_snac(self,
                   color_profile=None,
                   display_false=True,
                   *args,
                   **kwargs):

        has_snac = False
        for t in self.sender.sender_transactions:
            targ = t.target
            if targ.arp_resolve_attempted and not targ.mac_address:
                has_snac = True
                break

        return has_snac

    def build_target_mac(self, *args, **kwargs):
        '''Return the MAC address for the target:

        - [STALE TARGET] - returned when the target is stale
        - [UNRESOLVED] - indicates that no MAC is available 
        and ARP resolution has not been attempted.
        - MAC ADDRESS - when a MAC value is available for the IP
        '''

        if self.stale_target():
            return '[STALE TARGET]'
        elif self.target.mac_address:
            return self.target.mac_address
        elif not self.target.arp_resolve_attempted:
            return '[UNRESOLVED]'

    def build_sender_mac(self, new_sender=False, *args, **kwargs):
        '''Return the MAC address for the sender of the
        transaction. Guaranteed to exist since it is associated
        with the sender itself.
        '''

        if new_sender:
            return self.sender.mac_address
        else:
            return ''

    def build_sender_ptr(self, *args, new_sender=False, **kwargs):
        '''Return the PTR value for the sender if available.
        '''

        sptr = self.sender.ptr[0].value if self.sender.ptr \
                and new_sender else ''

        return sptr

    def build_target_ptr(self, *args, **kwargs):
        '''Return the PTR value for the target if available.
        '''

        tptr = self.target.ptr[0].value if self.target.ptr else ''

        return tptr

    def build_target_forward(self, *args, **kwargs):
        '''Build the forward IP address for the PTR value of
        a given target address. This is useful when determining
        if a given target may have a MITM opportunity when the
        target address is stale.
        '''

        tptr = self.target.ptr[0] if self.target.ptr else None

        return tptr.forward_ip if tptr and tptr.forward_ip else ''

    def build_mitm_op(self, display_false=True, *args, **kwargs):
        '''Check the target of a transaction to determine
        if a potential MITM opportunity exists when a new
        forward address is available for a previous PTR
        address.
        '''

        if self.target.ptr:

            if self.stale_target() and self.target.ptr[0].forward_ip and \
                    self.target.ptr[0].forward_ip != self.target.value:
                return f'T-IP:{self.target.value} != ' \
                       f'PTR-FWD:{self.target.ptr[0].forward_ip}'

        if display_false:
            return False
        else:
            return ''

    def build_from_handle(self, handle, *args, **kwargs):
        '''Build a column value from attribute name.
        '''

        return self.__getattribute__(handle)(*args, **kwargs)

    bfh = build_from_handle
Example #19
0
 def test_deferrable_table_fk(self):
     factory = lambda **kw: ForeignKeyConstraint(['b'], ['tbl.a'], **kw)
     self._test_deferrable(factory)
Example #20
0
class RenderedTaskInstanceFields(Base):
    """Save Rendered Template Fields"""

    __tablename__ = "rendered_task_instance_fields"

    dag_id = Column(StringID(), primary_key=True)
    task_id = Column(StringID(), primary_key=True)
    run_id = Column(StringID(), primary_key=True)
    map_index = Column(Integer, primary_key=True, server_default='-1')
    rendered_fields = Column(sqlalchemy_jsonfield.JSONField(json=json),
                             nullable=False)
    k8s_pod_yaml = Column(sqlalchemy_jsonfield.JSONField(json=json),
                          nullable=True)

    __table_args__ = (ForeignKeyConstraint(
        [dag_id, task_id, run_id, map_index],
        [
            "task_instance.dag_id",
            "task_instance.task_id",
            "task_instance.run_id",
            "task_instance.map_index",
        ],
        name='rtif_ti_fkey',
        ondelete="CASCADE",
    ), )
    task_instance = relationship(
        "TaskInstance",
        lazy='joined',
        back_populates="rendered_task_instance_fields",
    )

    # We don't need a DB level FK here, as we already have that to TI (which has one to DR) but by defining
    # the relationship we can more easily find the execution date for these rows
    dag_run = relationship(
        "DagRun",
        primaryjoin="""and_(
            RenderedTaskInstanceFields.dag_id == foreign(DagRun.dag_id),
            RenderedTaskInstanceFields.run_id == foreign(DagRun.run_id),
        )""",
        viewonly=True,
    )

    execution_date = association_proxy("dag_run", "execution_date")

    def __init__(self, ti: TaskInstance, render_templates=True):
        self.dag_id = ti.dag_id
        self.task_id = ti.task_id
        self.run_id = ti.run_id
        self.map_index = ti.map_index
        self.ti = ti
        if render_templates:
            ti.render_templates()
        self.task = ti.task
        if os.environ.get("AIRFLOW_IS_K8S_EXECUTOR_POD", None):
            self.k8s_pod_yaml = ti.render_k8s_pod_yaml()
        self.rendered_fields = {
            field: serialize_template_field(getattr(self.task, field))
            for field in self.task.template_fields
        }

        self._redact()

    def __repr__(self):
        prefix = f"<{self.__class__.__name__}: {self.dag_id}.{self.task_id} {self.run_id}"
        if self.map_index != -1:
            prefix += f" map_index={self.map_index}"
        return prefix + '>'

    def _redact(self):
        from airflow.utils.log.secrets_masker import redact

        if self.k8s_pod_yaml:
            self.k8s_pod_yaml = redact(self.k8s_pod_yaml)

        for field, rendered in self.rendered_fields.items():
            self.rendered_fields[field] = redact(rendered, field)

    @classmethod
    @provide_session
    def get_templated_fields(cls,
                             ti: TaskInstance,
                             session: Session = NEW_SESSION) -> Optional[dict]:
        """
        Get templated field for a TaskInstance from the RenderedTaskInstanceFields
        table.

        :param ti: Task Instance
        :param session: SqlAlchemy Session
        :return: Rendered Templated TI field
        """
        result = (session.query(cls.rendered_fields).filter(
            cls.dag_id == ti.dag_id,
            cls.task_id == ti.task_id,
            cls.run_id == ti.run_id,
            cls.map_index == ti.map_index,
        ).one_or_none())

        if result:
            rendered_fields = result.rendered_fields
            return rendered_fields
        else:
            return None

    @classmethod
    @provide_session
    def get_k8s_pod_yaml(cls,
                         ti: TaskInstance,
                         session: Session = NEW_SESSION) -> Optional[dict]:
        """
        Get rendered Kubernetes Pod Yaml for a TaskInstance from the RenderedTaskInstanceFields
        table.

        :param ti: Task Instance
        :param session: SqlAlchemy Session
        :return: Kubernetes Pod Yaml
        """
        result = (session.query(cls.k8s_pod_yaml).filter(
            cls.dag_id == ti.dag_id,
            cls.task_id == ti.task_id,
            cls.run_id == ti.run_id,
            cls.map_index == ti.map_index,
        ).one_or_none())
        return result.k8s_pod_yaml if result else None

    @provide_session
    def write(self, session: Session = None):
        """Write instance to database

        :param session: SqlAlchemy Session
        """
        session.merge(self)

    @classmethod
    @provide_session
    def delete_old_records(
        cls,
        task_id: str,
        dag_id: str,
        num_to_keep=conf.getint("core",
                                "max_num_rendered_ti_fields_per_task",
                                fallback=0),
        session: Session = None,
    ):
        """
        Keep only Last X (num_to_keep) number of records for a task by deleting others.

        In the case of data for a mapped task either all of the rows or none of the rows will be deleted, so
        we don't end up with partial data for a set of mapped Task Instances left in the database.

        :param task_id: Task ID
        :param dag_id: Dag ID
        :param num_to_keep: Number of Records to keep
        :param session: SqlAlchemy Session
        """
        from airflow.models.dagrun import DagRun

        if num_to_keep <= 0:
            return

        tis_to_keep_query = (session.query(
            cls.dag_id, cls.task_id, cls.run_id).filter(
                cls.dag_id == dag_id,
                cls.task_id == task_id).join(cls.dag_run).distinct().order_by(
                    DagRun.execution_date.desc()).limit(num_to_keep))

        if session.bind.dialect.name in ["postgresql", "sqlite"]:
            # Fetch Top X records given dag_id & task_id ordered by Execution Date
            subq1 = tis_to_keep_query.subquery()
            excluded = session.query(subq1.c.dag_id, subq1.c.task_id,
                                     subq1.c.run_id)
            session.query(cls).filter(
                cls.dag_id == dag_id,
                cls.task_id == task_id,
                tuple_(cls.dag_id, cls.task_id, cls.run_id).notin_(excluded),
            ).delete(synchronize_session=False)
        elif session.bind.dialect.name in ["mysql"]:
            cls._remove_old_rendered_ti_fields_mysql(dag_id, session, task_id,
                                                     tis_to_keep_query)
        else:
            # Fetch Top X records given dag_id & task_id ordered by Execution Date
            tis_to_keep = tis_to_keep_query.all()

            filter_tis = [
                not_(
                    and_(
                        cls.dag_id == ti.dag_id,
                        cls.task_id == ti.task_id,
                        cls.run_id == ti.run_id,
                    )) for ti in tis_to_keep
            ]

            session.query(cls).filter(
                and_(*filter_tis)).delete(synchronize_session=False)

        session.flush()

    @classmethod
    @retry_db_transaction
    def _remove_old_rendered_ti_fields_mysql(cls, dag_id, session, task_id,
                                             tis_to_keep_query):
        # Fetch Top X records given dag_id & task_id ordered by Execution Date
        subq1 = tis_to_keep_query.subquery('subq1')
        # Second Subquery
        # Workaround for MySQL Limitation (https://stackoverflow.com/a/19344141/5691525)
        # Limitation: This version of MySQL does not yet support
        # LIMIT & IN/ALL/ANY/SOME subquery
        subq2 = session.query(subq1.c.dag_id, subq1.c.task_id,
                              subq1.c.run_id).subquery('subq2')
        # This query might deadlock occasionally and it should be retried if fails (see decorator)
        session.query(cls).filter(
            cls.dag_id == dag_id,
            cls.task_id == task_id,
            tuple_(cls.dag_id, cls.task_id, cls.run_id).notin_(subq2),
        ).delete(synchronize_session=False)
Example #21
0
class Activation(AlchemyProxy):
    """Represent a function activation"""
    __tablename__ = "function_activation"
    __table_args__ = (
        PrimaryKeyConstraint("trial_id", "id"),
        ForeignKeyConstraint(["trial_id"], ["trial.id"], ondelete="CASCADE"),
        ForeignKeyConstraint(["trial_id", "caller_id"],
                             ["function_activation.trial_id",
                              "function_activation.id"], ondelete="CASCADE"),
    )
    trial_id = Column(Integer, index=True)
    id = Column(Integer, index=True)                                             # pylint: disable=invalid-name
    name = Column(Text)
    line = Column(Integer)
    return_value = Column(Text)
    start = Column(TIMESTAMP)
    finish = Column(TIMESTAMP)
    caller_id = Column(Integer, index=True)

    _children = backref("children", order_by="Activation.start")
    caller = one(
        "Activation", remote_side=[trial_id, id],
        backref=_children, viewonly=True
    )

    object_values = many_viewonly_ref("activation", "ObjectValue")
    file_accesses = many_viewonly_ref("activation", "FileAccess")

    variables = many_ref("activation", "Variable")
    variables_usages = many_viewonly_ref("activation", "VariableUsage")
    source_variables = many_viewonly_ref(
        "source_activation", "VariableDependency",
        primaryjoin=((id == VariableDependency.m.source_activation_id) &
                     (trial_id == VariableDependency.m.trial_id)))
    target_variables = many_viewonly_ref(
        "target_activation", "VariableDependency",
        primaryjoin=((id == VariableDependency.m.target_activation_id) &
                     (trial_id == VariableDependency.m.trial_id)))

    trial = backref_one("trial")  # Trial.activations
    children = backref_many("children")  # Activation.caller

    @query_many_property
    def globals(self):
        """Return activation globals as a SQLAlchemy query"""
        return self.object_values.filter(ObjectValue.m.type == "GLOBAL")

    @query_many_property
    def arguments(self):
        """Return activation arguments as a SQLAlchemy query"""
        return self.object_values.filter(ObjectValue.m.type == "ARGUMENT")

    @query_many_property
    def param_variables(self):
        """Return param variables as a SQLAlchemy query"""
        return self.variables.filter(Variable.m.type == "param")

    @query_many_property
    def no_param_variables(self):
        """Return param variables as a SQLAlchemy query"""
        return self.variables.filter(Variable.m.type != "param")

    prolog_description = PrologDescription("activation", (
        PrologTrial("trial_id", link="trial.id"),
        PrologAttribute("id"),
        PrologRepr("name"),
        PrologAttribute("line"),
        PrologTimestamp("start"),
        PrologTimestamp("finish"),
        PrologNullable("caller_activation_id", attr_name="caller_id",
                       link="activation.id"),
    ), description=(
        "informs that in a given trial (*trial_id*),\n"
        "a function *name* was activated\n"
        "by another function (*caller_activation_id*)\n"
        "and executed during a time period from *start*\n"
        "to *finish*."
    ))

    def __init__(self, *args, **kwargs):
        if args and isinstance(args[0], relational.base):
            obj = args[0]
            trial_ref = obj.id
        elif args:
            trial_ref = kwargs.get("trial_ref", args[0])
        else:
            trial_ref = kwargs.get("trial_ref", None)
        session = relational.session
        obj = Activation.load_activation(trial_ref, session=session)
        if obj is not None:
            super(Activation, self).__init__(obj)
        else:
            return None

    # ToDo: Improve hash

    def __key(self):
        return (self.trial_id, self.name, self.line)

    def __hash__(self):
        return hash(self.__key())

    def __eq__(self, other):
        return self.__key() == other.__key()                                     # pylint: disable=protected-access

    @property
    def duration(self):
        """Calculate activation duration"""
        return int((self.finish - self.start).total_seconds() * 1000000)

    def show(self, _print=lambda x, offset=0: print(x)):
        """Show object

        Keyword arguments:
        _print -- custom print function (default=print)
        """
        global_vars = list(self.globals)
        if global_vars:
            _print("{name}: {values}".format(
                name="Globals", values=", ".join(cvmap(str, global_vars))))

        arg_vars = list(self.arguments)
        if arg_vars:
            _print("{name}: {values}".format(
                name="Arguments", values=", ".join(cvmap(str, arg_vars))))

        if self.return_value:
            _print("Return value: {ret}".format(ret=self.return_value))

        _show_slicing("Variables:", self.variables, _print)
        _show_slicing("Usages:", self.variables_usages, _print)
        _show_slicing("Dependencies:", self.source_variables, _print)

    def __repr__(self):
        return "Activation({0.trial_id}, {0.id}, {0.name})".format(self)

    @classmethod  # query
    def load_activation(cls, trial_ref, session=None):
        """Load function_activation by function_activation reference

        Find reference on trials id and tags name
        """
        session = session or relational.session
        result = session.query(cls.m).filter(cls.m.trial_id == trial_ref)
        return result.first()

    def pull_content(cls, tid, session=None):
        session = session or relational.session
        ttrial = cls.__table__
        result = session.query(ttrial).filter(ttrial.c.trial_id == tid).all()
        return result

    def push_content(cls, id, reslist, session=None):
        session = session or relational.session
        ttrial = cls.__table__
        for res in reslist:
            result = session.execute(
                ttrial.insert(),
                {"trial_id": id, "id": res.id, "name": res.name, "line": res.line, "return_value": res.return_value, "caller_id": res.caller_id}
            )
            session.commit()
Example #22
0
class Fit(Base):
    """Fit object definition."""

    # Metadata
    __tablename__ = 'Fits'

    # Columns
    a = Column(Float, nullable=False)
    sig_a = Column(Float, nullable=False)
    b = Column(Float, nullable=False)
    sig_b = Column(Float, nullable=False)
    r2 = Column(Float, nullable=False)
    q = Column(Float, nullable=False)
    chi2 = Column(Float, nullable=False)
    nu_chi = Column(Integer, nullable=False)
    mddp = Column(Float, nullable=False)
    sig_mddp = Column(Float, nullable=False)
    x1s = Column(Float, nullable=False)
    sig_x1s = Column(Float, nullable=False)
    x1e = Column(Float, nullable=False)
    sig_x1e = Column(Float, nullable=False)
    x1 = Column(Float, nullable=False)
    sig_x1 = Column(Float, nullable=False)
    m1s = Column(Float, nullable=False)
    sig_m1s = Column(Float, nullable=False)
    m1e = Column(Float, nullable=False)
    sig_m1e = Column(Float, nullable=False)
    m1 = Column(Float, nullable=False)
    sig_m1 = Column(Float, nullable=False)
    rhos = Column(Float, nullable=False)
    sig_rhos = Column(Float, nullable=False)
    rhoe = Column(Float, nullable=False)
    sig_rhoe = Column(Float, nullable=False)
    rho = Column(Float, nullable=False)
    sig_rho = Column(Float, nullable=False)
    Bm1 = Column(Float, nullable=False)
    sig_Bm1 = Column(Float, nullable=False)
    T = Column(Float, nullable=False)
    sig_T = Column(Float, nullable=False)
    D12 = Column(Float, nullable=False)
    sig_D12 = Column(Float, nullable=False)
    hfg = Column(Float, nullable=False)
    sig_hfg = Column(Float, nullable=False)
    hu = Column(Float, nullable=False)
    sig_hu = Column(Float, nullable=False)
    hs = Column(Float, nullable=False)
    sig_hs = Column(Float, nullable=False)
    cpv = Column(Float, nullable=False)
    sig_cpv = Column(Float, nullable=False)
    he = Column(Float, nullable=False)
    sig_he = Column(Float, nullable=False)
    cpl = Column(Float, nullable=False)
    sig_cpl = Column(Float, nullable=False)
    hT = Column(Float, nullable=False)
    sig_hT = Column(Float, nullable=False)
    qcu = Column(Float, nullable=False)
    sig_qcu = Column(Float, nullable=False)
    Ebe = Column(Float, nullable=False)
    sig_Ebe = Column(Float, nullable=False)
    Ebs = Column(Float, nullable=False)
    sig_Ebs = Column(Float, nullable=False)
    qrs = Column(Float, nullable=False)
    sig_qrs = Column(Float, nullable=False)
    kv = Column(Float, nullable=False)
    sig_kv = Column(Float, nullable=False)
    alpha = Column(Float, nullable=False)
    sig_alpha = Column(Float, nullable=False)
    Bh = Column(Float, nullable=False)
    sig_Bh = Column(Float, nullable=False)
    M = Column(Float, nullable=False)
    sig_M = Column(Float, nullable=False)
    gamma1 = Column(Float, nullable=False)
    sig_gamma1 = Column(Float, nullable=False)
    gamma2 = Column(Float, nullable=False)
    sig_gamma2 = Column(Float, nullable=False)
    beta = Column(Float, nullable=False)
    sig_beta = Column(Float, nullable=False)
    Delta_m = Column(Float, nullable=False)
    sig_Delta_m = Column(Float, nullable=False)
    Delta_T = Column(Float, nullable=False)
    sig_Delta_T = Column(Float, nullable=False)
    mu = Column(Float, nullable=False)
    sig_mu = Column(Float, nullable=False)
    nu = Column(Float, nullable=False)
    sig_nu = Column(Float, nullable=False)
    ShR = Column(Float, nullable=False)
    sig_ShR = Column(Float, nullable=False)
    NuR = Column(Float, nullable=False)
    sig_NuR = Column(Float, nullable=False)
    Le = Column(Float, nullable=False)
    sig_Le = Column(Float, nullable=False)
    GrR_binary = Column(Float, nullable=False)
    sig_GrR_binary = Column(Float, nullable=False)
    GrR_primary = Column(Float, nullable=False)
    sig_GrR_primary = Column(Float, nullable=False)
    Ts = Column(Float, nullable=False)
    sig_Ts = Column(Float, nullable=False)

    # Composite foreign keys
    idx = Column(Integer, primary_key=True)
    experiment_id = Column(Integer, primary_key=True)

    __table_args__ = (
        ForeignKeyConstraint(
            [idx, experiment_id], [Observation.idx, Observation.experiment_id]),
        )

    def __repr__(self):  # noqa: D105
        return (
            f'<Fit(a={self.a}, '
            f'sig_a={self.sig_a}, '
            f'b={self.b}, '
            f'sig_b={self.sig_b}, '
            f'r2={self.r2}, '
            f'q={self.q}, '
            f'chi2={self.chi2}, '
            f'nu={self.nu}, '
            f'experiment_id={self.experiment_id}, '
            f'idx={self.idx})>')
    'datasets',
    m,
    Column('uuid', String, primary_key=True),
    Column('original_filename', String),
    Column('upload_date', DateTime),
)

metadata = Table(
    'metadata',
    m,
    Column('id', Integer, primary_key=True),
    Column('dataset_uuid', String),
    Column('key', String),
    Column('value', String),
    ForeignKeyConstraint(
        ['dataset_uuid'],
        [settings.DATABASES['default']['SCHEMA'] + '.datasets.uuid']),
)

transaction_types = ('create', 'add', 'modify', 'add_and_modify', 'remove')
dataset_transactions = Table(
    'dataset_transactions',
    m,
    Column('id', Integer, primary_key=True),
    Column('dataset_uuid', String),
    Column('transaction_type',
           Enum(*transaction_types, name='transaction_type'),
           default=transaction_types[0]),
    Column('rows_affected', Integer),
    Column('affected_row_ids', ARRAY(Integer)),
    ForeignKeyConstraint(
Example #24
0
class FeedPublishOperation(Base):
    """

    Steem Blockchain Example
    ======================
    {
      "exchange_rate": {
        "quote": "1000.000 STEEM",
        "base": "1.000 SBD"
      },
      "publisher": "abit"
    }

    """

    __tablename__ = 'sbds_op_feed_publishes'
    __table_args__ = (
        PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'),
        ForeignKeyConstraint(['publisher'], ['sbds_meta_accounts.name'],
                             deferrable=True,
                             initially='DEFERRED',
                             use_alter=True),
    )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    publisher = Column(String(16))  # steem_type:account_name_type
    exchange_rate = Column(JSONB)  # steem_type:price
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='feed_publish')

    _fields = dict(
        exchange_rate=lambda x: json_string_field(x.get('exchange_rate')
                                                  ),  # steem_type:price
    )

    _account_fields = frozenset([
        'publisher',
    ])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = sbds.sbds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return sbds.sbds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Example #25
0
                  convert_unicode=False,
                  assert_unicode=None,
                  unicode_error=None,
                  _warn_on_bytestring=False),
           primary_key=True,
           nullable=False),
    Column('role',
           String(length=255,
                  convert_unicode=False,
                  assert_unicode=None,
                  unicode_error=None,
                  _warn_on_bytestring=False),
           primary_key=True,
           nullable=False),
    ForeignKeyConstraint(['user_id', 'project_id'], [
        'user_project_association.user_id',
        'user_project_association.project_id'
    ]),
)

user_role_association = Table(
    'user_role_association',
    meta,
    Column('created_at', DateTime(timezone=False)),
    Column('updated_at', DateTime(timezone=False)),
    Column('deleted_at', DateTime(timezone=False)),
    Column('deleted', Boolean(create_constraint=True, name=None)),
    Column('user_id',
           String(length=255,
                  convert_unicode=False,
                  assert_unicode=None,
                  unicode_error=None,
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    agents = Table(
        'agents',
        meta,
        Column('id', Integer, primary_key=True, autoincrement=True),
        Column('created_at', DateTime),
        Column('project_id', String(length=255)),
        Column('updated_at', DateTime),
        Column('user_id', String(length=255)),
        Column('uuid', String(length=255), unique=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    queue_members = Table(
        'queue_members',
        meta,
        Column('id', Integer, primary_key=True, autoincrement=True),
        Column('created_at', DateTime),
        Column('agent_uuid', String(255)),
        Column('queue_uuid', String(255)),
        Column('updated_at', DateTime),
        ForeignKeyConstraint(
            ['agent_uuid'],
            ['agents.uuid'],
        ),
        ForeignKeyConstraint(
            ['queue_uuid'],
            ['queues.uuid'],
        ),
        UniqueConstraint('agent_uuid',
                         'queue_uuid',
                         name='uniq_queue_members0agent_uuid0queue_uuid'),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    queues = Table(
        'queues',
        meta,
        Column('id', Integer, primary_key=True, autoincrement=True),
        Column('created_at', DateTime),
        Column('description', Text),
        Column('disabled', Boolean),
        Column('name', String(length=80)),
        Column('project_id', String(length=255)),
        Column('updated_at', DateTime),
        Column('user_id', String(length=255)),
        Column('uuid', String(length=255), unique=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    tables = [agents, queues, queue_members]

    for table in tables:
        try:
            table.create()
        except Exception:
            LOG.info(repr(table))
            LOG.exception('Exception while creating table.')
            raise
Example #27
0
           ForeignKey('archives.name'),
           primary_key=True),
    Column('timestamp_value',
           String,
           ForeignKey('timestamps.value'),
           primary_key=True),
)

SrcpkgFiles = Table(
    'srcpkg_files',
    Base.metadata,
    Column('srcpkg_name', String, primary_key=True),
    Column('srcpkg_version', String, primary_key=True),
    Column('file_sha256', String, ForeignKey('files.sha256'),
           primary_key=True),
    ForeignKeyConstraint(('srcpkg_name', 'srcpkg_version'),
                         ('srcpkg.name', 'srcpkg.version')),
)


class BinpkgFiles(Base):
    __tablename__ = 'binpkg_files'
    __table_args__ = (ForeignKeyConstraint(
        ('binpkg_name', 'binpkg_version'),
        ('binpkg.name', 'binpkg.version')), )
    binpkg_name = Column(String, primary_key=True)
    binpkg_version = Column(String, primary_key=True)
    file_sha256 = Column(String, ForeignKey('files.sha256'), primary_key=True)
    architecture = Column(String,
                          ForeignKey('architectures.name'),
                          primary_key=True)
    file = relationship("DBfile")
Example #28
0
class V_Port(Base, _Port):
    __tablename__ = 'v_port'
    __table_args__ = (
        ForeignKeyConstraint(['scan'], ['v_scan.id'], ondelete='CASCADE'),
        Index('ix_v_port_scan_port', 'scan', 'port', 'protocol', unique=True),
    )
Example #29
0
File: models.py Project: Cylae/nyaa
class Torrent(db.Model):
    __tablename__ = DB_TABLE_PREFIX + 'torrents'

    id = db.Column(db.Integer, primary_key=True)
    info_hash = db.Column(BinaryType(length=20),
                          unique=True,
                          nullable=False,
                          index=True)
    display_name = db.Column(db.String(length=255,
                                       collation=COL_UTF8_GENERAL_CI),
                             nullable=False,
                             index=True)
    torrent_name = db.Column(db.String(length=255), nullable=False)
    information = db.Column(db.String(length=255), nullable=False)
    description = db.Column(DescriptionTextType(collation=COL_UTF8MB4_BIN),
                            nullable=False)

    filesize = db.Column(db.BIGINT, default=0, nullable=False, index=True)
    encoding = db.Column(db.String(length=32), nullable=False)
    flags = db.Column(db.Integer, default=0, nullable=False, index=True)
    uploader_id = db.Column(db.Integer,
                            db.ForeignKey('users.id'),
                            nullable=True)
    has_torrent = db.Column(db.Boolean, nullable=False, default=False)

    created_time = db.Column(db.DateTime(timezone=False),
                             default=datetime.utcnow,
                             nullable=False)
    updated_time = db.Column(db.DateTime(timezone=False),
                             default=datetime.utcnow,
                             onupdate=datetime.utcnow,
                             nullable=False)

    main_category_id = db.Column(db.Integer,
                                 db.ForeignKey(DB_TABLE_PREFIX +
                                               'main_categories.id'),
                                 nullable=False)
    sub_category_id = db.Column(db.Integer, nullable=False)
    redirect = db.Column(db.Integer,
                         db.ForeignKey(DB_TABLE_PREFIX + 'torrents.id'),
                         nullable=True)

    __table_args__ = (Index('uploader_flag_idx', 'uploader_id', 'flags'),
                      ForeignKeyConstraint([
                          'main_category_id', 'sub_category_id'
                      ], [
                          DB_TABLE_PREFIX + 'sub_categories.main_category_id',
                          DB_TABLE_PREFIX + 'sub_categories.id'
                      ]), {})

    user = db.relationship('User', uselist=False, back_populates='torrents')
    main_category = db.relationship('MainCategory',
                                    uselist=False,
                                    back_populates='torrents',
                                    lazy="joined")
    sub_category = db.relationship(
        'SubCategory',
        uselist=False,
        backref='torrents',
        lazy="joined",
        primaryjoin=(
            "and_(SubCategory.id == foreign(Torrent.sub_category_id), "
            "SubCategory.main_category_id == Torrent.main_category_id)"))
    info = db.relationship('TorrentInfo',
                           uselist=False,
                           cascade="all, delete-orphan",
                           back_populates='torrent')
    filelist = db.relationship('TorrentFilelist',
                               uselist=False,
                               cascade="all, delete-orphan",
                               back_populates='torrent')
    stats = db.relationship('Statistic',
                            uselist=False,
                            cascade="all, delete-orphan",
                            back_populates='torrent',
                            lazy='joined')
    trackers = db.relationship('TorrentTrackers',
                               uselist=True,
                               cascade="all, delete-orphan",
                               lazy='joined')

    def __repr__(self):
        return '<{0} #{1.id} \'{1.display_name}\' {1.filesize}b>'.format(
            type(self).__name__, self)

    @property
    def created_utc_timestamp(self):
        ''' Returns a UTC POSIX timestamp, as seconds '''
        return (self.created_time - UTC_EPOCH).total_seconds()

    @property
    def information_as_link(self):
        ''' Formats the .information into an IRC or HTTP(S) <a> if possible,
            otherwise escapes it. '''
        irc_match = re.match(r'^#([a-zA-Z0-9-_]+)@([a-zA-Z0-9-_.:]+)$',
                             self.information)
        if irc_match:
            # Return a formatted IRC uri
            return '<a href="irc://{1}/{0}">#{0}@{1}</a>'.format(
                *irc_match.groups())

        url_match = re.match(r'^(https?:\/\/.+?)$', self.information)
        if url_match:
            url = url_match.group(1)

            invalid_url_characters = '<>"'
            # Check if url contains invalid characters
            if not any(c in url for c in invalid_url_characters):
                return '<a href="{0}">{1}</a>'.format(
                    url, escape_markup(unquote_url(url)))
        # Escaped
        return escape_markup(self.information)

    @property
    def info_hash_as_b32(self):
        return base64.b32encode(self.info_hash).decode('utf-8')

    @property
    def info_hash_as_hex(self):
        return self.info_hash.hex()

    @property
    def magnet_uri(self):
        return create_magnet(self)

    @property
    def anonymous(self):
        return self.flags & TorrentFlags.ANONYMOUS

    @anonymous.setter
    def anonymous(self, value):
        self.flags = (self.flags & ~TorrentFlags.ANONYMOUS) | (
            value and TorrentFlags.ANONYMOUS)

    @property
    def hidden(self):
        return self.flags & TorrentFlags.HIDDEN

    @hidden.setter
    def hidden(self, value):
        self.flags = (self.flags
                      & ~TorrentFlags.HIDDEN) | (value and TorrentFlags.HIDDEN)

    @property
    def deleted(self):
        return self.flags & TorrentFlags.DELETED

    @deleted.setter
    def deleted(self, value):
        self.flags = (self.flags & ~TorrentFlags.DELETED) | (
            value and TorrentFlags.DELETED)

    @property
    def trusted(self):
        return self.flags & TorrentFlags.TRUSTED

    @trusted.setter
    def trusted(self, value):
        self.flags = (self.flags & ~TorrentFlags.TRUSTED) | (
            value and TorrentFlags.TRUSTED)

    @property
    def remake(self):
        return self.flags & TorrentFlags.REMAKE

    @remake.setter
    def remake(self, value):
        self.flags = (self.flags
                      & ~TorrentFlags.REMAKE) | (value and TorrentFlags.REMAKE)

    @property
    def complete(self):
        return self.flags & TorrentFlags.COMPLETE

    @complete.setter
    def complete(self, value):
        self.flags = (self.flags & ~TorrentFlags.COMPLETE) | (
            value and TorrentFlags.COMPLETE)

    @classmethod
    def by_id(cls, id):
        return cls.query.get(id)

    @classmethod
    def by_info_hash(cls, info_hash):
        return cls.query.filter_by(info_hash=info_hash).first()
Example #30
0
    def test_change_fk(self):
        m1 = MetaData()
        m2 = MetaData()

        r1a = Table('ref_a',
                    m1,
                    Column('a', Integer, primary_key=True),
                    mysql_engine='InnoDB')
        Table('ref_b',
              m1,
              Column('a', Integer, primary_key=True),
              Column('b', Integer, primary_key=True),
              mysql_engine='InnoDB')
        t1 = Table('t',
                   m1,
                   Column('x', Integer),
                   Column('y', Integer),
                   Column('z', Integer),
                   mysql_engine='InnoDB')
        t1.append_constraint(
            ForeignKeyConstraint([t1.c.x], [r1a.c.a], name="fk1"))
        t1.append_constraint(
            ForeignKeyConstraint([t1.c.y], [r1a.c.a], name="fk2"))

        Table('ref_a',
              m2,
              Column('a', Integer, primary_key=True),
              mysql_engine='InnoDB')
        r2b = Table('ref_b',
                    m2,
                    Column('a', Integer, primary_key=True),
                    Column('b', Integer, primary_key=True),
                    mysql_engine='InnoDB')
        t2 = Table('t',
                   m2,
                   Column('x', Integer),
                   Column('y', Integer),
                   Column('z', Integer),
                   mysql_engine='InnoDB')
        t2.append_constraint(
            ForeignKeyConstraint([t2.c.x, t2.c.z], [r2b.c.a, r2b.c.b],
                                 name="fk1"))
        t2.append_constraint(
            ForeignKeyConstraint([t2.c.y, t2.c.z], [r2b.c.a, r2b.c.b],
                                 name="fk2"))

        def include_object(object_, name, type_, reflected, compare_to):
            return not (isinstance(object_, ForeignKeyConstraint) and type_
                        == 'foreign_key_constraint' and name == 'fk1')

        diffs = self._fixture(m1, m2, object_filters=include_object)

        self._assert_fk_diff(diffs[0],
                             "remove_fk",
                             't', ['y'],
                             'ref_a', ['a'],
                             name='fk2')
        self._assert_fk_diff(diffs[1],
                             "add_fk",
                             't', ['y', 'z'],
                             'ref_b', ['a', 'b'],
                             name='fk2')
        eq_(len(diffs), 2)
pool_ns_records_table = Table('pool_ns_records',
                              meta,
                              Column('id',
                                     UUID(),
                                     default=utils.generate_uuid,
                                     primary_key=True),
                              Column('created_at', DateTime()),
                              Column('updated_at', DateTime()),
                              Column('version',
                                     Integer(),
                                     default=1,
                                     nullable=False),
                              Column('pool_id', UUID(), nullable=False),
                              Column('priority', Integer(), nullable=False),
                              Column('hostname', String(255), nullable=False),
                              ForeignKeyConstraint(['pool_id'], ['pools.id'],
                                                   ondelete='CASCADE'),
                              mysql_engine='INNODB',
                              mysql_charset='utf8')


def upgrade(migrate_engine):
    meta.bind = migrate_engine

    # Load the pool_attributes_table table schema
    pool_attributes_table = Table('pool_attributes', meta, autoload=True)

    # Create the pool_ns_records DB table
    pool_ns_records_table.create()

    # Find the existing name server entries
    pool_ns_records = select(columns=[