from sqlalchemy import Column, DateTime, String, Integer, BigInteger, SmallInteger, Float, Boolean, ForeignKey, func, asc, desc, func, LargeBinary from auditing.db import session, Base, engine from sqlalchemy.dialects import postgresql, sqlite BigIntegerType = BigInteger() BigIntegerType = BigIntegerType.with_variant(postgresql.BIGINT(), 'postgresql') BigIntegerType = BigIntegerType.with_variant(sqlite.INTEGER(), 'sqlite') class AlertType(Base): __tablename__ = 'alert_type' id = Column(BigIntegerType, primary_key=True, autoincrement=True) code = Column(String(20), nullable=False, unique=True) name = Column(String(120), nullable=False) message = Column(String(4096), nullable=True) risk = Column(String(20), nullable=False) description = Column(String(3000), nullable=False) @classmethod def count(cls): return session.query(func.count(cls.id)).scalar() def save(self): session.add(self) session.flush() @classmethod def find_one_by_code(cls, code): return session.query(cls).filter(cls.code == code).first()
class DTA(Base): __tablename__ = 'dta_dtas' id = Column(BigInteger().with_variant(Integer, 'sqlite'), primary_key=True) numero = Column(VARCHAR(10), index=True, nullable=False) anexos = relationship('Anexo', back_populates='dta')
def create_krc_pulp_pi_table(self, table_name): ''' create table for pulp raw pi tags (Kerinci) ''' self.metadata = MetaData() self.pi_data = Table( table_name, self.metadata, Column('krc_pulp_pi_id', BigInteger(), primary_key=True), Column('datetime', DateTime(), unique=True, nullable=False), #Production: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0003:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0002:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.433FQ1260.F:Y_DAY".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.422YI5139-YD:value".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0005:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0006:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0007:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0008:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0221:afc".lower())), Float()), #ClO2 Consumption & Total Active: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0017:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0023:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0018:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0024:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0019:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0025:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0251:afc".lower())), Float()), # Monthly Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0252:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0254:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0256:afc".lower())), Float()), #Defoamer: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0170:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0171:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0172:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0173:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0202:afc".lower())), Float()), #H2O2 Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0020:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0021:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0022:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0194:afc".lower())), Float()), # Monthly Consumption Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0207:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0226:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0228:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0230:afc".lower())), Float()), #NaOH(Own) Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0032:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0033:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0034:afc".lower())), Float()), #NaOH(Purchase) Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0035:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0036:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0037:afc".lower())), Float()), #NaOH(Total) Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0038:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0039:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0040:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0197:afc".lower())), Float()), # monthgly consumptiolns Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0243:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0245:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0247:afc".lower())), Float()), #Oxygen Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0026:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0027:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0028:afc".lower())), Float()), #Sulfuric Acid (H2SO4) Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0029:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0030:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0031:afc".lower())), Float()), #White liqour consumption (m3/ADt): Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0009:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0010:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0011:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0012:afc".lower())), Float()), # monthly Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0234:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0236:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0238:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0240:afc".lower())), Float()), #White liqour consumption (tAA/ADt): Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0013:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0014:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0015:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0016:afc".lower())), Float()), #Utility power consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0050:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0051:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0052:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0053:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0054:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0055:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0056:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0057:afc".lower())), Float()), # monthly Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0261:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0263:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0265:afc".lower())), Float()), #Utility LP steam consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0060:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0061:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0062:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0063:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0064:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0065:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0066:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0067:afc".lower())), Float()), #Utility MP steam consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0068:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0069:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0070:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0071:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0072:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0073:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0074:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0075:afc".lower())), Float()), #Utility Total steam consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0076:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0077:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0078:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0079:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0080:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0081:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0082:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0083:afc".lower())), Float()), # monthly Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0317:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0319:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0321:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0323:afc".lower())), Float()), #utility_water_consumption Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0084:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0085:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0086:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0087:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0088:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0089:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0090:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0091:afc".lower())), Float()), # monthly Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0271:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0273:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0275:afc".lower())), Float()), #Woodchip consumption - Woodchip Total Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0220:afc".lower())), Float()), #Woodchip consumption - Chip meter average Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0048:afc".lower())), Float()), #Woodchip consumption - Consumption Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0211:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0213:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0215:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0217:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0219:afc".lower())), Float()), #Yield - Cooking Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0130:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0131:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0132:afc".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("RPL.111XC0133:afc".lower())), Float()), ) self.metadata.create_all(self.engine)
def create_rz_pulp_pi_table(self, table_name): ''' create table for pulp raw pi tags (Rizhao) ''' self.metadata = MetaData() self.pi_data = Table( table_name, self.metadata, Column('rz_pulp_pi_id', BigInteger(), primary_key=True), Column('datetime', DateTime(), unique=True, nullable=False), #Production: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD1.PULP.PRODUCTION".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD2.PULP.PRODUCTION".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD3.PULP.PRODUCTION".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.PULP.PRODUCTION".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.PULP.PRODUCTION".lower())), Float()), #ClO2: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.CLO2".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.CLO2".lower())), Float()), #Defoamer: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.Defoamer".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.Defoamer".lower())), Float()), #H2O2: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.H202".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.H202".lower())), Float()), #NAOH Own: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.NaOH.OWN".lower())), Float()), #NAOH Puchase: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.NaOH.PUR".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.NaOH.PUR".lower())), Float()), #NAOH Total: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.NaOH.TOT".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.NaOH.TOT".lower())), Float()), #O2: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.O2".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.O2".lower())), Float()), #H2SO4: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.H2SO4".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.H2SO4".lower())), Float()), #White liquor consumption (m3): Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHE.WL".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHE.WL".lower())), Float()), #Utility Power Consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD1.UT.POWER.TOT".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD2.UT.POWER.TOT".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD3.UT.POWER.TOT".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.UT.PR.POWER".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.UT.PR.POWER".lower())), Float()), #LP Steam: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD1.UT.LP.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD2.UT.LP.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD3.UT.LP.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.UT.LP.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.UT.LP.STEAM".lower())), Float()), #MP Steam: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.UT.MP.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.UT.MP.STEAM".lower())), Float()), #Total Steam: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD1.UT.TOT.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD2.UT.TOT.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD3.UT.TOT.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.UT.TOT.STEAM".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.UT.TOT.STEAM".lower())), Float()), #Utility Water Consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD1.UT.PR.WATER".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD2.UT.PR.WATER".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PD3.UT.PR.WATER".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.UT.PR.WATER".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.UT.PR.WATER".lower())), Float()), #Woodchip Consumption: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHIPS.GMT".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHIPS.GMT".lower())), Float()), #Yield Bleaching: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHIPS.BLC.YIELD".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHIPS.BLC.YIELD".lower())), Float()), #Yield Cooking (disgester yield): Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHIPS.DIG.YIELD".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHIPS.DIG.YIELD".lower())), Float()), #Yield Screening: Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL11.CHIPS.SCR.YIELD".lower())), Float()), Column( re.sub('[^0-9a-zA-Z]+', '_', "tag_{}".format("OE.PL12.CHIPS.SCR.YIELD".lower())), Float())) self.metadata.create_all(self.engine)
from sqlalchemy.ext.declarative import declarative_base, declared_attr from sqlalchemy.dialects import sqlite from sqlalchemy import Column, Integer, BigInteger, String from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from sqlalchemy.types import DateTime from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles from sqlalchemy import event from sqlalchemy.engine import Engine from sqlalchemy.orm.exc import NoResultFound # SO much boilerplate BigIntegerType = BigInteger() BigIntegerType = BigIntegerType.with_variant(sqlite.INTEGER(), "sqlite") # create a custom utcnow function class utcnow(expression.FunctionElement): type = DateTime() @compiles(utcnow, "sqlite") def sqlite_utcnow(element, compiler, **kw): return "CURRENT_TIMESTAMP" class AppMixin(object): """
class SQLABaseObject(object): _track_timestamp = True id = Column(Integer(), primary_key=True, info={key: {'repr': True}}) parent_id = Column(String(), nullable=False, index=True, info={key: { 'exclude': True }}) last_modified = Column(BigInteger(), nullable=False, default=lambda: datetime.datetime.utcnow()) deleted = Column(Boolean(), default=False, index=True, info={key: { 'exclude': True }}) @property def is_timestamp_trackeable(self): """True if this object will be used to track the last time the collection it belongs to has been accessed""" return self._track_timestamp @property def last_modified_timestamp(self): return self.last_modified.replace( tzinfo=datetime.timezone.utc).timestamp() @classmethod def deserialize(cls, cstruct, context=None): try: appstruct = cls.__schema__.deserialize(cstruct) return cls.__schema__.objectify(appstruct, context) except AttributeError: raise NoSchemaException(cls) @classmethod def serialize(cls, appstruct): try: return cls.__schema__.serialize(appstruct) except AttributeError: raise NoSchemaException(cls) def __repr__(self): return '{class_name}{attributes}'.format( class_name=self.__class__.__name__, attributes=self.__repr_attributes()) def __repr_attributes(self): return dict([(att.name, getattr(self, att.name)) for att in self.__schema__.children if hasattr(att, 'repr') and att.repr]) @staticmethod def global_preparer(value): return value @staticmethod def global_validator(node, value): return True
from superset.models.core import Database from superset.utils.core import get_example_database logger = logging.getLogger(__name__) CHUNKSIZE = 512 VARCHAR = re.compile(r"VARCHAR\((\d+)\)", re.IGNORECASE) JSON_KEYS = {"params", "template_params"} type_map = { "BOOLEAN": Boolean(), "VARCHAR": String(255), "STRING": String(255), "TEXT": Text(), "BIGINT": BigInteger(), "FLOAT": Float(), "FLOAT64": Float(), "DOUBLE PRECISION": Float(), "DATE": Date(), "DATETIME": DateTime(), "TIMESTAMP WITHOUT TIME ZONE": DateTime(timezone=False), "TIMESTAMP WITH TIME ZONE": DateTime(timezone=True), } def get_sqla_type(native_type: str) -> VisitableType: if native_type.upper() in type_map: return type_map[native_type.upper()] match = VARCHAR.match(native_type)
# Licenser including the issue of its valid conclusion and its pre- and # post-contractual effects is governed by the laws of Austria. Any disputes # concerning this License-Agreement including the issue of its valid conclusion # and its pre- and post-contractual effects are exclusively decided by the # competent court, in whose district STRG.AT GmbH has its registered seat, at # the discretion of STRG.AT GmbH also the competent court, in whose district the # Licensee has his registered seat, an establishment or assets. import re from sqlalchemy import Column, ForeignKey, BigInteger, Integer, UniqueConstraint from sqlalchemy.orm import backref, relationship from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.orderinglist import ordering_list IdType = BigInteger() IdType = IdType.with_variant(Integer, 'sqlite') # taken from stackoverflow: # http://stackoverflow.com/a/1176023/44562 _first_cap_re = re.compile('(.)([A-Z][a-z]+)') _all_cap_re = re.compile('([a-z0-9])([A-Z])') def cls2tbl(cls): """ Converts a class (or a class name) to a table name. The class name is expected to be in *CamelCase*. The return value will be *seperated_by_underscores* and prefixed with an underscore. Omitting the underscore will yield the name of the class's :ref:`view <db_view>`. """ if isinstance(cls, type):
class MemberModel(Base): __tablename__ = "member_model" id = Column(BigInteger().with_variant(Integer, "sqlite"), primary_key=True) version = Column(BigInteger(), nullable=False) server_uri = Column(String(767), nullable=False, unique=True) update_time = Column(BigInteger(), nullable=False) uuid = Column(String(128), nullable=False, unique=True) @staticmethod def create_table(db_conn=None): if db_conn is not None: global SQL_ALCHEMY_CONN SQL_ALCHEMY_CONN = db_conn prepare_db() if not engine.dialect.has_table(engine, MemberModel.__tablename__): Base.metadata.create_all(engine) @staticmethod @provide_session def cleanup(session=None): session.query(MemberModel).delete() session.commit() @staticmethod @provide_session def update_member(server_uri, server_uuid, session=None): member = session.query(MemberModel) \ .filter(MemberModel.server_uri == server_uri).first() if member is None: member = MemberModel() member.version = 1 member.server_uri = server_uri member.update_time = time.time_ns() / 1000000 member.uuid = server_uuid session.add(member) else: if member.uuid != server_uuid: raise Exception( "The server uri '%s' is already exists in the storage!" % server_uri) member.version += 1 member.update_time = time.time_ns() / 1000000 session.commit() @staticmethod @provide_session def get_living_members(ttl, session=None): member_models = session.query(MemberModel) \ .filter(MemberModel.update_time >= time.time_ns() / 1000000 - ttl) \ .all() return [ Member(m.version, m.server_uri, int(m.update_time)) for m in member_models ] @staticmethod @provide_session def get_dead_members(ttl, session=None): member_models = session.query(MemberModel) \ .filter(MemberModel.update_time < time.time_ns() / 1000000 - ttl) \ .all() return [ Member(m.version, m.server_uri, int(m.update_time)) for m in member_models ] @staticmethod @provide_session def delete_member(server_uri=None, server_uuid=None, session=None): conditions = [] if server_uri: conditions.append(MemberModel.server_uri == server_uri) if server_uuid: conditions.append(MemberModel.uuid == server_uuid) if len(conditions) != 1: raise Exception( "Please provide exactly one param, server_uri or server_uuid") member = session.query(MemberModel).filter(*conditions).first() if member is not None: session.delete(member) session.commit() @staticmethod @provide_session def clear_dead_members(ttl, session=None): session.query(MemberModel) \ .filter(MemberModel.update_time < time.time_ns() / 1000000 - ttl) \ .delete() session.commit()
class BlockchainWallet(ModelBase): __tablename__ = 'blockchain_wallet' address = Column(String(), index=True, unique=True) _encrypted_private_key = Column(String()) wei_target_balance = Column(BigInteger()) wei_topup_threshold = Column(BigInteger()) last_topup_task_uuid = Column(String()) tasks = relationship('BlockchainTask', backref='signing_wallet', lazy=True, foreign_keys='BlockchainTask.signing_wallet_id') transactions = relationship('BlockchainTransaction', backref='signing_wallet', lazy=True, foreign_keys='BlockchainTransaction.signing_wallet_id') @hybrid_property def encrypted_private_key(self): return self._encrypted_private_key @encrypted_private_key.setter def encrypted_private_key(self, value): self._encrypted_private_key = value private_key = self.decrypt_private_key(value) self.address = self.address_from_private_key(private_key) @hybrid_property def private_key(self): return self.decrypt_private_key(self._encrypted_private_key) @private_key.setter def private_key(self, value): self.encrypted_private_key = self.encrypt_private_key(value) self.address = self.address_from_private_key(value) @staticmethod def decrypt_private_key(encrypted_private_key): encrypted_private_key_bytes = BlockchainWallet._bytify_if_required(encrypted_private_key) return BlockchainWallet._cipher_suite() \ .decrypt(encrypted_private_key_bytes).decode('utf-8') @staticmethod def encrypt_private_key(private_key): private_key_bytes = BlockchainWallet._bytify_if_required(private_key) return BlockchainWallet._cipher_suite()\ .encrypt(private_key_bytes).decode('utf-8') @staticmethod def _bytify_if_required(string): return string if isinstance(string, bytes) else string.encode('utf-8') @staticmethod def _cipher_suite(): fernet_encryption_key = base64.b64encode(keccak(text=config.SECRET_KEY)) return Fernet(fernet_encryption_key) @staticmethod def address_from_private_key(private_key): if isinstance(private_key, str): private_key = bytes.fromhex(private_key.replace('0x', '')) return keys.PrivateKey(private_key).public_key.to_checksum_address() def __init__(self, private_key=None, wei_target_balance=None, wei_topup_threshold=None): if private_key: self.private_key = private_key else: self.private_key = Web3.toHex(keccak(os.urandom(4096))) self.wei_target_balance = wei_target_balance self.wei_topup_threshold = wei_topup_threshold
class EventModel(Base): __tablename__ = "event_model" version = Column(BigInteger().with_variant(Integer, "sqlite"), primary_key=True) key = Column(String(1024), nullable=False) value = Column(Text()) event_type = Column(String(1024), server_default="UNDEFINED") context = Column(Text()) namespace = Column(String(1024)) sender = Column(String(1024)) create_time = Column(BigInteger(), nullable=False) uuid = Column(String(40), nullable=False, unique=True) @staticmethod @provide_session def add_event(event: BaseEvent, uuid, session=None): event_model = EventModel() event_model.key = event.key event_model.value = event.value event_model.event_type = event.event_type event_model.context = event.context event_model.namespace = event.namespace event_model.create_time = int(time.time() * 1000) event_model.uuid = uuid event_model.sender = event.sender session.add(event_model) session.commit() return event_model_to_event(event_model) @staticmethod @provide_session def list_events(key: Union[str, Tuple[str]], version: int = None, event_type: str = None, start_time: int = None, namespace: str = None, sender: str = None, session=None): key = None if key == "" else key event_type = None if event_type == "" else event_type namespace = None if namespace == "" else namespace sender = None if sender == "" else sender if isinstance(key, str): key = (key, ) elif isinstance(key, Iterable): key = tuple(key) if key is None: raise Exception('key cannot be empty.') conditions = [] if event_type is not None and event_type != ANY_CONDITION: conditions.append(EventModel.event_type == event_type) if start_time is not None and start_time > 0: conditions.append(EventModel.create_time >= start_time) if namespace is not None and ANY_CONDITION != namespace: conditions.append(EventModel.namespace == namespace) if sender is not None and ANY_CONDITION != sender: conditions.append(EventModel.sender == sender) if version > 0: conditions.append(EventModel.version > version) if ANY_CONDITION not in key: conditions.append(EventModel.key.in_(key)) event_model_list = session.query(EventModel).filter(*conditions).all() return [ event_model_to_event(event_model) for event_model in event_model_list ] @staticmethod @provide_session def list_all_events(start_time: int, session=None): conditions = [EventModel.create_time >= start_time] event_model_list = session.query(EventModel).filter(*conditions).all() return [ event_model_to_event(event_model) for event_model in event_model_list ] @staticmethod @provide_session def list_all_events_from_version(start_version: int, end_version: int = None, session=None): conditions = [EventModel.version > start_version] if end_version is not None and end_version > 0: conditions.append(EventModel.version <= end_version) event_model_list = session.query(EventModel).filter(*conditions).all() return [ event_model_to_event(event_model) for event_model in event_model_list ] @staticmethod @provide_session def sync_event(event: BaseEvent, uuid, session=None): event_model = EventModel() event_model.key = event.key event_model.value = event.value event_model.event_type = event.event_type event_model.context = event.context event_model.namespace = event.namespace event_model.create_time = event.create_time event_model.uuid = uuid event_model.sender = event.sender session.add(event_model) session.commit() return event_model_to_event(event_model) @staticmethod @provide_session def get_latest_version(session=None): return session.query(EventModel).order_by(EventModel.version.desc()) \ .limit(1).first().version @staticmethod def create_table(db_conn=None): if db_conn is not None: global SQL_ALCHEMY_CONN SQL_ALCHEMY_CONN = db_conn prepare_db() if not engine.dialect.has_table(engine, EventModel.__tablename__): Base.metadata.create_all(engine) @staticmethod @provide_session def cleanup(session=None): session.query(EventModel).delete() session.commit()
class Payment(DBBASE, PersistentACLMixin): """ Payment entry """ __tablename__ = 'payment' __table_args__ = default_table_args id = Column(Integer, primary_key=True) created_at = Column( DateTime(), info={'colanderalchemy': { 'exclude': True, 'title': u"Créé(e) le", }}, default=datetime.datetime.now, ) updated_at = Column(DateTime(), info={ 'colanderalchemy': { 'exclude': True, 'title': u"Mis(e) à jour le", } }, default=datetime.datetime.now, onupdate=datetime.datetime.now) mode = Column(String(50), info={ 'colanderalchemy': { 'title': u"Mode de paiement", 'validator': forms.get_deferred_select_validator(PaymentMode, id_key='label'), 'missing': colander.required, } }) amount = Column( BigInteger(), info={ 'colanderalchemy': { "title": u"Montant", 'missing': colander.required, "typ": AmountType(5) } }, ) remittance_amount = Column( String(255), info={ 'colanderalchemy': { 'title': u"Identifiant de remise en banque", 'missing': colander.required, } }, ) date = Column( DateTime(), info={ 'colanderalchemy': { 'title': u"Date de remise", 'missing': colander.required, } }, default=datetime.datetime.now, ) exported = Column(Boolean(), default=False) task_id = Column(Integer, ForeignKey('task.id', ondelete="cascade"), info={ 'colanderalchemy': { 'title': u"Identifiant du document", 'missing': colander.required, } }) bank_id = Column(ForeignKey('bank_account.id'), info={ 'colanderalchemy': { 'title': u"Compte en banque", 'missing': colander.required, 'validator': forms.get_deferred_select_validator(BankAccount), } }) tva_id = Column(ForeignKey('tva.id'), info={ 'colanderalchemy': { 'title': u"Tva associée à ce paiement", 'validator': forms.get_deferred_select_validator(Tva), 'missing': colander.drop, } }, nullable=True) user_id = Column( ForeignKey('accounts.id'), info={ 'colanderalchemy': { 'title': u"Utilisateur", 'missing': colander.required, } }, ) user = relationship( "User", info={'colanderalchemy': { 'exclude': True }}, ) bank = relationship("BankAccount", back_populates='payments', info={'colanderalchemy': { 'exclude': True }}) tva = relationship("Tva", info={'colanderalchemy': {'exclude': True}}) task = relationship( "Task", primaryjoin="Task.id==Payment.task_id", ) # Formatting precision precision = 5 # Usefull aliases @property def invoice(self): return self.task @property def parent(self): return self.task # Simple function def get_amount(self): return self.amount def __unicode__(self): return u"<Payment id:{s.id} task_id:{s.task_id} amount:{s.amount}\ mode:{s.mode} date:{s.date}".format(s=self)
'bw_usage_cache', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True, nullable=False), Column('instance_id', Integer(), nullable=False), Column( 'network_label', String(length=255, convert_unicode=False, assert_unicode=None, unicode_error=None, _warn_on_bytestring=False)), Column('start_period', DateTime(timezone=False), nullable=False), Column('last_refreshed', DateTime(timezone=False)), Column('bw_in', BigInteger()), Column('bw_out', BigInteger())) def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; # bind migrate_engine to your metadata meta.bind = migrate_engine try: bw_cache.create() except Exception: logging.info(repr(bw_cache)) logging.exception('Exception while creating table') meta.drop_all(tables=[bw_cache]) raise
import uuid import xml.etree.ElementTree from sqlalchemy import BigInteger from sqlalchemy.dialects import sqlite # without setting this explicitly, we get a warning that this option # will default to disabled in future versions (due to incurring a lot # of overhed). We aren't using the relevant functionality, so let's # just opt-in to the change now: app.config.update(SQLALCHEMY_TRACK_MODIFICATIONS=False) db = SQLAlchemy(app) # Sets up variant type so that postgresql can use BIGINT primary keys # while sqlite uses Integer primary keys. BigIntegerType = BigInteger().with_variant( sqlite.INTEGER(), 'sqlite') def init_db(uri=None): """Start up the DB connection. `uri` is the uri to use for the database. If it is None, the uri from the config file will be used. """ if uri is None: uri = cfg.get('database', 'uri') app.config.update(SQLALCHEMY_DATABASE_URI=uri) # A joining table for project's access to networks, which have a many to many # relationship:
from tokenserver.assignment import INodeAssignment from zope.interface import implements metadata = MetaData() users = Table( "users", metadata, Column("uid", Integer(), primary_key=True, autoincrement=True, nullable=False), Column("service", String(32), nullable=False), Column("email", String(255), nullable=False), Column("generation", BigInteger(), nullable=False), Column("client_state", String(32), nullable=False), Column("created_at", BigInteger(), nullable=False), Column("replaced_at", BigInteger(), nullable=True), Index('lookup_idx', 'email', 'service', 'created_at'), Index('clientstate_idx', 'email', 'service', 'client_state', unique=True), ) _GET_USER_RECORDS = sqltext("""\ select uid, generation, client_state from users where email = :email
def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return BigInteger() else: return Integer()
class Users(Base): __tablename__ = "users" id = Column(BigInteger(), primary_key=True) user_id = Column(Text()) username = Column(Text())
from sqlalchemy import BigInteger, Integer, Column, DateTime, func from sqlalchemy.dialects import postgresql, sqlite from sqlalchemy.orm.session import sessionmaker from sqlalchemy.orm.scoping import scoped_session from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() BigIntegerType = BigInteger() BigIntegerType = BigIntegerType.with_variant(postgresql.BIGINT(), 'postgresql') BigIntegerType = BigIntegerType.with_variant(sqlite.INTEGER(), 'sqlite') _sessionmaker = sessionmaker() session = scoped_session(_sessionmaker) class CommonColumnsMixin(): """ Some common attributes for tables. """ id = Column(Integer, primary_key=True) created_at = Column(DateTime, default=func.now()) updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
class Artifact(BASE, ArtifactBase): __tablename__ = 'glare_artifacts' __table_args__ = (Index('ix_glare_artifact_name_and_version', 'name', 'version_prefix', 'version_suffix'), Index('ix_glare_artifact_type', 'type_name'), Index('ix_glare_artifact_status', 'status'), Index('ix_glare_artifact_owner', 'owner'), Index('ix_glare_artifact_visibility', 'visibility'), Index('ix_glare_artifact_display_name', 'display_type_name'), { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8' }) __protected_attributes__ = set(["created_at", "updated_at"]) id = Column(String(36), primary_key=True, default=lambda: uuidutils.generate_uuid()) name = Column(String(255), nullable=False) type_name = Column(String(255), nullable=False) version_prefix = Column(BigInteger().with_variant(Integer, "sqlite"), nullable=False) version_suffix = Column(String(255)) version_meta = Column(String(255)) version = composite(semver_db.DBVersion, version_prefix, version_suffix, version_meta, comparator_factory=semver_db.VersionComparator) description = Column(Text()) visibility = Column(String(32), nullable=False) status = Column(String(32), nullable=False) owner = Column(String(255)) created_at = Column(DateTime, default=lambda: timeutils.utcnow(), nullable=False) updated_at = Column(DateTime, default=lambda: timeutils.utcnow(), nullable=False, onupdate=lambda: timeutils.utcnow()) activated_at = Column(DateTime) display_type_name = Column(String(255), nullable=True) def to_dict(self): d = super(Artifact, self).to_dict() d.pop('version_prefix') d.pop('version_suffix') d.pop('version_meta') d['version'] = str(self.version) # parse tags tags = [] for tag in self.tags: tags.append(tag.value) d['tags'] = tags # parse properties for prop in self.properties: prop_value = _parse_property_value(prop) if prop.position is not None: if prop.name not in d: # create new list d[prop.name] = [] # insert value in position d[prop.name].insert(prop.position, prop_value) elif prop.key_name is not None: if prop.name not in d: # create new dict d[prop.name] = {} # insert value in the dict d[prop.name][prop.key_name] = prop_value else: # make scalar d[prop.name] = prop_value # parse blobs for blob in self.blobs: blob_value = _parse_blob_value(blob) if blob.key_name is not None: if blob.name not in d: # create new dict d[blob.name] = {} # insert value in the dict d[blob.name][blob.key_name] = blob_value else: # make scalar d[blob.name] = blob_value return d
from sqlalchemy import BigInteger from sqlalchemy.dialects import postgresql, mysql, sqlite KeyInteger = BigInteger() KeyInteger = KeyInteger.with_variant(postgresql.BIGINT(), 'postgresql') KeyInteger = KeyInteger.with_variant(mysql.BIGINT(), 'mysql') KeyInteger = KeyInteger.with_variant(sqlite.INTEGER(), 'sqlite') class SABase(object): """ Base class for all the DB mapper objects. """ def _commit(self, session, batch, merge=False): if merge: session.merge(self) else: session.add(self) if batch: return session.flush() session.commit() def commit_to_db(self, session, batch=False): """ Commit the DB object/row to the database. @type session: sqlalchemy.orm.scoping.ScopedSession object @param session: SQLAlch session to commit row to. """ self._commit(session, batch)
class PaymentLine(DBBASE): """ payments lines """ __tablename__ = 'estimation_payment' __table_args__ = default_table_args id = Column( Integer, primary_key=True, nullable=False, ) task_id = Column( Integer, ForeignKey('estimation.id', ondelete="cascade"), info={'colanderalchemy': { 'title': u"Identifiant du document", }}, ) order = Column(Integer, info={'colanderalchemy': { 'title': u"Ordre" }}, default=1) description = Column( Text, info={'colanderalchemy': { 'title': u"Description" }}, ) amount = Column( BigInteger(), info={'colanderalchemy': { 'title': u"Montant" }}, ) date = Column(Date(), info={'colanderalchemy': { "title": u"Date" }}, default=datetime.date.today) task = relationship( "Estimation", info={'colanderalchemy': { 'exclude': True }}, ) def duplicate(self): """ duplicate a paymentline """ return PaymentLine( order=self.order, amount=self.amount, description=self.description, date=datetime.date.today(), ) def __repr__(self): return u"<PaymentLine id:{s.id} task_id:{s.task_id} amount:{s.amount}\ date:{s.date}".format(s=self) def __json__(self, request): return dict( id=self.id, order=self.order, index=self.order, description=self.description, cost=integer_to_amount(self.amount, 5), amount=integer_to_amount(self.amount, 5), date=self.date.isoformat(), task_id=self.task_id, )