def inkoopRegels(minkgeg, mregel): if not minkgeg: check = 0 return (check) minkordnr = minkgeg[0] mlevnr = minkgeg[1] class Widget(QDialog): def __init__(self, parent=None): super(Widget, self).__init__(parent) self.setWindowTitle('Bestelregels inkooporder materialen invoeren') self.setWindowIcon(QIcon('./images/logos/logo.jpg')) self.setFont(QFont('Arial', 10)) self.Inkoopordernummer = QLabel() inkorderEdit = QLineEdit(str(minkordnr)) inkorderEdit.setDisabled(True) inkorderEdit.setFixedWidth(130) inkorderEdit.setFont(QFont("Arial", 10)) inkorderEdit.textChanged.connect(self.inkorderChanged) self.BestelregelArtikel = QLabel() artEdit = QLineEdit() artEdit.setFixedWidth(130) artEdit.setFont(QFont("Arial", 10)) artEdit.textChanged.connect(self.artChanged) reg_ex = QRegExp("^[2#]{1}[0-9]{8}$") input_validator = QRegExpValidator(reg_ex, artEdit) artEdit.setValidator(input_validator) self.BestelHoeveelheid = QLabel() hoevEdit = QLineEdit() hoevEdit.setFixedWidth(130) hoevEdit.setFont(QFont("Arial", 10)) hoevEdit.textChanged.connect(self.hoevChanged) reg_ex = QRegExp("^[0-9.]{0,12}$") input_validator = QRegExpValidator(reg_ex, hoevEdit) hoevEdit.setValidator(input_validator) self.Inkoopeenheidsprijs = QLabel() prijsEdit = QLineEdit() prijsEdit.setFixedWidth(130) prijsEdit.setFont(QFont("Arial", 10)) prijsEdit.textChanged.connect(self.prijsChanged) reg_ex = QRegExp("^[0-9.]{0,12}$") input_validator = QRegExpValidator(reg_ex, prijsEdit) prijsEdit.setValidator(input_validator) self.Levering_start = QLabel() #levertEdit = QDateEdit(datetime.date) startEdit = QLineEdit('') startEdit.setCursorPosition(0) startEdit.setFixedWidth(130) startEdit.setFont(QFont("Arial", 10)) startEdit.textChanged.connect(self.startChanged) reg_ex = QRegExp( '^[2]{1}[0]{1}[0-9]{2}[-]{1}[0-1]{1}[0-9]{1}[-]{1}[0-3]{1}[0-9]{1}$' ) input_validator = QRegExpValidator(reg_ex, startEdit) startEdit.setValidator(input_validator) self.Levering_end = QLabel() #levertEdit = QDateEdit(datetime.date) endEdit = QLineEdit('') endEdit.setCursorPosition(0) endEdit.setFixedWidth(130) endEdit.setFont(QFont("Arial", 10)) endEdit.textChanged.connect(self.endChanged) reg_ex = QRegExp( '^[2]{1}[0]{1}[0-9]{2}[-]{1}[0-1]{1}[0-9]{1}[-]{1}[0-3]{1}[0-9]{1}$' ) input_validator = QRegExpValidator(reg_ex, endEdit) endEdit.setValidator(input_validator) grid = QGridLayout() grid.setSpacing(20) lbl = QLabel() pixmap = QPixmap('./images/logos/verbinding.jpg') lbl.setPixmap(pixmap) grid.addWidget(lbl, 1, 0) self.setFont(QFont('Arial', 10)) grid.addWidget(QLabel('Bestelling voor\nLeverancier: '+str(minkgeg[1])+\ ',\n'+minkgeg[2]+' '+minkgeg[3]+',\n'+minkgeg[4]+' '+str(minkgeg[5])+\ minkgeg[6]+',\n'+minkgeg[7]+' '+minkgeg[8]+'.\nOrderregel '+str(mregel)), 1, 1, 1, 2) lbl1 = QLabel('Ordernummer') lbl1.setAlignment(Qt.AlignRight | Qt.AlignVCenter) grid.addWidget(lbl1, 5, 0) grid.addWidget(inkorderEdit, 5, 1) lbl2 = QLabel('Artikelnummer') lbl2.setAlignment(Qt.AlignRight | Qt.AlignVCenter) grid.addWidget(lbl2, 6, 0) grid.addWidget(artEdit, 6, 1) lbl3 = QLabel('Bestelhoeveelheid') lbl3.setAlignment(Qt.AlignRight | Qt.AlignVCenter) grid.addWidget(lbl3, 7, 0) grid.addWidget(hoevEdit, 7, 1) lbl4 = QLabel('Inkoopeenheidsprijs') lbl4.setAlignment(Qt.AlignRight | Qt.AlignVCenter) grid.addWidget(lbl4, 8, 0) grid.addWidget(prijsEdit, 8, 1) lbl5 = QLabel('Levering start jjjj-mm-dd') lbl5.setAlignment(Qt.AlignRight | Qt.AlignVCenter) grid.addWidget(lbl5, 9, 0) grid.addWidget(startEdit, 9, 1) lbl6 = QLabel('Levering eind jjjj-mm-dd') lbl6.setAlignment(Qt.AlignRight | Qt.AlignVCenter) grid.addWidget(lbl6, 10, 0) grid.addWidget(endEdit, 10, 1) logo = QLabel() pixmap = QPixmap('./images/logos/logo.jpg') logo.setPixmap(pixmap) grid.addWidget(logo, 5, 2, 1, 1, Qt.AlignRight) self.setLayout(grid) self.setGeometry(100, 100, 150, 150) applyBtn = QPushButton('Invoeren') applyBtn.clicked.connect(self.accept) grid.addWidget(applyBtn, 11, 2, 1, 1, Qt.AlignRight) applyBtn.setFont(QFont("Arial", 10)) applyBtn.setFixedWidth(100) applyBtn.setStyleSheet( "color: black; background-color: gainsboro") sluitBtn = QPushButton('Sluiten') sluitBtn.clicked.connect(self.close) grid.addWidget(sluitBtn, 11, 1, 1, 2) sluitBtn.setFont(QFont("Arial", 10)) sluitBtn.setFixedWidth(100) sluitBtn.setStyleSheet( "color: black; background-color: gainsboro") grid.addWidget( QLabel('\u00A9 2017 all rights reserved [email protected]'), 12, 0, 1, 3, Qt.AlignCenter) def inkorderChanged(self, text): self.Inkoopordernummer.setText(text) def artChanged(self, text): self.BestelregelArtikel.setText(text) def hoevChanged(self, text): self.BestelHoeveelheid.setText(text) def prijsChanged(self, text): self.Inkoopeenheidsprijs.setText(text) def startChanged(self, text): self.Levering_start.setText(text) def endChanged(self, text): self.Levering_end.setText(text) def returninkorder(self): return self.Inkoopordernummer.text() def returnart(self): return self.BestelregelArtikel.text() def returnhoev(self): return self.BestelHoeveelheid.text() def returnprijs(self): return self.Inkoopeenheidsprijs.text() def returnstart(self): return self.Levering_start.text() def returnend(self): return self.Levering_end.text() @staticmethod def getData(parent=None): dialog = Widget(parent) dialog.exec_() return [dialog.returnart(), dialog.returnhoev(),\ dialog.returnprijs(), dialog.returnstart(), dialog.returnend()] window = Widget() data = window.getData() check = 1 if mregel == 1 and data[0]: datum = str(datetime.datetime.now()) mbestdatum = (datum[0:4] + '-' + datum[8:10] + '-' + datum[5:7]) metadata = MetaData() orders_inkoop = Table( 'orders_inkoop', metadata, Column('orderinkoopID', Integer(), primary_key=True), Column('leverancierID', None, ForeignKey('leveranciers.c.leverancierID')), Column('besteldatum', String)) engine = create_engine( 'postgresql+psycopg2://postgres@localhost/bisystem') conn = engine.connect() mbestdatum = ( datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))[0:10] ins = insert(orders_inkoop).values(orderinkoopID = minkordnr, leverancierID =\ mlevnr, besteldatum = mbestdatum) conn.execute(ins) conn.close if data[0]: check = 1 martikelnr = int(data[0]) else: check = 0 return (check) if data[1]: mhoev = float(data[1]) else: check = 0 return (check) if data[2]: mprijs = float(data[2]) else: check = 0 return (check) if data[3]: mleverstart = data[3] else: check = 0 return (check) if data[4]: mlevereind = data[4] else: mlevereind = mleverstart metadata = MetaData() artikelen = Table('artikelen', metadata, Column('artikelID', Integer(), primary_key=True)) engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem') conn = engine.connect() s = select([artikelen]).where(artikelen.c.artikelID == martikelnr) rpart = conn.execute(s).first() if not rpart: check = 2 foutArtikel(mregel) return (check) metadata = MetaData() orders_inkoop_artikelen = Table( 'orders_inkoop_artikelen', metadata, Column('ordartlevID', Integer(), primary_key=True), Column('artikelID', None, ForeignKey('artikelen.artikelID')), Column('orderinkoopID', None, ForeignKey('orders_inkoop.order_inkoopID')), Column('bestelaantal', Float), Column('inkoopprijs', Float), Column('levering_start', String), Column('levering_eind', String), Column('regel', Integer)) artikelen = Table('artikelen', metadata, Column('artikelID', Integer, primary_key=True), Column('bestelsaldo', Float), Column('bestelstatus', Boolean)) materiaallijsten = Table( 'materiaallijsten', metadata, Column('matlijstID', Integer, primary_key=True), Column('artikelID', None, ForeignKey('artikelen.artikelID')), Column('orderinkoopID', Integer), Column('hoeveelheid', Float), Column('reserverings_datum', String), Column('levertijd_begin', String), Column('levertijd_end', String)) engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem') conn = engine.connect() mordlevnr=(conn.execute(select([func.max(orders_inkoop_artikelen.c.ordartlevID,\ type_=Integer).label('mordlevnr')])).scalar()) mordlevnr += 1 mlevnr = minkgeg[1] insrgl = insert(orders_inkoop_artikelen).values(ordartlevID = mordlevnr,\ orderinkoopID = minkordnr, artikelID = martikelnr,\ bestelaantal = mhoev, inkoopprijs = mprijs, levering_start = mleverstart,\ levering_eind = mlevereind, regel = mregel) conn.execute(insrgl) selart = select([artikelen]).where(artikelen.c.artikelID == martikelnr) if conn.execute(selart): updart = update(artikelen).where(and_(orders_inkoop_artikelen.c.artikelID == \ artikelen.c.artikelID, artikelen.c.artikelID == martikelnr))\ .values(bestelsaldo = artikelen.c.bestelsaldo + mhoev, bestelstatus = True) conn.execute(updart) else: foutInvoer() inkoopRegels(minkgeg, mregel) selmat = select([materiaallijsten]).where(and_(materiaallijsten.c.artikelID == martikelnr,\ materiaallijsten.c.orderinkoopID == 0)) rpmat = conn.execute(selmat) mresrgl = 1 for row in rpmat: mhoev = mhoev - row[3] if mhoev <= 0: updmat = update(materiaallijsten).where(materiaallijsten.c.artikelID\ == martikelnr).values(orderinkoopID = minkordnr, levertijd_begin\ = mleverstart, levertijd_end = mlevereind) conn.execute(updmat) else: onvBesteld(row[3] - mhoev, mresrgl) mresrgl += 1 invoerOk(mregel) conn.close return (check)
class Task(Base): """Analysis task queue.""" __tablename__ = "tasks" id = Column(Integer(), primary_key=True) target = Column(Text(), nullable=False) category = Column(String(255), nullable=False) timeout = Column(Integer(), server_default="0", nullable=False) priority = Column(Integer(), server_default="1", nullable=False) custom = Column(String(255), nullable=True) machine = Column(String(255), nullable=True) package = Column(String(255), nullable=True) tags = relationship("Tag", secondary=tasks_tags, backref="tasks", lazy="subquery") options = Column(String(255), nullable=True) platform = Column(String(255), nullable=True) memory = Column(Boolean, nullable=False, default=False) enforce_timeout = Column(Boolean, nullable=False, default=False) clock = Column(DateTime(timezone=False), default=datetime.now(), nullable=False) added_on = Column(DateTime(timezone=False), default=datetime.now, nullable=False) started_on = Column(DateTime(timezone=False), nullable=True) completed_on = Column(DateTime(timezone=False), nullable=True) status = Column(Enum(TASK_PENDING, TASK_RUNNING, TASK_COMPLETED, TASK_REPORTED, TASK_RECOVERED, TASK_FAILED_ANALYSIS, TASK_FAILED_PROCESSING, TASK_FAILED_REPORTING, name="status_type"), server_default=TASK_PENDING, nullable=False) # Statistics data to identify broken Cuckoos servers or VMs # Also for doing profiling to improve speed dropped_files = Column(Integer(), nullable=True) running_processes = Column(Integer(), nullable=True) api_calls = Column(Integer(), nullable=True) domains = Column(Integer(), nullable=True) signatures_total = Column(Integer(), nullable=True) signatures_alert = Column(Integer(), nullable=True) files_written = Column(Integer(), nullable=True) registry_keys_modified = Column(Integer(), nullable=True) crash_issues = Column(Integer(), nullable=True) anti_issues = Column(Integer(), nullable=True) analysis_started_on = Column(DateTime(timezone=False), nullable=True) analysis_finished_on = Column(DateTime(timezone=False), nullable=True) processing_started_on = Column(DateTime(timezone=False), nullable=True) processing_finished_on = Column(DateTime(timezone=False), nullable=True) signatures_started_on = Column(DateTime(timezone=False), nullable=True) signatures_finished_on = Column(DateTime(timezone=False), nullable=True) reporting_started_on = Column(DateTime(timezone=False), nullable=True) reporting_finished_on = Column(DateTime(timezone=False), nullable=True) timedout = Column(Boolean, nullable=False, default=False) sample_id = Column(Integer, ForeignKey("samples.id"), nullable=True) sample = relationship("Sample", backref="tasks") machine_id = Column(Integer, nullable=True) guest = relationship("Guest", uselist=False, backref="tasks", cascade="save-update, delete") errors = relationship("Error", backref="tasks", cascade="save-update, delete") def to_dict(self): """Converts object to dict. @return: dict """ d = {} for column in self.__table__.columns: value = getattr(self, column.name) if isinstance(value, datetime): d[column.name] = value.strftime("%Y-%m-%d %H:%M:%S") else: d[column.name] = value # Tags are a relation so no column to iterate. d["tags"] = [tag.name for tag in self.tags] return d def to_json(self): """Converts object to JSON. @return: JSON data """ return json.dumps(self.to_dict()) def __init__(self, target=None): self.target = target def __repr__(self): return "<Task('{0}','{1}')>".format(self.id, self.target)
class Assignment(Base): url = Column(String(255), default="") name = Column(String(255), default="Untitled") body = Column(Text(), default="") on_run = Column( Text(), default="def on_run(code, output, properties):\n return True") on_step = Column( Text(), default="def on_step(code, output, properties):\n return True") on_start = Column(Text(), default="") answer = Column(Text(), default="") due = Column(DateTime(), default=None) type = Column(String(10), default="normal") visibility = Column(String(10), default="visible") disabled = Column(String(10), default="enabled") mode = Column(String(10), default="blocks") owner_id = Column(Integer(), ForeignKey('user.id')) course_id = Column(Integer(), ForeignKey('course.id')) version = Column(Integer(), default=0) @staticmethod def edit(assignment_id, presentation=None, name=None, on_run=None, on_step=None, on_start=None, parsons=None, text_first=None): assignment = Assignment.by_id(assignment_id) if name is not None: assignment.name = name assignment.version += 1 if presentation is not None: assignment.body = presentation assignment.version += 1 if on_run is not None: assignment.on_run = on_run assignment.version += 1 if on_step is not None: assignment.on_step = on_step assignment.version += 1 if on_start is not None: assignment.on_start = on_start assignment.version += 1 assignment.type = 'normal' if parsons is True: assignment.type = 'parsons' assignment.version += 1 if text_first is True: assignment.type = 'text' assignment.version += 1 db.session.commit() return assignment def to_dict(self): return { 'name': self.name, 'id': self.id, 'body': self.body, 'title': self.title() } def __str__(self): return '<Assignment {} for {}>'.format(self.id, self.course_id) def title(self): return self.name if self.name != "Untitled" else "Untitled ({})".format( self.id) @staticmethod def new(owner_id, course_id): assignment = Assignment(owner_id=owner_id, course_id=course_id) db.session.add(assignment) db.session.commit() return assignment @staticmethod def remove(assignment_id): Assignment.query.filter_by(id=assignment_id).delete() db.session.commit() @staticmethod def by_course(course_id, exclude_builtins=True): if exclude_builtins: return (Assignment.query.filter_by(course_id=course_id).filter( Assignment.mode != 'maze').all()) else: return Assignment.query.filter_by(course_id=course_id).all() @staticmethod def by_id(assignment_id): return Assignment.query.get(assignment_id) @staticmethod def by_builtin(type, id, owner_id, course_id): assignment = Assignment.query.filter_by(course_id=course_id, mode=type, name=id).first() if not assignment: assignment = Assignment.new(owner_id, course_id) assignment.mode = type assignment.name = id db.session.commit() return assignment @staticmethod def by_id_or_new(assignment_id, owner_id, course_id): if assignment_id is None: assignment = None else: assignment = Assignment.query.get(assignment_id) if not assignment: assignment = Assignment.new(owner_id, course_id) return assignment def context_is_valid(self, context_id): course = Course.query.get(self.course_id) if course: return course.external_id == context_id return False def get_submission(self, user_id): return Submission.load(user_id, self.id)
class Drink(db.Model): # Autoincrementing, unique primary key id = Column(Integer().with_variant(Integer, "sqlite"), primary_key=True) # String Title title = Column(String(80), unique=True) # the ingredients blob - this stores a lazy json blob # the required datatype is [{'color': string,'name':string,'parts':number}] recipe = Column(String(180), nullable=False) ''' short() short form representation of the Drink model ''' def short(self): print(json.loads(self.recipe)) short_recipe = [{ 'color': r['color'], 'parts': r['parts'] } for r in json.loads(self.recipe)] return {'id': self.id, 'title': self.title, 'recipe': short_recipe} ''' long() long form representation of the Drink model ''' def long(self): return { 'id': self.id, 'title': self.title, 'recipe': json.loads(self.recipe) } ''' insert() inserts a new model into a database the model must have a unique name the model must have a unique id or null id EXAMPLE drink = Drink(title=req_title, recipe=req_recipe) drink.insert() ''' def insert(self): db.session.add(self) db.session.commit() ''' delete() deletes a new model into a database the model must exist in the database EXAMPLE drink = Drink(title=req_title, recipe=req_recipe) drink.delete() ''' def delete(self): db.session.delete(self) db.session.commit() ''' update() updates a new model into a database the model must exist in the database EXAMPLE drink = Drink.query.filter(Drink.id == id).one_or_none() drink.title = 'Black Coffee' drink.update() ''' def update(self): db.session.commit() def __repr__(self): return json.dumps(self.short())
def credentials(self) -> AuthCredentials: credentials = [] if self.is_staff: credentials.append(Credential.STAFF) if self._authenticated_user: credentials.append(Credential.AUTHENTICATED) return AuthCredentials(credentials) user_table = Table( "users", mapper_registry.metadata, Column("id", Integer(), primary_key=True), Column("full_name", String(300), nullable=False), Column("password", String(128), nullable=False), Column("username", String(100), nullable=True), Column("email", String(254), unique=True, nullable=False), Column("name", String(300), nullable=False), Column("gender", String(10), nullable=False), Column("date_birth", Date(), nullable=True), Column("open_to_recruiting", Boolean(), default=False, nullable=False), Column("open_to_newsletter", Boolean(), default=False, nullable=False), Column("country", String(50), nullable=False), Column("date_joined", DateTime(timezone=True), nullable=False), Column("last_login", DateTime(timezone=True), nullable=True), Column("is_active", Boolean(), default=True, nullable=False), Column("is_staff", Boolean(), default=False, nullable=False), Column("is_superuser", Boolean(), default=False, nullable=False),
class TypeB(Base, FilterMixin): __tablename__ = 'type_b' filter_class = FilterB id = Column(Integer(), primary_key=True)
class BaseType(Base, TableNameMixin): discriminator = Column('type', String(50)) __mapper_args__ = dict(polymorphic_on=discriminator) id = Column(Integer, primary_key=True) value = Column(Integer())
class A(a_1): __tablename__ = 'b' id = Column(Integer(), ForeignKey(a_1.id), primary_key=True)
class BaseXCom(Base, LoggingMixin): """Base class for XCom objects.""" __tablename__ = "xcom" dag_run_id = Column(Integer(), nullable=False, primary_key=True) task_id = Column(String(ID_LEN, **COLLATION_ARGS), nullable=False, primary_key=True) key = Column(String(512, **COLLATION_ARGS), nullable=False, primary_key=True) # Denormalized for easier lookup. dag_id = Column(String(ID_LEN, **COLLATION_ARGS), nullable=False) run_id = Column(String(ID_LEN, **COLLATION_ARGS), nullable=False) value = Column(LargeBinary) timestamp = Column(UtcDateTime, default=timezone.utcnow, nullable=False) dag_run = relationship( "DagRun", primaryjoin="""and_( BaseXCom.dag_id == foreign(DagRun.dag_id), BaseXCom.run_id == foreign(DagRun.run_id), )""", uselist=False, lazy="joined", passive_deletes="all", ) execution_date = association_proxy("dag_run", "execution_date") __table_args__ = ( # Ideally we should create a unique index over (key, dag_id, task_id, run_id), # but it goes over MySQL's index length limit. So we instead create indexes # separately, and enforce uniqueness with DagRun.id instead. Index("idx_xcom_key", key), Index("idx_xcom_ti_id", dag_id, task_id, run_id), ) @reconstructor def init_on_load(self): """ Called by the ORM after the instance has been loaded from the DB or otherwise reconstituted i.e automatically deserialize Xcom value when loading from DB. """ self.value = self.orm_deserialize_value() def __repr__(self): return f'<XCom "{self.key}" ({self.task_id} @ {self.run_id})>' @overload @classmethod def set( cls, key: str, value: Any, *, dag_id: str, task_id: str, run_id: str, session: Session = NEW_SESSION, ) -> None: """Store an XCom value. A deprecated form of this function accepts ``execution_date`` instead of ``run_id``. The two arguments are mutually exclusive. :param key: Key to store the XCom. :param value: XCom value to store. :param dag_id: DAG ID. :param task_id: Task ID. :param run_id: DAG run ID for the task. :param session: Database session. If not given, a new session will be created for this function. """ @overload @classmethod def set( cls, key: str, value: Any, task_id: str, dag_id: str, execution_date: datetime.datetime, session: Session = NEW_SESSION, ) -> None: """:sphinx-autoapi-skip:""" @classmethod @provide_session def set( cls, key: str, value: Any, task_id: str, dag_id: str, execution_date: Optional[datetime.datetime] = None, session: Session = NEW_SESSION, *, run_id: Optional[str] = None, ) -> None: """:sphinx-autoapi-skip:""" from airflow.models.dagrun import DagRun if not exactly_one(execution_date is not None, run_id is not None): raise ValueError( "Exactly one of run_id or execution_date must be passed") if run_id is None: message = "Passing 'execution_date' to 'XCom.set()' is deprecated. Use 'run_id' instead." warnings.warn(message, DeprecationWarning, stacklevel=3) try: dag_run_id, run_id = (session.query( DagRun.id, DagRun.run_id).filter( DagRun.dag_id == dag_id, DagRun.execution_date == execution_date).one()) except NoResultFound: raise ValueError( f"DAG run not found on DAG {dag_id!r} at {execution_date}" ) from None elif run_id == IN_MEMORY_RUN_ID: dag_run_id = -1 else: dag_run_id = session.query(DagRun.id).filter_by( dag_id=dag_id, run_id=run_id).scalar() if dag_run_id is None: raise ValueError( f"DAG run not found on DAG {dag_id!r} with ID {run_id!r}") value = cls.serialize_value( value=value, key=key, task_id=task_id, dag_id=dag_id, run_id=run_id, ) # Remove duplicate XComs and insert a new one. session.query(cls).filter( cls.key == key, cls.run_id == run_id, cls.task_id == task_id, cls.dag_id == dag_id, ).delete() new = cast( Any, cls )( # Work around Mypy complaining model not defining '__init__'. dag_run_id=dag_run_id, key=key, value=value, run_id=run_id, task_id=task_id, dag_id=dag_id, ) session.add(new) session.flush() @overload @classmethod def get_one( cls, *, key: Optional[str] = None, ti_key: "TaskInstanceKey", session: Session = NEW_SESSION, ) -> Optional[Any]: """Retrieve an XCom value, optionally meeting certain criteria. This method returns "full" XCom values (i.e. uses ``deserialize_value`` from the XCom backend). Use :meth:`get_many` if you want the "shortened" value via ``orm_deserialize_value``. If there are no results, *None* is returned. A deprecated form of this function accepts ``execution_date`` instead of ``run_id``. The two arguments are mutually exclusive. :param ti_key: The TaskInstanceKey to look up the XCom for :param key: A key for the XCom. If provided, only XCom with matching keys will be returned. Pass *None* (default) to remove the filter. :param include_prior_dates: If *False* (default), only XCom from the specified DAG run is returned. If *True*, the latest matching XCom is returned regardless of the run it belongs to. :param session: Database session. If not given, a new session will be created for this function. """ @overload @classmethod def get_one( cls, *, key: Optional[str] = None, task_id: str, dag_id: str, run_id: str, session: Session = NEW_SESSION, ) -> Optional[Any]: ... @overload @classmethod def get_one( cls, execution_date: datetime.datetime, key: Optional[str] = None, task_id: Optional[str] = None, dag_id: Optional[str] = None, include_prior_dates: bool = False, session: Session = NEW_SESSION, ) -> Optional[Any]: """:sphinx-autoapi-skip:""" @classmethod @provide_session def get_one( cls, execution_date: Optional[datetime.datetime] = None, key: Optional[str] = None, task_id: Optional[str] = None, dag_id: Optional[str] = None, include_prior_dates: bool = False, session: Session = NEW_SESSION, *, run_id: Optional[str] = None, ti_key: Optional["TaskInstanceKey"] = None, ) -> Optional[Any]: """:sphinx-autoapi-skip:""" if not exactly_one(execution_date is not None, ti_key is not None, run_id is not None): raise ValueError( "Exactly one of ti_key, run_id, or execution_date must be passed" ) if ti_key is not None: query = session.query(cls).filter_by( dag_id=ti_key.dag_id, run_id=ti_key.run_id, task_id=ti_key.task_id, ) if key: query = query.filter_by(key=key) query = query.limit(1) elif run_id: query = cls.get_many( run_id=run_id, key=key, task_ids=task_id, dag_ids=dag_id, include_prior_dates=include_prior_dates, limit=1, session=session, ) elif execution_date is not None: message = "Passing 'execution_date' to 'XCom.get_one()' is deprecated. Use 'run_id' instead." warnings.warn(message, PendingDeprecationWarning, stacklevel=3) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) query = cls.get_many( execution_date=execution_date, key=key, task_ids=task_id, dag_ids=dag_id, include_prior_dates=include_prior_dates, limit=1, session=session, ) else: raise RuntimeError("Should not happen?") result = query.with_entities(cls.value).first() if result: return cls.deserialize_value(result) return None @overload @classmethod def get_many( cls, *, run_id: str, key: Optional[str] = None, task_ids: Union[str, Iterable[str], None] = None, dag_ids: Union[str, Iterable[str], None] = None, include_prior_dates: bool = False, limit: Optional[int] = None, session: Session = NEW_SESSION, ) -> Query: """Composes a query to get one or more XCom entries. This function returns an SQLAlchemy query of full XCom objects. If you just want one stored value, use :meth:`get_one` instead. A deprecated form of this function accepts ``execution_date`` instead of ``run_id``. The two arguments are mutually exclusive. :param run_id: DAG run ID for the task. :param key: A key for the XComs. If provided, only XComs with matching keys will be returned. Pass *None* (default) to remove the filter. :param task_ids: Only XComs from task with matching IDs will be pulled. Pass *None* (default) to remove the filter. :param dag_id: Only pulls XComs from this DAG. If *None* (default), the DAG of the calling task is used. :param include_prior_dates: If *False* (default), only XComs from the specified DAG run are returned. If *True*, all matching XComs are returned regardless of the run it belongs to. :param session: Database session. If not given, a new session will be created for this function. """ @overload @classmethod def get_many( cls, execution_date: datetime.datetime, key: Optional[str] = None, task_ids: Union[str, Iterable[str], None] = None, dag_ids: Union[str, Iterable[str], None] = None, include_prior_dates: bool = False, limit: Optional[int] = None, session: Session = NEW_SESSION, ) -> Query: """:sphinx-autoapi-skip:""" @classmethod @provide_session def get_many( cls, execution_date: Optional[datetime.datetime] = None, key: Optional[str] = None, task_ids: Optional[Union[str, Iterable[str]]] = None, dag_ids: Optional[Union[str, Iterable[str]]] = None, include_prior_dates: bool = False, limit: Optional[int] = None, session: Session = NEW_SESSION, *, run_id: Optional[str] = None, ) -> Query: """:sphinx-autoapi-skip:""" from airflow.models.dagrun import DagRun if not exactly_one(execution_date is not None, run_id is not None): raise ValueError( "Exactly one of run_id or execution_date must be passed") if execution_date is not None: message = "Passing 'execution_date' to 'XCom.get_many()' is deprecated. Use 'run_id' instead." warnings.warn(message, PendingDeprecationWarning, stacklevel=3) query = session.query(cls).join(cls.dag_run) if key: query = query.filter(cls.key == key) if is_container(task_ids): query = query.filter(cls.task_id.in_(task_ids)) elif task_ids is not None: query = query.filter(cls.task_id == task_ids) if is_container(dag_ids): query = query.filter(cls.dag_id.in_(dag_ids)) elif dag_ids is not None: query = query.filter(cls.dag_id == dag_ids) if include_prior_dates: if execution_date is not None: query = query.filter(DagRun.execution_date <= execution_date) else: # This returns an empty query result for IN_MEMORY_RUN_ID, # but that is impossible to implement. Sorry? dr = session.query(DagRun.execution_date).filter( DagRun.run_id == run_id).subquery() query = query.filter(cls.execution_date <= dr.c.execution_date) elif execution_date is not None: query = query.filter(DagRun.execution_date == execution_date) else: query = query.filter(cls.run_id == run_id) query = query.order_by(DagRun.execution_date.desc(), cls.timestamp.desc()) if limit: return query.limit(limit) return query @classmethod @provide_session def delete(cls, xcoms: Union["XCom", Iterable["XCom"]], session: Session) -> None: """Delete one or multiple XCom entries.""" if isinstance(xcoms, XCom): xcoms = [xcoms] for xcom in xcoms: if not isinstance(xcom, XCom): raise TypeError( f'Expected XCom; received {xcom.__class__.__name__}') session.delete(xcom) session.commit() @overload @classmethod def clear(cls, *, dag_id: str, task_id: str, run_id: str, session: Optional[Session] = None) -> None: """Clear all XCom data from the database for the given task instance. A deprecated form of this function accepts ``execution_date`` instead of ``run_id``. The two arguments are mutually exclusive. :param dag_id: ID of DAG to clear the XCom for. :param task_id: ID of task to clear the XCom for. :param run_id: ID of DAG run to clear the XCom for. :param session: Database session. If not given, a new session will be created for this function. """ @overload @classmethod def clear( cls, execution_date: pendulum.DateTime, dag_id: str, task_id: str, session: Session = NEW_SESSION, ) -> None: """:sphinx-autoapi-skip:""" @classmethod @provide_session def clear( cls, execution_date: Optional[pendulum.DateTime] = None, dag_id: Optional[str] = None, task_id: Optional[str] = None, session: Session = NEW_SESSION, *, run_id: Optional[str] = None, ) -> None: """:sphinx-autoapi-skip:""" from airflow.models import DagRun # Given the historic order of this function (execution_date was first argument) to add a new optional # param we need to add default values for everything :( if dag_id is None: raise TypeError("clear() missing required argument: dag_id") if task_id is None: raise TypeError("clear() missing required argument: task_id") if not exactly_one(execution_date is not None, run_id is not None): raise ValueError( "Exactly one of run_id or execution_date must be passed") if execution_date is not None: message = "Passing 'execution_date' to 'XCom.clear()' is deprecated. Use 'run_id' instead." warnings.warn(message, DeprecationWarning, stacklevel=3) run_id = (session.query(DagRun.run_id).filter( DagRun.dag_id == dag_id, DagRun.execution_date == execution_date).scalar()) return session.query(cls).filter_by(dag_id=dag_id, task_id=task_id, run_id=run_id).delete() @staticmethod def serialize_value( value: Any, *, key=None, task_id=None, dag_id=None, run_id=None, mapping_index: int = -1, ): """Serialize XCom value to str or pickled object""" if conf.getboolean('core', 'enable_xcom_pickling'): return pickle.dumps(value) try: return json.dumps(value).encode('UTF-8') except (ValueError, TypeError): log.error("Could not serialize the XCom value into JSON." " If you are using pickle instead of JSON for XCom," " then you need to enable pickle support for XCom" " in your airflow config.") raise @staticmethod def deserialize_value(result: "XCom") -> Any: """Deserialize XCom value from str or pickle object""" if result.value is None: return None if conf.getboolean('core', 'enable_xcom_pickling'): try: return pickle.loads(result.value) except pickle.UnpicklingError: return json.loads(result.value.decode('UTF-8')) else: try: return json.loads(result.value.decode('UTF-8')) except (json.JSONDecodeError, UnicodeDecodeError): return pickle.loads(result.value) def orm_deserialize_value(self) -> Any: """ Deserialize method which is used to reconstruct ORM XCom object. This method should be overridden in custom XCom backends to avoid unnecessary request or other resource consuming operations when creating XCom orm model. This is used when viewing XCom listing in the webserver, for example. """ return BaseXCom.deserialize_value(self)
def test_compare_integer_text_diff(self): self._compare_default_roundtrip(Integer(), text("5"), "7")
class Connection(Base, LoggingMixin): """ Placeholder to store information about different database instances connection information. The idea here is that scripts use references to database instances (conn_id) instead of hard coding hostname, logins and passwords when using operators or hooks. .. seealso:: For more information on how to use this class, see: :doc:`/howto/connection` :param conn_id: The connection ID. :param conn_type: The connection type. :param description: The connection description. :param host: The host. :param login: The login. :param password: The password. :param schema: The schema. :param port: The port number. :param extra: Extra metadata. Non-standard data such as private/SSH keys can be saved here. JSON encoded object. :param uri: URI address describing connection parameters. """ EXTRA_KEY = '__extra__' __tablename__ = "connection" id = Column(Integer(), primary_key=True) conn_id = Column(String(ID_LEN), unique=True, nullable=False) conn_type = Column(String(500), nullable=False) description = Column(Text(5000)) host = Column(String(500)) schema = Column(String(500)) login = Column(String(500)) _password = Column('password', String(5000)) port = Column(Integer()) is_encrypted = Column(Boolean, unique=False, default=False) is_extra_encrypted = Column(Boolean, unique=False, default=False) _extra = Column('extra', Text()) def __init__( self, conn_id: Optional[str] = None, conn_type: Optional[str] = None, description: Optional[str] = None, host: Optional[str] = None, login: Optional[str] = None, password: Optional[str] = None, schema: Optional[str] = None, port: Optional[int] = None, extra: Optional[Union[str, dict]] = None, uri: Optional[str] = None, ): super().__init__() self.conn_id = conn_id self.description = description if extra and not isinstance(extra, str): extra = json.dumps(extra) if uri and (conn_type or host or login or password or schema or port or extra): raise AirflowException( "You must create an object using the URI or individual values " "(conn_type, host, login, password, schema, port or extra)." "You can't mix these two ways to create this object." ) if uri: self._parse_from_uri(uri) else: self.conn_type = conn_type self.host = host self.login = login self.password = password self.schema = schema self.port = port self.extra = extra if self.password: mask_secret(self.password) @reconstructor def on_db_load(self): if self.password: mask_secret(self.password) def parse_from_uri(self, **uri): """This method is deprecated. Please use uri parameter in constructor.""" warnings.warn( "This method is deprecated. Please use uri parameter in constructor.", DeprecationWarning ) self._parse_from_uri(**uri) def _parse_from_uri(self, uri: str): uri_parts = urlparse(uri) conn_type = uri_parts.scheme if conn_type == 'postgresql': conn_type = 'postgres' elif '-' in conn_type: conn_type = conn_type.replace('-', '_') self.conn_type = conn_type self.host = _parse_netloc_to_hostname(uri_parts) quoted_schema = uri_parts.path[1:] self.schema = unquote(quoted_schema) if quoted_schema else quoted_schema self.login = unquote(uri_parts.username) if uri_parts.username else uri_parts.username self.password = unquote(uri_parts.password) if uri_parts.password else uri_parts.password self.port = uri_parts.port if uri_parts.query: query = dict(parse_qsl(uri_parts.query, keep_blank_values=True)) if self.EXTRA_KEY in query: self.extra = query[self.EXTRA_KEY] else: self.extra = json.dumps(query) def get_uri(self) -> str: """Return connection in URI format""" uri = f"{str(self.conn_type).lower().replace('_', '-')}://" authority_block = '' if self.login is not None: authority_block += quote(self.login, safe='') if self.password is not None: authority_block += ':' + quote(self.password, safe='') if authority_block > '': authority_block += '@' uri += authority_block host_block = '' if self.host: host_block += quote(self.host, safe='') if self.port: if host_block > '': host_block += f':{self.port}' else: host_block += f'@:{self.port}' if self.schema: host_block += f"/{quote(self.schema, safe='')}" uri += host_block if self.extra: try: query: Optional[str] = urlencode(self.extra_dejson) except TypeError: query = None if query and self.extra_dejson == dict(parse_qsl(query, keep_blank_values=True)): uri += '?' + query else: uri += '?' + urlencode({self.EXTRA_KEY: self.extra}) return uri def get_password(self) -> Optional[str]: """Return encrypted password.""" if self._password and self.is_encrypted: fernet = get_fernet() if not fernet.is_encrypted: raise AirflowException( f"Can't decrypt encrypted password for login={self.login} " f"FERNET_KEY configuration is missing" ) return fernet.decrypt(bytes(self._password, 'utf-8')).decode() else: return self._password def set_password(self, value: Optional[str]): """Encrypt password and set in object attribute.""" if value: fernet = get_fernet() self._password = fernet.encrypt(bytes(value, 'utf-8')).decode() self.is_encrypted = fernet.is_encrypted @declared_attr def password(cls): """Password. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_password', descriptor=property(cls.get_password, cls.set_password)) def get_extra(self) -> Dict: """Return encrypted extra-data.""" if self._extra and self.is_extra_encrypted: fernet = get_fernet() if not fernet.is_encrypted: raise AirflowException( f"Can't decrypt `extra` params for login={self.login}, " f"FERNET_KEY configuration is missing" ) return fernet.decrypt(bytes(self._extra, 'utf-8')).decode() else: return self._extra def set_extra(self, value: str): """Encrypt extra-data and save in object attribute to object.""" if value: fernet = get_fernet() self._extra = fernet.encrypt(bytes(value, 'utf-8')).decode() self.is_extra_encrypted = fernet.is_encrypted else: self._extra = value self.is_extra_encrypted = False @declared_attr def extra(cls): """Extra data. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra)) def rotate_fernet_key(self): """Encrypts data with a new key. See: :ref:`security/fernet`""" fernet = get_fernet() if self._password and self.is_encrypted: self._password = fernet.rotate(self._password.encode('utf-8')).decode() if self._extra and self.is_extra_encrypted: self._extra = fernet.rotate(self._extra.encode('utf-8')).decode() def get_hook(self, *, hook_params=None): """Return hook based on conn_type""" hook = ProvidersManager().hooks.get(self.conn_type, None) if hook is None: raise AirflowException(f'Unknown hook type "{self.conn_type}"') try: hook_class = import_string(hook.hook_class_name) except ImportError: warnings.warn( "Could not import %s when discovering %s %s", hook.hook_class_name, hook.hook_name, hook.package_name, ) raise if hook_params is None: hook_params = {} return hook_class(**{hook.connection_id_attribute_name: self.conn_id}, **hook_params) def __repr__(self): return self.conn_id def log_info(self): """ This method is deprecated. You can read each field individually or use the default representation (`__repr__`). """ warnings.warn( "This method is deprecated. You can read each field individually or " "use the default representation (__repr__).", DeprecationWarning, stacklevel=2, ) return ( f"id: {self.conn_id}. Host: {self.host}, Port: {self.port}, Schema: {self.schema}, " f"Login: {self.login}, Password: {'XXXXXXXX' if self.password else None}, " f"extra: {'XXXXXXXX' if self.extra_dejson else None}" ) def debug_info(self): """ This method is deprecated. You can read each field individually or use the default representation (`__repr__`). """ warnings.warn( "This method is deprecated. You can read each field individually or " "use the default representation (__repr__).", DeprecationWarning, stacklevel=2, ) return ( f"id: {self.conn_id}. Host: {self.host}, Port: {self.port}, Schema: {self.schema}, " f"Login: {self.login}, Password: {'XXXXXXXX' if self.password else None}, " f"extra: {self.extra_dejson}" ) def test_connection(self): """Calls out get_hook method and executes test_connection method on that.""" status, message = False, '' try: hook = self.get_hook() if getattr(hook, 'test_connection', False): status, message = hook.test_connection() else: message = ( f"Hook {hook.__class__.__name__} doesn't implement or inherit test_connection method" ) except Exception as e: message = str(e) return status, message @property def extra_dejson(self) -> Dict: """Returns the extra property by deserializing json.""" obj = {} if self.extra: try: obj = json.loads(self.extra) except JSONDecodeError: self.log.exception("Failed parsing the json for conn_id %s", self.conn_id) # Mask sensitive keys from this list mask_secret(obj) return obj @classmethod def get_connection_from_secrets(cls, conn_id: str) -> 'Connection': """ Get connection by conn_id. :param conn_id: connection id :return: connection """ for secrets_backend in ensure_secrets_loaded(): try: conn = secrets_backend.get_connection(conn_id=conn_id) if conn: return conn except Exception: log.exception( 'Unable to retrieve connection from secrets backend (%s). ' 'Checking subsequent secrets backend.', type(secrets_backend).__name__, ) raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined")
def test_compare_integer_str(self): self._compare_default_roundtrip(Integer(), "5")
def geefAuth(rpacc, m_email): maccountnr = int(rpacc[0]) mvoorn = rpacc[1] mtussen = rpacc[2] machtern = rpacc[3] metadata = MetaData() accounts = Table('accounts', metadata, Column('p1', String), Column('p2', String), Column('p3', String), Column('p4', String), Column('p5', String), Column('p6', String), Column('p7', String), Column('p8', String), Column('p9', String), Column('p10', String), Column('p11', String), Column('p12', String), Column('p13', String), Column('p14', String), Column('p15', String), Column('p16', String), Column('accountID', Integer(), primary_key=True)) engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem') con = engine.connect() sel = select([accounts]).where(accounts.c.accountID == maccountnr) rpa = con.execute(sel).first() class Widget(QDialog): def __init__(self, parent=None): super(Widget, self).__init__(parent) self.setWindowTitle("Aanpassen Bevoegdheden") self.setWindowIcon(QIcon('./images/logos/logo.jpg')) self.setFont(QFont('Arial', 10)) grid = QGridLayout() grid.setSpacing(15) self.mvoorn = mvoorn self.mtussen = mtussen self.machtern = machtern self.astr = rpa[0]+rpa[1]+rpa[2]+rpa[3]+rpa[4]+rpa[5]+rpa[6]+rpa[7]\ +rpa[8]+rpa[9]+rpa[10]+rpa[11]+rpa[12]+rpa[13]+rpa[14]+rpa[15] pyqt = QLabel() movie = QMovie('./images/logos/pyqt.gif') pyqt.setMovie(movie) movie.setScaledSize(QSize(240,80)) movie.start() grid.addWidget(pyqt, 0 ,0, 1, 6) logo = QLabel() pixmap = QPixmap('./images/logos/logo.jpg') logo.setPixmap(pixmap) grid.addWidget(logo , 0, 16, 1, 4, Qt.AlignRight) accEdit = QLineEdit(str(maccountnr)) accEdit.setFixedWidth(100) accEdit.setFont(QFont("Arial",10)) accEdit.setStyleSheet('color: black') accEdit.setDisabled(True) grid.addWidget(QLabel(mvoorn+' '+mtussen+' '+machtern), 0, 8, 1, 9, Qt.AlignTop) grid.addWidget(QLabel('Accountnummer'), 0, 8, 1, 10, Qt.AlignBottom) grid.addWidget(accEdit, 0, 7, 1, 10, Qt.AlignCenter | Qt.AlignBottom) grid.addWidget(QLabel('Menu'), 2, 1, 1, 2, Qt.AlignRight) grid.addWidget(QLabel('S'), 2, 3) grid.addWidget(QLabel('B'), 2, 4) grid.addWidget(QLabel('I'), 2, 5) grid.addWidget(QLabel('W'), 2, 6) grid.addWidget(QLabel('P'), 2, 7) grid.addWidget(QLabel('O'), 2, 8) grid.addWidget(QLabel('R'), 2, 9) grid.addWidget(QLabel('Menu'), 2, 11, 1, 2, Qt.AlignRight) grid.addWidget(QLabel('S'), 2, 13) grid.addWidget(QLabel('B'), 2, 14) grid.addWidget(QLabel('I'), 2, 15) grid.addWidget(QLabel('W'), 2, 16) grid.addWidget(QLabel('P'), 2, 17) grid.addWidget(QLabel('O'), 2, 18) grid.addWidget(QLabel('R'), 2, 19) lbl0 = QLabel('Accounts') lbl0.setFixedWidth(115) grid.addWidget(lbl0, 3 , 0) lbl1 = QLabel('Leveranciers') lbl1.setFixedWidth(115) grid.addWidget(lbl1, 4 , 0) lbl2 = QLabel('Werknemers') lbl2.setFixedWidth(115) grid.addWidget(lbl2, 5 , 0) lbl3 = QLabel('Inkoop') lbl3.setFixedWidth(115) grid.addWidget(lbl3, 6 , 0) lbl4 = QLabel('Verkoop') lbl4.setFixedWidth(115) grid.addWidget(lbl4, 7 , 0) lbl5 = QLabel('Magazijn') lbl5.setFixedWidth(115) grid.addWidget(lbl5, 8 , 0) lbl6 = QLabel('Werken Intern') lbl6.setFixedWidth(115) grid.addWidget(lbl6, 9 , 0) lbl7 = QLabel('Werken Extern') lbl7.setFixedWidth(115) grid.addWidget(lbl7, 10 , 0) lbl8 = QLabel('Calculatie Interne Werken') lbl8.setFixedWidth(200) grid.addWidget(lbl8, 3 , 11) lbl9 = QLabel('Calculatie Externe Werken') lbl9.setFixedWidth(200) grid.addWidget(lbl9, 4 , 11) lbl10 = QLabel('Loonadminstratie') lbl10.setFixedWidth(200) grid.addWidget(lbl10, 5 , 11) lbl11 = QLabel('Boekhouding') lbl11.setFixedWidth(200) grid.addWidget(lbl11, 6 , 11) lbl12 = QLabel('Voorraadmanagement') lbl12.setFixedWidth(200) grid.addWidget(lbl12, 7 , 11) lbl13 = QLabel('Managementinformatie') lbl13.setFixedWidth(200) grid.addWidget(lbl13, 8 , 11) lbl14 = QLabel('Onderhoud') lbl14.setFixedWidth(200) grid.addWidget(lbl14, 9 , 11) lbl15 = QLabel('Herprinten formulieren') lbl15.setFixedWidth(200) grid.addWidget(lbl15, 10 , 11) self.xlist=[] for x in range(0,121): cBox = QCheckBox() val = self.astr[x] if val == '1': cBox.setChecked(True) else: cBox.setChecked(False) if x < 64: grid.addWidget(cBox, int(x/8+3), x%8+2) else: grid.addWidget(cBox, int(x/8-5), x%8+12) cBox.clicked.connect(lambda checked , mindex = x : getindex(mindex)) def getindex(mindex): #compile list with changes self.xlist.append(mindex) def writeValues(self): #remove unnessary paired changes (enable, disable) or (disable, enable) and sort self.xlist = [value for value, count in Counter(self.xlist).items() if count%2 == 1] self.xlist.sort() for x in self.xlist: if self.astr[x] == '0': self.astr=self.astr[0:x]+'1'+self.astr[x+1:] else: self.astr=self.astr[0:x]+'0'+self.astr[x+1:] updper=update(accounts).where(accounts.c.accountID==maccountnr).values\ (p1=self.astr[0:8], p2=self.astr[8:16], p3=self.astr[16:24], p4=self.astr[24:32], p5=self.astr[32:40], p6=self.astr[40:48], p7=self.astr[48:56], p8=self.astr[56:64], p9=self.astr[64:72], p10=self.astr[72:80], p11=self.astr[80:88], p12=self.astr[88:96], p13=self.astr[96:104], p14=self.astr[104:112], p15=self.astr[112:120], p16=self.astr[120:128]) con.execute(updper) updateOK(self) self.close() applyBtn = QPushButton('Opslaan') applyBtn.clicked.connect(lambda: writeValues(self)) grid.addWidget(applyBtn, 12, 16, 1, 4, Qt.AlignRight) applyBtn.setFont(QFont("Arial",10)) applyBtn.setFixedWidth(100) applyBtn.setStyleSheet("color: black; background-color: gainsboro") cancelBtn = QPushButton('Sluiten') cancelBtn.clicked.connect(lambda: winSluit(self, m_email)) grid.addWidget(cancelBtn, 12, 12, 1, 4) cancelBtn.setFont(QFont("Arial",10)) cancelBtn.setFixedWidth(100) cancelBtn.setStyleSheet("color: black; background-color: gainsboro") infoBtn = QPushButton('Informatie') infoBtn.clicked.connect(lambda: info()) grid.addWidget(infoBtn, 12, 10, 1, 4, Qt.AlignCenter) infoBtn.setFont(QFont("Arial",10)) infoBtn.setFixedWidth(120) infoBtn.setStyleSheet("color: black; background-color: gainsboro") grid.addWidget(QLabel('\u00A9 2017 all rights reserved [email protected]'), 13, 0, 1, 18, Qt.AlignCenter) self.setLayout(grid) self.setGeometry(500, 200, 150, 100) win = Widget() win.exec_() zoekAccount(m_email)
def zoekAccount(m_email): class Widget(QDialog): def __init__(self, parent=None): super(Widget, self).__init__(parent) self.setWindowTitle("Authorisatie programma.") self.setWindowIcon(QIcon('./images/logos/logo.jpg')) self.setFont(QFont('Arial', 10)) self.Accountnummer = QLabel() accEdit = QLineEdit('1') accEdit.setFixedWidth(100) accEdit.setFont(QFont("Arial",10)) accEdit.textChanged.connect(self.accChanged) reg_ex = QRegExp('^[1]{1}[0-9]{8}$') input_validator = QRegExpValidator(reg_ex, accEdit) accEdit.setValidator(input_validator) grid = QGridLayout() grid.setSpacing(20) pyqt = QLabel() movie = QMovie('./images/logos/pyqt.gif') pyqt.setMovie(movie) movie.setScaledSize(QSize(240,80)) movie.start() grid.addWidget(pyqt, 0 ,0, 1, 2) logo = QLabel() pixmap = QPixmap('./images/logos/logo.jpg') logo.setPixmap(pixmap) grid.addWidget(logo , 0, 2, 1, 1, Qt.AlignRight) grid.addWidget(QLabel('Accountnummer'), 1, 1) grid.addWidget(accEdit, 1, 2) cancelBtn = QPushButton('Sluiten') cancelBtn.clicked.connect(lambda: windowSluit(self, m_email)) applyBtn = QPushButton('Zoeken') applyBtn.clicked.connect(self.accept) grid.addWidget(applyBtn, 2, 2) applyBtn.setFont(QFont("Arial",10)) applyBtn.setFixedWidth(100) applyBtn.setStyleSheet("color: black; background-color: gainsboro") grid.addWidget(cancelBtn, 2, 1) cancelBtn.setFont(QFont("Arial",10)) cancelBtn.setFixedWidth(100) cancelBtn.setStyleSheet("color: black; background-color: gainsboro") grid.addWidget(QLabel('\u00A9 2017 all rights reserved [email protected]'), 3, 0, 1, 3, Qt.AlignCenter) self.setLayout(grid) self.setGeometry(500, 400, 150, 150) def accChanged(self, text): self.Accountnummer.setText(text) def returnAccountnummer(self): return self.Accountnummer.text() @staticmethod def getData(parent=None): dialog = Widget(parent) dialog.exec_() return [dialog.returnAccountnummer()] window = Widget() data = window.getData() if data[0]: maccountnr = int(data[0]) else: maccountnr = 0 metadata = MetaData() accounts = Table('accounts', metadata, Column('accountID', Integer(), primary_key=True), Column('voornaam', String), Column('tussenvoegsel', String), Column('achternaam', String)) engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem') conn = engine.connect() s = select([accounts]).where(accounts.c.accountID == maccountnr) rpacc = conn.execute(s).first() if rpacc: geefAuth(rpacc, m_email) else: foutAccountnr() zoekAccount(m_email)
class UserInfo(Base): #用户信息 __tablename__ = 'dj_account' accountId = Column(String(64), primary_key=True) #nick=Column(String(255),default="", nullable=True) userType = Column(SmallInteger(), default=0, nullable=False) # 1:普通用户 2:代理用户 phone = Column(String(255), default="", nullable=True) coin = Column(BigInteger(), default=0) guessCoin = Column(BigInteger(), default=0) pingboCoin = Column(BigInteger(), default=0) shabaCoin = Column(BigInteger(), default=0) notDrawingCoin = Column(BigInteger(), default=0) passwordMd5 = Column(String(255), default="", nullable=False) # secret=Column(String(255), default="", nullable=False) regTime = Column(Integer(), default=0, nullable=False) email = Column(String(255), default="", nullable=True) loginTime = Column(Integer(), default=0, nullable=True) loginIp = Column(String(255), default="", nullable=True) loginAddress = Column(String(255), default="", nullable=True) #登录地址 loginDeviceUdid = Column(String(255), default="", nullable=True) loginDeviceModel = Column(String(255), default="", nullable=True) #登录设备模型 loginDeviceName = Column(String(255), default="", nullable=True) #登录设备名称 logoutTime = Column(Integer(), default=0, nullable=True) lastBetTime = Column(Integer(), default=0, nullable=True) platform = Column(Integer(), default=0, nullable=True) # invalid=Column(Integer(), default=0,nullable=False) # lastReceviveMsgTime=Column(Integer(), default=0, nullable=False) firstPayCoin = Column(Integer(), default=0, nullable=True) firstPayTime = Column(Integer(), default=0, nullable=True) headAddress = Column(String(255), default="", nullable=True) realName = Column(String(255), default="", nullable=True) sex = Column(String(32), default="", nullable=True) born = Column(String(255), default="", nullable=True) address = Column(String(512), default="", nullable=True) bankcard = Column(String(1024), default="", nullable=True) tradePasswordMd5 = Column(String(255), default="", nullable=True) status = Column(SmallInteger(), default=0, nullable=True) #0 正常,1冻结 lockStartTime = Column(Integer(), default=0, nullable=True) #封停开始时间 lockEndTime = Column(Integer(), default=0, nullable=True) #封停结束 lockReason = Column(String(512), default="", nullable=True) #封停原因 level = Column(Integer(), default=0, nullable=False) #等级 levelValidWater = Column(Integer(), default=0, nullable=True) # 等级下的有效流水值 agentId = Column(String(64), default="", nullable=False, index=True) # 代理id lastPBCRefreshTime = Column(Integer(), default=0, nullable=True) #上一次平博钱包刷新时间 __table_args__ = {'mysql_charset': 'utf8mb4'}
# License for the specific language governing permissions and limitations # under the License. from sqlalchemy import Column, Table, MetaData from sqlalchemy import Integer, BigInteger, DateTime, Boolean, String from nova import log as logging meta = MetaData() bw_cache = Table( 'bw_usage_cache', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True, nullable=False), Column('instance_id', Integer(), nullable=False), Column( 'network_label', String(length=255, convert_unicode=False, assert_unicode=None, unicode_error=None, _warn_on_bytestring=False)), Column('start_period', DateTime(timezone=False), nullable=False), Column('last_refreshed', DateTime(timezone=False)), Column('bw_in', BigInteger()), Column('bw_out', BigInteger())) def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine;
class AgentCommission(Base): # 代理佣金报表 __tablename__ = 'dj_agent_commission' billId = Column(String(64), primary_key=True) # 账单号 billTime = Column(Integer(), default=0, nullable=False) # 生成时间 agentId = Column(String(255), default="", nullable=False) # 代理Id dateYear = Column(Integer(), default="", nullable=False) # 年 dateMonth = Column(Integer(), default="", nullable=False) # 月 newAccount = Column(Integer(), default=0, nullable=True) # 新增用户 activeAccount = Column(Integer(), default=0, nullable=False) # 活跃用户 probetWinLoss = Column(Integer(), default=0, nullable=False) # 电竞总输赢 pingboWinLoss = Column(Integer(), default=0, nullable=False) # 平博总输赢 winLoss = Column(Integer(), default=0, nullable=False) # 总输赢 probetRate = Column(Float(), default=0, nullable=False) # 电竞平台费率 pingboRate = Column(Float(), default=0, nullable=False) # 平博平台费率 probetCost = Column(Integer(), default=0, nullable=False) # 电竞平台费 pingboCost = Column(Integer(), default=0, nullable=False) # 平博平台费 platformCost = Column(Integer(), default=0, nullable=False) # 平台费 depositDrawingCost = Column(Integer(), default=0, nullable=False) # 存提手续费 backWater = Column(Integer(), default=0, nullable=False) # 反水 bonus = Column(Integer(), default=0, nullable=False) # 活动奖金 water = Column(Integer(), default=0, nullable=True) # 流水 netProfit = Column(Integer(), default=0, nullable=False) # 净利润 preBalance = Column(Integer(), default=0, nullable=False) # 上月结余 balance = Column(Integer(), default=0, nullable=False) # 本月结余 commissionRate = Column(Float(), default=0, nullable=False) # 佣金比 commission = Column(Integer(), default=0, nullable=False) # 佣金 status = Column(Integer(), default=0, nullable=False) # 佣金状态 0:已发放 1:未发放 handleTime = Column(Integer(), default=0, nullable=True) # 发放时间 reviewer = Column(String(32), default="", nullable=True) # 审核人(发放佣金用) __table_args__ = {'mysql_charset': 'utf8mb4'}
index_label='id', chunksize=10, if_exists='replace', dtype=types) print(df.head(5)) #df = pd.read_csv("mytest.csv") # , keep_default_na=False) df = pd.read_csv("all_viruses.csv") # , keep_default_na=False) #df.drop(df.columns[[0]], axis=1, inplace=True) #df = df[df["gene_product_name"].str.startswith("CHECK_") == False] print(df.head(5)) types = { "id": Integer(), 'genbank_genome_accession': String(255), 'gene_symbol': String(255), 'gene_product_name': String(255), 'genbank_protein_accession': String(255), 'strain_name': String(255), 'isolate': String(255), 'isolation_source': String(255), 'virus_specimen': String(255), 'host': String(255), 'collection_date': String(8), 'country': String(255), 'sequence_type': String(4), 'fasta': Text() }
class Generic(Base, IdColumn): __tablename__ = 'base' discriminator = Column('type', String(50)) __mapper_args__ = dict(polymorphic_on=discriminator) value = Column(Integer())
class ResultTest(Base): __tablename__ = "result_test" id = Column(String, primary_key=True) to_number = Column(String(20)) label = Column(String(50)) success = Column(Boolean()) call_status = Column(String(30)) transcription = Column(String(30)) transcripted_text = Column(Text()) transcripted_quality = Column(Float()) created_at = Column(DateTime()) updated_at = Column(DateTime()) start_at = Column(DateTime()) end_at = Column(DateTime()) call_duration = Column(Integer()) recording_duration = Column(Integer()) recording_sid = Column(Text()) recording_url = Column(Text()) error_code = Column(Integer()) alarmed_at = Column(DateTime()) def json(self): return { "id": self.id, "to-number": self.to_number, "label": self.label, "success": self.success, "call-status": self.call_status, "transcription": self.transcription, "transcripted-text": self.transcripted_text, "transcripted-quality": self.transcripted_quality, "created-at": str(self.created_at), "updated-at": str(self.updated_at), "start-at": str(self.start_at), "end-at": str(self.end_at), "call-duration": self.call_duration, "recording-duration": self.recording_duration, "recording-sid": self.recording_sid, "recording-url": self.recording_url, "error-code": self.error_code, "alarmed_at": str(self.alarmed_at), } def minimal_json(self): return { "id": self.id, "to-number": self.to_number, "label": self.label, "success": self.success, "call-status": self.call_status, "transcription": self.transcription, "start-at": str(self.start_at), "recording-duration": self.recording_duration, "alarmed_at": str(self.alarmed_at), } @classmethod def find_by_id(cls, id): return session.query(cls).filter_by(id=id).one_or_none() @classmethod def find_by_to_number(cls, number, limit=3): return (session.query(cls).filter_by(to_number=number).order_by( ResultTest.start_at.desc()).limit(limit).all()) @classmethod def find_all(cls): return session.query(cls).all() @classmethod def get_last_unlarmed_faileds(cls): unlarmeds = {} for ura in Ura.get_uras_numbers(): results_obj = (session.query(cls).filter_by( to_number=ura, success=False, alarmed_at=None).order_by(ResultTest.start_at.desc()).all()) unlarmeds.update({ura: results_obj}) return unlarmeds @classmethod def get_failures_per_day(cls, from_date, to_date): """ SELECT count(success), date(start_at) from result_test where success=0 group by date(start_at) ORDER BY (start_at) DESC LIMIT 1 """ res = [] with engine.connect() as con: failures = con.execute( f"SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=FALSE AND DATE(start_at) >= DATE('{from_date}') AND DATE(start_at) <= DATE('{to_date}') GROUP BY success, DATE(start_at) ORDER BY DATE(start_at) DESC" ) for row in failures: res.append(list(row)) return res @classmethod def get_successes_per_day(cls, from_date, to_date): """ SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=true AND DATE(start_at) >= DATE('2020-03-18') AND DATE(start_at) <= DATE('2020-03-20') GROUP BY DATE(start_at) ORDER BY (start_at) DESC """ res = [] with engine.connect() as con: successes = con.execute( f"SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=TRUE AND DATE(start_at) >= DATE('{from_date}') AND DATE(start_at) <= DATE('{to_date}') GROUP BY success, DATE(start_at) ORDER BY DATE(start_at) DESC" ) for row in successes: res.append(list(row)) return res @classmethod def find_results(cls, uras=list(), n_last_results=3): results = {} uras = uras if uras else Ura.get_uras_numbers() for ura in uras: results_obj = ResultTest.find_by_to_number(ura, limit=n_last_results) test_results = [r.minimal_json() for r in results_obj] results.update({ura: test_results}) return results def save_to_db(self): session.add(self) session.commit() session.close() def delete_from_db(self): session.delete(self) session.commit() session.close()
class kTotalStrokes(Base): __tablename__ = 'kTotalStrokes' id = Column(Integer, primary_key=True) char_id = Column(String(1), ForeignKey('Unhn.char')) hans = Column(Integer()) hant = Column(Integer())
# 2020-02-05 # Colton Grainger # CC-0 Public Domain """ Initialize archive, platform, document, and image tables. """ from sqlalchemy import Table, Column, MetaData from sqlalchemy import UniqueConstraint, ForeignKey from sqlalchemy import Integer, String, Date from sqlalchemy.types import Enum metadata = MetaData() archive = Table( 'archive', metadata, Column('archive_id', Integer(), primary_key=True), Column('name', String(50)), Column('host_country', String(3)), UniqueConstraint('name', 'host_country', name='uix_archive_name_and_host_country')) platform = Table( 'platform', metadata, Column('platform_id', Integer(), primary_key=True), Column('name', String(50)), Column('host_country', String(3)), UniqueConstraint('name', 'host_country', name='uix_platform_name_and_host_country')) document = Table( 'document', metadata,
class TimelineEvent(DiscussionBoundBase): """Abstract event that will be shown in the timeline.""" __tablename__ = 'timeline_event' id = Column(Integer, primary_key=True, info={'rdf': QuadMapPatternS(None, ASSEMBL.db_id)}) discussion_id = Column(Integer, ForeignKey( 'discussion.id', ondelete='CASCADE', onupdate='CASCADE' ), nullable=False, index=True) type = Column(String(60), nullable=False) __mapper_args__ = { 'polymorphic_identity': 'timeline_event', 'polymorphic_on': type, 'with_polymorphic': '*' } identifier = Column(String(60), doc="An identifier for front-end semantics") title_id = Column( Integer(), ForeignKey(LangString.id), nullable=False, info={'rdf': QuadMapPatternS(None, DCTERMS.title)}) description_id = Column( Integer(), ForeignKey(LangString.id), info={'rdf': QuadMapPatternS(None, DCTERMS.description)}) title = relationship( LangString, lazy="joined", single_parent=True, primaryjoin=title_id == LangString.id, backref=backref("title_of_timeline_event", lazy="dynamic"), cascade="all, delete-orphan") description = relationship( LangString, lazy="joined", single_parent=True, primaryjoin=description_id == LangString.id, backref=backref("description_of_timeline_event", lazy="dynamic"), cascade="all, delete-orphan") image_url = Column(URLString()) start = Column( DateTime, # Formally, TIME.hasBeginning o TIME.inXSDDateTime info={'rdf': QuadMapPatternS(None, TIME.hasBeginning)}) end = Column( DateTime, info={'rdf': QuadMapPatternS(None, TIME.hasEnd)}) # Since dates are optional, the previous event pointer allows # dateless events to form a linked list. # Ideally we could use a uniqueness constraint but # that disallows multiple NULLs. # Also, the linked list defines lanes. previous_event_id = Column(Integer, ForeignKey( 'timeline_event.id', ondelete="SET NULL"), nullable=True) previous_event = relationship( "TimelineEvent", remote_side=[id], post_update=True, uselist=False, backref=backref("next_event", uselist=False, remote_side=[previous_event_id])) def __init__(self, **kwargs): previous_event_id = None previous_event = None if 'previous_event' in kwargs: previous_event = kwargs['previous_event'] del kwargs['previous_event'] if 'previous_event_id' in kwargs: previous_event_id = kwargs['previous_event_id'] del kwargs['previous_event_id'] super(TimelineEvent, self).__init__(**kwargs) if previous_event is not None: self.set_previous_event(previous_event) elif previous_event_id is not None: self.set_previous_event_id(previous_event_id) discussion = relationship( Discussion, backref=backref( 'timeline_events', order_by=start, cascade="all, delete-orphan"), info={'rdf': QuadMapPatternS(None, ASSEMBL.in_conversation)} ) def set_previous_event(self, previous_event): # This allows setting the previous event as an insert. # this method may not be reliable with unflushed objects. self.set_previous_event_id( previous_event.id if previous_event is not None else None) self.previous_event = previous_event previous_event.next_event = self def set_previous_event_id(self, previous_event_id): if previous_event_id != self.previous_event_id: # TODO: Detect and avoid cycles if previous_event_id is not None: existing = self.__class__.get_by( previous_event_id=previous_event_id) if existing: existing.previous_event = self if inspect(self).persistent: self.db.expire(self, ['previous_event']) elif 'previous_event' in self.__dict__: del self.__dict__['previous_event'] self.previous_event_id = previous_event_id def get_discussion_id(self): return self.discussion_id @classmethod def get_discussion_conditions(cls, discussion_id, alias_maker=None): return (cls.discussion_id == discussion_id,) crud_permissions = CrudPermissions(P_ADMIN_DISC, P_READ)
class Base(Model): __abstract__ = True @declared_attr def __tablename__(cls): return cls.__name__.lower() def __repr__(self): return str(self) def pretty_date_modified(self): return self.date_modified.strftime(" %I:%M%p on %a %d, %b %Y").replace( " 0", " ") def pretty_date_created(self): return self.date_created.strftime(" %I:%M%p on %a %d, %b %Y").replace( " 0", " ") id = Column(Integer(), primary_key=True, autoincrement=True) date_created = Column(DateTime, default=func.current_timestamp()) date_modified = Column(DateTime, default=func.current_timestamp(), onupdate=func.current_timestamp()) SCHEMA_V1_IGNORE_COLUMNS = ('id', 'date_modified') SCHEMA_V2_IGNORE_COLUMNS = SCHEMA_V1_IGNORE_COLUMNS SCHEMA_V1_RENAME_COLUMNS = {} SCHEMA_V2_RENAME_COLUMNS = {} @classmethod def decode_json(cls, data, **kwargs): existing = cls.get_existing(data) data = dict(data) schema_version = data.pop('_schema_version') if schema_version == 1: ignored, renamed = cls.SCHEMA_V1_IGNORE_COLUMNS, cls.SCHEMA_V1_RENAME_COLUMNS elif schema_version == 2: ignored, renamed = cls.SCHEMA_V2_IGNORE_COLUMNS, cls.SCHEMA_V2_RENAME_COLUMNS else: raise Exception("Unknown schema version: {}".format( data.get('_schema_version', "Unknown"))) data['date_created'] = string_to_datetime(data['date_created']) for old, new in renamed: data[new] = data.pop(old) for key, value in kwargs.items(): data[key] = value for ignore in ignored: if ignore in data: del data[ignore] if existing: existing.edit(data, update_version=False) else: existing = cls(**data) db.session.add(existing) db.session.commit() return existing @classmethod def get_existing(cls, data): if 'url' in data and data['url']: return cls.by_url(data['url']) @classmethod def by_id(cls, pk_id): if pk_id is None: return None return cls.query.get(pk_id) def edit(self, updates, update_version=True): modified = False for key, value in updates.items(): if getattr(self, key) != value: modified = True setattr(self, key, value) if modified: if update_version: self.version += 1 db.session.commit() return modified def encode_human(self): """ Create a human-friendly version of this data """ return {'{id}.md'.format(id=self.id): json.dumps(self.encode_human())}
class Task(Base): """Analysis task queue.""" __tablename__ = "tasks" id = Column(Integer(), primary_key=True) target = Column(Text(), nullable=False) category = Column(String(255), nullable=False) timeout = Column(Integer(), server_default="0", nullable=False) priority = Column(Integer(), server_default="1", nullable=False) custom = Column(String(255), nullable=True) owner = Column(String(64), nullable=True) machine = Column(String(255), nullable=True) package = Column(String(255), nullable=True) tags = relationship("Tag", secondary=tasks_tags, cascade="all, delete", single_parent=True, backref=backref("task", cascade="all"), lazy="subquery") options = Column(String(255), nullable=True) platform = Column(String(255), nullable=True) memory = Column(Boolean, nullable=False, default=False) enforce_timeout = Column(Boolean, nullable=False, default=False) clock = Column(DateTime(timezone=False), default=datetime.now, nullable=False) added_on = Column(DateTime(timezone=False), default=datetime.now, nullable=False) started_on = Column(DateTime(timezone=False), nullable=True) completed_on = Column(DateTime(timezone=False), nullable=True) status = Column(Enum(TASK_PENDING, TASK_RUNNING, TASK_COMPLETED, TASK_REPORTED, TASK_RECOVERED, TASK_FAILED_ANALYSIS, TASK_FAILED_PROCESSING, TASK_FAILED_REPORTING, name="status_type"), server_default=TASK_PENDING, nullable=False) sample_id = Column(Integer, ForeignKey("samples.id"), nullable=True) sample = relationship("Sample", backref="tasks") guest = relationship("Guest", uselist=False, backref="tasks", cascade="save-update, delete") errors = relationship("Error", backref="tasks", cascade="save-update, delete") def to_dict(self): """Converts object to dict. @return: dict """ d = {} for column in self.__table__.columns: value = getattr(self, column.name) if isinstance(value, datetime): d[column.name] = value.strftime("%Y-%m-%d %H:%M:%S") else: d[column.name] = value # Tags are a relation so no column to iterate. d["tags"] = [tag.name for tag in self.tags] return d def to_json(self): """Converts object to JSON. @return: JSON data """ return json.dumps(self.to_dict()) def __init__(self, target=None): self.target = target def __repr__(self): return "<Task('{0}','{1}')>".format(self.id, self.target)
class Filename(db.ModelBase): __tablename__ = "file_registry" id = Column(Integer, primary_key=True, nullable=False) filename = Column(Text, unique=True, nullable=False) release_classifiers = Table( "release_classifiers", db.metadata, Column("name", Text()), Column("version", Text()), Column("trove_id", Integer(), ForeignKey("trove_classifiers.id")), ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ), Index("rel_class_name_idx", "name"), Index("rel_class_name_version_idx", "name", "version"), Index("rel_class_trove_id_idx", "trove_id"), Index("rel_class_version_id_idx", "version"), ) class JournalEntry(db.ModelBase): __tablename__ = "journals"
class Submission(Base): code = Column(Text(), default="") status = Column(Integer(), default=0) correct = Column(Boolean(), default=False) assignment_id = Column(Integer(), ForeignKey('assignment.id')) user_id = Column(Integer(), ForeignKey('user.id')) assignment_version = Column(Integer(), default=0) version = Column(Integer(), default=0) def __str__(self): return '<Submission {} for {}>'.format(self.id, self.user_id) @staticmethod def load(user_id, assignment_id): submission = Submission.query.filter_by(assignment_id=assignment_id, user_id=user_id).first() if not submission: submission = Submission(assignment_id=assignment_id, user_id=user_id) assignment = Assignment.by_id(assignment_id) if assignment.mode == 'explain': submission.code = json.dumps( Submission.default_explanation('')) else: submission.code = assignment.on_start db.session.add(submission) db.session.commit() return submission @staticmethod def default_explanation(code): return { 'code': code, 'elements': { 'CORGIS_USE': { 'line': 0, 'present': False, 'answer': '', 'name': 'CORGIS_USE' }, 'FOR_LOOP': { 'line': 0, 'present': False, 'answer': '', 'name': 'FOR_LOOP' }, 'DICTIONARY_ACCESS': { 'line': 0, 'present': False, 'answer': '', 'name': 'DICTIONARY_ACCESS' }, 'IMPORT_CORGIS': { 'line': 0, 'present': False, 'answer': '', 'name': 'IMPORT_CORGIS' }, 'LIST_APPEND': { 'line': 0, 'present': False, 'answer': '', 'name': 'LIST_APPEND' }, 'IMPORT_MATPLOTLIB': { 'line': 0, 'present': False, 'answer': '', 'name': 'IMPORT_MATPLOTLIB' }, 'ASSIGNMENT': { 'line': 0, 'present': False, 'answer': '', 'name': 'ASSIGNMENT' }, 'MATPLOTLIB_PLOT': { 'line': 0, 'present': False, 'answer': '', 'name': 'MATPLOTLIB_PLOT' }, 'LIST_ASSIGNMENT': { 'line': 0, 'present': False, 'answer': '', 'name': 'LIST_ASSIGNMENT' }, 'IF_STATEMENT': { 'line': 0, 'present': False, 'answer': '', 'name': 'IF_STATEMENT' }, 'DICT_ASSIGNMENT': { 'line': 0, 'present': False, 'answer': '', 'name': 'DICT_ASSIGNMENT' }, 'PRINT_USE': { 'line': 0, 'present': False, 'answer': '', 'name': 'PRINT_USE' } } } @staticmethod def save_explanation_answer(user_id, assignment_id, name, answer): submission = Submission.query.filter_by( user_id=user_id, assignment_id=assignment_id).first() submission_destructured = json.loads(submission.code) elements = submission_destructured['elements'] if name in elements: elements[name]['answer'] = answer submission.code = json.dumps(submission_destructured) submission.version += 1 db.session.commit() submission.log_code() return submission_destructured def save_explanation_code(self, code, elements): try: submission_destructured = json.loads(self.code) except ValueError: submission_destructured = {} if 'code' in submission_destructured: submission_destructured['code'] = code existing_elements = submission_destructured['elements'] for element in existing_elements: existing_elements[element]['present'] = False for element, value in elements.items(): existing_elements[element]['line'] = value existing_elements[element]['present'] = True else: submission_destructured = Submission.default_explanation(code) self.code = json.dumps(submission_destructured) self.version += 1 db.session.commit() self.log_code() return submission_destructured ELEMENT_PRIORITY_LIST = [ 'CORGIS_USE', 'FOR_LOOP', 'DICTIONARY_ACCESS', 'IMPORT_CORGIS', 'LIST_APPEND', 'IMPORT_MATPLOTLIB', 'ASSIGNMENT', 'MATPLOTLIB_PLOT', 'IF_STATEMENT', 'DICT_ASSIGNMENT', 'PRINT_USE' ] @staticmethod def abbreviate_element_type(element_type): return ''.join([l[0] for l in element_type.split("_")]) def load_explanation(self, max_questions): submission_destructured = json.loads(self.code) code = submission_destructured['code'] # Find the first FIVE available_elements = [] used_lines = set() e = submission_destructured['elements'] for element in Submission.ELEMENT_PRIORITY_LIST: # Not present? if not e[element]['present']: continue # Already used that line? if e[element]['line'][0] in used_lines: continue # Cool, then add it available_elements.append(e[element]) used_lines.add(e[element]['line'][0]) # Stop if we have enough already if len(available_elements) >= max_questions: break return code, available_elements @staticmethod def save_code(user_id, assignment_id, code, assignment_version): submission = Submission.query.filter_by( user_id=user_id, assignment_id=assignment_id).first() is_version_correct = True if not submission: submission = Submission(assignment_id=assignment_id, user_id=user_id, code=code, assignment_version=assignment_version) db.session.add(submission) else: submission.code = code submission.version += 1 current_assignment_version = Assignment.by_id( submission.assignment_id).version is_version_correct = ( assignment_version == current_assignment_version) db.session.commit() submission.log_code() return submission, is_version_correct @staticmethod def save_correct(user_id, assignment_id): submission = Submission.query.filter_by( user_id=user_id, assignment_id=assignment_id).first() if not submission: submission = Submission(assignment_id=self.id, user_id=user_id, correct=True) db.session.add(submission) else: submission.correct = True db.session.commit() return submission def log_code(self, extension='.py'): ''' Store the code on disk, mapped to the Assignment ID and the Student ID ''' # Multiple-file logging directory = os.path.join(app.config['BLOCKLY_LOG_DIR'], str(self.assignment_id), str(self.user_id)) ensure_dirs(directory) name = time.strftime("%Y%m%d-%H%M%S") file_name = os.path.join(directory, name + extension) with open(file_name, 'wb') as blockly_logfile: blockly_logfile.write(self.code) # Single file logging student_interactions_logger = logging.getLogger('StudentInteractions') student_interactions_logger.info( StructuredEvent(self.user_id, self.assignment_id, 'code', 'set', self.code))
class RolesUsers(Base): __tablename__ = "roles_users" id = Column(Integer(), primary_key=True) user_id = Column("user_id", Integer(), ForeignKey("user.myuserid")) role_id = Column("role_id", Integer(), ForeignKey("role.myroleid"))
class User(Base, UserMixin): # General user properties id = Column(Integer(), primary_key=True) first_name = Column(String(255)) last_name = Column(String(255)) email = Column(String(255)) gender = Column(String(255), default='Unspecified') picture = Column(String(255), default='') # A url proof = Column(String(255), default=None) password = Column(String(255)) active = Column(Boolean()) confirmed_at = Column(DateTime()) # Foreign key relationships settings = relationship("Settings", backref='user', lazy='dynamic') roles = relationship("Role", backref='user', lazy='dynamic') authentications = relationship("Authentication", backref='user', lazy='dynamic') assignments = relationship("Assignment", backref='user', lazy='dynamic') def __str__(self): return '<User {} ({})>'.format(self.id, self.email) def name(self): return ' '.join((self.first_name, self.last_name)) def is_admin(self): return 'admin' in {role.name.lower() for role in self.roles} def is_instructor(self): return 'instructor' in {role.name.lower() for role in self.roles} @staticmethod def is_lti_instructor(given_roles): ROLES = [ "urn:lti:role:ims/lis/TeachingAssistant", "Instructor", "ContentDeveloper", "urn:lti:role:ims/lis/Instructor", "urn:lti:role:ims/lis/ContentDeveloper" ] return any(role for role in ROLES if role in given_roles) @staticmethod def new_lti_user(service, lti_user_id, lti_email, lti_first_name, lti_last_name): new_user = User(first_name=lti_first_name, last_name=lti_last_name, email=lti_email, password="", active=False, confirmed_at=None) db.session.add(new_user) db.session.flush() new_authentication = Authentication(type=service, value=lti_user_id, user_id=new_user.id) db.session.add(new_authentication) db.session.commit() return new_user def register_authentication(self, service, lti_user_id): new_authentication = Authentication(type=service, value=lti_user_id, user_id=self.id) db.session.add(new_authentication) db.session.commit() return self @staticmethod def from_lti(service, lti_user_id, lti_email, lti_first_name, lti_last_name): """ For a given service (e.g., "canvas"), and a user_id in the LTI system """ lti = Authentication.query.filter_by(type=service, value=lti_user_id).first() if lti is None: user = User.query.filter_by(email=lti_email).first() if user: user.register_authentication(service, lti_user_id) return user else: return User.new_lti_user(service, lti_user_id, lti_email, lti_first_name, lti_last_name) else: return lti.user
def zoekLeverancier(m_email): metadata = MetaData() leveranciers = Table('leveranciers', metadata, Column('leverancierID', Integer(), primary_key=True), Column('bedrijfsnaam', String), Column('rechtsvorm', String), Column('postcode', String), Column('huisnummer', String), Column('toevoeging', String)) engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem') conn = engine.connect() class Widget(QDialog): def __init__(self, parent=None): super(Widget, self).__init__(parent) self.setWindowTitle("Leverancier zoeken materialen.") self.setWindowIcon(QIcon('./images/logos/logo.jpg')) self.setFont(QFont('Arial', 10)) self.Leveranciernummer = QLabel() levEdit = QLineEdit() levEdit.setFixedWidth(100) levEdit.setFont(QFont("Arial", 10)) levEdit.textChanged.connect(self.levChanged) reg_ex = QRegExp('^[3]{1}[0-9]{8}$') input_validator = QRegExpValidator(reg_ex, levEdit) levEdit.setValidator(input_validator) grid = QGridLayout() grid.setSpacing(20) lbl = QLabel() pixmap = QPixmap('./images/logos/verbinding.jpg') lbl.setPixmap(pixmap) grid.addWidget(lbl, 0, 0, 1, 2) logo = QLabel() pixmap = QPixmap('./images/logos/logo.jpg') logo.setPixmap(pixmap) grid.addWidget(logo, 0, 2, 1, 1, Qt.AlignRight) self.setFont(QFont('Arial', 10)) grid.addWidget(QLabel('Leverancier'), 2, 1) grid.addWidget(levEdit, 2, 2) cancelBtn = QPushButton('Sluiten') cancelBtn.clicked.connect(lambda: windowSluit(self, m_email)) applyBtn = QPushButton('Zoeken') applyBtn.clicked.connect(self.accept) grid.addWidget(applyBtn, 3, 2, 1, 1, Qt.AlignRight) applyBtn.setFont(QFont("Arial", 10)) applyBtn.setFixedWidth(100) applyBtn.setStyleSheet( "color: black; background-color: gainsboro") grid.addWidget(cancelBtn, 3, 1, 1, 1, Qt.AlignRight) cancelBtn.setFont(QFont("Arial", 10)) cancelBtn.setFixedWidth(100) cancelBtn.setStyleSheet( "color: black; background-color: gainsboro") grid.addWidget( QLabel('\u00A9 2017 all rights reserved [email protected]'), 4, 0, 1, 3, Qt.AlignCenter) self.setLayout(grid) self.setGeometry(300, 300, 150, 150) def levChanged(self, text): self.Leveranciernummer.setText(text) def returnLeveranciernummer(self): return self.Leveranciernummer.text() @staticmethod def getData(parent=None): dialog = Widget(parent) dialog.exec_() return [dialog.returnLeveranciernummer()] window = Widget() data = window.getData() if data[0] and len(data[0]) == 9 and _11check(data[0]): mlevnr = int(data[0]) else: check = 0 return (check) sel = select([leveranciers]).where(leveranciers.c.leverancierID == mlevnr) rplev = conn.execute(sel).first() if rplev: return (rplev) else: check = 0 return (check)