def test_session_actions(self):
        """verify that session commit/rollback doesn't change txn state"""

        with closing(Transaction().open()) as txn:
            txn.session.execute(
                sa.insert(TestModel).values({
                    TestModel.id: 1,
                    TestModel.value: '1',
                })
            )
            txn.session.commit()

            self.assertTrue(
                txn.is_active,
                'txn becomes inactive after commit')

        with closing(Transaction().open()) as txn:
            txn.session.execute(
                sa.insert(TestModel).values({
                    TestModel.id: 2,
                    TestModel.value: '2',
                })
            )
            txn.session.rollback()

            self.assertTrue(
                txn.is_active,
                'txn becomes inactive after rollback')

        with closing(Transaction().open()) as txn:
            rows = txn.session.execute(sa.select([TestModel.id])).fetchall()

            self.assertFalse(
                rows,
                'data are stored in db after txn closed without txn commit')
Example #2
0
    def test_insert_returning(self):
        dialect = postgresql.dialect()
        table1 = table(
            "mytable", column("myid", Integer), column("name", String(128)), column("description", String(128))
        )

        i = insert(table1, values=dict(name="foo")).returning(table1.c.myid, table1.c.name)
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES " "(%(name)s) RETURNING mytable.myid, " "mytable.name",
            dialect=dialect,
        )
        i = insert(table1, values=dict(name="foo")).returning(table1)
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) RETURNING mytable.myid, "
            "mytable.name, mytable.description",
            dialect=dialect,
        )
        i = insert(table1, values=dict(name="foo")).returning(func.length(table1.c.name))
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES " "(%(name)s) RETURNING length(mytable.name) " "AS length_1",
            dialect=dialect,
        )
Example #3
0
 def test_insert_returning(self):
     table1 = table(
         "mytable",
         column("myid", Integer),
         column("name", String(128)),
         column("description", String(128)),
     )
     i = insert(table1, values=dict(name="foo")).returning(
         table1.c.myid, table1.c.name
     )
     self.assert_compile(
         i,
         "INSERT INTO mytable (name) OUTPUT "
         "inserted.myid, inserted.name VALUES "
         "(:name)",
     )
     i = insert(table1, values=dict(name="foo")).returning(table1)
     self.assert_compile(
         i,
         "INSERT INTO mytable (name) OUTPUT "
         "inserted.myid, inserted.name, "
         "inserted.description VALUES (:name)",
     )
     i = insert(table1, values=dict(name="foo")).returning(
         func.length(table1.c.name)
     )
     self.assert_compile(
         i,
         "INSERT INTO mytable (name) OUTPUT "
         "LEN(inserted.name) AS length_1 VALUES "
         "(:name)",
     )
Example #4
0
    def set(self, key, value):
        '''Set a value in the cache.

        @param key Keyword of item in cache.
        @param value Value to be inserted in cache.
        '''
        if len(self) > self._max_entries:
            self._cull()
        timeout, cache = self.timeout, self._cache
        # Get expiration time
        expires = datetime.fromtimestamp(
            time.time() + timeout
        ).replace(microsecond=0)
        #try:
        # Update database if key already present
        if key in self:
            update(
                cache,
                cache.c.key == key,
                dict(value=value, expires=expires),
            ).execute()
        # Insert new key if key not present
        else:
            insert(
                cache, dict(key=key, value=value, expires=expires)).execute()
Example #5
0
    def test_insert_returning(self):
        dialect = postgresql.dialect()
        table1 = table('mytable',
                       column('myid', Integer),
                       column('name', String(128)),
                       column('description', String(128)),
                       )

        i = insert(
            table1,
            values=dict(
                name='foo')).returning(
            table1.c.myid,
            table1.c.name)
        self.assert_compile(i,
                            'INSERT INTO mytable (name) VALUES '
                            '(%(name)s) RETURNING mytable.myid, '
                            'mytable.name', dialect=dialect)
        i = insert(table1, values=dict(name='foo')).returning(table1)
        self.assert_compile(i,
                            'INSERT INTO mytable (name) VALUES '
                            '(%(name)s) RETURNING mytable.myid, '
                            'mytable.name, mytable.description',
                            dialect=dialect)
        i = insert(table1, values=dict(name='foo'
                                       )).returning(func.length(table1.c.name))
        self.assert_compile(i,
                            'INSERT INTO mytable (name) VALUES '
                            '(%(name)s) RETURNING length(mytable.name) '
                            'AS length_1', dialect=dialect)
Example #6
0
 def test_insert_returning(self):
     table1 = table(
         "mytable",
         column("myid", Integer),
         column("name", String(128)),
         column("description", String(128)),
     )
     i = insert(table1, values=dict(name="foo")).returning(
         table1.c.myid, table1.c.name
     )
     self.assert_compile(
         i,
         "INSERT INTO mytable (name) VALUES (:name) "
         "RETURNING mytable.myid, mytable.name",
     )
     i = insert(table1, values=dict(name="foo")).returning(table1)
     self.assert_compile(
         i,
         "INSERT INTO mytable (name) VALUES (:name) "
         "RETURNING mytable.myid, mytable.name, "
         "mytable.description",
     )
     i = insert(table1, values=dict(name="foo")).returning(
         func.length(table1.c.name)
     )
     self.assert_compile(
         i,
         "INSERT INTO mytable (name) VALUES (:name) "
         "RETURNING char_length(mytable.name) AS "
         "length_1",
     )
Example #7
0
def write_version_info(version_table, version_value):
    """
    Inserts the version value in to the version table.

    Parameters
    ----------
    version_table : sa.Table
        The version table of the asset database
    version_value : int
        The version to write in to the database

    """
    sa.insert(version_table, values={'version': version_value}).execute()
Example #8
0
def test_aggregation(tpostgres, to_insert_patients, to_insert_scores, expected):
    """Tests that we are performing join and aggregating."""
    tpostgres.connection.execute(
        sqla.insert(TUTORIAL_DATA_INGEST__PATIENTS, values=to_insert_patients))
    tpostgres.connection.execute(
        sqla.insert(TUTORIAL_ANALYTICS__CURRENT_SCORES, values=to_insert_scores))

    # Execute
    tpostgres.run_transform_query(QUERY_FILENAME)

    # Verify
    results = tpostgres.connection.execute(TARGET_QUERY)
    rows = [(row.doctor_id, row.assessment_type, row.avg_risk_score) for row in results]
    assert rows == expected
def upgrade():
    conn = op.get_bind()

    language = sa.table('language', *map(sa.column, ['pk', 'id', 'name', 'updated']))
    lid = sa.bindparam('id_')
    lbefore = sa.bindparam('before')
    update_lang = sa.update(language, bind=conn)\
        .where(sa.and_(
            language.c.id == lid,
            language.c.name == lbefore))\
        .values(updated=sa.func.now(), name=sa.bindparam('after'))

    walslanguage = sa.table('walslanguage', *map(sa.column, ['pk', 'ascii_name']))
    aname = sa.bindparam('ascii_name')
    update_wals = sa.update(walslanguage, bind=conn)\
        .where(sa.exists().where(sa.and_(
            language.c.pk == walslanguage.c.pk,
            language.c.id == lid))\
        .where(walslanguage.c.ascii_name != aname))\
        .values(ascii_name=aname)

    icols = ['created', 'updated', 'active', 'version', 'type', 'description', 'lang', 'name']
    identifier = sa.table('identifier', *map(sa.column, ['pk'] + icols))
    itype, idesc, ilang = (sa.bindparam(*a) for a in [('type', 'name'), ('description', 'other'), ('lang', 'en')])
    iname = sa.bindparam('name')
    iwhere = sa.and_(
        identifier.c.type == itype,
        identifier.c.description == idesc,
        identifier.c.lang == ilang,
        identifier.c.name == iname)
    insert_ident = sa.insert(identifier, bind=conn).from_select(icols,
        sa.select([sa.func.now(), sa.func.now(), True, 1, itype, idesc, ilang, iname])
        .where(~sa.exists().where(iwhere)))

    licols = ['created', 'updated', 'active', 'version', 'language_pk', 'identifier_pk']
    languageidentifier = sa.table('languageidentifier', *map(sa.column, licols))
    l_pk = sa.select([language.c.pk]).where(language.c.id == lid)
    i_pk = sa.select([identifier.c.pk]).where(sa.and_(iwhere))
    insert_lang_ident = sa.insert(languageidentifier, bind=conn).from_select(licols,
        sa.select([sa.func.now(), sa.func.now(), True, 1, l_pk.as_scalar(), i_pk.as_scalar()])
        .where(~sa.exists().where(sa.and_(
            languageidentifier.c.language_pk == l_pk,
            languageidentifier.c.identifier_pk == i_pk))))

    for id_, (before, after, keep) in sorted(ID_BEFORE_AFTER_KEEP.items()):
        update_lang.execute(id_=id_, before=before, after=after)
        update_wals.execute(id_=id_, ascii_name=ascii_name(after))
        if keep:
            insert_ident.execute(name=before)
            insert_lang_ident.execute(id_=id_, name=before)
    def test_rollback_on_close(self):
        """verify that savepoint is rolled back on close"""

        def _get_value(_session):
            _value = _session.execute(
                sa.select([TestModel.value])
            ).scalar()

            return _value

        with Transaction() as txn:
            txn.session.execute(
                sa.insert(TestModel).values({
                    TestModel.id: 1,
                    TestModel.value: '1',
                })
            )

            with closing(txn.savepoint().open()) as sp:
                sp.session.execute(
                    sa.update(TestModel).where(TestModel.id == 1).values({
                        TestModel.value: '2',
                    })
                )

                value = _get_value(sp.session)
                self.assertEqual(value, '2', 'update failed')

            value = _get_value(txn.session)
            self.assertEqual(value, '1', 'savepoint was not rolled back')
Example #11
0
 def _insert_or_update(self, timestamp, values, lastseen=None):
     stmt = insert(self.tables.passive)\
         .values(dict(values, addr=utils.force_int2ip(values['addr'])))
     try:
         self.db.execute(stmt)
     except IntegrityError:
         whereclause = and_(
             self.tables.passive.addr == values['addr'],
             self.tables.passive.sensor == values['sensor'],
             self.tables.passive.recontype == values['recontype'],
             self.tables.passive.source == values['source'],
             self.tables.passive.value == values['value'],
             self.tables.passive.targetval == values['targetval'],
             self.tables.passive.info == values['info'],
             self.tables.passive.port == values['port']
         )
         upsert = {
             'firstseen': func.least(
                 self.tables.passive.firstseen,
                 timestamp,
             ),
             'lastseen': func.greatest(
                 self.tables.passive.lastseen,
                 lastseen or timestamp,
             ),
             'count': self.tables.passive.count + values['count'],
         }
         updt = update(
             self.tables.passive
         ).where(whereclause).values(upsert)
         self.db.execute(updt)
Example #12
0
 def create(self, uuid):
     objects = self.PROPERTY
     stmt = insert(objects).values(
         uuid=uuid,
     )
     result = self.execute(stmt)
     return result.inserted_primary_key[0]
def insertNewRow(tableName, TheDict):
	engine = create_engine('sqlite:///../database/database/rh.db')

	if tableName == "Devices":
		table = Devices
	elif tableName == "Errors":
		table = Errors
	elif tableName == "Maintenance":
		table = Maintenance
	elif tableName == "AirHandlerOne":
		table = AirHandlerOne
	elif tableName == "Setpoints":
		table = Setpoints
	# Bind the engine to the metadata of the Base class so that the
	# declaratives can be accessed through a DBSession instance
	Base.metadata.bind = engine
	DBSession = sessionmaker(bind=engine)
	# A DBSession() instance establishes all conversations with the database
	# and represents a "staging zone" for all the objects loaded into the
	# database session object. Any change made against the objects in the
	# session won't be persisted into the database until you call
	# session.commit(). If you're not happy about the changes, you can
	# revert all of them back to the last commit by calling
	# session.rollback()
	session = DBSession()
	l = insert(table)
	l = l.values(TheDict)
	session.execute(l)
	session.commit()
Example #14
0
    def primary_index_get_or_insert(conn):
        q = sq.select(
            [t_primary.c.node],
            t_primary.c.id==dimension_value,
            bind=conn,
            for_update=True,
            )
        res = q.execute().fetchone()
        if res is not None:
            # it's already in there, we're done!
            node_id = res[t_primary.c.node]
            return node_id

        # node not assigned yet, insert while inside this transaction
        # so the above for_update will hold it locked for us. ugh
        # locks.
        node_id = _pick_node(
            hive_metadata=hive_metadata,
            dimension_name=dimension_name,
            partition_id=partition_id,
            )
        q = sq.insert(
            t_primary,
            {
                t_primary.c.id: dimension_value,
                t_primary.c.node: node_id,
                t_primary.c.secondary_index_count: 0,
                t_primary.c.last_updated: datetime.datetime.now(),
                t_primary.c.read_only: False,
                },
            )        # important to do this within the transaction
        conn.execute(q)
        return node_id
Example #15
0
 def insert(self, table_name, data):
     self.connect()
     table = meta.tables[table_name]
     query = sqlalchemy.insert(table, data)
     print query
     self.execute(query)
     self.disconnect()
Example #16
0
    def update_metadata(self, session, _update_pending=True):
        ## TODO: This does not take into account branching block chains
        log.info('Matching txin to previous txout')
        start = datetime.datetime.now()
        inserted = session.execute(
            insert(
                TxIn_TxOut
            ).returning(
                TxIn_TxOut.txin_id, TxIn_TxOut.txout_id
            ).from_select(
                [TxIn_TxOut.txin_id, TxIn_TxOut.txout_id],
                session.query(
                    TxInUnmatched.txin_id.label('txin_id'), TxOut.id.label('txout_id')
                ).join(
                    Transaction, Transaction.tx_hash == TxInUnmatched.previous_output_transaction_hash
                ).join(
                    TxOut, (Transaction.id == TxOut.transaction_id) & (TxOut.transaction_index == TxInUnmatched.previous_output_index)
                )
            )
        ).fetchall()
        log.info('...%i rows, %s', len(inserted), datetime.datetime.now() - start)
        txin_ids = [i[0] for i in inserted]
        txout_ids = [i[1] for i in inserted]
        log.info('Removing outdated txin_unmatched records')
        start = datetime.datetime.now()
        res = session.query(TxInUnmatched).filter(TxInUnmatched.txin_id.in_(txin_ids)).delete(synchronize_session=False)
        log.info('...%i rows, %s', res, datetime.datetime.now() - start)
        start = datetime.datetime.now()
        log.info('Removing outdated txout_unspent records')
        start = datetime.datetime.now()
        res = session.query(TxOutUnspent).filter(TxOutUnspent.txout_id.in_(txout_ids)).delete(synchronize_session=False)
        log.info('...%i rows, %s', res, datetime.datetime.now() - start)
        session.expire_all()

        return self.update_chain_metadata(session)
Example #17
0
async def update_balance(balance: List, account: Dict, conn: SAConnection):
    existed = {}
    query = sqlalchemy.select([table]).where(
        table.c.account_id == account.get('id')).order_by(table.c.date.asc())

    async for item in conn.execute(query):
        key = datetime.combine(item.date, datetime.min.time())
        existed[key] = {
            'id': item.id,
            'income': item.income,
            'expense': item.expense,
            'remain': item.remain,
            'date': item.date
        }

    for item in balance:
        if item['date'] in existed:
            query = sqlalchemy.update(table).where(
                table.c.id == existed[item['date']]['id']).values(**item)
        else:
            query = sqlalchemy.insert(table, values={
                'account_id': account['id'], **item
            })

        await conn.execute(query)
Example #18
0
def update_market_history():
    session = DBSession()
    with transaction.manager:
        last_tick = session.query(func.max(MarketHistoryElement.ticks)).scalar()
        current_date = datetime.datetime.now(tzlocal())
        current_tick = int((current_date - MarketHistoryElement.START_ERA).total_seconds() / MarketHistoryElement.TICK_SECONDS_LENGTH)
        assert last_tick <= current_tick
        if last_tick == current_tick:
            logger.debug("Skipping update to market history: tick %d already saved.", current_tick)
            return
        origin_select = session.\
            query(Item.data_id,
                  literal_column(str(current_tick)),
                  Item.buy_count,
                  Item.buy_price,
                  Item.sell_count,
                  Item.sell_price).\
            filter(Item.buy_count > 0, Item.sell_count > 0)
        i = insert(MarketHistoryElement).from_select([
            MarketHistoryElement.item_id,
            MarketHistoryElement.ticks,
            MarketHistoryElement.buy_count,
            MarketHistoryElement.buy_price,
            MarketHistoryElement.sell_count,
            MarketHistoryElement.sell_price
        ], origin_select)
        logger.debug("Executing market history insert...")
        i.execute()
        logger.debug("Saved market data for tick %d.", current_tick)
Example #19
0
async def add_message(*, chat_id, message='', action_type=None, request, db):
    values = {
        'user': request['user'].id,
        'chat': chat_id,
        'message': message,
    }

    if action_type:
        values['action_type'] = action_type

    query = sa.insert(Message).values(**values).returning(
        Message.id,
        Message.message,
        Message.datetime,
        Message.action_type,
    )

    message = await (await db.execute(query)).fetchone()
    message = dict(message)
    message['login'] = request['user'].login
    msg = dumps(message)
    if chat_id in request.app['websockets']:  # pragma: no cover
        for ws in request.app['websockets'][chat_id]:
            # pragma: no cover
            ws.send_str(msg)
Example #20
0
 def run(self):
     session = self.session()
     engine = session._WopMarsSession__session.bind
     conn = engine.connect()
     #
     snp2phenotype_path = self.input_file(InsertSNP2Phenotype.__input_file_snp2phenotype)
     snp_model = self.input_table(InsertSNP2Phenotype.__input_table_snp)
     phenotype_model = self.input_table(InsertSNP2Phenotype.__input_table_phenotype)
     snp2phenotype_model = self.output_table(InsertSNP2Phenotype.__output_table_snp2phenotype)
     snp2phenotype_df = pandas.read_table(snp2phenotype_path, header=None)
     #
     # read input file
     input_file_obj_list = []
     for line in csv.reader(open(snp2phenotype_path, 'r', encoding='utf-8'), delimiter="\t"):
         snp_rsid = int(line[0])
         phenotype_name = line[1]
         input_file_obj_list.append({'snp_rsid' : snp_rsid, 'phenotype_name' : phenotype_name})
     #
     # create insert
     snp_select = select([snp_model.id]).where(snp_model.rsid==bindparam('snp_rsid'))
     phenotype_select = select([phenotype_model.id]).where(phenotype_model.name==bindparam('phenotype_name'))
     output_table_insert = insert(table=snp2phenotype_model.__table__, values={'snp_id': snp_select, 'phenotype_id': phenotype_select})
     #
     if len(input_file_obj_list) > 0:
         if str(engine.__dict__['url']).split("://")[0]=='sqlite':
             engine.execute(output_table_insert.prefix_with("OR IGNORE", dialect='sqlite'), input_file_obj_list)
         elif str(engine.__dict__['url']).split("://")[0]=='mysql':
                 from warnings import filterwarnings # three lines to suppress mysql warnings
                 import MySQLdb as Database
                 filterwarnings('ignore', category = Database.Warning)
                 engine.execute(output_table_insert.prefix_with("IGNORE", dialect='mysql'), input_file_obj_list)
         elif str(engine.__dict__['url']).split("://")[0]=='postgresql':
             from sqlalchemy.dialects.postgresql import insert as pg_insert
             output_table_insert_pg = pg_insert(table=snp2phenotype_model.__table__, values={'snp_id': snp_select, 'phenotype_id': phenotype_select}).on_conflict_do_nothing(index_elements=['snp_id', 'phenotype_id'])
             engine.execute(output_table_insert_pg, input_file_obj_list)
    def setUpClass(cls):
        from sqlalchemy import engine_from_config

        engine = engine_from_config({'url': 'sqlite://'}, prefix='')

        qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
        sconn = engine.raw_connection()
        c = sconn.cursor()
        c.executescript(qry)
        sconn.commit()
        c.close()
        cls.engine = engine

        def _fake_engine_from_config(*args, **kw):
            return cls.engine
        cls.fixture = fixtures.MonkeyPatch(
            'sqlalchemy.create_engine', _fake_engine_from_config)
        cls.fixture.setUp()

        metadata = MetaData()
        cls.nm = models.create_nm_model(metadata)
        cls._delete_nm_query = delete(cls.nm)
        cls._insert_nm_query = (insert(cls.nm)
                                .values(
                                    id=bindparam('id'),
                                    tenant_id=bindparam('tenant_id'),
                                    name=bindparam('name'),
                                    type=bindparam('type'),
                                    address=bindparam('address'),
                                    created_at=bindparam('created_at'),
                                    updated_at=bindparam('updated_at')))
Example #22
0
async def test_chat_list(cli, create_user, user_token, session_db):
    db = await session_db()
    await create_user()

    chats = [
        {
            'name': 'Chat1',
            'user': None,
        },
        {
            'name': 'Chat2',
            'user': None,
        },
    ]

    for chat in chats:
        query = sa.insert(Chat).values(name=chat['name'])
        new_chat = await (await db.execute(query)).fetchone()
        chat['id'] = new_chat['id']

    resp = await cli.get('/chat/list', headers={
        'X-Auth-Token': user_token,
    })
    assert resp.status == 200
    assert await resp.json() == chats
Example #23
0
    def test_insert_with_values_func(self):
        table1 = self.tables.mytable

        self.assert_compile(
            insert(table1, values=dict(myid=func.lala())),
            "INSERT INTO mytable (myid) VALUES (lala())",
        )
def insertData(data, connection, busdata):
    # Build an insert statement for the data table: stmt
	stmt = insert(busdata)
    # Execute stmt with the values_list: data
	results = connection.execute(stmt, data)
    # Return rowcount
	return results.rowcount
Example #25
0
def extract_and_insert_words(session, japanese_module):
    '''
    retrieve new messages.extract nouns from them.
    insert the noun-message pairs into tmp table.
    register new words and message_words into tables from tmp table.
    '''
    # setup generators(filter and extractor)
    message_reader = _get_new_messges_query(session)
    if not message_reader:
        return
    message_reader = _apply_message_cutter(message_reader)
    message_words_reader = _message_words_gen(message_reader, japanese_module)
    message_words_values_gen = _message_words_values_gen(message_words_reader)
    
    # insert the noun-message pairs into tmp table.
    mid = None
    stmt = insert(TmpMessageWord.__table__)
    values = []
    loop_count = 0
    for values in message_words_values_gen:
        session.execute(stmt.values(), values)
        loop_count += len(values)
        mid = values[-1]["mid"]
    session.flush()
    logging.info("%s temporary message words was inserted.", loop_count)

    # register new words from tmp_message_words table.
    _register_new_words(session)

    # register new message words from tmp_message_words table.
    _resiter_new_message_words(session)

    if mid:
        session.merge(Parameter(LAST_MID_OF_INSERT_WORDS, mid))
Example #26
0
File: db.py Project: Greyvend/pyro
def insert_rows(engine, relation, rows):
    _rows = list(rows)
    if not _rows:
        return
    metadata = MetaData(engine, reflect=True)
    insert_query = insert(metadata.tables[relation['name']])
    _execute(engine, insert_query, _rows)
def insertNewRow(tableName, TheDict, path):
	#Purpose: Function takes the the table name(i.e. the class found in sql_declarative) and a dictionary to insert all of the values into the database

	#the engine calls the dictionary called rh.db
	#sqlite:/// with three forward slashes allow you to choose a directory using an relative path
	#sqlite://// with four forward slashes allows you to choose a directory using an absolute path
	engine = create_engine(path)
	#switch cases are used so that the function that calls insertNewRow doesn't have to import the classes
	#found in sql_declarative. 
	######
	#NOTE: if a new device or data table is added it needs to be added here as well and import on line three
	######
	
	if tableName == "devices":
		table = devices
	elif tableName == "errors":
		table = errors
	elif tableName == "maintenance":
		table = maintenance
	elif tableName == "controllerone":
		table = controllerone
	elif tableName == "controllertwo":
		table = controllertwo
	elif tableName == "setpoints":
		table = setpoints
	# Bind the engine to the metadata of the Base class so that the
	# declaratives can be accessed through a DBSession instance
	Base.metadata.bind = engine
	DBSession = sessionmaker(bind=engine)
	session = DBSession()
	l = insert(table)
	l = l.values(TheDict)
	session.execute(l)
	session.commit()
Example #28
0
def set_group_content(session, group, packages, append=False, delete=False):
    """
    Makes given group contain given packages (by name).
    In append mode (append=True) doesn't remove any packages from the group.
    With append=False, makes the group contain only specified packages.
    With delete=True, only deletes given packages from the group.

    :param: session koschei session
    :param: group PackageGroup object
    :param: packages list of package names to be in given group
    :param: append whether to clear the group first or append to existing content
    :param: delete whether to delete instead of adding
    :raises: PackagesDontExist when packages weren't found
    """
    assert not append or not delete
    contents = set(packages)
    new_content = set(
        session.db.query(BasePackage)
        .filter(BasePackage.name.in_(contents))
        .all()
    )
    if len(new_content) != len(contents):
        raise PackagesDontExist(contents - {base.name for base in new_content})
    current_content = set(
        session.db.query(BasePackage)
        .join(PackageGroupRelation)
        .filter(PackageGroupRelation.group_id == group.id)
        .all()
    )
    if delete:
        to_add = set()
    else:
        to_add = new_content - current_content
    if to_add:
        rels = [dict(group_id=group.id, base_id=base.id) for base in to_add]
        session.db.execute(insert(PackageGroupRelation, rels))
        for base in to_add:
            session.log_user_action(
                "Group {} modified: package {} added".format(group.name, base.name),
                base_id=base.id,
            )
    if append:
        to_delete = set()
    elif delete:
        to_delete = new_content
    else:
        to_delete = current_content - new_content
    if to_delete:
        (
            session.db.query(PackageGroupRelation)
            .filter(PackageGroupRelation.group_id == group.id)
            .filter(PackageGroupRelation.base_id.in_(base.id for base in to_delete))
            .delete()
        )
        for base in to_delete:
            session.log_user_action(
                "Group {} modified: package {} removed".format(group.name, base.name),
                base_id=base.id,
            )
Example #29
0
def main():
    # First create the SQL db that we will dump to
    engine, table = init_db()
    connection = engine.connect()

    # Load up all this stuff - crappy code but it works (clean up if time but this whole script is a shoddy hack)
    clear_mappers()
    session = loadTables()
    session2 = loadOutput()

    # create a connection to the mongo DB
    client = MongoClient()
    db = client.dealtrader
    collection = db.raw_tweets

    while True:
        # get number of deals in the table
        cnttot = session.query(func.max(Deals.deal_id))
        num_deals = cnttot[0][0]
        #print num_deals

        cntdone = session2.query(func.max(Output.deal_id))
        min_deal = cntdone[0][0] or 0
        #print min_deal

        res = session.query(Deals).all()

        for i in range(min_deal, num_deals):
            tweetid = int(res[i].tweet_id)
            q =  session.query(Matches)
            mchres = q.filter(Matches.tweet_id == tweetid).all()
            tweet = collection.find_one( { 'id': tweetid } )
            try:
                deal_id = res[i].deal_id
                origtext = tweet['text']
                tweetts = str(tweet['created_at'])
                itemdescr = res[i].description
                itemprice = res[i].price
                itemurl = res[i].url
                lowest_price = min(list(map(lambda x : x.merchant_price, mchres)))
                best_listings = list(filter(lambda x : x.merchant_price==lowest_price, mchres))
                best_listing = best_listings[0]
                bestprice = str(best_listing.merchant_price)
                bestlink = str(best_listing.url)

                ins = insert(table).values(
                                deal_id = deal_id,
                                tweet_id = tweetid,
                                orig_text = origtext,
                                tweet_ts = tweetts,
                                description = itemdescr,
                                price = itemprice,
                                url = itemurl,
                                best_price = bestprice,
                                best_url = bestlink
                                )
                result = connection.execute(ins)
            except:
                pass
Example #30
0
    def test_generic_insert_bind_params_all_columns(self):
        table1 = self.tables.mytable

        self.assert_compile(
            insert(table1),
            "INSERT INTO mytable (myid, name, description) "
            "VALUES (:myid, :name, :description)",
        )
Example #31
0
    def test_generic_insert_bind_params_all_columns(self):
        table1 = self.tables.mytable

        self.assert_compile(
            insert(table1), 'INSERT INTO mytable (myid, name, description) '
            'VALUES (:myid, :name, :description)')
Example #32
0
def issue_apikey(
    payload: dict,
    raiseonfail: bool = False,
    override_authdb_path: str = None,
    override_permissions_json: str = None,
    config: SimpleNamespace = None,
) -> dict:
    """Issues a new API key.

    Parameters
    ----------

    payload : dict
        The payload dict must have the following keys:

        - issuer: str, the entity that will be designated as the API key issuer
        - audience: str, the service this API key is being issued for
        - subject: str, the specific API endpoint API key is being issued for
        - apiversion: int or str, the API version that the API key is valid for
        - expires_days: int, the number of days after which the API key will
          expire
        - not_valid_before: float or int, the amount of seconds after utcnow()
          when the API key becomes valid
        - user_id: int, the user ID of the user requesting the API key
        - user_role: str, the user role of the user requesting the API key
        - ip_address: str, the IP address to tie the API key to
        - user_agent: str, the browser user agent requesting the API key
        - session_token: str, the session token of the user requesting the API
          key

    raiseonfail : bool
        If True, will raise an Exception if something goes wrong.

    override_authdb_path : str or None
        If given as a str, is the alternative path to the auth DB.

    override_permissions_json : str or None
        If given as a str, is the alternative path to the permissions JSON to
        use. This is used to check if the user_id is allowed to actually request
        an API key.

    config : SimpleNamespace object or None
        An object containing systemwide config variables as attributes. This is
        useful when the wrapping function needs to pass in some settings
        directly from environment variables.

    Returns
    -------

    dict
        The dict returned is of the form::

            {'success': True or False,
             'apikey': apikey dict,
             'expires': expiry datetime in ISO format,
             'messages': list of str messages if any}

    Notes
    -----

    API keys are tied to an IP address and client header combination.

    This function will return a dict with all the API key information. This
    entire dict should be serialized to JSON, encrypted and time-stamp signed by
    the frontend as the final "API key", and finally sent back to the client.

    """

    engine, meta, permjson, dbpath = get_procdb_permjson(
        override_authdb_path=override_authdb_path,
        override_permissions_json=override_permissions_json,
        raiseonfail=raiseonfail,
    )

    for key in ("reqid", "pii_salt"):
        if key not in payload:
            LOGGER.error(
                "Missing %s in payload dict. Can't process this request." % key
            )
            return {
                "success": False,
                "apikey": None,
                "expires": None,
                "failure_reason": (
                    "invalid request: missing '%s' in request" % key
                ),
                "messages": ["Invalid API key request."],
            }

    for key in {
        "user_id",
        "user_role",
        "expires_days",
        "not_valid_before",
        "issuer",
        "audience",
        "subject",
        "ip_address",
        "user_agent",
        "session_token",
        "apiversion",
    }:

        if key not in payload:
            LOGGER.error(
                "[%s] Invalid API key request, missing %s."
                % (payload["reqid"], key)
            )
            return {
                "success": False,
                "apikey": None,
                "expires": None,
                "failure_reason": (
                    "invalid request: missing '%s' in request" % key
                ),
                "messages": ["Some required keys are missing from payload."],
            }

    # check if the provided user_id and role can actually create an API key
    user_id = payload["user_id"]
    user_role = payload["user_role"]

    apikey_creation_allowed = check_user_access(
        {
            "user_id": user_id,
            "user_role": user_role,
            "action": "create",
            "target_name": "apikey",
            "target_owner": user_id,
            "target_visibility": "private",
            "target_sharedwith": None,
            "reqid": payload["reqid"],
            "pii_salt": payload["pii_salt"],
        },
        raiseonfail=raiseonfail,
        override_permissions_json=override_permissions_json,
        override_authdb_path=override_authdb_path,
    )

    if not apikey_creation_allowed["success"]:

        LOGGER.error(
            "[%s] Invalid API key issuance request. "
            "from user_id: %s, role: '%s'. "
            "The user is not allowed to create an API key."
            % (
                payload["reqid"],
                pii_hash(user_id, payload["pii_salt"]),
                pii_hash(user_role, payload["pii_salt"]),
            )
        )
        return {
            "success": False,
            "failure_reason": "user not allowed to issue API key",
            "messages": [
                "API key issuance failed. "
                "You are not allowed to issue an API key."
            ],
        }

    # check the session
    session_info = auth_session_exists(
        {
            "session_token": payload["session_token"],
            "pii_salt": payload["pii_salt"],
            "reqid": payload["reqid"],
        },
        raiseonfail=raiseonfail,
        override_authdb_path=override_authdb_path,
    )

    if not session_info["success"]:

        LOGGER.error(
            "[%s] Invalid API key request. "
            "user_id: %s, session_token: %s, role: '%s', "
            "ip_address: %s, user_agent: %s requested an API key for "
            "audience: '%s', subject: '%s', apiversion: %s. "
            "Session token of requestor was not found in the DB."
            % (
                payload["reqid"],
                pii_hash(payload["user_id"], payload["pii_salt"]),
                pii_hash(payload["session_token"], payload["pii_salt"]),
                payload["user_role"],
                pii_hash(payload["ip_address"], payload["pii_salt"]),
                pii_hash(payload["user_agent"], payload["pii_salt"]),
                payload["audience"],
                payload["subject"],
                payload["apiversion"],
            )
        )

        return {
            "success": False,
            "apikey": None,
            "expires": None,
            "failure_reason": (
                "invalid session for user requesting API key issuance"
            ),
            "messages": (
                ["Invalid session token for API key issuance request."]
            ),
        }

    session = session_info["session_info"]

    # check if the session info matches what we have in the payload
    session_ok = (
        (session["user_id"] == payload["user_id"])
        and (session["ip_address"] == payload["ip_address"])
        and (session["user_agent"] == payload["user_agent"])
        and (session["user_role"] == payload["user_role"])
    )

    if not session_ok:

        LOGGER.error(
            "[%s] Invalid API key request. "
            "user_id: %s, session_token: %s, role: '%s', "
            "ip_address: %s, user_agent: %s requested an API key for "
            "audience: '%s', subject: '%s', apiversion: '%s'. "
            "Session token info of requestor does not match payload info."
            % (
                payload["reqid"],
                pii_hash(payload["user_id"], payload["pii_salt"]),
                pii_hash(payload["session_token"], payload["pii_salt"]),
                payload["user_role"],
                pii_hash(payload["ip_address"], payload["pii_salt"]),
                pii_hash(payload["user_agent"], payload["pii_salt"]),
                payload["audience"],
                payload["subject"],
                payload["apiversion"],
            )
        )

        return {
            "success": False,
            "apikey": None,
            "expires": None,
            "failure_reason": (
                "invalid session for user requesting API key issuance"
            ),
            "messages": (
                [
                    "DB session user_id, ip_address, user_agent, "
                    "user_role does not match provided session info."
                ]
            ),
        }

    #
    # finally, generate the API key
    #
    random_token = secrets.token_urlsafe(32)

    # we'll return this API key dict to the frontend so it can JSON dump it,
    # encode to bytes, then encrypt, then sign it, and finally send back to the
    # client
    issued = datetime.utcnow()
    expires = issued + timedelta(days=payload["expires_days"])

    notvalidbefore = issued + timedelta(seconds=payload["not_valid_before"])

    apikey_dict = {
        "iss": payload["issuer"],
        "ver": payload["apiversion"],
        "uid": payload["user_id"],
        "rol": payload["user_role"],
        "usa": payload["user_agent"],
        "aud": payload["audience"],
        "sub": payload["subject"],
        "ipa": payload["ip_address"],
        "tkn": random_token,
        "iat": issued.isoformat(),
        "nbf": notvalidbefore.isoformat(),
        "exp": expires.isoformat(),
    }
    apikey_json = json.dumps(apikey_dict)

    # we'll also store this dict in the apikeys table
    apikeys = meta.tables["apikeys"]

    # NOTE: we store only the random token. this will later be checked for
    # equality against the value stored in the API key dict['tkn'] when we send
    # in this API key for verification later
    ins = insert(apikeys).values(
        {
            "apikey": random_token,
            "issued": issued,
            "expires": expires,
            "not_valid_before": notvalidbefore,
            "user_id": payload["user_id"],
            "user_role": payload["user_role"],
            "session_token": payload["session_token"],
        }
    )

    with engine.begin() as conn:
        conn.execute(ins)

    #
    # return the API key to the frontend
    #

    LOGGER.info(
        "[%s] API key request successful. "
        "user_id: %s, session_token: %s, role: '%s', "
        "ip_address: %s, user_agent: %s requested an API key for "
        "audience: '%s', subject: '%s', apiversion: '%s'. "
        "API key not valid before: %s, expires on: %s."
        % (
            payload["reqid"],
            pii_hash(payload["user_id"], payload["pii_salt"]),
            pii_hash(payload["session_token"], payload["pii_salt"]),
            payload["user_role"],
            pii_hash(payload["ip_address"], payload["pii_salt"]),
            pii_hash(payload["user_agent"], payload["pii_salt"]),
            payload["audience"],
            payload["subject"],
            payload["apiversion"],
            notvalidbefore.isoformat(),
            expires.isoformat(),
        )
    )

    messages = (
        "API key generated successfully, expires: %s." % expires.isoformat()
    )

    return {
        "success": True,
        "apikey": apikey_json,
        "expires": expires.isoformat(),
        "messages": ([messages]),
    }
Example #33
0
    def test_insert_with_values_func(self):
        table1 = self.tables.mytable

        self.assert_compile(insert(table1, values=dict(myid=func.lala())),
                            'INSERT INTO mytable (myid) VALUES (lala())')
Example #34
0
def upgrade_1(session, metadata):
    """
    Version 1 upgrade.

    This upgrade renames a number of keys to a single naming convention.
    """
    metadata_table = metadata.tables['metadata']
    # Copy "Version" to "name" ("version" used by upgrade system)
    try:
        session.execute(insert(metadata_table).values(
            key='name',
            value=select(
                [metadata_table.c.value],
                metadata_table.c.key == 'Version'
            ).as_scalar()
        ))
        session.execute(delete(metadata_table).where(metadata_table.c.key == 'Version'))
    except:
        log.exception('Exception when upgrading Version')
    # Copy "Copyright" to "copyright"
    try:
        session.execute(insert(metadata_table).values(
            key='copyright',
            value=select(
                [metadata_table.c.value],
                metadata_table.c.key == 'Copyright'
            ).as_scalar()
        ))
        session.execute(delete(metadata_table).where(metadata_table.c.key == 'Copyright'))
    except:
        log.exception('Exception when upgrading Copyright')
    # Copy "Permissions" to "permissions"
    try:
        session.execute(insert(metadata_table).values(
            key='permissions',
            value=select(
                [metadata_table.c.value],
                metadata_table.c.key == 'Permissions'
            ).as_scalar()
        ))
        session.execute(delete(metadata_table).where(metadata_table.c.key == 'Permissions'))
    except:
        log.exception('Exception when upgrading Permissions')
    # Copy "Bookname language" to "book_name_language"
    try:
        value_count = session.execute(
            select(
                [func.count(metadata_table.c.value)],
                metadata_table.c.key == 'Bookname language'
            )
        ).scalar()
        if value_count > 0:
            session.execute(insert(metadata_table).values(
                key='book_name_language',
                value=select(
                    [metadata_table.c.value],
                    metadata_table.c.key == 'Bookname language'
                ).as_scalar()
            ))
            session.execute(delete(metadata_table).where(metadata_table.c.key == 'Bookname language'))
    except:
        log.exception('Exception when upgrading Bookname language')
    # Copy "download source" to "download_source"
    try:
        value_count = session.execute(
            select(
                [func.count(metadata_table.c.value)],
                metadata_table.c.key == 'download source'
            )
        ).scalar()
        log.debug('download source: %s', value_count)
        if value_count > 0:
            session.execute(insert(metadata_table).values(
                key='download_source',
                value=select(
                    [metadata_table.c.value],
                    metadata_table.c.key == 'download source'
                ).as_scalar()
            ))
            session.execute(delete(metadata_table).where(metadata_table.c.key == 'download source'))
    except:
        log.exception('Exception when upgrading download source')
    # Copy "download name" to "download_name"
    try:
        value_count = session.execute(
            select(
                [func.count(metadata_table.c.value)],
                metadata_table.c.key == 'download name'
            )
        ).scalar()
        log.debug('download name: %s', value_count)
        if value_count > 0:
            session.execute(insert(metadata_table).values(
                key='download_name',
                value=select(
                    [metadata_table.c.value],
                    metadata_table.c.key == 'download name'
                ).as_scalar()
            ))
            session.execute(delete(metadata_table).where(metadata_table.c.key == 'download name'))
    except:
        log.exception('Exception when upgrading download name')
    # Copy "proxy server" to "proxy_server"
    try:
        value_count = session.execute(
            select(
                [func.count(metadata_table.c.value)],
                metadata_table.c.key == 'proxy server'
            )
        ).scalar()
        log.debug('proxy server: %s', value_count)
        if value_count > 0:
            session.execute(insert(metadata_table).values(
                key='proxy_server',
                value=select(
                    [metadata_table.c.value],
                    metadata_table.c.key == 'proxy server'
                ).as_scalar()
            ))
            session.execute(delete(metadata_table).where(metadata_table.c.key == 'proxy server'))
    except:
        log.exception('Exception when upgrading proxy server')
    # Copy "proxy username" to "proxy_username"
    try:
        value_count = session.execute(
            select(
                [func.count(metadata_table.c.value)],
                metadata_table.c.key == 'proxy username'
            )
        ).scalar()
        log.debug('proxy username: %s', value_count)
        if value_count > 0:
            session.execute(insert(metadata_table).values(
                key='proxy_username',
                value=select(
                    [metadata_table.c.value],
                    metadata_table.c.key == 'proxy username'
                ).as_scalar()
            ))
            session.execute(delete(metadata_table).where(metadata_table.c.key == 'proxy username'))
    except:
        log.exception('Exception when upgrading proxy username')
    # Copy "proxy password" to "proxy_password"
    try:
        value_count = session.execute(
            select(
                [func.count(metadata_table.c.value)],
                metadata_table.c.key == 'proxy password'
            )
        ).scalar()
        log.debug('proxy password: %s', value_count)
        if value_count > 0:
            session.execute(insert(metadata_table).values(
                key='proxy_password',
                value=select(
                    [metadata_table.c.value],
                    metadata_table.c.key == 'proxy password'
                ).as_scalar()
            ))
            session.execute(delete(metadata_table).where(metadata_table.c.key == 'proxy password'))
    except:
        log.exception('Exception when upgrading proxy password')
    try:
        session.execute(delete(metadata_table).where(metadata_table.c.key == 'dbversion'))
    except:
        log.exception('Exception when deleting dbversion')
    session.commit()
connection = engine.connect()
metadata = db.MetaData()

iss_data_table = db.Table(
    'iss_data_table', metadata, db.Column('iss_timestamp', db.String(50)),
    db.Column('iss_lat', db.String(20), nullable=False),
    db.Column('iss_lon', db.String(20), nullable=False),
    db.Column('num_description', db.String(255), nullable=True),
    db.Column('weather_description', db.String(255), nullable=True),
    db.Column('weather_temp', db.String(10), nullable=True),
    db.Column('country_alpha_code', db.String(10), nullable=True),
    db.Column('country_name', db.String(50), nullable=True),
    db.Column('country_borders', db.String(255), nullable=True),
    db.Column('country_flag_url', db.String(255), nullable=True),
    db.Column('country_capital', db.String(50), nullable=True))

metadata.create_all(engine)
query = db.insert(iss_data_table).values(
    iss_timestamp=iss_timestamp,
    iss_lat=iss_lat,
    iss_lon=iss_lon,
    num_description=num_description,
    weather_description=weather_description,
    weather_temp=weather_temp,
    country_alpha_code=country_alpha_code,
    country_name=country_name,
    country_borders=country_borders,
    country_flag_url=country_flag_url,
    country_capital=country_capital)
ResultProxy = connection.execute(query)
    def setUpClass(cls):
        from sqlalchemy import engine_from_config

        engine = engine_from_config({'url': 'sqlite://'}, prefix='')

        qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
        sconn = engine.raw_connection()
        c = sconn.cursor()
        c.executescript(qry)
        sconn.commit()
        c.close()
        cls.engine = engine

        def _fake_engine_from_config(*args, **kw):
            return cls.engine

        cls.fixture = fixtures.MonkeyPatch('sqlalchemy.create_engine',
                                           _fake_engine_from_config)
        cls.fixture.setUp()

        metadata = MetaData()

        cls.aa = models.create_aa_model(metadata)
        cls._delete_aa_query = delete(cls.aa)
        cls._insert_aa_query = (insert(cls.aa).values(
            alarm_definition_id=bindparam('alarm_definition_id'),
            alarm_state=bindparam('alarm_state'),
            action_id=bindparam('action_id')))

        cls.ad = models.create_ad_model(metadata)
        cls._delete_ad_query = delete(cls.ad)
        cls._insert_ad_query = (insert(cls.ad).values(
            id=bindparam('id'),
            tenant_id=bindparam('tenant_id'),
            name=bindparam('name'),
            severity=bindparam('severity'),
            expression=bindparam('expression'),
            match_by=bindparam('match_by'),
            actions_enabled=bindparam('actions_enabled'),
            created_at=bindparam('created_at'),
            updated_at=bindparam('updated_at'),
            deleted_at=bindparam('deleted_at')))
        cls.sad = models.create_sad_model(metadata)
        cls._delete_sad_query = delete(cls.sad)
        cls._insert_sad_query = (insert(cls.sad).values(
            id=bindparam('id'),
            alarm_definition_id=bindparam('alarm_definition_id'),
            function=bindparam('function'),
            metric_name=bindparam('metric_name'),
            operator=bindparam('operator'),
            threshold=bindparam('threshold'),
            period=bindparam('period'),
            periods=bindparam('periods'),
            created_at=bindparam('created_at'),
            updated_at=bindparam('updated_at')))

        cls.sadd = models.create_sadd_model(metadata)
        cls._delete_sadd_query = delete(cls.sadd)
        cls._insert_sadd_query = (insert(cls.sadd).values(
            sub_alarm_definition_id=bindparam('sub_alarm_definition_id'),
            dimension_name=bindparam('dimension_name'),
            value=bindparam('value')))

        cls.nm = models.create_nm_model(metadata)
        cls._delete_nm_query = delete(cls.nm)
        cls._insert_nm_query = (insert(cls.nm).values(
            id=bindparam('id'),
            tenant_id=bindparam('tenant_id'),
            name=bindparam('name'),
            type=bindparam('type'),
            address=bindparam('address'),
            created_at=bindparam('created_at'),
            updated_at=bindparam('updated_at')))
 def add_match(cls, match):
     with engine.connect() as connection:
         if not any(match):
             return None
         insert_query = db.insert(Games)
         connection.execute(insert_query, match)
 def add_heroes(cls, hero):
     with engine.connect() as connection:
         if not any(hero):
             return None
         insert_query = db.insert(Heroes)
         connection.execute(insert_query, hero)
Example #39
0
def invParams(m_email):
    metadata = MetaData()
    params = Table('params', metadata,
                   Column('paramID', Integer(), primary_key=True),
                   Column('item', String), Column('tarief', Float),
                   Column('verrekening', String), Column('ondergrens', Float),
                   Column('bovengrens', Float), Column('datum', String),
                   Column('tarieffactor', Float),
                   Column('loonID', None, ForeignKey('lonen.loonID')))

    engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem')
    con = engine.connect()
    mparnr = (con.execute(select([func.max(params.c.paramID, type_=Integer)\
        .label('mparnr')])).scalar())
    mparnr += 1

    class Widget(QDialog):
        def __init__(self, parent=None):
            super(Widget, self).__init__(parent)
            grid = QGridLayout()
            grid.setSpacing(20)
            self.setWindowTitle("Invoeren Parameters")
            self.setWindowIcon(QIcon('./images/logos/logo.jpg'))

            self.setFont(QFont('Arial', 10))

            self.Item = QLabel()
            q1Edit = QLineEdit()
            q1Edit.setCursorPosition(0)
            q1Edit.setFixedWidth(150)
            q1Edit.setFont(QFont("Arial", 10))
            q1Edit.textChanged.connect(self.q1Changed)
            reg_ex = QRegExp("^.{0,20}$")
            input_validator = QRegExpValidator(reg_ex, q1Edit)
            q1Edit.setValidator(input_validator)

            self.Tarief = QLabel()
            q2Edit = QLineEdit()
            q2Edit.setFixedWidth(100)
            q2Edit.setFont(QFont("Arial", 10))
            q2Edit.textChanged.connect(self.q2Changed)
            reg_ex = QRegExp("^[-0-9.]{0,12}$")
            input_validator = QRegExpValidator(reg_ex, q2Edit)
            q2Edit.setValidator(input_validator)

            self.Verrekening = QLabel()
            q3Edit = QLineEdit()
            q3Edit.setFixedWidth(200)
            q3Edit.setFont(QFont("Arial", 10))
            q3Edit.textChanged.connect(self.q3Changed)
            reg_ex = QRegExp("^.{0,20}$")
            input_validator = QRegExpValidator(reg_ex, q3Edit)
            q3Edit.setValidator(input_validator)

            self.Ondergrens = QLabel()
            q4Edit = QLineEdit()
            q4Edit.setFixedWidth(100)
            q4Edit.setFont(QFont("Arial", 10))
            q4Edit.textChanged.connect(self.q4Changed)
            reg_ex = QRegExp("^[0-9.]{0,12}$")
            input_validator = QRegExpValidator(reg_ex, q4Edit)
            q4Edit.setValidator(input_validator)

            self.Bovengrens = QLabel()
            q5Edit = QLineEdit()
            q5Edit.setFixedWidth(100)
            q5Edit.setFont(QFont("Arial", 10))
            q5Edit.textChanged.connect(self.q5Changed)
            reg_ex = QRegExp("^[0-9.]{0,12}$")
            input_validator = QRegExpValidator(reg_ex, q5Edit)
            q5Edit.setValidator(input_validator)

            self.Tarieffactor = QLabel()
            q6Edit = QLineEdit()
            q6Edit.setFixedWidth(100)
            q6Edit.setFont(QFont("Arial", 10))
            q6Edit.textChanged.connect(self.q6Changed)
            reg_ex = QRegExp("^[0-9.]{0,12}$")
            input_validator = QRegExpValidator(reg_ex, q6Edit)
            q6Edit.setValidator(input_validator)

            grid = QGridLayout()
            grid.setSpacing(20)

            lbl1 = QLabel('Parameternummer')
            grid.addWidget(lbl1, 1, 0)

            lbl2 = QLabel(str(mparnr))
            grid.addWidget(lbl2, 1, 1)

            lbl3 = QLabel('Item')
            grid.addWidget(lbl3, 2, 0)
            grid.addWidget(q1Edit, 2, 1, 1, 2)

            lbl4 = QLabel('Tarief')
            grid.addWidget(lbl4, 3, 0)
            grid.addWidget(q2Edit, 3, 1)

            lbl5 = QLabel('Verrekening')
            grid.addWidget(lbl5, 4, 0)
            grid.addWidget(q3Edit, 4, 1, 1, 2)

            lbl6 = QLabel('Ondergrens')
            grid.addWidget(lbl6, 5, 0)
            grid.addWidget(q4Edit, 5, 1)

            lbl7 = QLabel('Bovengrens')
            grid.addWidget(lbl7, 6, 0)
            grid.addWidget(q5Edit, 6, 1)

            lbl8 = QLabel('Tarieffactor')
            grid.addWidget(lbl8, 7, 0)
            grid.addWidget(q6Edit, 7, 1)

            lbl = QLabel()
            pixmap = QPixmap('./images/logos/verbinding.jpg')
            lbl.setPixmap(pixmap)
            grid.addWidget(lbl, 0, 0, 1, 2)

            logo = QLabel()
            pixmap = QPixmap('./images/logos/logo.jpg')
            logo.setPixmap(pixmap)
            grid.addWidget(logo, 0, 2, 1, 1, Qt.AlignRight)

            grid.addWidget(
                QLabel('\u00A9 2017 all rights reserved [email protected]'),
                9, 0, 1, 3, Qt.AlignCenter)

            self.setLayout(grid)
            self.setGeometry(400, 250, 450, 150)

            applyBtn = QPushButton('Invoer')
            applyBtn.clicked.connect(self.accept)

            grid.addWidget(applyBtn, 8, 2)
            applyBtn.setFont(QFont("Arial", 10))
            applyBtn.setFixedWidth(100)
            applyBtn.setStyleSheet(
                "color: black;  background-color: gainsboro")

            cancelBtn = QPushButton('Sluiten')
            cancelBtn.clicked.connect(lambda: windowSluit(self, m_email))

            grid.addWidget(cancelBtn, 8, 1)
            cancelBtn.setFont(QFont("Arial", 10))
            cancelBtn.setFixedWidth(100)
            cancelBtn.setStyleSheet(
                "color: black;  background-color: gainsboro")

        def q1Changed(self, text):
            self.Item.setText(text)

        def q2Changed(self, text):
            self.Tarief.setText(text)

        def q3Changed(self, text):
            self.Verrekening.setText(text)

        def q4Changed(self, text):
            self.Ondergrens.setText(text)

        def q5Changed(self, text):
            self.Bovengrens.setText(text)

        def q6Changed(self, text):
            self.Tarieffactor.setText(text)

        def returnq1(self):
            return self.Item.text()

        def returnq2(self):
            return self.Tarief.text()

        def returnq3(self):
            return self.Verrekening.text()

        def returnq4(self):
            return self.Ondergrens.text()

        def returnq5(self):
            return self.Bovengrens.text()

        def returnq6(self):
            return self.Tarieffactor.text()

        @staticmethod
        def getData(parent=None):
            dialog = Widget(parent)
            dialog.exec_()
            return [dialog.returnq1(), dialog.returnq2(), dialog.returnq3(),\
                    dialog.returnq4(), dialog.returnq5(), dialog.returnq6()]

    window = Widget()
    data = window.getData()

    if data[0]:
        mf0 = data[0]
    else:
        invVerplicht()
        return
    if data[1]:
        mf1 = float(data[1])
    else:
        invVerplicht()
        return
    if data[2]:
        mf2 = data[2]
    else:
        invVerplicht()
        return
    if data[3]:
        mf3 = float(data[3])
    else:
        mf3 = 0
    if data[4]:
        mf4 = float(data[4])
    else:
        mf4 = 0
    if data[5]:
        mf5 = float(data[5])
    else:
        mf5 = 0

    engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem')
    con = engine.connect()
    dt = str(datetime.datetime.now())
    dt = dt[0:10]
    inspar = insert(params).values(paramID = mparnr, item = mf0,tarief = mf1,\
                   verrekening = mf2, ondergrens = mf3, bovengrens = mf4, \
                   tarieffactor = mf5, datum = dt)
    con.execute(inspar)
    con.close()
    invoerOK()
Example #40
0
from sqlalchemy import insert, select, update
from sqlalchemy.exc import IntegrityError

from src.part1.chapter3.s29_exceptions import connection, cookies, line_items, orders, users

# Defining the stock #
######################

insertion = insert(users).values(username='******',
                                 email_address='*****@*****.**',
                                 phone='111-111-1111',
                                 password='******')
connection.execute(insertion)

insertion = cookies.insert()
inventory_list = [{
    'cookie_name': 'chocolate chip',
    'cookie_recipe_url': 'http://some.aweso.me/cookie/recipe.html',
    'cookie_sku': 'CC01',
    'quantity': '12',
    'unit_cost': '0.50'
}, {
    'cookie_name': 'dark chocolate chip',
    'cookie_recipe_url': 'http://some.aweso.me/cookie/recipe_dark.html',
    'cookie_sku': 'CC02',
    'quantity': '1',
    'unit_cost': '0.75'
}]
connection.execute(insertion, inventory_list)

# Adding orders for user #
Example #41
0
def dbf2sqlite(tables,
               years,
               refyear,
               pudl_settings,
               bad_cols=(),
               clobber=False,
               datastore=None):
    """Clone the FERC Form 1 Databsae to SQLite.

    Args:
        tables (iterable): What tables should be cloned?
        years (iterable): Which years of data should be cloned?
        refyear (int): Which database year to use as a template.
        pudl_settings (dict): Dictionary containing paths and database URLs
            used by PUDL.
        bad_cols (iterable of tuples): A list of (table, column) pairs
            indicating columns that should be skipped during the cloning
            process. Both table and column are strings in this case, the
            names of their respective entities within the database metadata.
        datastore (Datastore): instance of a datastore to access the resources.

    Returns:
        None

    """
    # Read in the structure of the DB, if it exists
    logger.info("Dropping the old FERC Form 1 SQLite DB if it exists.")
    sqlite_engine = sa.create_engine(pudl_settings["ferc1_db"])
    try:
        # So that we can wipe it out
        pudl.helpers.drop_tables(sqlite_engine, clobber=clobber)
    except sa.exc.OperationalError:
        pass

    # And start anew
    sqlite_engine = sa.create_engine(pudl_settings["ferc1_db"])
    sqlite_meta = sa.MetaData(bind=sqlite_engine)

    # Get the mapping of filenames to table names and fields
    logger.info(f"Creating a new database schema based on {refyear}.")
    datastore = Ferc1Datastore(datastore)
    dbc_map = get_dbc_map(datastore, refyear)
    define_sqlite_db(sqlite_meta,
                     dbc_map,
                     datastore,
                     tables=tables,
                     refyear=refyear,
                     bad_cols=bad_cols)

    for table in tables:
        logger.info(f"Pandas: reading {table} into a DataFrame.")
        new_df = get_raw_df(datastore, table, dbc_map, years=years)
        # Because this table has no year in it, there would be multiple
        # definitions of respondents if we didn't drop duplicates.
        if table == 'f1_respondent_id':
            new_df = new_df.drop_duplicates(subset='respondent_id',
                                            keep='last')
        n_recs = len(new_df)
        logger.debug(f"    {table}: N = {n_recs}")
        # Only try and load the table if there are some actual records:
        if n_recs <= 0:
            continue

        # Write the records out to the SQLite database, and make sure that
        # the inferred data types are being enforced during loading.
        # if_exists='append' is being used because we defined the tables
        # above, but left them empty. Becaue the DB is reset at the beginning
        # of the function, this shouldn't ever result in duplicate records.
        coltypes = {col.name: col.type for col in sqlite_meta.tables[table].c}
        logger.info(f"SQLite: loading {n_recs} rows into {table}.")
        new_df.to_sql(table,
                      sqlite_engine,
                      if_exists='append',
                      chunksize=100000,
                      dtype=coltypes,
                      index=False)

    # add the missing respondents into the respondent_id table.
    reported_ids = (pd.read_sql_table("f1_respondent_id",
                                      sqlite_engine).respondent_id.unique())
    observed_ids = observed_respondents(sqlite_engine)
    missing = missing_respondents(
        reported=reported_ids,
        observed=observed_ids,
        identified=PUDL_RIDS,
    )
    logger.info(
        f"Inserting {len(missing)} missing IDs into f1_respondent_id table.")
    sa.insert(sqlite_meta.tables['f1_respondent_id'], values=missing).execute()
Example #42
0
res = sqlalchemy.update(user_table).where(
    user_table.c.nome == 'Lucas Ricciardi de Salles').values(
        nome='Professor').execute()


def selecionar_usuarios_por_idade():

    resultados = 0
    idade = int(input('Digite uma idade: '))

    # Lógica aqui
    resultados = len([
        r for r in sqlalchemy.select([user_table]).where(
            user_table.c.idade == idade).execute()
    ])

    print('Temos {} usuários com a idade {}'.format(resultados, idade))


selecionar_usuarios_por_idade()

res = sqlalchemy.delete(user_table).where(user_table.c.idade == 26).execute()

print('{} registros deletdos'.format(res.rowcount))

sqlalchemy.insert(user_table).where().execute()
sqlalchemy.update(user_table).where().execute()
sqlalchemy.select(user_table).where().execute()
sqlalchemy.delete(user_table).where().execute()
Example #43
0
def update_entries(session, date, logger=None):
    """Create receiver coverage stats for Melissas ognrange."""

    if logger is None:
        logger = current_app.logger

    logger.info("Compute receiver coverages.")

    (start, end) = date_to_timestamps(date)

    # Filter aircraft beacons
    sq = (
        session.query(AircraftBeacon.location_mgrs_short, AircraftBeacon.receiver_name, AircraftBeacon.signal_quality, AircraftBeacon.altitude, AircraftBeacon.address)
        .filter(and_(between(AircraftBeacon.timestamp, start, end), AircraftBeacon.location_mgrs_short != null(), AircraftBeacon.receiver_name != null(), AircraftBeacon.address != null()))
        .subquery()
    )

    # ... and group them by reduced MGRS, receiver and date
    sq2 = (
        session.query(
            sq.c.location_mgrs_short,
            sq.c.receiver_name,
            func.cast(date, Date).label("date"),
            func.max(sq.c.signal_quality).label("max_signal_quality"),
            func.min(sq.c.altitude).label("min_altitude"),
            func.max(sq.c.altitude).label("max_altitude"),
            func.count(sq.c.altitude).label("aircraft_beacon_count"),
            func.count(func.distinct(sq.c.address)).label("device_count"),
        )
        .group_by(sq.c.location_mgrs_short, sq.c.receiver_name)
        .subquery()
    )

    # Replace receiver_name with receiver_id
    sq3 = (
        session.query(
            sq2.c.location_mgrs_short,
            Receiver.id.label("receiver_id"),
            sq2.c.date,
            sq2.c.max_signal_quality,
            sq2.c.min_altitude,
            sq2.c.max_altitude,
            sq2.c.aircraft_beacon_count,
            sq2.c.device_count,
        )
        .filter(sq2.c.receiver_name == Receiver.name)
        .subquery()
    )

    # if a receiver coverage entry exist --> update it
    upd = (
        update(ReceiverCoverage)
        .where(and_(ReceiverCoverage.location_mgrs_short == sq3.c.location_mgrs_short, ReceiverCoverage.receiver_id == sq3.c.receiver_id, ReceiverCoverage.date == date))
        .values(
            {
                "max_signal_quality": sq3.c.max_signal_quality,
                "min_altitude": sq3.c.min_altitude,
                "max_altitude": sq3.c.max_altitude,
                "aircraft_beacon_count": sq3.c.aircraft_beacon_count,
                "device_count": sq3.c.device_count,
            }
        )
    )

    result = session.execute(upd)
    update_counter = result.rowcount
    session.commit()
    logger.debug("Updated receiver coverage entries: {}".format(update_counter))

    # if a receiver coverage entry doesnt exist --> insert it
    new_coverage_entries = session.query(sq3).filter(
        ~exists().where(and_(ReceiverCoverage.location_mgrs_short == sq3.c.location_mgrs_short, ReceiverCoverage.receiver_id == sq3.c.receiver_id, ReceiverCoverage.date == date))
    )

    ins = insert(ReceiverCoverage).from_select(
        (
            ReceiverCoverage.location_mgrs_short,
            ReceiverCoverage.receiver_id,
            ReceiverCoverage.date,
            ReceiverCoverage.max_signal_quality,
            ReceiverCoverage.min_altitude,
            ReceiverCoverage.max_altitude,
            ReceiverCoverage.aircraft_beacon_count,
            ReceiverCoverage.device_count,
        ),
        new_coverage_entries,
    )

    result = session.execute(ins)
    insert_counter = result.rowcount
    session.commit()

    finish_message = "ReceiverCoverage: {} inserted, {} updated".format(insert_counter, update_counter)
    logger.debug(finish_message)
    return finish_message
Example #44
0
    async def async_insert(cls, values: List[Dict[str, object]]):
        query = insert(cls)

        return await databases_instance.execute_many(query=query,
                                                     values=values)
def process_image(db_ses, board_name, board, thread_id,
    Boards, Threads, Posts, Files,
    dl_dir, reqs_ses, current_file):
    """Put file-level data into DB"""
    logging.info(u'current_file={0!r}'.format(current_file))
    # Generate paths, names, etc.
    file_md5_hex = common.please_utf(current_file.file_md5_hex)
    archive_filename_file = '{h}.{e}'.format(h=file_md5_hex, e=current_file.file_extension)
    archive_filepath_file = common.generate_image_filepath_8ch(base=dl_dir, mtype=u'file', filename=archive_filename_file)
    archive_filename_thumbnail = '{h}.{e}'.format(h=file_md5_hex, e=current_file.file_extension)
    archive_filepath_thumbnail = common.generate_image_filepath_8ch(base=dl_dir, mtype=u'thumb', filename=archive_filename_thumbnail)
    # Look for existing copies of the file
    file_check_q = db_ses.query(Files)\
        .filter(Files.m_file_md5_hex == file_md5_hex)
    file_check_result = file_check_q.first()
    if file_check_result:
        file_saved = file_check_result.file_saved# Does archive have file?
        thumbnail_saved = file_check_result.thumbnail_saved# Does archive have thumbnail?
        forbidden = file_check_result.forbidden# Has archive forbidden file hash?
        # Already saved?
        if ((file_saved == True) and (thumbnail_saved == True)):
            # Do not save if file and thumbnail have been saved.
            logging.debug(u'File already saved: {0!r}'.format(current_file))
            return
        # Forbidden?
        elif (forbidden == True):
            # If forbidden, do not permit saving file.
            logging.debug(u'File is forbidden: {0!r}, MD5={1!r}'.format(current_file, file_md5_hex))
            return
    else:
        # No existing DB entry for this file
        file_saved = False
        thumbnail_saved = False
    # Do downloads if appropriate
    # Save file
    if (file_saved):
        # Download is appropriate.
        logging.debug(u'Downloading file: {0!r}'.format(current_file.file_url))
        common.download_file(
            reqs_ses=reqs_ses,
            url=current_file.file_url,
            filepath=archive_filepath_file,
        )
    # Save thumb
    if (thumbnail_saved):
        logging.debug(u'Downloading thumbnail: {0!r}'.format(current_file.thumbnail_url))
        common.download_file(
            reqs_ses=reqs_ses,
            url=current_file.thumbnail_url,
            filepath=archive_filepath_thumbnail,
        )
    # Once we know we have the file on disk, create an entry on the image table
    # Insert file row
    sqlalchemy.insert(Files)\
        .values(
        # py8chan columns
        m_file_md5_hex = common.please_utf(current_file.file_md5_hex),
        m_filename_original = common.please_utf(current_file.filename_original),
        m_filename = common.please_utf(current_file.filename),
        m_file_url = common.please_utf(current_file.file_url),
        m_file_extension = common.please_utf(current_file.file_extension),
        m_file_size = current_file.file_size,
        m_file_width = current_file.file_width,
        m_file_height = current_file.file_height,
        m_thumbnail_width = current_file.thumbnail_width,
        m_thumbnail_height = current_file.thumbnail_height,
        m_thumbnail_fname = common.please_utf(current_file.thumbnail_fname),
        m_thumbnail_url = common.please_utf(current_file.thumbnail_url),
        # archive-side data (File present on disk? File forbidden?)
        archive_filename_file = archive_filename_file,# Actual disk filename
        archive_filename_thumbnail = archive_filename_thumbnail,# Actual disk filename
        file_saved = True,# Do we actually have the file, this should be set False if it is lost somehow.
        thumbnail_saved = True,# Do we actually have the thumbnail, this should be set False if it is lost somehow.
    )
    logging.debug(u'Staged file entry')
    return
Example #46
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        'users', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('first_name', sa.String(), nullable=False),
        sa.Column('last_name', sa.String(), nullable=False),
        sa.Column('family_name', sa.String(), nullable=True),
        sa.Column('nickname', sa.String(), nullable=False),
        sa.Column('password', sa.String(), nullable=False),
        sa.Column('created_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('created_by', sa.Integer(), nullable=True),
        sa.Column('updated_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('updated_by', sa.Integer(), nullable=True),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('deleted_by', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['created_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['deleted_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['updated_by'],
            ['users.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'orders', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('created_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('created_by', sa.Integer(), nullable=True),
        sa.Column('updated_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('updated_by', sa.Integer(), nullable=True),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('deleted_by', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['created_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['deleted_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['updated_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['users.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'products', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('description', sa.String(), nullable=True),
        sa.Column('price', sa.Float(), nullable=False),
        sa.Column('created_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('created_by', sa.Integer(), nullable=True),
        sa.Column('updated_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('updated_by', sa.Integer(), nullable=True),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('deleted_by', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['created_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['deleted_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['updated_by'],
            ['users.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'orders_products', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('order_id', sa.Integer(), nullable=False),
        sa.Column('product_id', sa.Integer(), nullable=False),
        sa.Column('created_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('created_by', sa.Integer(), nullable=True),
        sa.Column('updated_at',
                  sa.DateTime(),
                  server_default=sa.text('now()'),
                  nullable=True),
        sa.Column('updated_by', sa.Integer(), nullable=True),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('deleted_by', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['created_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['deleted_by'],
            ['users.id'],
        ), sa.ForeignKeyConstraint(
            ['order_id'],
            ['orders.id'],
        ), sa.ForeignKeyConstraint(
            ['product_id'],
            ['products.id'],
        ), sa.ForeignKeyConstraint(
            ['updated_by'],
            ['users.id'],
        ), sa.PrimaryKeyConstraint('id'))

    default_users = [
        # first_name, last_name, nickname, password
        ('Система', 'System', 'system', 'system'),
        ('admin', 'admin', 'admin', 'admin')
    ]

    for fist_name, last_name, nickname, password in default_users:
        op.execute(
            insert(User).values(first_name=fist_name,
                                last_name=last_name,
                                nickname=nickname,
                                password=User.get_password_hash(password),
                                created_by=1,
                                updated_by=1))
Example #47
0
from pprint import pprint

username = "******"
password = ""
db_name = "dvdrental"
# connection string to postgres
DATABASE_URI = f"postgres+psycopg2://{username}@localhost:5432/{db_name}"
engine = sqa.create_engine(DATABASE_URI, echo=True)
connection = engine.connect()
metadata = sqa.MetaData()

table_name = 'film'
t = sqa.Table(table_name, metadata, autoload=True, autoload_with=engine)
# Print full table metadata
pprint(repr(t))

query = sqa.insert(t).values(film_id=10000,
                             title="coco",
                             description="cartoon",
                             release_year=2017,
                             language_id=1,
                             rental_duration=3,
                             rental_rate=4.5,
                             length=130,
                             replacement_cost=19.99,
                             rating='PG-13',
                             last_update='2018-05-26 14:50:58.951',
                             special_features="Trailers, Commentaries")
# ::tsvector

result = connection.execute(query)
Example #48
0
"""


import sqlalchemy
from sqlalchemy import create_engine, select, insert
from sqlalchemy.orm import sessionmaker
from model_state import Base, State
from sys import argv

if __name__ == "__main__":
    username = argv[1]
    password = argv[2]
    database_name = argv[3]

    """ create_engine function not conects with the database """
    engine = create_engine(
        'mysql+mysqldb://{}:{}@localhost:3306/{}'.format(
            username,
            password,
            database_name
        ),
        pool_pre_ping=True
    )
    """ to connect with the db but functions without engine.connect() """
    """ engine.connect() """

    s = insert(State).values(name="Louisiana")

    rs = engine.execute(s)
    print(rs.inserted_primary_key[0])
def process_thread(db_ses, board_name, board, thread_id,
    Boards, Threads, Posts, Files, dl_dir, reqs_ses, ):
    """Fetch and insert one thread.
    db_ses: Sqlalchemy DB session
    board: py8chan Board instance
    WARNING: May overwrite existing post/thread data in table!
    TODO: Look into resolutions for this potential issue.
    """
    logging.debug(u'Fetching thread: {0!r}'.format(thread_id))
    # Load thread from site
    thread = board.get_thread(thread_id)
    logging.info(u'thread={0!r}'.format(thread))
    # Check if thread is already in the DB
    thread_check_q = db_ses.query(Threads)\
        .filter(Threads.t_thread_id == thread_id,)
    thread_check_result = thread_check_q.first()
    logging.info(u'thread_check_result={0!r}'.format(thread_check_result))
    # Push thread-leven info to DB
    if (thread_check_result):
        # UPDATE thread entry
        sqlalchemy.update(Threads)\
        .where(Threads.primary_key == thread_check_result.primary_key)\
        .values(
            t_thread_id = thread.id,
            t_board = 0,# TODO Foreign key (thread-to-board association)
            t_last_reply_id = thread.last_reply_id,
            t_closed = thread.closed,
            t_sticky = thread.sticky,
            t_topic = thread.topic.post_id,# TODO Figure out what we want to do here. (Should we even put a topic entry in here? (If so, make foreign key?))
            t_url = thread.url,
        )
        logging.debug(u'Updated thread entry')
    else:
        # INSERT thread entry
        sqlalchemy.insert(Threads)\
        .values(
            t_thread_id = thread.id,
            t_board = 0,# TODO Foreign key (thread-to-board association)
            t_last_reply_id = thread.last_reply_id,
            t_closed = thread.closed,
            t_sticky = thread.sticky,
            t_topic = thread.topic.post_id,# TODO Figure out what we want to do here. (Should we even put a topic entry in here? (If so, make foreign key?))
            t_url = thread.url,
        )
        logging.debug(u'Inserted thread entry')
    # Process all posts in the thread
    for post in thread.all_posts:
        process_post(
        db_ses,
        board_name,
        board,
        thread_id,
        Boards,
        Threads,
        Posts,
        Files,
        dl_dir,
        reqs_ses,
        post
        )
        continue
    logging.debug(u'Finished this thread\'s posts')
    logging.info(u'Fetched thread')
    return
Example #50
0
import sqlalchemy as db
from sqlalchemy import table
from eightqueens import QueensSolver

if __name__ == "__main__":
    engine = db.create_engine('postgresql+psycopg2://brime:panda@db/queensdb')
    connection = engine.connect()
    metadata = db.MetaData()
    solutions_table = db.Table(
        'solutions', metadata,
        db.Column('solution_id',
                  db.Integer(),
                  db.Sequence('id'),
                  primary_key=True),
        db.Column('solution_string', db.String(25), nullable=False))

    metadata.create_all(engine)

    payload = []

    solver = QueensSolver(8)
    print("Solutions for Eight Queens:")
    print(len(solver.solutions))
    for solution in solver.solutions:
        print(str(solution))
        query = db.insert(solutions_table).values(
            solution_string=str(solution))
        ResultProxy = connection.execute(query)
 def insert_user(cls, user_dict, parent_ref_code: str = None):
     with engine.connect() as connection:
         insert_query = db.insert(TelegramUser)
         if parent_ref_code:
             user_dict["parent_code"] = parent_ref_code
         connection.execute(insert_query, user_dict)
Example #52
0
    cookie_recipe_url="http://some.aweso.me/cookie/recipe.html",
    cookie_sku="CC01",
    quantity="12",
    unit_cost="0.50")

print(str(ins))

result = connection.execute(ins)

print(result.inserted_primary_key)

from sqlalchemy import insert

ins = insert(cookies).values(
    cookie_name="chocolate chip",
    cookie_recipe_url="http://some.aweso.me/cookie/recipe.html",
    cookie_sku="CC01",
    quantity="12",
    unit_cost="0.50")
print(str(ins))

ins = cookies.insert()

# 事务
transaction = connection.begin()
result = connection.execute(
    ins,
    cookie_name='dark chocolate chip',
    cookie_recipe_url='http://some.aweso.me/cookie/recipe_dark.html',
    cookie_sku='CC02',
    quantity='1',
    unit_cost='0.75')
Example #53
0
from __future__ import print_function
from sqlalchemy import insert

# bring in the table defs
import cookies_tables

# connect to our db
connection = cookies_tables.engine.connect()

ins = insert(cookies_tables.cookies).values(
    cookie_name="white chocolate chip and macadamia nut",
    cookie_recipe_url="http://some.aweso.me/cookie/white_choc_maca_nut.html",
    cookie_sku="CC03",
    quantity="1",
    unit_cost="1.00")

result = connection.execute(ins)
                                                                                                                                                                  
# create a new table                                                                                                                                              
newTable = sqlalchemy.Table('newTable', metadata,                                                                                                                 
                       sqlalchemy.Column('Id', sqlalchemy.Integer()),                                                                                             
                       sqlalchemy.Column('name', sqlalchemy.String(255), nullable=False),                                                                         
                       sqlalchemy.Column('salary', sqlalchemy.Float(), default=100.0),                                                                            
                       sqlalchemy.Column('active', sqlalchemy.Boolean(), default=True)                                                                            
              )                                                                                                                                                   
                                                                                                                                                                  
metadata.create_all(engine)                                                                                                                                       
                                                                                                                                                                  
                                                                                                                                                                  
# INSERT statement                                                                                                                                                
newTable = sqlalchemy.Table('newTable', metadata, autoload=True, autoload_with=engine)                                                                            
                                                                                                                                                                  
query = sqlalchemy.insert(newTable).values(Id=1, name='Software Ninjaneer', salary=60000.00, active=True)                                                         
result_proxy = connection.execute(query)                                                                                                                          
                                                                                                                                                                  
# INSERT multiple rows at once                                                                                                                                    
query = sqlalchemy.insert(newTable)                                                                                                                               
new_records = [{'Id':'2', 'name':'record1', 'salary':80000, 'active':False},                                                                                      
               {'Id':'3', 'name':'record2', 'salary':70000, 'active':True}]                                                                                       
result_proxy = connection.execute(query,new_records)                                                                                                              
                                                                                                                                                                  
                                                                                                                                                                  
# UPDATE  statement                                                                                                                                               
import sqlalchemy                                                                                                                                                 
engine = sqlalchemy.create_engine('mysql+pymysql://username:password@localhost/sakila')                                                                           
connection = engine.connect()                                                                                                                                     
metadata = sqlalchemy.MetaData()                                                                                                                                  
newTable = sqlalchemy.Table('newTable', metadata, autoload=True, autoload_with=engine)                                                                            
 def add_item(cls, item):
     with engine.connect() as connection:
         if not any(item):
             return None
         insert_query = db.insert(Items)
         connection.execute(insert_query, item)
Example #56
0
print(second_row[2])

#practice with a where clause
result = result.where(purchases_sqlal.columns.id <= 5)
results = conn.execute(result).fetchall()

#print out info from results after where clause
for result in results:
    print(result.id, result.item_id, result.customer_id)

#get total number of rows
print(results.rowcount)

#insert an item into the table
stmt = insert(purchases_sqlal).values(id=11,
                                      item_id=7,
                                      customer_id=20,
                                      valid=True)

#execute query
new_result = conn.execute(stmt)
##############################################################################################

# getting table from postgres and creating pandas data frame
import pandas as pd
from sqlalchemy import create_engine, MetaData, Table

# establish connection
engine = create_engine(
    'postgresql+psycopg2://postgres:password@localhost:####/Regents Exams DataBase'
)
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 26 02:19:03 2017

@author: Shabaka
"""

# Import insert and select from sqlalchemy
from sqlalchemy import insert, select

# Build an insert statement to insert a record into the data table: stmt
stmt = insert(data).values(name='Anna', count=1, amount=1000.00, valid=True)

# Execute the statement via the connection: results
results = connection.execute(stmt)

# Print result rowcount
print(results.rowcount)

# Build a select statement to validate the insert
stmt = select([data]).where(data.columns.name == 'Anna')

# Print the result of executing the query.
print(connection.execute(stmt).first())
#create word doc
doc = dx.Document()
name = doc.add_paragraph('')  #bold and right align name
name.add_run(clsplit.pop(0)).bold = True
name.alignment = WD_ALIGN_PARAGRAPH.RIGHT
for _ in range(4):  #for the rest of the contact info right align
    line = doc.add_paragraph(clsplit.pop(0))
    line.alignment = WD_ALIGN_PARAGRAPH.RIGHT
for _ in range(len(clsplit)):  #rest of the doc left align
    line = doc.add_paragraph(clsplit.pop(0))
word_filename = company + '_coverletter.docx'
doc.save(word_filename)

pdf_filename = word_filename.replace('.docx', '.pdf')
convert(word_filename, pdf_filename)  #convert word to pdf

os.remove(word_filename)  #remove word file

#create new entry in database table
new = insert(applications).values(position=position,
                                  company=company,
                                  date=date.today())
engine.execute(new)

# In[ ]:

q = 'SELECT * FROM applications'  #see all job apps saved
pd.read_sql(q, engine)  #wow so productive!

# In[ ]:
Example #59
0
        # Create a table with the appropriate Columns
        airports = db.Table('Airports', metadata,
              db.Column('ICAO', db.String(255), primary_key=True, nullable=False), 
              db.Column('Name', db.String(255)), db.Column('City', db.String(255)),
              db.Column('Country', db.String(255)), db.Column('IATA', db.String(255)),
              db.Column('Latitude', db.String(10)),db.Column('Longtitude', db.String(10)),
              db.Column('Elevation', db.String(255)),db.Column('TZDB', db.String(255)))

        # Implement the creation
        metadata.create_all(engine)
        #now populate table
        with open(settings.AirportsFile, newline='',encoding='utf-8') as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                print(row)
                query = db.insert(airports).values(ICAO=row['ICAO'],Name=row['Name'],City=row['City'],Country=row['Country'],Latitude=row['Latitude'],Longtitude=row['Longtitude'],Elevation=row['Elevation'],TZDB=row['TZ Database time']) 
                ResultProxy = connection.execute(query)
    if not engine.dialect.has_table(engine, 'Stations'):  # If table don't exist, Create.
        print("Creating Stations Table")
        metadata = db.MetaData()
        # Create a table with the appropriate Columns
        stations = db.Table('Stations', metadata,
              db.Column('StationNumber', db.String(255), primary_key=True, nullable=False), 
              db.Column('Name', db.String(255)), db.Column('Latitude', db.String(255)),
              db.Column('Longtitude', db.String(255)))

        # Implement the creation
        metadata.create_all(engine)
        #now populate table
        with open(settings.SoundStationsFile, newline='',encoding='utf-8') as csvfile:
            reader = csv.DictReader(csvfile)
Example #60
0
async def create(model, column_num=None, **kwargs):
    q = sqlalchemy.insert(model, values=kwargs)
    if column_num is not None:
        return await pg.fetchval(q, column=column_num)
    return await pg.fetchrow(q)