Beispiel #1
0
def prepareSqlite(out,featureClass,fileType,includeGeometry):
    [shp,shpType]=getShp(featureClass)
    if shpType == "point":
        gType = 1
    elif shpType == "multipoint":
        gType = 4
    elif shpType == "polyline":
        gType = 5
    elif shpType == "polygon":
        gType = 6
    fields=listFields(featureClass)
    fieldNames = []
    fieldNames.append("OGC_FID INTEGER PRIMARY KEY")
    if includeGeometry:
        fieldNames.append("GEOMETRY blob")
    for field in fields:
        if (fields[field] != u'OID') and field.lower() !=shp.lower():
            fieldNames.append(parseFieldType(field,fields[field]))

    conn=Connection(out)
    c=conn.cursor()
    name = splitext(split(out)[1])[0]
    c.execute("""CREATE TABLE geometry_columns (     f_table_name VARCHAR,      f_geometry_column VARCHAR,      geometry_type INTEGER,      coord_dimension INTEGER,      srid INTEGER,     geometry_format VARCHAR )""")
    c.execute("""insert into geometry_columns( f_table_name, f_geometry_column, geometry_type, coord_dimension, srid, geometry_format) values(?,?,?,?,?,?)""",(name,"GEOMETRY",gType,2,4326,"WKB"))
    c.execute("""CREATE TABLE spatial_ref_sys        (     srid INTEGER UNIQUE,     auth_name TEXT,     auth_srid TEXT,     srtext TEXT)""")
    c.execute("insert into spatial_ref_sys(srid ,auth_name ,auth_srid ,srtext) values(?,?,?,?)",(4326, u'EPSG', 4326, u'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'))
    c.execute("create table {0}({1})".format(name,", ".join(fieldNames)))
    return [name,c,conn]
Beispiel #2
0
def parse_and_import_cards(
    input_file: pathlib.Path, sql_connection: sqlite3.Connection
) -> None:
    """
    Parse the JSON cards and input them into the database
    :param input_file: AllSets.json file
    :param sql_connection: Database connection
    """
    LOGGER.info("Loading JSON into memory")
    json_data = json.load(input_file.open("r"))

    LOGGER.info("Building sets")
    for set_code, set_data in json_data.items():
        # Handle set insertion
        LOGGER.info("Inserting set row for {}".format(set_code))
        set_insert_values = handle_set_row_insertion(set_data)
        sql_dict_insert(set_insert_values, "sets", sql_connection)

        for card in set_data.get("cards"):
            LOGGER.debug("Inserting card row for {}".format(card.get("name")))
            card_attr: Dict[str, Any] = handle_card_row_insertion(card, set_code)
            sql_insert_all_card_fields(card_attr, sql_connection)

        for token in set_data.get("tokens"):
            LOGGER.debug("Inserting token row for {}".format(token.get("name")))
            token_attr = handle_token_row_insertion(token, set_code)
            sql_dict_insert(token_attr, "tokens", sql_connection)

    sql_connection.commit()
Beispiel #3
0
 def _fetch_account(self, conn: sqlite3.Connection, account_id: int) -> Account:
     acc = conn.execute("SELECT * FROM Account WHERE id=?", (account_id,)).fetchone()
     priv_result = conn.execute("SELECT privilege FROM Privilege WHERE account=?", (account_id,)).fetchall()
     privileges = {pr["privilege"] for pr in priv_result}
     storydata_result = conn.execute("SELECT format, data FROM StoryData WHERE account=?", (account_id,)).fetchone()
     if storydata_result:
         if storydata_result["format"] == "json":
             storydata = json.loads(storydata_result["data"], encoding="utf-8")
         elif storydata_result["format"] == "serpent":
             storydata = serpent.loads(storydata_result["data"])
         else:
             raise ValueError("invalid storydata format in database: " + storydata_result["format"])
         if not isinstance(storydata, dict):
             raise TypeError("storydata should be a dict")
     else:
         storydata = {}
     stats_result = dict(conn.execute("SELECT * FROM CharStat WHERE account=?", (account_id,)).fetchone() or {})
     del stats_result["id"]
     del stats_result["account"]
     stats = base.Stats()
     for key, value in stats_result.items():
         if hasattr(stats, key):
             setattr(stats, key, value)
         else:
             raise AttributeError("stats doesn't have attribute: " + key)
     stats.set_stats_from_race()   # initialize static stats from races table
     return Account(acc["name"], acc["email"], acc["pw_hash"], acc["pw_salt"], privileges,
                    acc["created"], acc["logged_in"], bool(acc["banned"]), stats, storydata)
Beispiel #4
0
def load_max_id(db: sqlite3.Connection):
    cur = db.cursor()
    cur.execute('select min(id) from fav_tweets')
    r = cur.fetchone()
    db.rollback()
    cur.close()
    if r:
        return r[0]
    pass
    def __update_pragma(connection: sqlite3.Connection, name: str, value: Any) -> None:
        """
        Updates PRAGMA value

        :param connection:  Database connection
        :param name:        Pragma's name
        :param value:       Pragma's value
        """
        connection.execute("PRAGMA {0} = {1}".format(name, str(value)))
 def __init__(self, dbname='test.db', autocommit=1, encoding='utf-8'):
     isolation_level = ''
     if autocommit:
         isolation_level = None
     LocalConnection.__init__(self, dbname, isolation_level=isolation_level)
     #self.row_factory = ResultSet
     #self.row_factory = attribute_factory
     #self.row_factory = Row
     self.row_factory = ResultSetRow
Beispiel #7
0
 def _store_stats(self, conn: sqlite3.Connection, account_id: int, stats: base.Stats) -> None:
     columns = ["account"]
     values = [account_id]
     stat_vars = dict(vars(stats))
     for not_stored in ["bodytype", "language", "weight", "size"]:
         del stat_vars[not_stored]    # these are not stored, but always initialized from the races table
     for key, value in stat_vars.items():
         columns.append(key)
         values.append(value)
     sql = "INSERT INTO CharStat(" + ",".join(columns) + ") VALUES (" + ",".join('?' * len(columns)) + ")"
     conn.execute(sql, values)
Beispiel #8
0
def insert(db: sqlite3.Connection, t):
    id = int(t['id_str'])
    created_at = int(timestamp_from_id(id))
    cur = db.cursor()
    cur.execute(
        """
        INSERT INTO fav_tweets (`id`, `tweet`, `created_at`)
        VALUES (?, ?, ?)
        ON CONFLICT (`id`) DO NOTHING
        """,
        (id, json.dumps(t), created_at)
    )
    db.commit()
    cur.close()
def delete_friend(ds_connection: sqlite3.Connection, id: str) -> dict:
    """
    Delete a given entry from the friends table in a given SQLite connection.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        id (str): An `id` value which will be used to find a specific
            datastore row to delete.
    """
    cursor = ds_connection.execute("DELETE  " "from friends where lower(id) = ?", [id.lower()])

    if not cursor.rowcount:
        raise ValueError()

    ds_connection.commit()
Beispiel #10
0
    def init_db(self):
        self.db = Connection(self.config['database'])
        self.db.execute("""
            CREATE TABLE IF NOT EXISTS
            message_log (
                id INTEGER PRIMARY KEY ASC,
                channel TEXT,
                nick TEXT,
                ident TEXT,
                host TEXT,
                message TEXT,
                date INTEGER
            )
        """)
        self.db.execute("""
            CREATE TABLE IF NOT EXISTS
            social_telki (
              id INTEGER PRIMARY KEY ASC,
              rating INTEGER,
              displayed_times INNTEGER,
              url TEXT,
              who_added TEXT,
              date_added INTEGER
            )
        """)
        def dict_factory(cursor, row):
            d = {}
            for idx, col in enumerate(cursor.description):
                d[col[0]] = row[idx]
            return d

        self.db.row_factory = dict_factory
Beispiel #11
0
 def __init__(self, ladder: str, dbh: sqlite3.Connection) -> None:
     self.ladder = ladder
     self.dbh = dbh
     self.cursor = dbh.cursor()
     self.players: Dict[str, Player] = {}
     self.tsh: trueskill.TrueSkill = None
     self.last_ranking = 0
def get_friend(ds_connection: sqlite3.Connection, id: str) -> dict:
    """
    Obtain a specific friend record and return a representation of it.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        id (str): An `id` value which will be used to find a specific
            datastore row.

    Returns
        A JSON ready dictionary representing a specific
        row of the friends table.
    """
    cursor = ds_connection.execute(
        'select id, first_name, last_name, telephone, email, notes '
        'from friends where lower(id) = ?',
        [id.lower()])

    friend_row = cursor.fetchone()

    if friend_row:
        return {
            "id": friend_row[0],
            "first_name": friend_row[1],
            "last_name": friend_row[2],
            "telephone": friend_row[3],
            "email": friend_row[4],
            "notes": friend_row[5]}
def get_friends(ds_connection: sqlite3.Connection) -> dict:
    """
    Return a representation of all rows in the friends table.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.

    Returns
        A JSON ready dictionary representing all rows of the friends table.
    """
    cursor = ds_connection.execute(
        'select id, first_name, last_name, telephone, email, notes '
        'from friends')

    friends_collection = list()
    for friend_row in cursor.fetchall():
        friends_collection.append(
            {"id": friend_row[0],
             "first_name": friend_row[1],
             "last_name": friend_row[2],
             "telephone": friend_row[3],
             "email": friend_row[4],
             "notes": friend_row[5]})

    return friends_collection
Beispiel #14
0
def calc_longitudinal_qc(infiles):
    qcmap = {}
    qcpsms = []
    psms = parse_psms(infiles['psmtable'], is_instrument_qc=True)
    header = next(psms)
    perrorix = header.index('PrecursorError(ppm)')
    qvalix = header.index('QValue')
    msgfix = header.index('MSGFScore')
    rtix = header.index('Retention time(min)')
    misclix = header.index('missed_cleavage')
    for line in psms:
        # FIXME filtering in galaxy? will be incorrect num of peptides
        if float(line[qvalix]) > 0.01:
            continue
        qcpsms.append(line)
        if int(line[misclix]) < 4:
            mckey = 'miscleav{}'.format(line[misclix])
            try:
                qcmap[mckey] += 1
            except KeyError:
                qcmap[mckey] = 1
    qcmap['perror'] = calc_boxplot([psm[perrorix] for psm in qcpsms])
    qcmap['msgfscore'] = calc_boxplot([psm[msgfix] for psm in qcpsms])
    qcmap['rt'] = calc_boxplot([psm[rtix] for psm in qcpsms])
    con = Connection(infiles['sqltable'])
    qcmap.update({'psms': len(qcpsms), 
                  'scans': con.execute('SELECT COUNT(*) FROM mzml').fetchone()[0]})
    peps = []
    with open(infiles['peptable']) as fp:
        header, lines = table_reader(fp)
        areaix = header.index('MS1 area (highest of all PSMs)')
        protix = header.index('Protein(s)')
        count = 0
        unicount = 0
        for line in lines:
            count += 1
            if ';' not in line[protix]:
                unicount += 1
            try:
                peps.append(line)
            except ValueError:
                pass
    qcmap['peparea'] = calc_boxplot([x[areaix] for x in peps])
    qcmap.update({'peptides': count, 'unique_peptides': unicount})
    with open(infiles['prottable']) as fp:
        qcmap['proteins'] = sum(1 for _ in fp) - 1
    return qcmap
Beispiel #15
0
def import_catalog_into_db(catalog: Dict[str, CatalogEntry], connection: sqlite3.Connection):
    cursor = connection.cursor()

    for catalog_entry in catalog.values():
        cursor.execute(
            'insert into catalog (item_key, category_name) values (?, ?);',
            [catalog_entry.item_key, catalog_entry.category_name]
        )
Beispiel #16
0
def import_sales_into_db(sales, connection: sqlite3.Connection):
    cursor = connection.cursor()

    for sale in sales:
        cursor.execute(
            'insert into sales (item_key, country, city_name, sale_timestamp, price) values (?, ?, ?, ?, ?);',
            [sale.item_key, sale.country, sale.city_name, sale.timestamp.isoformat(), str(sale.price)]
        )
Beispiel #17
0
 def __init__(self, uri, graphs=()):
     self.uri = uri
     self.db = Connection(
         database=uri,
         check_same_thread=False,
         isolation_level=None,
     )
     self.lock = Lock()
     self.setup_sql(graphs)
def add_friend(ds_connection: sqlite3.Connection, entry_data: dict):
    """
    Create a new row in the friends table.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        entry_data (dict): The data needed to created a new entry.
    """
    ds_connection.execute(
        "insert into friends (id, first_name, last_name, telephone, email, notes) "
        "values (?, ?, ?, ?, ?, ?)",
        [entry_data['id'],
         entry_data['firstName'],
         entry_data['lastName'],
         entry_data['telephone'],
         entry_data['email'],
         entry_data['notes']])
    ds_connection.commit()
    def __init__(self, native_connection: sqlite3.Connection):
        """
        Constructor

        :param native_connection: Native connection object
        """
        Connection.__init__(self)

        # Disable automatic transactions and save the connection object
        native_connection.isolation_level = None
        self.__native_connection = native_connection
        self.__in_transaction = False
Beispiel #20
0
def refresh_disable_proxies(conn: Connection):
    """
    刷新失效的代理,失效一天以上就可删除
    :param conn: 数据连接
    :return:
    """
    delete_sql = ''' 
        DELETE FROM disable_proxies
        WHERE CAST(strftime('%s', datetime('now', 'localtime')) - strftime('%s', insert_date) as INTEGER) > ?
    '''
    cur = conn.cursor()
    cur.execute(delete_sql, (day_of_seconds,))
Beispiel #21
0
    def _purge_old_search_metadata_communities(self):
        """
        Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database.
        """
        db_path = os.path.join(self.session.get_state_dir(), u"sqlite", u"dispersy.db")
        if not os.path.isfile(db_path):
            return

        communities_to_delete = (u"SearchCommunity", u"MetadataCommunity", u"TunnelCommunity")

        connection = Connection(db_path)
        cursor = connection.cursor()

        for community in communities_to_delete:
            try:
                result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?;", (community,)))

                for community_id, in result:
                    cursor.execute(u"DELETE FROM community WHERE id == ?;", (community_id,))
                    cursor.execute(u"DELETE FROM meta_message WHERE community == ?;", (community_id,))
                    cursor.execute(u"DELETE FROM sync WHERE community == ?;", (community_id,))
            except StopIteration:
                continue

        cursor.close()
        connection.commit()
        connection.close()
Beispiel #22
0
    def _update_dispersy(self):
        """
        Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database.
        """
        db_path = os.path.join(self.session.get_state_dir(), u"sqlite", u"dispersy.db")
        if not os.path.isfile(db_path):
            return

        communities_to_delete = (u"SearchCommunity", u"MetadataCommunity")

        connection = Connection(db_path)
        cursor = connection.cursor()

        data_updated = False
        for community in communities_to_delete:
            try:
                result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?", (community,)))

                for community_id, in result:
                    self._logger.info(u"deleting all data for community %s...", community_id)
                    cursor.execute(u"DELETE FROM community WHERE id == ?", (community_id,))
                    cursor.execute(u"DELETE FROM meta_message WHERE community == ?", (community_id,))
                    cursor.execute(u"DELETE FROM sync WHERE community == ?", (community_id,))
                    data_updated = True
            except StopIteration:
                continue

        if data_updated:
            connection.commit()
        cursor.close()
        connection.close()
Beispiel #23
0
def insert_disable_proxy(conn: Connection, proxy: ()):
    """
    写入失效的代理
    :param conn: 数据连接
    :param proxy: (代理)
    :return:
    """
    insert_sql = '''
        INSERT OR IGNORE INTO disable_proxies (proxy, insert_date)
        VALUES (?, datetime('now', 'localtime'))
    '''
    cur = conn.cursor()
    cur.execute(insert_sql, proxy)
Beispiel #24
0
class Graph(object):
    """
    Initializes a new Graph object.

    :param uri: The URI of the SQLite db.
    :param graphs: Graphs to create.
    """
    def __init__(self, uri, graphs=()):
        self.uri = uri
        self.db = Connection(database=uri)
        self.setup_sql(graphs)

    def setup_sql(self, graphs):
        """
        Sets up the SQL tables for the graph object,
        and creates indexes as well.

        :param graphs: The graphs to create.
        """
        with closing(self.db.cursor()) as cursor:
            for table in graphs:
                cursor.execute(SQL.CREATE_TABLE % (table))
                for index in SQL.INDEXES:
                    cursor.execute(index % (table))
            self.db.commit()

    def close(self):
        """
        Close the SQLite connection.
        """
        self.db.close()

    __del__ = close

    def __contains__(self, edge):
        """
        Checks if an edge exists within the database
        with the given source and destination nodes.

        :param edge: The edge to query.
        """
        with closing(self.db.cursor()) as cursor:
            cursor.execute(*SQL.select_one(edge.src, edge.rel, edge.dst))
            return bool(cursor.fetchall())

    def find(self, edge_query):
        """
        Returns a Query object that acts on the graph.
        """
        return Query(self.db)(edge_query)

    def transaction(self):
        """
        Returns a Transaction object. All modifying
        operations, i.e. ``store``, ``delete`` must
        then be performed on the transaction object.
        """
        return Transaction(self.db)
Beispiel #25
0
def sql_dict_insert(
    data: Dict[str, Any], table: str, sql_connection: sqlite3.Connection
) -> None:
    """
    Insert a dictionary into a sqlite table
    :param data: Dict to insert
    :param table: Table to insert to
    :param sql_connection: SQL connection
    """
    cursor = sql_connection.cursor()
    columns = ", ".join(data.keys())
    placeholders = ":" + ", :".join(data.keys())
    query = "INSERT INTO " + table + " (%s) VALUES (%s)" % (columns, placeholders)
    cursor.execute(query, data)
def fully_update_friend(ds_connection: sqlite3.Connection, entry_data: dict):
    """
    Update all aspects of given row in the friends table.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        entry_data (dict): The data needed to update a given entry.  The
            `id` value of this dictionary is used to identify the entry
            to update.
    """
    ds_connection.execute(
        "UPDATE friends "
        "SET id=?, first_name=?, last_name=?, telephone=?, email=?, notes=? "
        "WHERE lower(id) = ?",
        [entry_data['id'],
         entry_data['firstName'],
         entry_data['lastName'],
         entry_data['telephone'],
         entry_data['email'],
         entry_data['notes'],
         entry_data['id'].lower()])
    ds_connection.commit()
Beispiel #27
0
def update_proxy_error(conn: Connection, proxy_list: []) -> []:
    """
    批量更新代理信息表中代理错误次数
    :param conn: 数据连接
    :proxy_list: 代理列表[(代理1), (代理1), ... , (代理n)]
    :return:
    """
    update_sql = '''
        UPDATE OR IGNORE proxy_list
        SET error_count = error_count + 1
        WHERE proxy = ?
    '''

    cur = conn.cursor()
    cur.executemany(update_sql, proxy_list)
Beispiel #28
0
    def __read_pragma(connection: sqlite3.Connection, name: str) -> Any:
        """
        Reads PRAGMA value

        :param connection:  Database connection
        :param name:        Pragma's name
        """
        cursor = connection.execute("PRAGMA {0}".format(name))

        value = None
        row = cursor.fetchone()

        if row is not None:
            value = row[0]

        return value
Beispiel #29
0
def update_tac_status(conn: Connection, tac_data: ()):
    """
    更新tac的状态
    :param conn: 数据连接
    :param tac_data: (状态,tac)
        状态:(2:不存在,3:查询错)
    :return:
    """
    update_sql = '''
        UPDATE OR IGNORE tac_info
        SET status = ? ,
        query_count = query_count + 1 ,
        update_date = datetime('now', 'localtime')
        WHERE tac_id = ?
    '''
    cur = conn.cursor()
    cur.execute(update_sql, tac_data)
Beispiel #30
0
def reload_activate_proxies(conn: Connection, data_list: []):
    """
    载入活跃代理,载入前先清空数据
    :param conn: 数据连接
    :param data_list: 代理列表,格式为:[(proxy1, response_time1, test_time1, test_xp1),...,(proxyn, response_timen, test_timen, test_xpn)]
    :return:
    """
    delete_sql = ''' 
        DELETE FROM activate_proxies
    '''
    insert_sql = ''' 
        INSERT INTO activate_proxies (proxy, response_time, test_time, test_xp, insert_date)
        VALUES (?, ?, ?, ?, datetime('now', 'localtime'))
    '''

    cur = conn.cursor()
    cur.execute(delete_sql)
    cur.executemany(insert_sql, data_list)
Beispiel #31
0
def build_sql_schema(sql_connection: sqlite3.Connection) -> None:
    """
    Create the SQLite DB schema
    :param sql_connection: Connection to the database
    """
    LOGGER.info("Building SQLite Schema")
    cursor = sql_connection.cursor()

    # Build Set table
    cursor.execute(
        "CREATE TABLE `sets` ("
        "baseSetSize INTEGER,"
        "block TEXT,"
        "boosterV3 TEXT,"
        "code TEXT,"
        "codeV3 TEXT,"
        "isForeignOnly INTEGER NOT NULL DEFAULT 0,"
        "isFoilOnly INTEGER NOT NULL DEFAULT 0,"
        "isOnlineOnly INTEGER NOT NULL DEFAULT 0,"
        "keyruneCode TEXT,"
        "mcmName TEXT,"
        "mcmId INTEGER,"
        "meta TEXT,"
        "mtgoCode TEXT,"
        "name TEXT,"
        "parentCode TEXT,"
        "releaseDate TEXT,"
        "tcgplayerGroupId INTEGER,"
        "totalSetSize INTEGER,"
        "type TEXT"
        ")"
    )

    # Translations for set names
    cursor.execute(
        "CREATE TABLE `set_translations` ("
        "language TEXT,"
        "translation TEXT,"
        "setCode TEXT"
        ")"
    )

    # Build foreignData table
    cursor.execute(
        "CREATE TABLE `foreignData` ("
        "uuid TEXT,"
        "flavorText TEXT,"
        "language TEXT,"
        "multiverseId INTEGER,"
        "name TEXT,"
        "text TEXT,"
        "type TEXT"
        ")"
    )

    # Build legalities table
    cursor.execute(
        "CREATE TABLE `legalities` (" "uuid TEXT," "format TEXT," "status TEXT" ")"
    )

    # Build ruling table
    cursor.execute("CREATE TABLE `rulings` (" "uuid TEXT," "date TEXT," "text TEXT" ")")

    # Build cards table
    cursor.execute(
        "CREATE TABLE `cards` ("
        "artist TEXT,"
        "borderColor TEXT,"
        "colorIdentity TEXT,"
        "colorIndicator TEXT,"
        "colors TEXT,"
        "convertedManaCost FLOAT,"
        "duelDeck TEXT,"
        "faceConvertedManaCost FLOAT,"
        "flavorText TEXT,"
        "frameEffect TEXT,"
        "frameVersion TEXT,"
        "hand TEXT,"
        "hasFoil INTEGER NOT NULL DEFAULT 0,"
        "hasNonFoil INTEGER NOT NULL DEFAULT 0,"
        "isAlternative INTEGER NOT NULL DEFAULT 0,"
        "isOnlineOnly INTEGER NOT NULL DEFAULT 0,"
        "isOversized INTEGER NOT NULL DEFAULT 0,"
        "isReserved INTEGER NOT NULL DEFAULT 0,"
        "isStarter INTEGER NOT NULL DEFAULT 0,"
        "isTimeshifted INTEGER NOT NULL DEFAULT 0,"
        "layout TEXT,"
        "life TEXT,"
        "loyalty TEXT,"
        "manaCost TEXT,"
        "mcmName TEXT DEFAULT NULL,"
        "mcmId INTEGER DEFAULT 0,"
        "mcmMetaId INTEGER DEFAULT 0,"
        "mtgstocksId INTEGER DEFAULT 0,"
        "multiverseId INTEGER,"
        "name TEXT,"
        "names TEXT,"
        "number TEXT,"
        "originalText TEXT,"
        "originalType TEXT,"
        "printings TEXT,"
        "prices TEXT,"
        "power TEXT,"
        "purchaseUrls TEXT,"
        "rarity TEXT,"
        "scryfallId TEXT,"
        "scryfallOracleId TEXT,"
        "scryfallIllustrationId TEXT,"
        "setCode TEXT,"
        "side TEXT,"
        "subtypes TEXT,"
        "supertypes TEXT,"
        "tcgplayerProductId INTEGER,"
        "tcgplayerPurchaseUrl TEXT,"
        "text TEXT,"
        "toughness TEXT,"
        "type TEXT,"
        "types TEXT,"
        "uuid TEXT(36) PRIMARY KEY,"
        "uuidV421 TEXT,"
        "variations TEXT,"
        "watermark TEXT"
        ")"
    )

    # Build tokens table
    cursor.execute(
        "CREATE TABLE `tokens` ("
        "artist TEXT,"
        "borderColor TEXT,"
        "colorIdentity TEXT,"
        "colorIndicator TEXT,"
        "colors TEXT,"
        "duelDeck TEXT,"
        "isOnlineOnly INTEGER NOT NULL DEFAULT 0,"
        "layout TEXT,"
        "loyalty TEXT,"
        "name TEXT,"
        "names TEXT,"
        "number TEXT,"
        "power TEXT,"
        "reverseRelated TEXT,"
        "scryfallId TEXT,"
        "scryfallOracleId TEXT,"
        "scryfallIllustrationId TEXT,"
        "setCode TEXT,"
        "side TEXT,"
        "text TEXT,"
        "toughness TEXT,"
        "type TEXT,"
        "uuid TEXT,"
        "uuidV421 TEXT,"
        "watermark TEXT"
        ")"
    )

    # Execute the commands
    sql_connection.commit()
Beispiel #32
0
def dbdropall(conn: sqlite3.Connection) -> None:
    logger.info("尝试删除全部")
    drop_sql = 'update `illustJsonMsg` set `time`=-1 where `time`!=-1;'
    conn.execute(drop_sql)
    conn.commit()
    logger.info("删除全部成功")
Beispiel #33
0
def dbclose(conn: sqlite3.Connection) -> None:
    conn.close()
    logger.info("关闭数据库成功")
Beispiel #34
0
def insert_metrics_info(
    db: sqlite3.Connection,
    metrics_info: List[MetricInfo],
) -> None:
    query = 'INSERT INTO metric_names (name, description) VALUES (?, ?)'
    db.executemany(query, metrics_info)
def set_schema_version(i_db_conn: sqlite3.Connection,
                       i_version_it: sqlite3.Connection) -> None:
    i_db_conn.execute("PRAGMA user_version={:d}".format(i_version_it))
Beispiel #36
0
def close_connection(connection: sqlite3.Connection):
    connection.close()
Beispiel #37
0
def has_message_been_banned(conn: sqlite3.Connection, guild_id: int,
                            msg_id: int) -> bool:
    sql = SELECT_BANNED_MSG_SQL.format(identifier=BANNED_MSG_TABLENAME)
    cur = conn.cursor()
    cur.execute(sql, (guild_id, msg_id))
    return cur.fetchone() is not None
Beispiel #38
0
def close_db(connection:sqlite3.Connection):
    connection.commit()#make sure any changes get saved
    connection.close()
Beispiel #39
0
def _init() -> Connection:
    conn = Connection(BUFFER_DB, isolation_level=None)
    init_db(conn)
    conn.executescript(sql("create", "pragma"))
    conn.executescript(sql("create", "tables"))
    return conn
Beispiel #40
0
	def __Connect(self):
		if self._connection is None or not self._connection.active:
			self._connection = Connection()
			self._connection.connect(self._beddoMischerIP, self._settings.server["port"])
Beispiel #41
0
def update_local_associations(
    sydent: Sydent,
    db: sqlite3.Connection,
    send_email: bool,
    dry_run: bool,
    test: bool = False,
) -> None:
    """Update the DB table local_threepid_associations so that all stored
    emails are casefolded, and any duplicate mxid's associated with the
    given email are deleted.

    Setting dry_run to True means that the script is being run in dry-run mode
    by the user, i.e. it will run but will not send any email nor update the database.
    Setting test to True means that the function is being called as part of an automated
    test, and therefore we shouldn't backoff when sending emails.

    :return: None
    """

    logger.info("Processing rows in local_threepid_associations")

    res = db.execute(
        "SELECT address, mxid FROM local_threepid_associations WHERE medium = 'email'"
        "ORDER BY ts DESC")

    # a dict that associates an email address with correspoinding mxids and lookup hashes
    associations: Dict[str, List[Tuple[str, str, str]]] = {}

    logger.info(
        "Computing new hashes and signatures for local_threepid_associations")

    # iterate through selected associations, casefold email, rehash it, and add to
    # associations dict
    for address, mxid in res.fetchall():
        casefold_address = address.casefold()

        # rehash email since hashes are case-sensitive
        lookup_hash = calculate_lookup_hash(sydent, casefold_address)

        if casefold_address in associations:
            associations[casefold_address].append((address, mxid, lookup_hash))
        else:
            associations[casefold_address] = [(address, mxid, lookup_hash)]

    # Deltas to apply to the database, associated with the casefolded address they're for.
    deltas: Dict[str, Delta] = {}

    # Iterate through the results, to build the deltas.
    for casefold_address, assoc_tuples in associations.items():
        # If the row is already in the right state and there's no duplicate, don't compute
        # a delta for it.
        if len(assoc_tuples) == 1 and assoc_tuples[0][0] == casefold_address:
            continue

        deltas[casefold_address] = Delta(to_update=UpdateDelta(
            address=assoc_tuples[0][0],
            mxid=assoc_tuples[0][1],
            lookup_hash=assoc_tuples[0][2],
        ))

        if len(assoc_tuples) > 1:
            # Iterate over all associations except for the first one, since we've already
            # processed it.
            deltas[casefold_address].to_delete = []
            for address, mxid, _ in assoc_tuples[1:]:
                deltas[casefold_address].to_delete.append(
                    DeleteDelta(
                        address=address,
                        mxid=mxid,
                    ))

    logger.info(f"{len(deltas)} rows to update in local_threepid_associations")

    # Apply the deltas
    for casefolded_address, delta in deltas.items():
        if not test:
            log_msg = f"Updating {casefolded_address}"
            if delta.to_delete is not None:
                log_msg += (
                    f" and deleting {len(delta.to_delete)} rows associated with it"
                )
            logger.info(log_msg)

        try:
            # Delete each association, and send an email mentioning the affected MXID.
            if delta.to_delete is not None and not dry_run:
                for to_delete in delta.to_delete:
                    if send_email and to_delete.mxid != delta.to_update.mxid:
                        # If the MXID is one that will still be associated with this
                        # email address after this run, don't send an email for it.
                        sendEmailWithBackoff(
                            sydent,
                            to_delete.address,
                            to_delete.mxid,
                            test=test,
                        )

                    logger.debug(
                        "Deleting %s from table local_threepid_associations",
                        to_delete.address,
                    )
                    cur = db.cursor()
                    cur.execute(
                        "DELETE FROM local_threepid_associations WHERE medium = 'email' AND address = ?",
                        (to_delete.address, ),
                    )
                    db.commit()

            # Update the row now that there's no duplicate.
            if not dry_run:
                logger.debug(
                    "Updating table local threepid associations setting address to %s, "
                    "lookup_hash to %s, where medium = email and address = %s and mxid = %s",
                    casefolded_address,
                    delta.to_update.lookup_hash,
                    delta.to_update.address,
                    delta.to_update.mxid,
                )
                cur = db.cursor()
                cur.execute(
                    "UPDATE local_threepid_associations SET address = ?, lookup_hash = ? WHERE medium = 'email' AND address = ? AND mxid = ?",
                    (
                        casefolded_address,
                        delta.to_update.lookup_hash,
                        delta.to_update.address,
                        delta.to_update.mxid,
                    ),
                )
                db.commit()

        except CantSendEmailException:
            # If we failed because we couldn't send an email move on to the next address
            # to de-duplicate.
            # We catch this error here rather than when sending the email because we want
            # to avoid deleting rows we can't warn users about, and we don't want to
            # proceed with the subsequent update because there might still be duplicates
            # in the database (since we haven't deleted everything we wanted to delete).
            logger.warn("Failed to send email to %s; skipping!",
                        to_delete.address)
            continue
Beispiel #42
0
def insert_error(db: sqlite3.Cursor, conn: sqlite3.Connection, control_number, message):
    db.execute('INSERT INTO errors VALUES (?,?,?)', (None,control_number, message))
    conn.commit()
Beispiel #43
0
 def _create_config_table(self, db: sqlite3.Connection) -> None:
     db.executescript(
         'CREATE TABLE IF NOT EXISTS configs ('
         '   path TEXT NOT NULL,'
         '   PRIMARY KEY (path)'
         ');', )
Beispiel #44
0
def update_global_associations(
    sydent: Sydent,
    db: sqlite3.Connection,
    dry_run: bool,
) -> None:
    """Update the DB table global_threepid_associations so that all stored
    emails are casefolded, the signed association is re-signed and any duplicate
    mxid's associated with the given email are deleted.

    Setting dry_run to True means that the script is being run in dry-run mode
    by the user, i.e. it will run but will not send any email nor update the database.

    :return: None
    """

    logger.info("Processing rows in global_threepid_associations")

    # get every row where the local server is origin server and medium is email
    origin_server = sydent.config.general.server_name
    medium = "email"

    res = db.execute(
        "SELECT address, mxid, sgAssoc FROM global_threepid_associations WHERE medium = ?"
        "AND originServer = ? ORDER BY ts DESC",
        (medium, origin_server),
    )

    # dict that stores email address with mxid, email address, lookup hash, and
    # signed association
    associations: Dict[str, List[Tuple[str, str, str, str]]] = {}

    logger.info(
        "Computing new hashes and signatures for global_threepid_associations")

    # iterate through selected associations, casefold email, rehash it, re-sign the
    # associations and add to associations dict
    for address, mxid, sg_assoc in res.fetchall():
        casefold_address = address.casefold()

        # rehash the email since hash functions are case-sensitive
        lookup_hash = calculate_lookup_hash(sydent, casefold_address)

        # update signed associations with new casefolded address and re-sign
        sg_assoc = json_decoder.decode(sg_assoc)
        sg_assoc["address"] = address.casefold()
        sg_assoc = json.dumps(
            signedjson.sign.sign_json(sg_assoc,
                                      sydent.config.general.server_name,
                                      sydent.keyring.ed25519))

        if casefold_address in associations:
            associations[casefold_address].append(
                (address, mxid, lookup_hash, sg_assoc))
        else:
            associations[casefold_address] = [(address, mxid, lookup_hash,
                                               sg_assoc)]

    # list of arguments to update db with
    db_update_args: List[Tuple[Any, str, str, str, str]] = []

    # list of mxids to delete
    to_delete: List[Tuple[str]] = []

    for casefold_address, assoc_tuples in associations.items():
        # If the row is already in the right state and there's no duplicate, don't compute
        # a delta for it.
        if len(assoc_tuples) == 1 and assoc_tuples[0][0] == casefold_address:
            continue

        db_update_args.append((
            casefold_address,
            assoc_tuples[0][2],
            assoc_tuples[0][3],
            assoc_tuples[0][0],
            assoc_tuples[0][1],
        ))

        if len(assoc_tuples) > 1:
            # Iterate over all associations except for the first one, since we've already
            # processed it.
            for address, mxid, _, _ in assoc_tuples[1:]:
                to_delete.append((address, ))

    logger.info(
        f"{len(to_delete)} rows to delete, {len(db_update_args)} rows to update in global_threepid_associations"
    )
    if not dry_run:
        cur = db.cursor()

        if len(to_delete) > 0:
            cur.executemany(
                "DELETE FROM global_threepid_associations WHERE medium = 'email' AND address = ?",
                to_delete,
            )

            logger.info(
                f"{len(to_delete)} rows deleted from global_threepid_associations"
            )

        if len(db_update_args) > 0:
            cur.executemany(
                "UPDATE global_threepid_associations SET address = ?, lookup_hash = ?, sgAssoc = ? WHERE medium = 'email' AND address = ? AND mxid = ?",
                db_update_args,
            )

            logger.info(
                f"{len(db_update_args)} rows updated in global_threepid_associations"
            )

        db.commit()
Beispiel #45
0
def delete_all_upload_report(conn: Connection):
    conn.execute('DELETE FROM uploadreport')
    conn.commit()
def initial_schema_and_setup(i_db_conn: sqlite3.Connection) -> None:
    # Auto-increment is not needed in our case: https://www.sqlite.org/autoinc.html

    i_db_conn.execute("CREATE TABLE " + Schema.PhrasesTable.name + "(" +
                      Schema.PhrasesTable.Cols.id + " INTEGER PRIMARY KEY, " +
                      Schema.PhrasesTable.Cols.vertical_order +
                      " INTEGER NOT NULL, " + Schema.PhrasesTable.Cols.title +
                      " TEXT NOT NULL, " + Schema.PhrasesTable.Cols.ib_phrase +
                      " TEXT NOT NULL, " + Schema.PhrasesTable.Cols.ob_phrase +
                      " TEXT NOT NULL, " +
                      Schema.PhrasesTable.Cols.ib_short_phrase +
                      " TEXT NOT NULL DEFAULT '', " +
                      Schema.PhrasesTable.Cols.ob_short_phrase +
                      " TEXT NOT NULL DEFAULT '', " +
                      Schema.PhrasesTable.Cols.type + " INTEGER NOT NULL" +
                      " DEFAULT " +
                      str(mc_global.BreathingPhraseType.in_out.value) + ")")

    i_db_conn.execute("CREATE TABLE " + Schema.RestActionsTable.name + "(" +
                      Schema.RestActionsTable.Cols.id +
                      " INTEGER PRIMARY KEY, " +
                      Schema.RestActionsTable.Cols.vertical_order +
                      " INTEGER NOT NULL, " +
                      Schema.RestActionsTable.Cols.title + " TEXT NOT NULL" +
                      ")")

    i_db_conn.execute(
        "CREATE TABLE " + Schema.SettingsTable.name + "(" +
        Schema.SettingsTable.Cols.id + " INTEGER PRIMARY KEY, " +
        Schema.SettingsTable.Cols.rest_reminder_active + " INTEGER NOT NULL" +
        " DEFAULT " + str(SQLITE_TRUE_INT) + ", " +
        Schema.SettingsTable.Cols.rest_reminder_interval +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(DEFAULT_REST_REMINDER_INTERVAL_MINUTES_INT) + ", " +
        Schema.SettingsTable.Cols.rest_reminder_audio_filename +
        " TEXT NOT NULL" + " DEFAULT '" + mc_global.WIND_CHIMES_FILENAME_STR +
        "'" + ", " + Schema.SettingsTable.Cols.rest_reminder_volume +
        " INTEGER NOT NULL" + " DEFAULT " + str(DEFAULT_VOLUME_INT) + ", " +
        Schema.SettingsTable.Cols.rest_reminder_notification_type +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(mc_global.NotificationType.Both.value) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_active +
        " INTEGER NOT NULL" + " DEFAULT " + str(SQLITE_TRUE_INT) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_interval +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(DEFAULT_BREATHING_REMINDER_INTERVAL_MINUTES_INT) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_audio_filename +
        " TEXT NOT NULL" + " DEFAULT '" +
        mc_global.SMALL_BELL_SHORT_FILENAME_STR + "'" + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_volume +
        " INTEGER NOT NULL" + " DEFAULT " + str(DEFAULT_VOLUME_INT) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_notification_type +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(mc_global.NotificationType.Both.value) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_phrase_setup +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(mc_global.PhraseSetup.Long.value) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_nr_before_dialog +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(DEFAULT_BREATHING_REMINDER_NR_BEFORE_DIALOG_INT) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_dialog_audio_active +
        " INTEGER NOT NULL" + " DEFAULT " + str(SQLITE_TRUE_INT) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_dialog_close_on_hover +
        " INTEGER NOT NULL" + " DEFAULT " + str(SQLITE_FALSE_INT) + ", " +
        Schema.SettingsTable.Cols.breathing_reminder_text + " TEXT NOT NULL" +
        " DEFAULT ''" + ", " +
        Schema.SettingsTable.Cols.breathing_dialog_phrase_selection +
        " INTEGER NOT NULL" + " DEFAULT " +
        str(mc_global.PhraseSelection.random.value) + ", " +
        Schema.SettingsTable.Cols.prep_reminder_audio_filename +
        " TEXT NOT NULL" + " DEFAULT '" +
        mc_global.SMALL_BELL_LONG_FILENAME_STR + "'" + ", " +
        Schema.SettingsTable.Cols.prep_reminder_audio_volume +
        " INTEGER NOT NULL" + " DEFAULT " + str(DEFAULT_VOLUME_INT) + ", " +
        Schema.SettingsTable.Cols.run_on_startup + " INTEGER NOT NULL" +
        " DEFAULT " + str(SQLITE_FALSE_INT) + ")")

    db_connection = Helper.get_db_connection()

    db_cursor = db_connection.cursor()
    db_cursor.execute(
        "INSERT OR IGNORE INTO " + Schema.SettingsTable.name + "(" +
        Schema.SettingsTable.Cols.id + ") VALUES (?)",
        (SINGLE_SETTINGS_ID_INT, ))
    # -please note "OR IGNORE"
    db_connection.commit()
Beispiel #47
0
class BeddoFabrik:
	def __init__(self):
		self._continueReading = True
		self._settings = None
		self._readers = []
		self._beddoMischerIP = None
		self._connection = None

	def Start(self):
		# hook the SIGINT
		signal.signal(signal.SIGINT, self.__EndRead)

		# load settings
		self._settings = Settings()
		self._readers = self._settings.getReaders()
		Logger.debug("{} devices registered:".format(len(self._readers)))
		for x in self._readers:
			Logger.debug(x)

		# wait for wifi
		Logger.debug("Waiting for Wifi connection...")
		while not self.__IsWifiConnected():
			time.sleep(1)
		Logger.debug("Wifi connected")

		# discover BeddoMischer IP
		while not self._beddoMischerIP:
			try:
				discoverer = ServerDiscover()
				self._beddoMischerIP = discoverer.Discover()
			except:
				self._beddoMischerIP = None
				Logger.error("Discovery failed")
				time.sleep(1)

		# connect with server
		self.__Connect()

		# start thread to listen for incoming clear requests
		start_new_thread(self.__ListenForClearRequests, ())

		self.__MainLoop()

	# Capture SIGINT for cleanup when the script is aborted
	def __EndRead(self, signal, frame):
		Logger.info("Ctrl+C captured, ending read.")
		self._continueReading = False
		GPIO.cleanup()
		self._connection.active = False
		self._connection.close()

	def __IsWifiConnected(self):
		process = Popen(['/sbin/iwconfig', 'wlan0'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
		output, err = process.communicate(b"")
		returnCode = process.returncode
		if returnCode == 0 and "BeddoLand" in output:
			return True

		return False

	def __Connect(self):
		if self._connection is None or not self._connection.active:
			self._connection = Connection()
			self._connection.connect(self._beddoMischerIP, self._settings.server["port"])

	def __ListenForClearRequests(self):
		while True:
			try:
				requestData = self._connection.read()
				self.__HandleClearRequest(requestData.strip())
			except:
				self.__Connect()

	def __HandleClearRequest(self, requestData):
		if requestData == "":
			return

		requestData = requestData.split("\n")
		for line in requestData:
			try:
				obj = json.loads(line)
				key = obj["key"]
				Logger.debug("Received Clear: {}".format(key))
				if key == -1:
					self.__ClearAllHolds()
				else:
					self.__ClearHold(key)
			except:
				Logger.error("Error while parsing JSON. Data: {}".format(line))

	def __ClearAllHolds(self):
		for reader in self._readers:
			reader.clearHold()

	def __ClearHold(self, readerID):
		for reader in self._readers:
			if reader.id == readerID:
				reader.clearHold()

	def __MainLoop(self):
		while self._continueReading:
			for currentReader in self._readers:
				if not currentReader.activated:
					continue

				reader = MFRC522.MFRC522(bus=currentReader.bus,
										 subbus=currentReader.subbus,
										 reset=currentReader.reset)

				self.__HandleReader(reader, currentReader)

				reader.Close()
				time.sleep(0.2)


	def __HandleReader(self, reader, currentReader):
		(_, TagType) = reader.MFRC522_Request(reader.PICC_REQIDL)
		(status, uid) = reader.MFRC522_Anticoll()

		if status != reader.MI_OK:
			return

		uid = "-".join(map(str, uid))

		# check if detected card is manage card
		if self.__HandleManageCard(uid, currentReader):
			return

		self.__HandleNormalCard(uid, currentReader)

	def __HandleManageCard(self, uid, currentReader):
		manageCards = ManageCards()
		manageCard = manageCards.getCardFromUID(uid)

		if manageCard is None:
			return False

		Logger.debug("Reader {} detected manage card: {}".format(currentReader.id, manageCard))

		dataDict = {
			"scope": "READER",
			"command": "manageCard",
			"key": currentReader.id,
			"value": manageCard
		}

		data = json.dumps(dataDict)
		data += "\r\n"
		if not self._connection.send(data):
			Logger.error("Error while sending manage card to server")
			self.__Connect()

		return True

	def __HandleNormalCard(self, uid, currentReader):
		if not currentReader.isNewCard(uid):
			Logger.debug("Skipping detected card for reader {}".format(currentReader.id))
			return

		cards = Cards()
		card = cards.getCardFromUID(uid)
		if card is None:
			return

		Logger.debug("Reader {} detected new card: {}".format(currentReader.id, card))

		dataDict = {
			"scope": "READER",
			"command": "card",
			"key": currentReader.id,
			"value": card
		}

		data = json.dumps(dataDict)
		data += "\r\n"
		if not self._connection.send(data):
			Logger.error("Error while sending new card to server")
			self.__Connect()
			# card could not be sent due to connection issues
			currentReader.clearCardFromHold(uid)
Beispiel #48
0
def close_connection(con: Connection):
    """
    Close connection
    """
    con.close()
Beispiel #49
0
def InsertNewBackupInfo(conn: sqlite3.Connection, _date, _source, _zip_name):
    cur = conn.cursor()
    cur.execute(
        'INSERT INTO sync (date, source, zip_name) VALUES ({}, \'{}\', \'{}\')'
        .format(_date, _source, _zip_name))
    conn.commit()
Beispiel #50
0
def sqlite_cursor(conn: sqlite3.Connection) -> Iterator[sqlite3.Cursor]:
    curs = conn.cursor()
    try:
        yield curs
    finally:
        curs.close()
Beispiel #51
0
def init_stock(con: sqlite3.Connection, stocks: List[str],
               stock_default_price: int):
    stocks = list(set(stocks))
    if len(stocks) > 10:
        exit("주식 개수는 10개 이하여야합니다.")

    cur = con.cursor()
    cur.execute("SELECT * FROM stocks")
    db_stocks = list(map(lambda x: x[0], cur.fetchall()))

    create_stocks = []
    del_stocks = []

    for i in stocks:
        if i not in db_stocks:
            create_stocks.append(i)

    for i in db_stocks:
        if i not in stocks:
            del_stocks.append(i)

    if del_stocks:
        for i in del_stocks:
            print(f"주식 [{i}] 삭제")
            cur.execute("DELETE FROM stocks WHERE name=?", (i, ))

        cur.execute("SELECT * FROM users")

        for i in cur.fetchall():
            data = json.loads(i[4])
            for j in del_stocks:
                if data.get(j):
                    del data[j]

            cur.execute(
                "UPDATE users SET stock=? WHERE id=?",
                (json.dumps(data, ensure_ascii=False), i[0]),
            )

        con.commit()

    if create_stocks:
        for i in create_stocks:
            print(f"주식 [{i}] 생성")
            cur.execute(
                "INSERT INTO stocks VALUES (?, ?, ?, ?)",
                (i, str(stock_default_price), "0", "[]"),
            )

        cur.execute("SELECT * FROM users")

        for i in cur.fetchall():
            data = json.loads(i[4])
            for j in create_stocks:
                data[j] = []

            cur.execute(
                "UPDATE users SET stock=? WHERE id=?",
                (json.dumps(data, ensure_ascii=False), i[0]),
            )

        con.commit()
Beispiel #52
0
def count_sql_results(connection: sqlite3.Connection, query: str) -> int:
    count_query = f"SELECT COUNT(*) FROM ({query})"
    cur = connection.execute(count_query)
    return cur.fetchone()[0]
Beispiel #53
0
def transmit_transcript(index, transcript: Transcript,
                        connection: sqlite3.Connection):
    transcript.finalize()
    connection.execute(
        "INSERT INTO dump VALUES (?, ?)",
        (index, json.dumps(transcript.as_dict(remove_attributes=True))))
Beispiel #54
0
 def __init__(self, conn: Connection, table_name: str):
     self.conn = conn
     self.curr = conn.cursor()
     self.table_name = table_name
Beispiel #55
0
 def __init__(self, filename='lang.db'):
     self.conn = Connection(filename)
     self.create_db_if_required()
Beispiel #56
0
def record_banned_message(conn: sqlite3.Connection, guild_id: int,
                          msg_id: int) -> None:
    sql = INSERT_BANNED_MSG_SQL.format(identifier=BANNED_MSG_TABLENAME)
    cur = conn.cursor()
    cur.execute(sql, (guild_id, msg_id, int(time.time())))
    conn.commit()
Beispiel #57
0
def query(connection: Connection, *args: Any) -> Cursor:
    cursor = connection.cursor()
    cursor.execute(*args)
    return cursor
Beispiel #58
0
def get_initial_most_recent_date(db_connection: Connection):
    cursor = db_connection.execute("SELECT date FROM message "
                                   f"WHERE is_from_me == 0 "
                                   "ORDER BY date DESC")
    return cursor.fetchone()[0]
Beispiel #59
0
def delete_data_db(conn: Connection):
    conn.execute('DELETE FROM employee')
    conn.commit()
Beispiel #60
0
def insert_result(db: sqlite3.Cursor, conn: sqlite3.Connection, control_number, azure_extracted_text):
    db.execute('INSERT INTO results VALUES (?,?)', (control_number, azure_extracted_text))
    conn.commit()