Exemple #1
0
 def _fetch_account(self, conn: sqlite3.Connection, account_id: int) -> Account:
     acc = conn.execute("SELECT * FROM Account WHERE id=?", (account_id,)).fetchone()
     priv_result = conn.execute("SELECT privilege FROM Privilege WHERE account=?", (account_id,)).fetchall()
     privileges = {pr["privilege"] for pr in priv_result}
     storydata_result = conn.execute("SELECT format, data FROM StoryData WHERE account=?", (account_id,)).fetchone()
     if storydata_result:
         if storydata_result["format"] == "json":
             storydata = json.loads(storydata_result["data"], encoding="utf-8")
         elif storydata_result["format"] == "serpent":
             storydata = serpent.loads(storydata_result["data"])
         else:
             raise ValueError("invalid storydata format in database: " + storydata_result["format"])
         if not isinstance(storydata, dict):
             raise TypeError("storydata should be a dict")
     else:
         storydata = {}
     stats_result = dict(conn.execute("SELECT * FROM CharStat WHERE account=?", (account_id,)).fetchone() or {})
     del stats_result["id"]
     del stats_result["account"]
     stats = base.Stats()
     for key, value in stats_result.items():
         if hasattr(stats, key):
             setattr(stats, key, value)
         else:
             raise AttributeError("stats doesn't have attribute: " + key)
     stats.set_stats_from_race()   # initialize static stats from races table
     return Account(acc["name"], acc["email"], acc["pw_hash"], acc["pw_salt"], privileges,
                    acc["created"], acc["logged_in"], bool(acc["banned"]), stats, storydata)
    def __update_pragma(connection: sqlite3.Connection, name: str, value: Any) -> None:
        """
        Updates PRAGMA value

        :param connection:  Database connection
        :param name:        Pragma's name
        :param value:       Pragma's value
        """
        connection.execute("PRAGMA {0} = {1}".format(name, str(value)))
Exemple #3
0
 def _store_stats(self, conn: sqlite3.Connection, account_id: int, stats: base.Stats) -> None:
     columns = ["account"]
     values = [account_id]
     stat_vars = dict(vars(stats))
     for not_stored in ["bodytype", "language", "weight", "size"]:
         del stat_vars[not_stored]    # these are not stored, but always initialized from the races table
     for key, value in stat_vars.items():
         columns.append(key)
         values.append(value)
     sql = "INSERT INTO CharStat(" + ",".join(columns) + ") VALUES (" + ",".join('?' * len(columns)) + ")"
     conn.execute(sql, values)
def get_friend(ds_connection: sqlite3.Connection, id: str) -> dict:
    """
    Obtain a specific friend record and return a representation of it.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        id (str): An `id` value which will be used to find a specific
            datastore row.

    Returns
        A JSON ready dictionary representing a specific
        row of the friends table.
    """
    cursor = ds_connection.execute(
        'select id, first_name, last_name, telephone, email, notes '
        'from friends where lower(id) = ?',
        [id.lower()])

    friend_row = cursor.fetchone()

    if friend_row:
        return {
            "id": friend_row[0],
            "first_name": friend_row[1],
            "last_name": friend_row[2],
            "telephone": friend_row[3],
            "email": friend_row[4],
            "notes": friend_row[5]}
def get_friends(ds_connection: sqlite3.Connection) -> dict:
    """
    Return a representation of all rows in the friends table.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.

    Returns
        A JSON ready dictionary representing all rows of the friends table.
    """
    cursor = ds_connection.execute(
        'select id, first_name, last_name, telephone, email, notes '
        'from friends')

    friends_collection = list()
    for friend_row in cursor.fetchall():
        friends_collection.append(
            {"id": friend_row[0],
             "first_name": friend_row[1],
             "last_name": friend_row[2],
             "telephone": friend_row[3],
             "email": friend_row[4],
             "notes": friend_row[5]})

    return friends_collection
def add_friend(ds_connection: sqlite3.Connection, entry_data: dict):
    """
    Create a new row in the friends table.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        entry_data (dict): The data needed to created a new entry.
    """
    ds_connection.execute(
        "insert into friends (id, first_name, last_name, telephone, email, notes) "
        "values (?, ?, ?, ?, ?, ?)",
        [entry_data['id'],
         entry_data['firstName'],
         entry_data['lastName'],
         entry_data['telephone'],
         entry_data['email'],
         entry_data['notes']])
    ds_connection.commit()
def fully_update_friend(ds_connection: sqlite3.Connection, entry_data: dict):
    """
    Update all aspects of given row in the friends table.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        entry_data (dict): The data needed to update a given entry.  The
            `id` value of this dictionary is used to identify the entry
            to update.
    """
    ds_connection.execute(
        "UPDATE friends "
        "SET id=?, first_name=?, last_name=?, telephone=?, email=?, notes=? "
        "WHERE lower(id) = ?",
        [entry_data['id'],
         entry_data['firstName'],
         entry_data['lastName'],
         entry_data['telephone'],
         entry_data['email'],
         entry_data['notes'],
         entry_data['id'].lower()])
    ds_connection.commit()
    def __read_pragma(connection: sqlite3.Connection, name: str) -> Any:
        """
        Reads PRAGMA value

        :param connection:  Database connection
        :param name:        Pragma's name
        """
        cursor = connection.execute("PRAGMA {0}".format(name))

        value = None
        row = cursor.fetchone()

        if row is not None:
            value = row[0]

        return value
def delete_friend(ds_connection: sqlite3.Connection, id: str) -> dict:
    """
    Delete a given entry from the friends table in a given SQLite connection.

    Args:
        ds_connection (sqllite3.Connection): An active connection to a
            sqllite datastore containing a friends table.
        id (str): An `id` value which will be used to find a specific
            datastore row to delete.
    """
    cursor = ds_connection.execute("DELETE  " "from friends where lower(id) = ?", [id.lower()])

    if not cursor.rowcount:
        raise ValueError()

    ds_connection.commit()
Exemple #10
0
def calc_longitudinal_qc(infiles):
    qcmap = {}
    qcpsms = []
    psms = parse_psms(infiles['psmtable'], is_instrument_qc=True)
    header = next(psms)
    perrorix = header.index('PrecursorError(ppm)')
    qvalix = header.index('QValue')
    msgfix = header.index('MSGFScore')
    rtix = header.index('Retention time(min)')
    misclix = header.index('missed_cleavage')
    for line in psms:
        # FIXME filtering in galaxy? will be incorrect num of peptides
        if float(line[qvalix]) > 0.01:
            continue
        qcpsms.append(line)
        if int(line[misclix]) < 4:
            mckey = 'miscleav{}'.format(line[misclix])
            try:
                qcmap[mckey] += 1
            except KeyError:
                qcmap[mckey] = 1
    qcmap['perror'] = calc_boxplot([psm[perrorix] for psm in qcpsms])
    qcmap['msgfscore'] = calc_boxplot([psm[msgfix] for psm in qcpsms])
    qcmap['rt'] = calc_boxplot([psm[rtix] for psm in qcpsms])
    con = Connection(infiles['sqltable'])
    qcmap.update({'psms': len(qcpsms), 
                  'scans': con.execute('SELECT COUNT(*) FROM mzml').fetchone()[0]})
    peps = []
    with open(infiles['peptable']) as fp:
        header, lines = table_reader(fp)
        areaix = header.index('MS1 area (highest of all PSMs)')
        protix = header.index('Protein(s)')
        count = 0
        unicount = 0
        for line in lines:
            count += 1
            if ';' not in line[protix]:
                unicount += 1
            try:
                peps.append(line)
            except ValueError:
                pass
    qcmap['peparea'] = calc_boxplot([x[areaix] for x in peps])
    qcmap.update({'peptides': count, 'unique_peptides': unicount})
    with open(infiles['prottable']) as fp:
        qcmap['proteins'] = sum(1 for _ in fp) - 1
    return qcmap
Exemple #11
0
def get_table_columns(conn: sqlite3.Connection, table_name):
    """Return the list of columns for the given table

    Args:
        conn (sqlite3.Connection): Sqlite3 Connection
        table_name (str): sqlite table name

    Returns:
        Columns description from table_info
        ((0, 'chr', 'str', 0, None, 1 ... ))

    References:
        used by async_insert_many_variants() to build queries with placeholders
    """
    return [
        c[1] for c in conn.execute(f"pragma table_info({table_name})")
        if c[1] != "id"
    ]
Exemple #12
0
 def _get_all_files(
         transaction: sqlite3.Connection) -> typing.List[typing.Dict]:
     file_infos = list(
         map(
             lambda a: _lbry_file_dict(*a),
             transaction.execute(
                 "select file.rowid, file.*, stream.* "
                 "from file inner join stream on file.stream_hash=stream.stream_hash"
             ).fetchall()))
     stream_hashes = [
         file_info['stream_hash'] for file_info in file_infos
     ]
     claim_infos = get_claims_from_stream_hashes(
         transaction, stream_hashes)
     for index in range(len(file_infos)):  # pylint: disable=consider-using-enumerate
         file_infos[index]['claim'] = claim_infos.get(
             file_infos[index]['stream_hash'])
     return file_infos
Exemple #13
0
def get_results_for_start_and_end_date(connection: sqlite3.Connection,
                                       start_datetime: datetime,
                                       end_datetime: datetime) -> List[Dict]:
    """
    Returns all measurements between `start_datetime` and `end_datetime`.
    """
    query = """
    SELECT m.timestamp, d.download_rate, u.upload_rate, p.ping
    FROM measurements m
    JOIN downloads d ON m.measurement_id = d.measurement_id
    JOIN uploads u ON m.measurement_id = u.measurement_id
    JOIN pings p ON m.measurement_id = p.measurement_id
    WHERE m.timestamp >= ? AND m.timestamp <= ?;
    """
    connection.row_factory = sqlite3.Row
    cursor = connection.execute(query, [start_datetime, end_datetime])

    return [dict(row) for row in cursor.fetchall()]
Exemple #14
0
def get_all_lbry_files(
        transaction: sqlite3.Connection) -> typing.List[typing.Dict]:
    files = []
    signed_claims = {}
    for (
            rowid, stream_hash, file_name, download_dir, data_rate, status, _,
            sd_hash, stream_key, stream_name, suggested_file_name, *claim_args
    ) in _batched_select(
            transaction, "select file.rowid, file.*, stream.*, c.* "
            "from file inner join stream on file.stream_hash=stream.stream_hash "
            "inner join content_claim cc on file.stream_hash=cc.stream_hash "
            "inner join claim c on cc.claim_outpoint=c.claim_outpoint "
            "where file.stream_hash in {} "
            "order by c.rowid desc", [
                stream_hash
                for (stream_hash,
                     ) in transaction.execute("select stream_hash from file")
            ]):

        claim = StoredStreamClaim(stream_hash, *claim_args)
        if claim.channel_claim_id:
            if claim.channel_claim_id not in signed_claims:
                signed_claims[claim.channel_claim_id] = []
            signed_claims[claim.channel_claim_id].append(claim)
        files.append({
            "rowid": rowid,
            "stream_hash": stream_hash,
            "file_name": file_name,  # hex
            "download_directory": download_dir,  # hex
            "blob_data_rate": data_rate,
            "status": status,
            "sd_hash": sd_hash,
            "key": stream_key,
            "stream_name": stream_name,  # hex
            "suggested_file_name": suggested_file_name,  # hex
            "claim": claim
        })
    for claim_name, claim_id in _batched_select(
            transaction,
            "select c.claim_name, c.claim_id from claim c where c.claim_id in {}",
            list(signed_claims.keys())):
        for claim in signed_claims[claim_id]:
            claim.channel_name = claim_name
    return files
Exemple #15
0
def test_add_censys_scan_data_success(connection: Connection_instance) -> None:
    """
    This test checks if we can successfully put scan data
    results into database (for Censys in this case)
    :param connection: sqlite3.Connection object
    :return: None
    """
    scan_data_values = [
        {
            "query": {"query": "one", "query_confidence": "one"},
            "results_count": 4,
            "results": [{"ip": "44.44.44.44"}]
        },
        {
            "query": {"query": "two", "query_confidence": "two"},
            "results_count": 5,
            "results": [{"ip": "55.55.55.55"}]
        },
        {
            "query": {"query": "three", "query_confidence": "three"},
            "results_count": 6,
            "results": [{"ip": "66.66.66.66"}]
        }
    ]
    for scan_data_value in scan_data_values:
        db.add_censys_scan_data(
            **scan_data_value
        )
    censys_data_results = connection.execute(
        """
        SELECT * FROM censys_results 
        WHERE id = (
            SELECT max(id) 
            FROM censys_results
        )
        """
    ).fetchall()[0]
    assert isinstance(censys_data_results[0], int)
    assert isinstance(censys_data_results[1], int)
    assert isinstance(censys_data_results[2], int)
    assert censys_data_results[3] == "three"
    assert censys_data_results[4] == "three"
    assert censys_data_results[5] == 6
    assert loads(censys_data_results[6]) == [{"ip": "66.66.66.66"}]
Exemple #16
0
def test_add_shodan_scan_data_success(connection: Connection_instance) -> None:
    """
    This test checks if we can successfully put scan data
    results into database (for Shodan in this case)
    :param connection: sqlite3.Connection object
    :return: None
    """
    scan_data_values = [
        {
            "query": {"query": "one", "query_confidence": "one"},
            "results_count": 1,
            "results": [{"ip": "11.11.11.11"}]
        },
        {
            "query": {"query": "two", "query_confidence": "two"},
            "results_count": 2,
            "results": [{"ip": "22.22.22.22"}]
        },
        {
            "query": {"query": "three", "query_confidence": "three"},
            "results_count": 3,
            "results": [{"ip": "33.33.33.33"}]
        }
    ]
    for scan_data_value in scan_data_values:
        db.add_shodan_scan_data(
            **scan_data_value
        )
    shodan_data_results = connection.execute(
        """
        SELECT * FROM shodan_results 
        WHERE id = (
            SELECT max(id) 
            FROM shodan_results
        )
        """
    ).fetchall()[0]
    assert isinstance(shodan_data_results[0], int)
    assert isinstance(shodan_data_results[1], int)
    assert isinstance(shodan_data_results[2], int)
    assert shodan_data_results[3] == "three"
    assert shodan_data_results[4] == "three"
    assert shodan_data_results[5] == 3
    assert loads(shodan_data_results[6]) == [{"ip": "33.33.33.33"}]
Exemple #17
0
def faker(con: sqlite3.Connection, count=100_000):
    con.execute('BEGIN')
    for _ in range(count):
        age = get_random_age()
        active = get_random_active()
        # switch for area code
        if get_random_bool():
            # random 6 digit number
            area = get_random_area_code()
            con.execute('INSERT INTO user VALUES (NULL,?,?,?)', (area, age, active))
        else:
            con.execute('INSERT INTO user VALUES (NULL,NULL,?,?)', (age, active))
    con.commit()
def get_friend(ds_connection: sqlite3.Connection, id: str) -> dict:
    """
    donstring
    """
    cursor = ds_connection.execute(
        'select id, first_name, last_name, telephone, email, notes '
        'from friends where lower(id) = ?', [id.lower()])

    friend_row = cursor.fetchone()

    if friend_row:
        return {
            "id": friend_row[0],
            "first_name": friend_row[1],
            "last_name": friend_row[2],
            "telephone": friend_row[3],
            "email": friend_row[4],
            "notes": friend_row[5]
        }
def get_friends(ds_connection: sqlite3.Connection) -> dict:
    """docstring"""

    cursor = ds_connection.execute(
        "select id, first_name, last_name, telephone, email, notes "
        "from friends")

    friends_collection = list()
    for friend_row in cursor.fetchall():
        friends_collection.append({
            "id": friend_row[0],
            "first_name": friend_row[1],
            "last_name": friend_row[2],
            "telephone": friend_row[3],
            "email": friend_row[4],
            "notes": friend_row[5]
        })

    return friends_collection
def test_database_existing_scan_information_columns(
        connection: Connection_instance) -> None:
    """
    Check column names of 'scan_information' table
    :param connection: sqlite3.Connection object
    :return: None
    """
    assert sorted(
        connection.execute(
            "PRAGMA table_info(scan_information)").fetchall()) == sorted([
                (0, "id", "INTEGER", 0, None, 1),
                (1, "scan_name", "TEXT", 0, None, 0),
                (2, "scan_date", "TEXT", 0, None, 0),
                (3, "scan_start_time", "TEXT", 0, None, 0),
                (4, "scan_end_time", "TEXT", 0, None, 0),
                (5, "scan_duration", "TEXT", 0, None, 0),
                (6, "scan_total_products", "INT", 0, None, 0),
                (7, "scan_total_results", "INT", 0, None, 0),
            ])
def get_friends(ds_connection: sqlite3.Connection) -> dict:
    """docstring"""

    cursor = ds_connection.execute("select id, first_name, last_name, telephone, email, notes " "from friends")

    friends_collection = list()
    for friend_row in cursor.fetchall():
        friends_collection.append(
            {
                "id": friend_row[0],
                "first_name": friend_row[1],
                "last_name": friend_row[2],
                "telephone": friend_row[3],
                "email": friend_row[4],
                "notes": friend_row[5],
            }
        )

    return friends_collection
def get_friend(ds_connection: sqlite3.Connection, id: str) -> dict:
    """
    donstring
    """
    cursor = ds_connection.execute(
        "select id, first_name, last_name, telephone, email, notes " "from friends where lower(id) = ?", [id.lower()]
    )

    friend_row = cursor.fetchone()

    if friend_row:
        return {
            "id": friend_row[0],
            "first_name": friend_row[1],
            "last_name": friend_row[2],
            "telephone": friend_row[3],
            "email": friend_row[4],
            "notes": friend_row[5],
        }
Exemple #23
0
def link_backup_activities(db: sqlite3.Connection, dir_activities: Path,
                           dir_activities_backup: Path) -> None:
    for activity in db.execute(
            "SELECT id, upload_id FROM activity WHERE upload_id IS NOT NULL"):
        activity_id = int(activity['id'])
        upload_id = int(activity['upload_id'])

        if find_gpx(dir_activities, activity_id):
            continue

        backup = find_gpx(dir_activities_backup, activity_id) or find_gpx(
            dir_activities_backup, upload_id)
        if backup:
            link = Path(dir_activities,
                        str(activity_id) + "".join(backup.suffixes))
            if hasattr(backup, 'link_to'):
                backup.link_to(link)  # type: ignore [attr-defined]
            else:
                os.link(backup, link)  # python 3.7 compat
Exemple #24
0
def count(
    conn: Connection,
    *,
    search: str = "",
    playlist_ids: list[int] = [],
    artist_ids: list[int] = [],
    years: list[int] = [],
) -> int:
    """
    Fetch the number of tracks matching the passed-in criteria. Parameters are
    optional; omitted ones are excluded from the matching criteria.

    :param search: A search string. We split this up into individual punctuation-less
                   tokens and return tracks whose titles and artists contain each
                   token.
    :param playlist_ids: A list of playlist IDs. We match tracks by the playlists in
                         this list. For a track to match, it must be in all playlists in
                         this list.
    :param artist_ids: A list of artist IDs. We match tracks by the artists in this
                       list. For a track to match, all artists in this list must be
                       included.
    :param years: A list of years. Filter out tracks that were not released in one of
                  the years in this list.
    :param conn: A connection to the database.
    :return: The number of matching releases.
    """
    filters, params = _generate_filters(search, playlist_ids, artist_ids,
                                        years)

    cursor = conn.execute(
        f"""
        SELECT COUNT(1)
        FROM music__tracks AS trks
        JOIN music__tracks__fts AS fts ON fts.rowid = trks.id
        JOIN music__releases AS rls ON rls.id = trks.release_id
        {"WHERE " + " AND ".join(filters) if filters else ""}
        """,
        params,
    )

    count = cursor.fetchone()[0]
    logger.debug(f"Counted {count} tracks that matched the filters.")
    return count
Exemple #25
0
def from_sha256(sha256: bytes, conn: Connection) -> Optional[T]:
    """
    Return the track with the provided sha256 hash.

    WARNING: The sha256 attribute is populated lazily. It may not exist. Only use this
    function in a scenario where the sha256 value is guaranteed to exist.

    :param sha256: The sha256 hash of the track to fetch.
    :param conn: A connection to the database.
    :return: The track with the provided sha256 hash, if it exists.
    """
    cursor = conn.execute(
        "SELECT * FROM music__tracks WHERE sha256 = ?",
        (sha256, ),
    )

    if row := cursor.fetchone():
        logger.debug(f"Fetched track {row['id']} from SHA256 {sha256.hex()}.")
        return from_row(row)
Exemple #26
0
def from_sha256_initial(sha256_initial: bytes,
                        conn: Connection) -> Optional[T]:
    """
    Return the track with the provided sha256_initial (hash of the first 1KB) hash.

    :param sha256_initial: The sha256_initial of the track to fetch.
    :param conn: A connection to the database.
    :return: The track with the provided ID, if it exists.
    """
    cursor = conn.execute(
        "SELECT * FROM music__tracks WHERE sha256_initial = ?",
        (sha256_initial, ),
    )

    if row := cursor.fetchone():
        logger.debug(
            f"Fetched track {row['id']} from initial SHA256 {sha256_initial.hex()}."
        )
        return from_row(row)
Exemple #27
0
    def get_all_by_date(self, conn: sqlite3.Connection,
                        date: datetime.datetime) -> List[ActivityEntry]:
        from_datetime = datetime.datetime(year=date.year,
                                          month=date.month,
                                          day=date.day)
        to_datetime = from_datetime + datetime.timedelta(days=1)
        cursor = conn.execute(
            "SELECT * FROM activity_entry WHERE"
            " (:from_date <= ae_last_update AND ae_last_update < :to_date)"
            " OR"
            " (:from_date <= ae_start AND ae_start < :to_date)"
            " ORDER BY ae_start ASC", {
                "from_date":
                datetime_helper.datetime_to_timestamp(from_datetime),
                "to_date": datetime_helper.datetime_to_timestamp(to_datetime)
            })
        db_activity_entries = cursor.fetchall()

        return [self._from_dbo(db_ae=db_ae) for db_ae in db_activity_entries]
def test_update_results_count_success(connection: Connection_instance) -> None:
    """
    Check if we can successfully and correctly
    update final counters of scan (when scan
    will be finished)
    :param connection: sqlite3.Connection object
    :return: None
    """
    db.update_results_count(total_products=42, total_results=1337)
    end_results_values = connection.execute("""
        SELECT * FROM scan_information 
        WHERE scan_information.id = (
            SELECT max(id) 
            FROM scan_information
        )
        """).fetchall()
    total_products, total_results = end_results_values[0][6:]
    assert total_products == 42
    assert total_results == 1337
Exemple #29
0
def export_series(db: sqlite3.Connection, write_api: influxdb_client.WriteApi,
                  bucket: str, org: str, series_name: str, start_time: int,
                  end_time: int):
    series_id = get_series_id(db, series_name)
    if series_id is None:
        log.error(f'No such series {series_name}')
        return
    cur = db.execute(
        f'SELECT (time/60*60) as time, value AS "{series_name}" FROM samples '
        'WHERE series=? AND time >= ? AND time <= ? ORDER BY time',
        (series_id, start_time, end_time))
    data: List[Tuple[int, float]] = list(cur)
    for timestamp, value in data:
        point = Point('sample') \
            .field(series_name, float(value)) \
            .tag('series', series_name) \
            .time(timestamp, WritePrecision.S)
        write_api.write(bucket, org, point)
    log.info(f'Wrote {len(data)} samples')
Exemple #30
0
def _load_cookies(db: sqlite3.Connection,
                  *,
                  now: Optional[float] = None) -> List["Morsel[str]"]:
    if now is None:
        now = time.time()
    if _ensure_schema(db, update=False):
        return []
    cur = db.execute(
        """\
            SELECT name, domain, path, cookie, timestamp FROM cookie_session
            WHERE timestamp >= ?
            ORDER BY name
        """,
        (now - SESSION_COOKIE_MAXAGE, ),
    )
    ret: List[Morsel[str]] = []
    for name, domain, path, value, timestamp in cur:
        ret.append(_make_cookie(name, value, domain, path))
    return ret
Exemple #31
0
def search(
    conn: Connection,
    search: str = "",
    page: int = 1,
    per_page: Optional[int] = None,
) -> list[T]:
    """
    Search for artists. Parameters are optional; omitted ones are excluded from the
    matching criteria.

    :param conn: A connection to the database.
    :param search: A search string. We split this up into individual punctuation-less
                   tokens and return artists whose names contain each token. If
                   specified, the returned artists will be sorted by match proximity.
    :param page: Which page of artists to return.
    :param per_page: The number of artists per page. Pass ``None`` to return all
                     artists (this will ignore ``page``).
    :return: All matching artists.
    """
    filters, params = _generate_filters(search)

    if per_page:
        params.extend([per_page, (page - 1) * per_page])

    cursor = conn.execute(
        f"""
        SELECT
            arts.*,
            COUNT(artsrls.release_id) AS num_releases
        FROM music__artists AS arts
        JOIN music__artists__fts AS fts ON fts.rowid = arts.id
        LEFT JOIN music__releases_artists AS artsrls
            ON artsrls.artist_id = arts.id
        {"WHERE " + " AND ".join(filters) if filters else ""}
        GROUP BY arts.id
        ORDER BY {"fts.rank" if search else "arts.name"}
        {"LIMIT ? OFFSET ?" if per_page else ""}
        """,
        params,
    )

    logger.debug(f"Searched artists with {cursor.rowcount} results.")
    return [from_row(row) for row in cursor]
Exemple #32
0
def exists_playlist_and_track(
    playlist_id: int, track_id: int, conn: Connection
) -> bool:
    """
    Check whether the given track is in the given playlist.

    :param playlist_id: The ID of the playlist.
    :param track_id: The ID of the track.
    :param conn: A connection to the database.
    :return: Whether the track is in the playlist.
    """
    cursor = conn.execute(
        """
        SELECT 1 FROM music__playlists_tracks
        WHERE playlist_id = ? AND track_id = ?
        """,
        (playlist_id, track_id),
    )
    return bool(cursor.fetchone())
Exemple #33
0
def ddl_transaction(db: sqlite3.Connection) -> Iterator[sqlite3.Connection]:
    """Automatically commit/rollback transactions containing DDL statements.

    Usage:

        with ddl_transaction(db):
            db.execute(...)
            db.execute(...)

    Note: ddl_transaction() does not work with executescript().

    Normally, one would expect to be able to use DDL statements in a
    transaction like so:

        with db:
            db.execute(ddl_statement)
            db.execute(other_statement)

    Initially, this worked around https://bugs.python.org/issue10740;
    the sqlite3 transaction handling would trigger an implicit commit
    if the first execute() was a DDL statement, which prevented it from
    being rolled back if there was an exception after it.

    This was fixed in Python 3.6, but there are still some cases that behave
    in the same way, e.g.:

        db = sqlite3.connect(':memory:')
        try:
            with db:
                db.execute("create table t (a, b);")
                1 / 0
        except ZeroDivisionError:
            pass
        # table t exists even if it shouldn't

    https://docs.python.org/3.5/library/sqlite3.html#controlling-transactions

    """
    # initialy from https://github.com/lemon24/boomtime/blob/master/boomtime/db.py
    isolation_level = db.isolation_level
    try:
        db.isolation_level = None
        db.execute("BEGIN;")
        yield db
        db.execute("COMMIT;")
    except Exception:
        db.execute("ROLLBACK;")
        raise
    finally:
        db.isolation_level = isolation_level
Exemple #34
0
def select_by_like_site_or_username(site_or_username_like: str,
                                    connection: sqlite3.Connection):
    with connection:
        try:
            row_ls: sqlite3.Cursor = connection.execute(
                "select id,site,username,password from pass ")
        except Exception as e:
            print(repr(e))
        else:
            plain_text_ls = [(row[0], dec(row[1],
                                          gconf.k.aes_key).decode("utf-8"),
                              dec(row[2], gconf.k.aes_key).decode("utf-8"),
                              mask(dec(row[3], gconf.k.aes_key)))
                             for row in row_ls]
            return [
                row for row in plain_text_ls
                if row[1].__contains__(site_or_username_like)
                or row[2].__contains__(site_or_username_like)
            ]
    return None
    def __tables_exist(self, connection: Connection):
        rows = connection.execute(
            """SELECT name FROM sqlite_master WHERE type='table';""")

        existing_tables = []
        for row in rows:
            existing_tables.append(row['name'])

        missing_tables = []
        if len(existing_tables) < len(self.tables):
            for table in self.tables:
                if table not in existing_tables:
                    print("Table: " + table + " not initialized!")
                    missing_tables.append(table)

            connection.commit()
            return False, missing_tables

        print("DB Already initialized...")
        return True, missing_tables
Exemple #36
0
def read_sql_generator(connection: sqlite3.Connection,
                       query: str) -> Iterator[Dict[str, Any]]:
    """
    Generator to query data from a SQLite connection as a dictionary.

    Args:
        connection: SQLite3 connection object
        query: SELECT Query

    Yields:
        Row of the query result set as namedtuple
    """
    cur = connection.execute(query)
    columns = [column[0] for column in cur.description]

    while True:
        values = cur.fetchone()
        if values is None:
            break
        yield dict(zip(columns, values))
Exemple #37
0
def create_tag_page(conn: Connection, tag: str) -> str:
    """Create HTML section that lists all notes with the tag."""
    sql = """
        SELECT id, title FROM Tags NATURAL JOIN Notes WHERE tag = ? ORDER BY id
    """
    items = []
    for nid, title in conn.execute(sql, (tag, )):
        item = Elem("li", f"[{nid}] ", Elem("a", title, href=f"#{nid}"))
        items.append(item)
    section = Elem("section",
                   Elem("h1", Elem("a",
                                   tag,
                                   href="#tags",
                                   title="List of tags")),
                   Elem("ul", *items),
                   id=tag,
                   title=tag,
                   style="display:none",
                   **{"class": "level1"})
    return render(section)
Exemple #38
0
def get_synset(db: sqlite3.Connection, word: str) -> Optional[Synset]:
    """Given a word, find its Synset (or None if not found)."""

    matching_synset = None
    upper_word = word.upper()

    # Get all likely synsets with case-insensitive LIKE first.
    # From those results, pick the most appropriate one.
    for row in db.execute("SELECT id, synset FROM synsets WHERE synset LIKE ?",
                          (f"%{word}%", )):
        words = parse_synset_units(row[1])
        if word in words:
            matching_synset = Synset(row[0], words)
            break  # It doesn't get any better, so...
        elif any([x.upper() == upper_word for x in words]):
            matching_synset = Synset(row[0], words)
            # Don't break yet, see if we can get a case match.

    # If we didn't find anything, we'll return None here.
    return matching_synset
Exemple #39
0
def check_token(usr: T, token: bytes, conn: Connection) -> bool:
    """
    Check if a given token is valid for a user.

    :param usr: The user to check.
    :param token: The token to check.
    :param conn: A connection to the database.
    :return: Whether the token is valid.
    """
    cursor = conn.execute(
        "SELECT token_hash FROM system__users WHERE id = ?",
        (usr.id, ),
    )

    row = cursor.fetchone()
    if not row:
        logger.debug(f"Did not find token for user {usr.id}.")
        return False

    return check_password_hash(row["token_hash"], token.hex())
Exemple #40
0
def test_update_end_time_success(connection: Connection_instance) -> None:
    """
    Check if we can successfully update time
    of scan - time when scan was finished
    :param connection: sqlite3.Connection object
    :return: None
    """
    db.update_end_time()
    end_time_values = connection.execute(
        """
        SELECT * FROM scan_information 
        WHERE scan_information.id = (
            SELECT max(id) 
            FROM scan_information
        )
        """
    ).fetchall()
    end_time, end_duration = end_time_values[0][4:6]
    assert end_time == str(db.scan_end_time.time().strftime("%H:%M:%S"))
    assert end_duration == str(db.scan_duration)
Exemple #41
0
def from_id(id: int, conn: Connection) -> Optional[T]:
    """
    Return the playlist entry with the provided ID.

    :param id: The ID of the playlist entry to fetch.
    :param conn: A connection to the database.
    :return: The playlist entry with the provided ID, if it exists.
    """
    cursor = conn.execute(
        """
        SELECT *
        FROM music__playlists_tracks
        WHERE id = ?
        """,
        (id,),
    )

    if row := cursor.fetchone():
        logger.debug(f"Fetched playlist entry {id}.")
        return from_row(row)
Exemple #42
0
def search(
    conn: Connection,
    created_by: Optional[int] = None,
    include_expired: bool = False,
    include_used: bool = False,
    page: int = 1,
    per_page: Optional[int] = None,
) -> list[T]:
    """
    Search for invites. Parameters are optional; omitted ones are excluded from the
    matching criteria.

    :param conn: A connection to the database.
    :param created_by: The user that created the invite.
    :param include_expired: Whether to include expired invites.
    :param include_used: Whether to include used invites.
    :param page: Which page of invites to return.
    :param per_page: The number of invites per page. Pass ``None`` to return all
                     invites (this will ignore ``page``).
    :return: All matching invites.
    """
    filters, params = _generate_filters(created_by, include_expired,
                                        include_used)

    if per_page:
        params.extend([per_page, (page - 1) * per_page])

    cursor = conn.execute(
        f"""
        SELECT *
        FROM system__invites
        {"WHERE " + " AND ".join(filters) if filters else ""}
        GROUP BY id
        ORDER BY created_at ASC
        {"LIMIT ? OFFSET ?" if per_page else ""}
        """,
        params,
    )

    logger.debug(f"Searched invites with {cursor.rowcount} results.")
    return [from_row(row) for row in cursor]
Exemple #43
0
def create_table(db: sqlite3.Connection):
    c.check_type(db, "db", sqlite3.Connection)

    L.info(s.log_create_table_in_db)
    with db:
        db.execute(
            """
           CREATE TABLE followers
           (
           id       INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
           name     TEXT            NOT NULL,
           in_name  TEXT            NOT NULL
           );
           """
        )

        db.execute(
            """
           CREATE TABLE meta
           (
           id       INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
           name     TEXT            NOT NULL,
           in_name  TEXT            NOT NULL,
           cookies  TEXT            NOT NULL
           );
            """
        )

        db.execute(
            """
           CREATE TABLE log
           (
           id               INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
           time             DATETIME        NOT NULL,
           follower_number  INT             NOT NULL,
           increase         INT             NOT NULL,
           message          TEXT            NOT NULL
           );
            """
        )
    L.info(s.success)
def initial_schema_and_setup(i_db_conn: sqlite3.Connection) -> None:
    # Auto-increment is not needed in our case: https://www.sqlite.org/autoinc.html

    i_db_conn.execute(
        "CREATE TABLE " + Schema.PhrasesTable.name + "("
        + Schema.PhrasesTable.Cols.id + " INTEGER PRIMARY KEY, "
        + Schema.PhrasesTable.Cols.vertical_order + " INTEGER NOT NULL, "
        + Schema.PhrasesTable.Cols.title + " TEXT NOT NULL, "
        + Schema.PhrasesTable.Cols.ib_phrase + " TEXT NOT NULL, "
        + Schema.PhrasesTable.Cols.ob_phrase + " TEXT NOT NULL, "
        + Schema.PhrasesTable.Cols.ib_short_phrase + " TEXT NOT NULL DEFAULT '', "
        + Schema.PhrasesTable.Cols.ob_short_phrase + " TEXT NOT NULL DEFAULT '', "
        + Schema.PhrasesTable.Cols.type + " INTEGER NOT NULL"
        + " DEFAULT " + str(mc_global.BreathingPhraseType.in_out.value)
        + ")"
    )

    i_db_conn.execute(
        "CREATE TABLE " + Schema.RestActionsTable.name + "("
        + Schema.RestActionsTable.Cols.id + " INTEGER PRIMARY KEY, "
        + Schema.RestActionsTable.Cols.vertical_order + " INTEGER NOT NULL, "
        + Schema.RestActionsTable.Cols.title + " TEXT NOT NULL"
        + ")"
    )

    i_db_conn.execute(
        "CREATE TABLE " + Schema.SettingsTable.name + "("
        + Schema.SettingsTable.Cols.id + " INTEGER PRIMARY KEY, "
        + Schema.SettingsTable.Cols.rest_reminder_active + " INTEGER NOT NULL"
        + " DEFAULT " + str(SQLITE_TRUE_INT) + ", "
        + Schema.SettingsTable.Cols.rest_reminder_interval + " INTEGER NOT NULL"
        + " DEFAULT " + str(DEFAULT_REST_REMINDER_INTERVAL_MINUTES_INT) + ", "
        + Schema.SettingsTable.Cols.rest_reminder_audio_filename + " TEXT NOT NULL"
        + " DEFAULT '" + mc_global.WIND_CHIMES_FILENAME_STR + "'" + ", "
        + Schema.SettingsTable.Cols.rest_reminder_volume + " INTEGER NOT NULL"
        + " DEFAULT " + str(DEFAULT_VOLUME_INT) + ", "
        + Schema.SettingsTable.Cols.rest_reminder_notification_type + " INTEGER NOT NULL"
        + " DEFAULT " + str(mc_global.NotificationType.Both.value) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_active + " INTEGER NOT NULL"
        + " DEFAULT " + str(SQLITE_TRUE_INT) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_interval + " INTEGER NOT NULL"
        + " DEFAULT " + str(DEFAULT_BREATHING_REMINDER_INTERVAL_MINUTES_INT) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_audio_filename + " TEXT NOT NULL"
        + " DEFAULT '" + mc_global.SMALL_BELL_SHORT_FILENAME_STR + "'" + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_volume + " INTEGER NOT NULL"
        + " DEFAULT " + str(DEFAULT_VOLUME_INT) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_notification_type + " INTEGER NOT NULL"
        + " DEFAULT " + str(mc_global.NotificationType.Both.value) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_phrase_setup + " INTEGER NOT NULL"
        + " DEFAULT " + str(mc_global.PhraseSetup.Long.value) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_nr_before_dialog + " INTEGER NOT NULL"
        + " DEFAULT " + str(DEFAULT_BREATHING_REMINDER_NR_BEFORE_DIALOG_INT) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_dialog_audio_active + " INTEGER NOT NULL"
        + " DEFAULT " + str(SQLITE_TRUE_INT) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_dialog_close_on_hover + " INTEGER NOT NULL"
        + " DEFAULT " + str(SQLITE_FALSE_INT) + ", "
        + Schema.SettingsTable.Cols.breathing_reminder_text + " TEXT NOT NULL"
        + " DEFAULT ''" + ", "
        + Schema.SettingsTable.Cols.breathing_dialog_phrase_selection + " INTEGER NOT NULL"
        + " DEFAULT " + str(mc_global.PhraseSelection.random.value) + ", "
        + Schema.SettingsTable.Cols.prep_reminder_audio_filename + " TEXT NOT NULL"
        + " DEFAULT '" + mc_global.SMALL_BELL_LONG_FILENAME_STR + "'" + ", "
        + Schema.SettingsTable.Cols.prep_reminder_audio_volume + " INTEGER NOT NULL"
        + " DEFAULT " + str(DEFAULT_VOLUME_INT) + ", "
        + Schema.SettingsTable.Cols.run_on_startup + " INTEGER NOT NULL"
        + " DEFAULT " + str(SQLITE_FALSE_INT)
        + ")"
    )

    db_connection = Helper.get_db_connection()

    db_cursor = db_connection.cursor()
    db_cursor.execute(
        "INSERT OR IGNORE INTO " + Schema.SettingsTable.name + "("
        + Schema.SettingsTable.Cols.id
        + ") VALUES (?)", (SINGLE_SETTINGS_ID_INT,)
    )
    # -please note "OR IGNORE"
    db_connection.commit()
def get_schema_version(i_db_conn: sqlite3.Connection) -> int:
    t_cursor = i_db_conn.execute("PRAGMA user_version")
    return t_cursor.fetchone()[0]
 def drop_db_table(i_db_conn: sqlite3.Connection, i_table_name: str) -> None:
     i_db_conn.execute("DROP TABLE IF EXISTS " + i_table_name)
Exemple #47
0
class SimpleBot(Client):
    def on_connected(self):
        self.init_db()

        self.authorized_users = list()
        self.command_prefix = "*"

        for channel in self.config['channels']:
            self.join(channel)

        self.aliases = {
            "telka": ["телка", "телочка"]
        }

    def on_disconnected(self):
        if self.db:
            self.db.close()

    def init_db(self):
        self.db = Connection(self.config['database'])
        self.db.execute("""
            CREATE TABLE IF NOT EXISTS
            message_log (
                id INTEGER PRIMARY KEY ASC,
                channel TEXT,
                nick TEXT,
                ident TEXT,
                host TEXT,
                message TEXT,
                date INTEGER
            )
        """)
        self.db.execute("""
            CREATE TABLE IF NOT EXISTS
            social_telki (
              id INTEGER PRIMARY KEY ASC,
              rating INTEGER,
              displayed_times INNTEGER,
              url TEXT,
              who_added TEXT,
              date_added INTEGER
            )
        """)
        def dict_factory(cursor, row):
            d = {}
            for idx, col in enumerate(cursor.description):
                d[col[0]] = row[idx]
            return d

        self.db.row_factory = dict_factory

    def get_unix_timestamp(self):
        return int(time.mktime(datetime.datetime.now().timetuple()))

    def log_message(self, nick_name, ident, host_name, message, channel):
        self.db.execute("""
        INSERT INTO message_log (channel, nick, ident, host, message, date)
        VALUES (?,?,?,?,?,?)
        """, (channel, nick_name, ident, host_name, message, self.get_unix_timestamp()))
        self.db.commit()

    def on_private(self, nick, ident, host, message):
        print u"pm from:%s: %s" % (nick, message)

    def on_channel(self, nick, ident, host, message, channel):
        print u"on %s from %s: %s" % (channel, nick, message)


    def on_privmsg(self, nick, ident, host, params, trailing):

        channel = None
        message = trailing

        if params == self.config['nick']:
            self.on_private(nick, ident, host, message)
        else:
            channel = params.decode('utf-8')
            self.on_channel(nick, ident, host, message, channel)

        self.log_message(nick.decode('utf-8'), ident.decode('utf-8'), host.decode('utf-8'), message.decode('utf-8'), channel)

        if message.startswith(self.command_prefix):
            self.handle_command(nick, ident, host, message[len(self.command_prefix):], channel)

    def on_nick(self, old_nick, ident, host, params, new_nick):
        if old_nick == self.nick_name:
            self.print_debug("Yay! My name changed to: %s" % new_nick)
            self.nick_name = new_nick

    def is_authorized(self, nick, ident, host):
        for login_data in self.authorized_users:
            if login_data[0] == nick and login_data[1] == ident and login_data[2] == host:
                return True
        return False

    def authorize(self, nick, ident, host, password):
        for bot_oper in self.config['allowed_ops']:
            if bot_oper['nick'] == nick and bot_oper['password'] == password:
                self.authorized_users.append((nick, ident, host))
                return True

    def handle_command(self, nick, ident, host, command, channel):
        args = None
        if " " in command:
            command, args = command.split(" ", 1)

        command_name = "cmd_%s" %  command.lower()

        try:
            meth = getattr(self, command_name, None)
            if meth is None:
                for cmdname, aliases in self.aliases.iteritems():
                    for alias in aliases:
                        if command == alias:
                            command_name = "cmd_%s" % cmdname.lower()
                            meth = getattr(self, command_name)
                            break
            if meth:
                meth(nick, ident, host, channel, args)

            ameth = getattr(self, "a%s" % command_name, None)
            if ameth is not None:
                if self.is_authorized(nick, ident, host):
                    ameth(nick, ident, host, channel, args)
                    return
                else:
                    self.say_to(nick, "Nope!")
        except Exception as e:
            print(e.message)
            self.say_to(nick, "Some error occurred while your command being executed :[")


    def cmd_auth(self, nick, ident, host, channel, args):
        if channel:
            self.say_to(nick, 'Noob :D', channel)

        if not self.is_authorized(nick, ident, host):
            if self.authorize(nick, ident, host, args):
                self.say_to(nick, "Auth successed!")
            else:
                self.say_to(nick, "Auth failed!")
        else:
            self.say_to(nick, "You're already authorized!")

    def acmd_die(self, nick, ident, host, channel, args):
        self.say_to(nick, "Quitting....")
        self._shutdown_pending = True

    def acmd_join(self, nick, ident, host, channel, args):
        self.join(args)

    def acmd_part(self, nick, ident, host, channel, args):
        self.part(args)

    def cmd_lastlog(self, nick, ident, host, channel, args):

        known_args = ["nick", "channel", "message", "date", "limit"]
        if not args:
            self.say_to(nick, "Known args are: %s" % ", ".join(known_args), channel)
            return

        arg_dict = dict()

        if args:
            arg_pairs = args.split(" ")
            wrong_attrs = list()

            for p in arg_pairs:
                if "=" in p:
                    k,v = p.split("=")
                    if k in known_args:
                        arg_dict[k] = v
                    else:
                        wrong_attrs.append(k)
                else:
                    wrong_attrs.append(p)
            if wrong_attrs:
                self.say_to(nick, "Wrong or unknown attributes: %s" % ",".join(wrong_attrs), channel)

            if not arg_dict:
                return

        has_limit = 30
        if "limit" in arg_dict.keys():
            has_limit = arg_dict.pop("limit")

        query = [
            "SELECT * FROM ("
            "SELECT id, channel, nick, ident, host, message, date FROM message_log"
        ]
        if arg_dict:
            query.append("WHERE")

        query_conditions = list()
        placeholders = list()

        parsed = parse_data(arg_dict)
        #print parsed
        for attr, value in parsed:
            query_conditions.append("%s %s"% (attr, value[0]))
            placeholders.append(value[1])

        query.append(" AND ".join(query_conditions))

        query.append("LIMIT ?")
        placeholders.append(has_limit)

        query.append(") ORDER BY id ASC")

        self.print_debug("querying: %s" % " ".join(query))
        cur = self.db.execute(" ".join(query), placeholders)

        message = ["chan\tnick\tmessage"]
        for row in cur.fetchall():
            datime = datetime.datetime.fromtimestamp(row['date'])
            row['date'] = datime.strftime("%Y.%m.%d %H:%M:%S")
            message.append("%(date)s\t%(channel)s\t%(nick)s\t%(message)s" % row)

        if not message:
            self.say_to(nick, "Nothing found :[")
            return

        resp = requests.post("https://clbin.com", dict(clbin="\n".join(message)))
        if resp.status_code == 200:
            self.say_to(nick, resp.text, channel)
        else:
            self.say_to(nick, "Post failed. Code %d" % resp.status_code,channel)

    def acmd_nick(self, nick, ident, host, channel, args):
        self.sendMessage("NICK :%s" % args)

    def acmd_telki_loadf(self, nick, ident, host, channel, args):
        self.print_debug("Loading telki from plain file %s" % args)
        duplicated_rows = 0
        added_rows = 0
        processed_rows = 0
        incorrect_urls = 0
        if os.path.exists(args):
            args = open(args)
        elif "," in args:
            args = args.split(",")
        elif args.startswith("http"):
            args = [args]

        for line in args:
            clean_url = line.strip()
            if not clean_url.startswith("http"):
                incorrect_urls +=1
                continue
            cur = self.db.execute("SELECT COUNT(*) count FROM social_telki WHERE URL=?", (clean_url, ))
            count = cur.fetchone()
            if int(count['count']) <=0:
                self.db.execute("""INSERT INTO social_telki (
                    rating,
                    displayed_times,
                    url,
                    who_added,
                    date_added
                    )
                    VALUES
                    (?,?,?,?,?)""", (0, 0, clean_url, nick, self.get_unix_timestamp()))
                added_rows +=1
            else:
                duplicated_rows += 1
            processed_rows+=1
        self.db.commit()
        self.say_to(nick, "File loaded; total rows: %d; added: %s; doubles ignored: %s; incorrect urls: %d" % (processed_rows, added_rows, duplicated_rows, incorrect_urls), channel)

    def cmd_telka(self, nick, ident, host, channel, args):
        cur = self.db.execute("""
        SELECT id, url FROM social_telki
          WHERE displayed_times<= (SELECT MIN(displayed_times) FROM social_telki)
          ORDER BY RANDOM()
          LIMIT 1
        """)
        row = cur.fetchone()
        if row:
            cur.execute("UPDATE social_telki SET displayed_times=displayed_times+1 WHERE id=?", (row['id'], ))
            url = row['url']
            resp = requests.get(url)
            if resp.status_code == 200:
                self.say_to(args or nick, row['url'], channel)
                self.db.commit()
            else:
                self.say_to(nick, "Еще разок попробуй, ссыль битая :(", channel)
                self.db.rollback()
            return

        self.say_to(nick, "Nothing found O_o", channel)