Exemplo n.º 1
0
def writeStravaDetailedActivitySQL(result):

    activity_id = result['id']
    activity_start_date_local = time.strptime(result['start_date_local'],
                                              '%Y-%m-%dT%H:%M:%SZ')
    activity_start_date_local_seconds = time.mktime(activity_start_date_local)
    activity_type = result['type']
    activity_average_speed = None
    activity_distance = None

    activity_split_speeds = []
    activity_split_hearts = []
    activity_split_start_dates_local = []

    if (((activity_type) == 'Run') or ((activity_type) == 'Walk')):
        activity_average_speed = result['average_speed']
        activity_distance = result['distance']

    elapsed_time = 0
    if ('splits_metric' in result):
        activity_splits_metrics = result['splits_metric']
        for activity_splits_metric in activity_splits_metrics:
            activity_split_speeds.append(
                activity_splits_metric['average_speed'])
            if "average_heartrate" in activity_splits_metric:
                activity_split_hearts.append(
                    activity_splits_metric['average_heartrate'])
            else:
                activity_split_hearts.append("0")
            activity_split_start_dates_local.append(
                time.strftime(
                    '%Y-%m-%dT%H:%M:%SZ',
                    time.localtime(activity_start_date_local_seconds +
                                   elapsed_time)))
            elapsed_time += int(activity_splits_metric['elapsed_time'])

    print(activity_id,
          time.strftime('%Y-%m-%dT%H:%M:%SZ', activity_start_date_local),
          activity_type, activity_average_speed, activity_distance)
    # print(activity_split_speeds, activity_split_hearts, activity_split_start_dates_local)

    activity_split_speeds_string = ",".join(
        [str(x) for x in activity_split_speeds])
    activity_split_hearts_string = ",".join(
        [str(x) for x in activity_split_hearts])
    activity_split_start_dates_local = ",".join(
        [str(x) for x in activity_split_start_dates_local])
    # Connect to the database
    try:
        connection = sqlite3.connect(kDatabaseName)
        cursor = connection.cursor()
    except sqlite3.Error as error:
        print(log_time_string() + "Error: " + error.args[0])
        return False

    # Create table (would error if already exists)
    try:
        create_stravaDetailedActivities_table_query = "CREATE TABLE stravaDetailedActivities(\
		activity_id varchar(400), \
		activity_start_date_local varchar(400), \
		activity_type varchar(400), \
		activity_average_speed varchar(400), \
		activity_distance varchar(400), \
		activity_split_speeds varchar(1000), \
		activity_split_hearts varchar(1000), \
		activity_split_start_dates_local varchar(1000), \
		PRIMARY KEY (activity_id) \
		);"

        cursor.execute(create_stravaDetailedActivities_table_query)
    except sqlite3.Error as error:
        if (error.args[0].startswith(
                "table stravaDetailedActivities already exists")):
            noop()
        else:
            print(log_time_string() + "Warning: " + error.args[0])

    # Insert the record to the table
    try:
        cursor.execute(
            "INSERT INTO stravaDetailedActivities VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
            (sqlite3.Binary(zlib.compress(str(activity_id).encode("utf-8"))),
             sqlite3.Binary(
                 zlib.compress((time.strftime(
                     '%Y-%m-%dT%H:%M:%SZ',
                     activity_start_date_local)).encode("utf-8"))),
             sqlite3.Binary(zlib.compress(activity_type.encode("utf-8"))),
             sqlite3.Binary(
                 zlib.compress(str(activity_average_speed).encode("utf-8"))),
             sqlite3.Binary(
                 zlib.compress(str(activity_distance).encode("utf-8"))),
             sqlite3.Binary(
                 zlib.compress(activity_split_speeds_string.encode("utf-8"))),
             sqlite3.Binary(
                 zlib.compress(activity_split_hearts_string.encode("utf-8"))),
             sqlite3.Binary(
                 zlib.compress(
                     activity_split_start_dates_local.encode("utf-8")))))
        connection.commit()
        print(log_time_string() + "Inserted " + str(activity_id) +
              " into the stravaDetailedActivities table.")
    except sqlite3.Error as error:
        if (error.args[0].startswith("UNIQUE constraint failed")):
            noop()
        else:
            print(log_time_string() + "Error: " + error.args[0])
            return False

    return None
Exemplo n.º 2
0
def checksums_to_blob(checksums):
    blob = array(CHECKUMS_ARRAY_TYPE, checksums)
    data = blob.tobytes()
    return sqlite3.Binary(data)
Exemplo n.º 3
0
    def reset(self):
        self.conn = None
        self.closed = True

    def set_connection(self, conn):
        self.conn = conn
        self.closed = False


class _ConnectionLocal(_ConnectionState, threading.local):
    pass


# Python 2.x may return <buffer> object for BLOB columns.
to_bytes = lambda b: bytes(b) if not isinstance(b, bytes) else b
to_blob = lambda b: sqlite3.Binary(b)


class SqliteStorage(BaseStorage):
    table_kv = ('create table if not exists kv ('
                'queue text not null, key text not null, value blob not null, '
                'primary key(queue, key))')
    table_sched = ('create table if not exists schedule ('
                   'id integer not null primary key, queue text not null, '
                   'data blob not null, timestamp real not null)')
    index_sched = ('create index if not exists schedule_queue_timestamp '
                   'on schedule (queue, timestamp)')
    table_task = ('create table if not exists task ('
                  'id integer not null primary key, queue text not null, '
                  'data blob not null, priority real not null default 0.0)')
    index_task = ('create index if not exists task_priority_id on task '
Exemplo n.º 4
0
 def __setitem__(self, key, item):
     super(DbPickleDict,
           self).__setitem__(key, sqlite.Binary(pickle.dumps(item)))
Exemplo n.º 5
0
def encode(obj):
    """Serialize an object using pickle to a binary format accepted by SQLite."""
    return sqlite3.Binary(dumps(obj, protocol=PICKLE_PROTOCOL))
Exemplo n.º 6
0
__author__ = 'Muhammad'

import sqlite3
conn = sqlite3.connect('database.db')
cur = conn.cursor()


def create_table():
    cur.execute(
        "CREATE TABLE notes (id INTEGER PRIMARY KEY UNIQUE NOT NULL, note VARCHAR(1024), file BLOB, tags VARCHAR(256))"
    )


create_table()

with open("أ.mp3", "rb") as input_file:
    ablob = input_file.read()
    cur.execute("INSERT INTO notes (id, file) VALUES(0, ?)",
                [sqlite3.Binary(ablob)])
    conn.commit()

with open("Output.mp3", "wb") as output_file:
    cur.execute("SELECT file FROM notes WHERE id = 0")
    ablob = cur.fetchone()
    output_file.write(ablob[0])

cur.close()
conn.close()
Exemplo n.º 7
0
def _get_claims(cols, for_count=False, **constraints) -> Tuple[str, Dict]:
    if 'order_by' in constraints:
        sql_order_by = []
        for order_by in constraints['order_by']:
            is_asc = order_by.startswith('^')
            column = order_by[1:] if is_asc else order_by
            if column not in ORDER_FIELDS:
                raise NameError(f'{column} is not a valid order_by field')
            if column == 'name':
                column = 'normalized'
            sql_order_by.append(
                f"claim.{column} ASC" if is_asc else f"claim.{column} DESC")
        constraints['order_by'] = sql_order_by

    ops = {'<=': '__lte', '>=': '__gte', '<': '__lt', '>': '__gt'}
    for constraint in INTEGER_PARAMS:
        if constraint in constraints:
            value = constraints.pop(constraint)
            postfix = ''
            if isinstance(value, str):
                if len(value) >= 2 and value[:2] in ops:
                    postfix, value = ops[value[:2]], value[2:]
                elif len(value) >= 1 and value[0] in ops:
                    postfix, value = ops[value[0]], value[1:]
            if constraint == 'fee_amount':
                value = Decimal(value) * 1000
            constraints[f'claim.{constraint}{postfix}'] = int(value)

    if constraints.pop('is_controlling', False):
        if {'sequence', 'amount_order'}.isdisjoint(constraints):
            for_count = False
            constraints['claimtrie.claim_hash__is_not_null'] = ''
    if 'sequence' in constraints:
        constraints['order_by'] = 'claim.activation_height ASC'
        constraints['offset'] = int(constraints.pop('sequence')) - 1
        constraints['limit'] = 1
    if 'amount_order' in constraints:
        constraints['order_by'] = 'claim.effective_amount DESC'
        constraints['offset'] = int(constraints.pop('amount_order')) - 1
        constraints['limit'] = 1

    if 'claim_id' in constraints:
        claim_id = constraints.pop('claim_id')
        if len(claim_id) == 40:
            constraints['claim.claim_id'] = claim_id
        else:
            constraints['claim.claim_id__like'] = f'{claim_id[:40]}%'
    elif 'claim_ids' in constraints:
        constraints['claim.claim_id__in'] = constraints.pop('claim_ids')

    if 'reposted_claim_id' in constraints:
        constraints['claim.reposted_claim_hash'] = sqlite3.Binary(
            unhexlify(constraints.pop('reposted_claim_id'))[::-1])

    if 'name' in constraints:
        constraints['claim.normalized'] = normalize_name(
            constraints.pop('name'))

    if 'public_key_id' in constraints:
        constraints['claim.public_key_hash'] = sqlite3.Binary(
            ctx.get().ledger.address_to_hash160(
                constraints.pop('public_key_id')))
    if 'channel_hash' in constraints:
        constraints['claim.channel_hash'] = sqlite3.Binary(
            constraints.pop('channel_hash'))
    if 'channel_ids' in constraints:
        channel_ids = constraints.pop('channel_ids')
        if channel_ids:
            constraints['claim.channel_hash__in'] = [
                sqlite3.Binary(unhexlify(cid)[::-1]) for cid in channel_ids
            ]
    if 'not_channel_ids' in constraints:
        not_channel_ids = constraints.pop('not_channel_ids')
        if not_channel_ids:
            not_channel_ids_binary = [
                sqlite3.Binary(unhexlify(ncid)[::-1])
                for ncid in not_channel_ids
            ]
            if constraints.get('has_channel_signature', False):
                constraints[
                    'claim.channel_hash__not_in'] = not_channel_ids_binary
            else:
                constraints['null_or_not_channel__or'] = {
                    'claim.signature_valid__is_null': True,
                    'claim.channel_hash__not_in': not_channel_ids_binary
                }
    if 'blocklist_channel_ids' in constraints:
        blocklist_ids = constraints.pop('blocklist_channel_ids')
        if blocklist_ids:
            blocking_channels = [
                sqlite3.Binary(unhexlify(channel_id)[::-1])
                for channel_id in blocklist_ids
            ]
            constraints.update({
                f'$blocking_channel{i}': a
                for i, a in enumerate(blocking_channels)
            })
            blocklist = ', '.join([
                f':$blocking_channel{i}' for i in range(len(blocking_channels))
            ])
            constraints[
                'claim.claim_hash__not_in#blocklist_channel_ids'] = f"""
                SELECT reposted_claim_hash FROM claim WHERE channel_hash IN ({blocklist})
            """
    if 'signature_valid' in constraints:
        has_channel_signature = constraints.pop('has_channel_signature', False)
        if has_channel_signature:
            constraints['claim.signature_valid'] = constraints.pop(
                'signature_valid')
        else:
            constraints['null_or_signature__or'] = {
                'claim.signature_valid__is_null': True,
                'claim.signature_valid': constraints.pop('signature_valid')
            }
    elif constraints.pop('has_channel_signature', False):
        constraints['claim.signature_valid__is_not_null'] = True

    if 'txid' in constraints:
        tx_hash = unhexlify(constraints.pop('txid'))[::-1]
        nout = constraints.pop('nout', 0)
        constraints['claim.txo_hash'] = sqlite3.Binary(tx_hash +
                                                       struct.pack('<I', nout))

    if 'claim_type' in constraints:
        constraints['claim.claim_type'] = CLAIM_TYPES[constraints.pop(
            'claim_type')]
    if 'stream_types' in constraints:
        stream_types = constraints.pop('stream_types')
        if stream_types:
            constraints['claim.stream_type__in'] = [
                STREAM_TYPES[stream_type] for stream_type in stream_types
            ]
    if 'media_types' in constraints:
        media_types = constraints.pop('media_types')
        if media_types:
            constraints['claim.media_type__in'] = media_types

    if 'fee_currency' in constraints:
        constraints['claim.fee_currency'] = constraints.pop(
            'fee_currency').lower()

    _apply_constraints_for_array_attributes(constraints, 'tag', clean_tags,
                                            for_count)
    _apply_constraints_for_array_attributes(constraints, 'language',
                                            lambda _: _, for_count)
    _apply_constraints_for_array_attributes(constraints, 'location',
                                            lambda _: _, for_count)

    if 'text' in constraints:
        constraints["search"] = constraints.pop("text")
        constraints["order_by"] = FTS_ORDER_BY
        select = f"SELECT {cols} FROM search JOIN claim ON (search.rowid=claim.rowid)"
    else:
        select = f"SELECT {cols} FROM claim"
    if not for_count:
        select += " LEFT JOIN claimtrie USING (claim_hash)"
    return query(select, **constraints)
Exemplo n.º 8
0
def add_person():
    name = request.form.get('name')
    img = sqlite3.Binary(request.files.get('img').read())
    db_helpers.run_db('insert into users(name, image) values(?,?)',
                      (name, img))
    return redirect('/people/edit')