Ejemplo n.º 1
0
 def native_tx_to_bin_sqlite(tx):
     """
     Converts a native tuple tx into a bin tuple for sqlite
     :param tx:
     :return:
     """
     return (tx[0], tx[1], tx[2], tx[3], f8_to_int(tx[4]),
             Binary(b64decode(tx[5])), Binary(b64decode(tx[6])),
             Binary(b64decode(tx[7])), f8_to_int(tx[8]), f8_to_int(tx[9]),
             tx[10], tx[11])
Ejemplo n.º 2
0
def akazer(pics=None, akaze=None, columns='ak_points,ak_desc'):
    """
    Not just for AKAZE any more!

    Parameters
    ----------
    pics = {id: local-path-to-pic, ...}
    akaze = cv2.AKAZE_create() or cv2.ORB_create() or cv2.KAZE_create() etc..
    columns = two comma separated column names for the new data
    Returns
    -------
    list of image data formatted for entering into cards database
    """
    new_data = []
    if pics is None:
        pics = cards()
    if akaze is None:
        akaze = cv2.AKAZE_create()
    c1, c2 = columns.split(',')
    for kk, vv in pics.viewitems():
        if vv:
            im = cv2.imread(vv)
        else:
            im = None
        if im is not None:
            akps, adesc = akaze.detectAndCompute(im, None)
            jk = [(a.pt, a.angle, a.class_id, a.octave, a.response, a.size) for a in akps]
            new_data.append({'id': kk, c1: jk, c2: Binary(adesc.dumps())})
        else:
            print("for id: {}, akazer failed to find pic on path: {}".format(kk, vv))
    return new_data
Ejemplo n.º 3
0
    def to_bin_tuple(self, sqlite_encode=False):
        """
        The transaction object as a bin tuple in the following order:
        'block_height', 'timestamp', 'address', 'recipient', 'amount', 'signature', 'public_key', 'block_hash',
        'fee', 'reward', 'operation', 'openfield'

        Bin format means amounts will be integers, and all content unencoded.
        """
        if sqlite_encode:
            # sqlite needs .binary() to encode blobs
            return (self.block_height, self.timestamp, self.address, self.recipient, self.amount,
                    Binary(self.signature), Binary(self.public_key), Binary(self.block_hash),
                    self.fee, self.reward, self.operation, self.openfield)

        return (self.block_height, self.timestamp, self.address, self.recipient, self.amount, self.signature,
                self.public_key, self.block_hash, self.fee, self.reward, self.operation, self.openfield)
Ejemplo n.º 4
0
    def match(self, s, db, soffset=0, offsets={}):

        y = self.sparse(s)
        # keys for the signal
        keys = self.extractKeys(y)

        # Quantiztion factor for the offsets
        qt = max(self.dictionary.sizes)
        for hash_key, offset in keys:
            result = db.selectFingerprints(Binary(hash_key)[0:5])
            for r in result:
                if not r[0] in offsets:
                    offsets[r[0]] = []
                offsets[r[0]].append((r[1] - offset - soffset) // qt * qt)

        maxsongid = None
        max_offset = 0.
        second_max = 0.
        # proceed the histogram
        for song, o in offsets.items():
            # extract the most common offset
            tmp_max = o.count(max(set(o), key=o.count))
            if tmp_max >= max_offset:
                second_max = max_offset
                max_offset = tmp_max
                maxsongid = song
            elif tmp_max > second_max:
                second_max = tmp_max

        songid = None
        if max_offset > 4 and (second_max == 0 or max_offset / second_max >
                               1 + 0.6 / np.log(max_offset)):
            songid = maxsongid
        return songid, offsets
Ejemplo n.º 5
0
 def stopOperation(self):
     self.toggleState(False)
     self.resourceWorker.terminate()
     self.resourceWorker.runThread = False
     self.geneticAlgorithm.terminate()
     self.timer.stop()
     if len(self.topChromosomes):
         self.parent.btnStop.setText('View Result')
         self.parent.btnStop.clicked.disconnect(self.stopOperation)
         self.parent.btnStop.clicked.connect(self.dialog.close)
         self.parent.lblCPU.setText('CPU Usage: Stopped')
         self.parent.lblMemory.setText('Memory Usage: Stopped')
         self.parent.lblStatus.setText('Status: Stopped')
         self.totalResource['cpu'] = mean(self.totalResource['cpu'])
         self.totalResource['memory'] = mean(self.totalResource['memory'])
         self.meta = [[chromosome[1], chromosome[0].fitnessDetails] for chromosome in
                      self.topChromosomes]
         conn = db.getConnection()
         cursor = conn.cursor()
         cursor.execute('INSERT INTO results (content) VALUES (?)', [Binary(
             pickle.dumps({'data': [chromosome[0].data for chromosome in self.topChromosomes],
                           'meta': self.meta,
                           'time': self.time.toString('hh:mm:ss'),
                           'resource': self.totalResource,
                           'rawData': self.data},
                          pickle.HIGHEST_PROTOCOL))])
         conn.commit()
         conn.close()
     else:
         self.dialog.close()
Ejemplo n.º 6
0
 def test_insert_image_blob(self):
     img = new("RGB", (256, 256), "red")
     data = img_to_buf(img, 'jpeg').read()
     tempDB = self.__make_tempDB()
     tempDB.insert_image_blob(0, 0, 0, Binary(data))
     result = tempDB.cursor.execute("select count(*) from tiles;")
     assert result.fetchone()[0] == 1
Ejemplo n.º 7
0
def sign_up_user():
    """
    Register user.

    Confirms whether login and password are in range and whether user exist.
    Hashes and encrypts password using :func:`codec`.
    Stores user's data in database.

    request: {
        "username": str,
        "password": str
    }
    :return: dict of execution status
    :rtype: {
        "login_out_of_range": bool,
        "password_out_of_range": bool,
        "ok": bool
    }
    """

    username = request.authorization.username
    password = request.authorization.password

    # Make sure credentials are in range.
    if len(username) not in range(4, 20, 1):
        return {"login_out_of_range": True}
    elif len(password) not in range(4, 20, 1):
        return {"login_out_of_range": False, "password_out_of_range": True}

    connection = create_connection("data.sqlite3")

    select_user = f"SELECT id FROM users WHERE username LIKE :username"
    query_data = execute_read_query(connection, select_user, 0,
                                    {'username': username})

    # If user isn't registered, encrypt password and store in database.
    if query_data is None:
        password_hash = codec(password, 1)
        password_hash = Binary(password_hash)

        data_dict = {'username': username, 'password_hash': password_hash}
        create_user = f"INSERT INTO users (username, password_hash, registered)" \
                      f"VALUES (:username, :password_hash, strftime('%s','now'))"
        execute_query(connection, create_user, data_dict)

    else:
        connection.close()
        return {
            "login_out_of_range": False,
            "password_out_of_range": False,
            'ok': False
        }

    connection.close()
    return {
        "login_out_of_range": False,
        "password_out_of_range": False,
        'ok': True
    }
Ejemplo n.º 8
0
def data2db(datas: List[str]):  # 데이터를 DB에 삽입
    now = datetime.datetime.today().strftime("%Y-%m-%d %H:%M")
    for data in datas:
        file = data.split('\\')[-1]
        file_name, file_extension = file.split('.')
        with open(data, 'rb') as blob:
            blob_data = blob.read()
            DbWriter(db_path).insert(file_name, file_extension, Binary(blob_data), now)
Ejemplo n.º 9
0
    def processLog(self, file_path):
        # process sequence log

        if isfile(file_path):
            with open(file_path, 'rb') as log_file:
                ablob = log_file.read()
                return Binary(ablob)
        else:
            return None
Ejemplo n.º 10
0
 async def save(self, task: TaskInterface):
     async with aiosqlite.connect(self.storage_path) as con:
         await con.execute(
             "INSERT INTO tasks(ID, NAME, URL, COMMAND, OBJECT) VALUES (?, ?, ?, ?, ?)",
             [
                 str(task.task_id), task.name, task.url, task.command_type,
                 Binary(pickle.dumps(task))
             ])
         await con.commit()
Ejemplo n.º 11
0
 def dump_resource(self, resource):
     """ Dump the resource to the database. Check for redirects.
     """
     url_id = self._select_url(resource.url)
     for name, value in resource.headers.items():
         self.execute("INSERT INTO header(url_id, name, value) " \
                      "VALUES (?, ?, ?)", url_id, name, value)
     content = Binary(resource.content)
     self.execute("INSERT INTO content(url_id, content, hash) " \
                  "VALUES (?, ?, ?)", url_id, content, resource.hash)
Ejemplo n.º 12
0
 def insert_tile(self, zoom, row, col, data):
     with self.db_connection as db_connection:
         cursor = db_connection.cursor()
         compression = self.compression
         data_type = self.data_type
         if compression and (data_type == "xray"):
             db_connection.text_factory = str
             data_compressed = blosc.pack_array(data, cname=compression)
             data = data_compressed
         if data_type == "image/TIFF":
             try:
                 assert data.dtype == "uint8"
             except:
                 raise TypeError("dtype %s not supported" % data.dtype)
             image = Image.fromarray(np.uint8(data))
             buf = ioBuffer()
             if compression == "tiff_lzw":
                 TiffImagePlugin.WRITE_LIBTIFF = True
                 image.save(buf, "TIFF", compression=compression)
                 TiffImagePlugin.WRITE_LIBTIFF = False
             else:
                 image.save(buf, "TIFF", compression=compression)
             buf.seek(0)
             data = Binary(buf.read())
         if data_type == "image/JPEG2000":
             try:
                 assert data.dtype == "uint8"
             except:
                 raise TypeError("dtype %s not supported" % data.dtype)
             image = Image.fromarray(np.uint8(data))
             buf = ioBuffer()
             image.save(buf, "j2k")
             buf.seek(0)
             data = Binary(buf.read())
         try:
             cursor.execute("""
                 INSERT INTO tiles
                     (zoom_level, tile_row, tile_column, tile_data)
                     VALUES (?,?,?,?)
             """, (zoom, row, col, data))
         except:
             raise
Ejemplo n.º 13
0
def makeMultiLineString(c):
    if c.partCount==1:
        values = ["<BI",1,2]
        [ptrn,coords]=lineSt(c.getPart(0))
    elif c.partCount>1:
        values = ["<BI",1,5]
        [ptrn,coords]=multiLine(c)
    else:
        return False
    values[0]+=ptrn
    values.extend(coords)
    return Binary(pack(*values))
Ejemplo n.º 14
0
def makeMultiPolygon(c):
    if c.partCount==1:
        values = ["<BI",1,3]
        [ptrn,coords]=linearRing(c.getPart(0))
    elif c.partCount>1:
        values = ["<BI",1,6]
        [ptrn,coords]=multiRing(c)
    else:
        return False
    values[0]+=ptrn
    values.extend(coords)
    return Binary(pack(*values))
Ejemplo n.º 15
0
 def _itercompress(self,
                   content,
                   isinstance=isinstance,
                   str=str,
                   bytes=bytes,
                   Binary=Binary):
     """Iteratively compress chunks generated by callable `content`"""
     compressor = compressobj()
     for uncompressed_chunk in content():
         if isinstance(uncompressed_chunk, str):
             chunk = compressor.compress(uncompressed_chunk.encode())
         elif isinstance(uncompressed_chunk, bytes):
             chunk = compressor.compress(uncompressed_chunk)
         else:
             _type = type(uncompressed_chunk).__name__
             raise TypeError("Content chunk is not str or bytes", _type)
         if chunk:
             yield Binary(chunk)
     chunk = compressor.flush(Z_FINISH)
     if chunk:
         yield Binary(chunk)
Ejemplo n.º 16
0
 def store_spatial_index(self, geofield):
     spindex = self.workspace.spatial_indexes.pop((self.name, geofield))
     spindex.close()
     temppath = spindex._temppath
     # get the byte data from the files
     with open(temppath + '.idx', 'rb') as fobj:
         idx = Binary(fobj.read())
     with open(temppath + '.dat', 'rb') as fobj:
         dat = Binary(fobj.read())
     # delete the temporary files from disk
     os.remove(temppath + '.idx')
     os.remove(temppath + '.dat')
     # update the idx and dat columns in the spatial_indexes table
     idxtable = self.workspace.table('spatial_indexes')
     cur = idxtable._cursor()
     cur.execute(
         '''UPDATE {rtree} SET rtree_idx=:idx, rtree_dat=:dat,
                  WHERE tbl = '{table}' AND col = '{geofield}' '''.format(
             rtree=idxtable.name, table=self.name, geofield=geofield),
         dict(idx=idx, dat=dat))
     cur.close()
Ejemplo n.º 17
0
 def car_info(self, url):
     soup = self.get_response(url)
     left_index = soup.find("div", {"class": "xq-left"}).findAll('p')
     name = left_index[0].text
     image_byte = requests.get(left_index[1].img['src']).content
     right_index = soup.find("ul", {"class": "xq-right"}).findAll('li')
     founded = right_index[3].span.text
     models = right_index[5].span.text
     website = right_index[7].span.text
     print("Insert Car Logo {}".format(name))
     _sql = "insert into car_logo(name,image,founded,models,website) values (?,?,?,?,?)"
     self.db.insert(_sql, (name, Binary(image_byte), founded, models, website))
Ejemplo n.º 18
0
def test_combine_worker_dbs():
    session_folder = make_session_folder()
    # make a random number of tempdbs with dummy data
    img = new("RGB", (256, 256), "red")
    data = img_to_buf(img, 'jpeg').read()
    z = randint(2, 5)
    for x in xrange(z):
        TempDB(session_folder).insert_image_blob(x, 0, 0, Binary(data))
    # confirm that combine_worker_dbs assimilates all tempdb's into gpkg
    chdir(session_folder) # necessary to put gpkg in session_folder
    gpkg = Geopackage("test.gpkg", 4326)
    combine_worker_dbs(gpkg)
    result = gpkg.execute("select count(*) from tiles;")
    assert (result.fetchone())[0] == z
Ejemplo n.º 19
0
 def update(self, desc="blobs/update"):
     """Run `self.__download_as_blob()` and insert result (optionally compressed) into `self.table` as BLOB"""
     blob = Binary(bytes(self.compressor(self.__download_as_blob())))
     retrieved_at = int(datetime.now().timestamp())
     with self.sqltransactions.exclusive(desc) as (connection, execute):
         if self.is_stale(ignore_conflicts=True) is False:
             return # data was updated while waiting to acquire lock
         self.drop(connection=connection)
         execute(f"""INSERT INTO `{self.table}`
             (`identifier`,`blob`,`timestamp`,`retrieved_at`)
             VALUES(?,?,?,?)""", [
             self.identifier, blob, self.timestamp, retrieved_at])
         msg = f"Inserted new blob into {self.table}"
         GeneFabLogger.info(f"{msg}:\n  {self.identifier}")
Ejemplo n.º 20
0
 def dump_wkb(self):
     # geometry memoryview to sqlite3 db blob
     # py2: db requires buffer, py3: db requires memview
     if self._wkb:
         # use existing wkb
         wkb_mem = self._wkb
     elif self._shp != None:
         # if no wkb, then dump from shp
         if self._shp.is_empty:
             return None
         wkb_mem = memoryview(self._shp.wkb)
     else:
         raise Exception('Geometry must have _wkb or _shp, but has neither')
         
     if PY2:
         wkb_mem = buffer(wkb_mem.tobytes())
     return Binary(wkb_mem)
Ejemplo n.º 21
0
def compressXelapedia(source, dest):
    createXelapedia(dest)

    # connect with the database with the compressed articles
    con = sqlite.connect(dest)
    con.text_factory = str

    # connect the database with the uncompressed articles
    con.execute('ATTACH ? AS source', (source, ))

    # update the configuration
    con.execute('UPDATE config SET value=\'lzma\' WHERE key=\'type\'')

    # empty the destination database
    con.execute('DELETE FROM articles')
    con.execute('DELETE FROM titles')
    con.execute('DELETE FROM redurects')

    # copy the titles
    con.execute('INSERT INTO titles(title, article_id) ' +
                'SELECT title, article_id FROM source.titles')
    con.commit()

    # we dont need the table attached directly anymore
    con.execute('DETACH source')
    conSource = sqlite.connect(source)
    conSource.text_factory = str

    # now copy and compress the articles

    #con.create_function('compress', 1, compressFunction)
    #con.execute('INSERT INTO articles(id, contents) ' +
    #  'SELECT id, compress(contents) FROM source.articles ORDER BY id')
    cur = conSource.execute('SELECT id, contents FROM articles ORDER BY id')
    for id, uncompressed in cur:
        compressed = Binary(compress(uncompressed))
        con.execute('INSERT INTO articles(id, contents) VALUES(?,?)', (
            id,
            compressed,
        ))

        stdout.write('.')
        stdout.flush()
        con.commit()
Ejemplo n.º 22
0
def sqlite_geoms():
    print 'load shapefile'
    t = time()
    #data = pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\global_urban_extent_polygons_v1.01.shp", encoding='latin')
    #data = pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\atlas_urban.geojson", encoding='latin')
    data = pg.VectorData(
        r"C:\Users\kimok\Desktop\gazetteer data\raw\global_settlement_points_v1.01.shp",
        encoding='latin')
    #data = pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\ne_10m_admin_0_countries.shp", encoding='latin')
    print time() - t

    print 'making shapely'
    t = time()
    shapelys = [
        shape(f.geometry) for f in data
    ]  # CRUCIAL SPEEDUP, SHAPELY SHOULD BE FROM SHAPE, NOT ASSHAPE WHICH IS INDIRECT REFERENCING
    print time() - t

    print 'dump wkb'
    t = time()
    wkbs = [s.wkb for s in shapelys]
    print time() - t

    print 'convert to binary'
    from sqlite3 import Binary
    t = time()
    blobs = [Binary(wkb) for wkb in wkbs]
    print time() - t

    print 'insert wkb into db'
    fields = ['ID', 'geom']
    typs = ['int', 'BLOB']
    w = Writer('testgeodb::data', fields=zip(fields, typs), replace=True)
    t = time()
    for i, blb in enumerate(blobs):
        w.add([i, blb])
    print time() - t

    print 'load wkb from db'
    t = time()
    shapelys = [wkb_loads(bytes(blb)) for ID, blb in w.select('*')]
    print time() - t
Ejemplo n.º 23
0
def rast_to_wkb(rast):
    # raster to wkb buffer
    wkb = rast.wkb
    buf = Binary(wkb)
    return buf
Ejemplo n.º 24
0
 def test_execute_query(self):
     passwords = {'pwd1': Binary(b'alb'), 'pwd2': Binary(b'ing'), 'pwd3': Binary(b'dan'), 'pwd4': Binary(b'sus')}
     self.assertIsNone(execute_query(self.connection, queries['add_users'], passwords))
     self.assertIsNone(execute_query(self.connection, queries['add_messages']))
Ejemplo n.º 25
0
def get_pickle(data):
    mypickle = pickle.dumps(data, protocol=2)
    mybinary = Binary(mypickle)
    return mybinary
	def _dict_to_blob(self, dict):
		dict = json.dumps(dict).encode('utf-8')
		return Binary(dict)
Ejemplo n.º 27
0
def shapely_to_wkb(shp):
    # shapely to wkb buffer
    wkb = shp.wkb
    buf = Binary(wkb)
    return buf
Ejemplo n.º 28
0
def geoj_to_wkb(geoj):
    # geojson to wkb buffer
    wkb = shape(geoj).wkb
    buf = Binary(wkb)
    return buf
Ejemplo n.º 29
0
 def _write_uuid_to_sqlite3(uuid_):
     #return buffer(uuid_.bytes_le)
     return Binary(uuid_.bytes_le)
Ejemplo n.º 30
0
 def _write_numpy_to_sqlite3(arr):
     out = io.BytesIO()
     np.save(out, arr)
     out.seek(0)
     #return buffer(out.read())
     return Binary(out.read())