def write(): db = UnQLite(DB_FILE) with db.transaction(): db.collection('users').create() for line in FIRST_LINES: with db.transaction(): stories = db.collection('stories') stories.create() # Are there stories that have the same first line? same_first_line = stories.filter( lambda story: story['lines'][0].get('text') == line) if same_first_line: continue stories.store([{ "max_lines": MAX_LINES, "locked": False, "locked_by": None, "locked_at": None, "lines": [{ "text": line }] }])
def getTimesFromDB(self, dbPath): db = UnQLite(DB_PATH) times = db.collection("times") if not times.exists(): raise Exception("No data to retrieve") self.dbData = times.all() return self
def __init__( self, *, mydomains: List[str], local_delivery_handler: LocalDeliveryHandler, database: UnQLite, smtpd_auth_handler: SMTPAuthHandler, hostname: str, self_name: str = "mailboat.transfer_agent", smtpd_port: int = 8025, custom_queue: Optional[EmailQueue] = None, ) -> None: self.mydomains = mydomains self.database = database self.name = self_name self.hostname = hostname self.queue = (custom_queue if custom_queue else UnQLiteEmailMessageQueue( database.collection("{}.queue".format(self_name)))) self.delivery_tasks: List[DeliveryTask] = [] self.smtpd_controller = Controller( _SMTPDHandler( self.handle_message, smtp_auth_handler=smtpd_auth_handler, ), port=smtpd_port, hostname=hostname, ) self.local_delivery_handler = local_delivery_handler self._task_deliveryman = asyncio.ensure_future( self._cothread_deliveryman())
def consolidateToDB(self): db = UnQLite(DB_PATH) times = db.collection("times") if not times.exists(): times.create() for t in self.timeMap.keys(): times.store({t: self.timeMap[t]}) db.commit()
class UnQDb: """ 基于UnQLite的nosql数据存贮 """ def __init__(self, dbpath='data.db'): # UnQLite.__init__(self,dbpath) self.db = UnQLite(dbpath) self.dbpath = dbpath def __del__(self): self.db.close() def add(self, key, value): """ 添加数据 key ='2eas' value={} """ self.db[key] = value # print('222') def reload(self): self.db = UnQLite(self.dbpath) pass def get(self, key): """ 获取数据 自动转换成字典 """ # print('value',value) try: value = str(self.db[key], "utf-8") value = ast.literal_eval(value) except: return None pass return value def get_all(self): with self.db.cursor() as cursor: for key, value in cursor: yield key, self.get(key) def delete(self, key): """ 删除数据 """ # del self.db[key] self.db.delete(key) def col(self, key): self.col = self.db.collection(key) self.col.create() # Create the collection if it does not exist. self.col.exists() return self.col
def insert_unqlite_items(number): db = UnQLite('tmp.unqlite') items = db.collection('items') items.create() for x in xrange(number): items.store([{ 'a': str(x), 'b': '2', 'c': '3', }])
def get_db(source='orders', suffix='', directory=''): col_path = source + suffix base_path = directory if not base_path.endswith('/'): base_path = base_path + '/' if not os.path.isdir(base_path): os.mkdir(base_path) db = UnQLite(base_path + col_path + '.db') # Create an in-memory database. col = db.collection(col_path) col.create() return col, db
def __init__(self, master, local): """define UnQLite database""" db = UnQLite() """ define collection where we'll insert local files content""" self.db_local = db.collection("db_local") self.db_local.create() """ define collection where we'll insert master files content""" self.db_master = db.collection("db_master") self.db_master.create() with open(local) as bibtex_file: bibtex_str = bibtex_file.read() """create bibtex database for local file""" self.bibdb_local = bibtexparser.loads(bibtex_str) self.db_local.store(self.bibdb_local.entries) with open(master) as bibtex_file: bibtex_str = bibtex_file.read() """create bibtex database for master file""" bibdb_master = bibtexparser.loads(bibtex_str) self.db_master.store(bibdb_master.entries)
def __init__( self, configFileName, dbPath, timeout ): self.dnsArray = list() self.responseTimes = dict() if os.access( configFileName, os.R_OK ) is False: print "Can not read "+configFileName+" !" exit(-1) if os.access( dbPath, os.F_OK ) is False: print "Created DB Directory at "+dbPath os.mkdir( dbPath ) db = UnQLite( dbPath+"/history.db" ) self.dnsDB = db.collection('dnsResponses') if ( self.dnsDB.exists() is False ): print "Initial DB Created." self.dnsDB.create() self.recursor = dns.resolver.Resolver() self.recursor.lifetime = timeout self.recursor.timeout = self.recursor.lifetime dns.resolver.override_system_resolver(self.recursor) with open(configFileName) as configFile: contents = configFile.readlines() validNSRegex = re.compile(r'nameserver (1[0-9][0-9]|2[0-4][0-9]|25[0-5]|[0-9][0-9]|[0-9])\.(1[0-9][0-9]|2[0-4][0-9]|25[0-5]|[0-9][0-9]|[0-9])\.(1[0-9][0-9]|2[0-4][0-9]|25[0-5]|[0-9][0-9]|[0-9])\.(1[0-9][0-9]|2[0-4][0-9]|25[0-5]|[0-9][0-9]|[0-9])') for line in contents: match = validNSRegex.search(line) if ( match is not None ): self.dnsArray.append(match.group(1)+"."+match.group(2)+"."+match.group(3)+"."+match.group(4)) print "=> Added "+match.group(1)+"."+match.group(2)+"."+match.group(3)+"."+match.group(4)+" into the list." self.currentPreferredDNS = self.dnsArray[0] return
# coding: utf-8 # In[12]: from unqlite import UnQLite db = UnQLite('sample.db') data = db.collection('data') # In[13]: # Graded Cell, PartID: o1flK def FindBusinessBasedOnCity(): pass def FindBusinessBasedOnLocation(): pass # In[1]: true_results = [ "VinciTorio's Restaurant$1835 E Elliot Rd, Ste C109, Tempe, AZ 85284$Tempe$AZ", "P.croissants$7520 S Rural Rd, Tempe, AZ 85283$Tempe$AZ", "Salt Creek Home$1725 W Ruby Dr, Tempe, AZ 85284Tempe$AZ" ] opf = '' try:
class UnQLiteTest(unittest.TestCase): def setUp(self): self.app = bottle.Bottle(catchall=False) _, filename = tempfile.mkstemp(suffix='.unqlite') self.plugin = self.app.install(unqlite.Plugin(filename=filename)) self.conn = UnQLite(filename) self.conn.collection('todo').create() self.conn.close() def tearDown(self): pass # os.unlink(self.plugin.filename) def test_with_keyword(self): @self.app.get('/') def test(db): self.assertEqual(type(db), type(UnQLite(':mem:'))) self._request('/') def test_without_keyword(self): @self.app.get('/') def test_1(): pass self._request('/') @self.app.get('/2') def test_2(**kw): self.assertFalse('db' in kw) self._request('/2') def test_install_conflicts(self): self.app.install(unqlite.Plugin(keyword='db2')) @self.app.get('/') def test(db, db2): pass # I have two plugins working with different names self._request('/') def test_commit_on_redirect(self): @self.app.get('/') def test(db): self._insert_into(db) bottle.redirect('/') self._request('/') self.assert_records(1) def test_commit_on_abort(self): @self.app.get('/') def test(db): self._insert_into(db) bottle.abort() self._request('/') self.assert_records(0) def _request(self, path, method='GET'): return self.app({ 'PATH_INFO': path, 'REQUEST_METHOD': method }, lambda x, y: None) def _insert_into(self, db): db.collection('todo').store({'task': 'PASS'}) def assert_records(self, count): self.conn.open() actual_count = len(self.conn.collection('todo').all()) self.conn.close() self.assertEqual(count, actual_count)
# -*- coding: utf-8 -*- import hashlib import os from unqlite import UnQLite import exifread import datetime db = UnQLite('./memories.db') pictures = db.collection('pictures') pictures.create() pictures.store({'name': 'Leslie', 'color': 'also green'}) print pictures.fetch(0) if __name__ == '__main__': for dirpath, dirs, files in os.walk('/home/stephan/Images/2015'): for filename in files: with open(os.path.join(dirpath, filename), 'rb') as fname: exif = exifread.process_file(fname, details=False) photo_date = datetime.datetime.strptime(exif['EXIF DateTimeOriginal'].values, '%Y:%m:%d %H:%M:%S') resolution = (exif['EXIF ExifImageWidth'].values[0], exif['EXIF ExifImageLength'].values[0]) inserted = modified = datetime.datetime.now() md5 = hashlib.md5(fname.read()).hexdigest() print filename print photo_date print resolution print inserted print modified print md5
#Basic udb['a'] = '1' #like a dict content = ['%s=%s' % item for item in udb] #db iterable print(content) #Cursor C = udb.cursor() content2 = ['%s=%s' % (k, v) for k, v in C] #iter differently C.seek('a') print(C.value()) #seems not only an iteritor, it can go backward #VM #Collecton -- Storage dicts in list (I think...) usr = udb.collection('usr') usr.create() #this is necesary u1 = {'name': 'neet', 'uuid': 'D7B810FD'} #serialize and save ''' ('usr', b'a\x1e\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02L\\\x0b\xb5') ('usr_0', '\x01\x08\x00\x00\x00\x04name\x05\x08\x00\x00\x00\x04neet\x06\x08\x00\x00\x00\x04uuid\x05\x08\x00\x00\x00\x08D7B810FD\x06\x08\x00\x00\x00\x04__id\x05\n\x00\x00\x00\x00\x00\x00\x00\x00\x06\x02') ('usr_1', '\x01\x08\x00\x00\x00\x04name\x05\x08\x00\x00\x00\x05limbo\x06\x08\x00\x00\x00\x04uuid\x05\x08\x00\x00\x00\x08D7B810FC\x06\x08\x00\x00\x00\x04__id\x05\n\x00\x00\x00\x00\x00\x00\x00\x01\x06\x02') Well, 'collection' is also serialized ''' usr.store(u1) n = usr.filter(lambda o: o['name'] == 'neet') #filter? seems doesn't have searching function #but i can give it that~ print(n)
__author__ = 'James Veitch' ''' Example to connect and download all issues on the remote server to a local cache nosql database using UnQLite ''' from unqlite import UnQLite from pprint import pprint import pythreesixty.core.mobile as mobile db = UnQLite() # Creates an in-memory database. issues = db.collection('issues') issues.create() # Create the collection if it does not exist. projects = mobile.projects() print("User has access to {count} projects".format(count=len(projects.json()))) for p in projects.json(): print("\n{id}: {name}".format( id=p['project_id'], name=p['name']) ) # Get issues issues_request = mobile.get_issues(p['project_id'], count_only=True) # use count_only to just return the number if issues_request.status_code == 200: print(" - {count} issues found.".format(count=issues_request.json()['count'])) issues.store(mobile.get_issues(p['project_id']).json())
class UnQLiteDatabase(): """UnQLite database wrapper. Parameters ---------- location : string Path to store the database file. If not given, make in-memory database. """ def __init__(self, location=None): try: from unqlite import UnQLite except ImportError: raise ImportError( "The unqlite library is required for this feature.") self.location = location if type(self.location) == str and len(self.location) > 0: logger.debug("Connecting to database at {}".format( os.path.abspath(location))) self.db = UnQLite(self.location) else: # in-memory database logger.debug("Creating an in-memory database.") self.db = UnQLite() self.collections = dict() def add_collection(self, name): """Add collection to database and create it if it doesn't yet exist. Parameters ---------- name : string Collection name. """ if name in self.collections.keys(): # assume already exists return collection = self.db.collection(name) if collection.exists() is False: # does not exist at all yet collection.create() logger.debug("({}) Created collection {}".format( self.location, name)) self._commit() self.collections[name] = collection def _get_collection(self, name): """Get collection with name from database. Parameters ---------- name : string Collection name. Returns ------- Collection """ if name not in self.collections.keys(): self.add_collection(name) return self.collections[name] def add_row(self, collection, row): self.add_rows(collection, [row]) def add_rows(self, collection, rows): """Adds row to collection. Parameters ---------- name : string Collection name. row : list of dicts Rows to store. """ coll = self._get_collection(collection) coll.store(rows) self._commit() def filter_rows(self, collection, filt): """Returns the rows matching filter. Parameters ---------- collection : string Collection name. filter : function(row) -> bool Filter function that returns True for items to return Returns ------- Row : List of matching rows """ coll = self._get_collection(collection) return coll.filter(filt) def _commit(self): """Commits changes to database, retries few times if database locked. """ maxtries = 10 while True: try: self.db.commit() return except: if maxtries < 1: raise self.db.rollback() delay = max(0.5, min(random.expovariate(3.0), 10.0)) logger.debug("({}) Database locked, waiting {:.1f}s..".format( self.location, delay)) time.sleep(delay) maxtries -= 1 logger.warning("({}) Database error: could not commit!".format( self.location))
def get_collection(collection): db = UnQLite(db_file) collection = db.collection(collection) if not collection.exists(): collection.create() return collection
class XlsxReader(object): def __init__(self, filepath): self.filepath = filepath self.name = os.path.basename(filepath) # In memory JSON store self.db = UnQLite() self.workbook = self.load_from_file(filepath) self.data = self.extract_as_python() self.to_db() def __repr__(self): return '<XlsxReader: {}>'.format(self.name) @staticmethod def load_from_file(filepath): # read_only: optimised read, data_only: ignores formulae return load_workbook(filename=filepath, read_only=True, data_only=True) def extract_as_python(self): sheets = self.workbook.worksheets sheet_data = {} columns_data = {} for sheet in sheets: sheet_title = sheet.title rows, columns = self.extract_sheet(sheet) sheet_data[sheet_title] = rows columns_data[sheet_title] = columns sheet_data.update({'_columns': [columns_data]}) return sheet_data def extract_sheet(self, sheet): rows = sheet.rows # Assume the first row of the sheet is the columns column_cells = next(rows) columns = [cell.value for cell in column_cells] extracted_rows = [] for row_idx, row in enumerate(rows, start=1): row_dict = {'row': row_idx} # Index into the columns list. # Assumes the order is always the same. for column_idx, cell in enumerate(row): cell_value = self.extract_cell(cell) row_dict[columns[column_idx]] = cell_value extracted_rows.append(row_dict) return extracted_rows, columns def extract_cell(self, cell): """ Workarounds for OpenPyXL live here. If the cell value has a comma, we assume it is a list and split it. All dates in Excel are TZ naive, so add one. """ # TODO: test me directly val = cell.value if isinstance(val, unicode): # Takes 'foo', bar' # Returns ['foo', 'bar'] if ',' in val: return [i.lstrip() for i in val.split(',')] if isinstance(val, datetime): # Just assume everything is UTC for the time being. return UTC.localize(val) return val def to_json(self): return json.dumps(self.data, default=json_serialiser) def to_db(self): self.collections = { name: self.create_collection(name, documents) for name, documents in self.data.items() } def create_collection(self, name, documents): collection = self.db.collection(name) collection.create() docs = self.pre_store(documents) collection.store(docs) return collection def pre_store(self, documents): """ We can't change UnQLite's json serialiser, so we need to check for datetime objs here. We drop the 'row' key as it's of no use once stored. Also, we try to naively call json.dumps() to raise errors, since UnQLite silently stores a None if it can't serialise. Doh! """ checked_docs = [] for doc in documents: checked_doc = {} for k, v in doc.items(): if k == 'row': continue if isinstance(v, datetime): checked_doc[k] = v.isoformat() continue json.dumps(v) checked_doc[k] = v checked_docs.append(checked_doc) return checked_docs