class Compilation_error_db: def __init__(self, db_path=''): self.db_path = db_path self.store = UnQLite() #loading db from databases freezes the process!! self.hits = 0 self.misses = 0 self.uncommited_recs = 0 # keeping prog_id for backward compatibility def get_errors(self, prog_id, prog): #print("get_errors here") if prog in self.store: err_msg = self.store[prog] errs = get_error_list(err_msg) self.hits += 1 else: errs, err_msg = compilation_errors(prog) self.store[prog] = err_msg self.misses += 1 self.uncommited_recs += 1 if self.uncommited_recs > 0 and self.uncommited_recs % 250 == 0: self.commit() return errs def close(self): self.store.close() def commit(self, ): cnt = self.uncommited_recs self.uncommited_recs = 0 self.store.commit() def __len__(self): return len(self.store)
def consolidateToDB(self): db = UnQLite(DB_PATH) times = db.collection("times") if not times.exists(): times.create() for t in self.timeMap.keys(): times.store({t: self.timeMap[t]}) db.commit()
def index_by(index_dir: str, index_extension: str, data_iter: iter, key_fn: Callable, value_fn: Callable, checkpoint: int, object_name: str): """ Generate UnQlite data indices for each entity :param index_dir index directory :param index_extension index file extension :param data_iter iterable on data :param key_fn function to use on data to get the index key :param value_fn function to use on data to get the index value :param checkpoint commit index every checkpoints :return dict of index paths by entity name """ i = 0 index_path_by_entity = {} index_by_entity = {} for data in data_iter: entity = data['@type'] if entity not in index_path_by_entity: index_path = get_file_path([index_dir, entity], ext=index_extension) index_path_by_entity[entity] = index_path index = UnQLite(index_path_by_entity[entity]) index.begin() index_by_entity[entity] = index index = index_by_entity[entity] # Index index[str(key_fn(data))] = value_fn(data) i += 1 # Log if i % 50000 == 0: print(f'checkpoint: {i} {object_name}') # Checkpoint if i % checkpoint == 0: # Flush indices for index in index_by_entity.values(): index.commit() index.begin() print(f'checkpoint: {i} {object_name}') # Close indices for index in index_by_entity.values(): index.commit() index.close() # Output all indices return index_path_by_entity
class PyCachEngine: def __init__(self, path, db_path, options=dict()): self.board = Board() self.db = UnQLite(db_path) self.engine = Popen(path, universal_newlines=True, stdin=PIPE, stdout=PIPE) self._put('uci') self._ready() for option, val in options.items(): self._set_option(option, val) self.num_games = 1 while True: self.board.reset() self.learn(200) def __del__(self): self.db.close() self.engine.kill() def _put(self, line): if not self.engine.stdin: raise BrokenPipeError() self.engine.stdin.write(line + '\n') self.engine.stdin.flush() def _read(self): if not self.engine.stdout: raise BrokenPipeError() return self.engine.stdout.readline().strip() def _ready(self): self._put('isready') while self._read() != 'readyok': continue def _bestmove(self): while True: line = self._read() if 'depth' in line: depth = int(line.split()[2]) if 'bestmove' in line: move = line.split()[1] return (move, depth) def _set_option(self, option, value): self._put(f'setoption option {option} value {value}') def _store(self, new_fen, move, depth): with self.db.transaction(): if new_fen in self.db: _move, _depth = eval(self.db[new_fen].decode('utf-8')) print(_move, _depth) if int(_depth) >= depth: return self.db[new_fen] = (move, depth) self.db.commit() def learn(self, movetime): fen = self.board.fen() new_fen = ' '.join(fen.split()[:-2]) self._put(f'position fen {fen}') self._put(f'go movetime {movetime}') move, depth = self._bestmove() self.board.push_uci(move) self._store(new_fen, move, depth) system('clear') # print(fen) print(self.board) print() print('new_fen:', new_fen) print('depth:', depth) print('move:', move) print('db_size:', len(self.db)) print('num_games:', self.num_games) if not self.board.is_game_over(): self.learn(movetime) else: result = self.board.outcome().result() self.num_games += 1 print(result)
class Settings(metaclass=Singleton): def __init__(self, filename=None): assert filename is not None if not os.path.exists(filename): directory = Path(filename[:filename.rfind('/')]) directory.mkdir(parents=True, exist_ok=True) self.__db = UnQLite(filename) if self.__db_get('master_password_hash'): current_app.config['INIT_STATE'] = 2 def write(self): self.__db.commit() @property def master_password_hash(self) -> str: return self.__db_get('master_password_hash') @master_password_hash.setter def master_password_hash(self, v: str): self.__db['master_password_hash'] = v @property def master_password_hash_salt(self) -> str: return self.__db_get('master_password_hash_salt') @master_password_hash_salt.setter def master_password_hash_salt(self, v: str): self.__db['master_password_hash_salt'] = v @property def ethereum_address(self) -> str: return self.__db_get('ethereum_address') @ethereum_address.setter def ethereum_address(self, v: str): self.__db['ethereum_address'] = v @property def blockchain_length(self) -> int: return int(self.__db_get('blockchain_length', 0)) @blockchain_length.setter def blockchain_length(self, v: int): self.__db['blockchain_length'] = str(v) @property def blockchain(self) -> list: return json.loads(self.__db_get('blockchain', '[]')) @blockchain.setter def blockchain(self, v: list): self.__db['blockchain'] = json.dumps(v) def __del__(self): self.__db.close() def __db_get(self, key, default=None): if key in self.__db: return self.__db[key] return default
class UnQLiteDatabase(): """UnQLite database wrapper. Parameters ---------- location : string Path to store the database file. If not given, make in-memory database. """ def __init__(self, location=None): try: from unqlite import UnQLite except ImportError: raise ImportError( "The unqlite library is required for this feature.") self.location = location if type(self.location) == str and len(self.location) > 0: logger.debug("Connecting to database at {}".format( os.path.abspath(location))) self.db = UnQLite(self.location) else: # in-memory database logger.debug("Creating an in-memory database.") self.db = UnQLite() self.collections = dict() def add_collection(self, name): """Add collection to database and create it if it doesn't yet exist. Parameters ---------- name : string Collection name. """ if name in self.collections.keys(): # assume already exists return collection = self.db.collection(name) if collection.exists() is False: # does not exist at all yet collection.create() logger.debug("({}) Created collection {}".format( self.location, name)) self._commit() self.collections[name] = collection def _get_collection(self, name): """Get collection with name from database. Parameters ---------- name : string Collection name. Returns ------- Collection """ if name not in self.collections.keys(): self.add_collection(name) return self.collections[name] def add_row(self, collection, row): self.add_rows(collection, [row]) def add_rows(self, collection, rows): """Adds row to collection. Parameters ---------- name : string Collection name. row : list of dicts Rows to store. """ coll = self._get_collection(collection) coll.store(rows) self._commit() def filter_rows(self, collection, filt): """Returns the rows matching filter. Parameters ---------- collection : string Collection name. filter : function(row) -> bool Filter function that returns True for items to return Returns ------- Row : List of matching rows """ coll = self._get_collection(collection) return coll.filter(filt) def _commit(self): """Commits changes to database, retries few times if database locked. """ maxtries = 10 while True: try: self.db.commit() return except: if maxtries < 1: raise self.db.rollback() delay = max(0.5, min(random.expovariate(3.0), 10.0)) logger.debug("({}) Database locked, waiting {:.1f}s..".format( self.location, delay)) time.sleep(delay) maxtries -= 1 logger.warning("({}) Database error: could not commit!".format( self.location))
print "Task Submitted =>", pn, '->', pn + TWEET_CHUNK_SIZE, "<=" _tweet(list(tweet_id_list), pn) pn += TWEET_CHUNK_SIZE db[input_file] = pn save_ct += 1 if save_ct % 50 == 0: db.commit() if not all(t.isAlive() for t in threads): raise Exception("Threads are dead.") print "Finished." except KeyboardInterrupt, e: stop_evt.set() print "Stopped." stop_flag = True finally: db.commit() print "Progress Saved." return not stop_flag if __name__ == "__main__": import random db.begin() tokens = [ map(tk.__getitem__, ('consumer_key', 'consumer_secret', 'access_token', 'access_secret')) for tk in json.load(open('config/tokens.json')) ] for f in glob('llt/twitter-events-2012-2016/*.ids'): print f if not retrieve_tweets(