def with_transaction(cursor: Cursor, closing: bool = True) -> Iterator[Cursor]: with _closing(cursor) if closing else nullcontext(cursor): cursor.execute("BEGIN TRANSACTION") try: yield cursor finally: cursor.execute("END TRANSACTION")
def ts_last(self, ns_key): """Return timestamp of most recent data point, or None if key is unknown.""" with _closing(self._conn.cursor()) as c: c.execute("SELECT MAX(_timestamp) FROM _ts WHERE _key=?", (ns_key, )) t_max = c.fetchone() return None if t_max is None else t_max[0]
def ts_delete(self, ns_key, start, end): """Delete data points between *start* and *end* time.""" with _closing(self._conn.cursor()) as c: c.execute( "DELETE FROM _ts WHERE _key=%s AND _timestamp>=%s AND _timestamp<=%s", (ns_key, start, end)) self._conn.commit()
def kv_delete(self, *ns_keys): """Delete value(s) from store.""" with _closing(self._conn.cursor()) as c: c.execute( f"DELETE FROM _kv WHERE _key IN ({','.join('?'*len(ns_keys))})", ns_keys) self._conn.commit()
def ts_first(self, ns_key): """Return timestamp of oldest data point, or None if key is unknown.""" with _closing(self._conn.cursor()) as c: c.execute("SELECT MIN(_timestamp) FROM _ts WHERE _key=?", (ns_key, )) t_min = c.fetchone() return None if t_min is None else t_min[0]
def es_events(self, ns_key, start, end, raw_ops): """Return [(timestamp,{item:(op,value)})] sorted by ascending timestamp, optionally filtered by time and ops.""" with _closing(self._conn.cursor()) as c: conds, condv = ['_key=%s'], [ns_key] if start is not None: conds.append('_timestamp>=%s') condv.append(start) if end is not None: conds.append('_timestamp<=%s') condv.append(end) if raw_ops: conds.append(f"_op IN ({','.join(['%s']*len(raw_ops))})") condv.extend(raw_ops) wheres = '' if not conds else ' WHERE ' + ' AND '.join(conds) c.execute("SELECT _timestamp, _item, _op, _value FROM _es " + wheres + " ORDER BY _timestamp ASC", condv) events, tstate = [], None def append_event(tstate, rstate): events.append((tstate, rstate)) for erow in c.fetchall(): t, item, ro, rv = erow[0], erow[1], bytes(erow[2]), bytes(erow[3]) if t != tstate: if tstate is not None: append_event(tstate, rstate) tstate, rstate = t, {} rstate[item] = (ro, rv) if tstate is not None: append_event(tstate, rstate) return events
def es_keys(self): """Return [es keys].""" with _closing(self._conn.cursor()) as c: c.execute( f"SELECT DISTINCT _key FROM _es WHERE _key LIKE '{self._namespace}__%'" ) return [_[0] for _ in c.fetchall()]
def ts_extend(self, ns_key, raw_data_points): """Add/replace [(timestamp,raw value)] to time series.""" with _closing(self._conn.cursor()) as c: for t, rv in raw_data_points: c.execute("INSERT OR REPLACE INTO _ts VALUES (?,?,?)", (ns_key, t, rv)) self._conn.commit()
def es_delete(self, ns_key, raw_events): """Delete [(timestamp,item,raw_op,_)].""" with _closing(self._conn.cursor()) as c: for re in raw_events: c.execute( "DELETE FROM _es WHERE _key=? AND _timestamp=? AND _item=? AND _op=?", (ns_key, re[0], re[1], re[2])) self._conn.commit()
def read(self, pathfilename): with _codecs.open(pathfilename, 'r', encoding='utf-8') as input_file: config_pairs = input_file.read() with _closing(_StringIO(u"[{0}]{1}{2}".format(self._default_section, _os.linesep, config_pairs))) \ as default_section: _RawConfigParser.readfp(self, default_section)
def read(self, pathfilename): with _codecs.open(pathfilename, 'r', encoding='utf-8') as input_file: config_pairs = input_file.read() with _closing(_StringIO("[{0}]{1}{2}".format(self._default_section, _os.linesep, config_pairs))) \ as default_section: _RawConfigParser.readfp(self, default_section)
def ts_range(self, ns_key, start, end): """Return [(timestamp,raw value,{})] between *start* and *end* time.""" with _closing(self._conn.cursor()) as c: c.execute("SELECT _timestamp, _value FROM _ts WHERE _key=%s AND _timestamp>=%s AND _timestamp<=%s", (ns_key, start, end)) raw_range = [] for t, rv in c.fetchall(): raw_range.append((t, bytes(rv))) return raw_range
def es_apply(self, ns_key, timestamp, raw_state): """Store raw_state {item:(raw_op,raw_value)}.""" with _closing(self._conn.cursor()) as c: c.execute("SELECT COUNT(*) FROM _es WHERE _key=%s AND _timestamp=%s", (ns_key, timestamp)) if c.fetchone()[0] > 0: raise KeyError(timestamp) for item, raw_ov in raw_state.items(): c.execute("INSERT INTO _es VALUES (%s,%s,%s,%s,%s)", (ns_key, timestamp, item, *raw_ov)) self._conn.commit()
def kv_set(self, ns_key, raw_value): """Store value.""" with _closing(self._conn.cursor()) as c: try: c.execute("INSERT INTO _kv VALUES (%s,%s)", (ns_key, raw_value)) except _psycopg2.IntegrityError: self._conn.rollback() c.execute("UPDATE _kv SET _value=%s WHERE _key=%s", (raw_value, ns_key)) self._conn.commit()
def ts_extend(self, ns_key, raw_data_points): """Add/replace [(timestamp,raw value)] to time series.""" with _closing(self._conn.cursor()) as c: for t, rv in raw_data_points: try: c.execute("INSERT INTO _ts VALUES (%s,%s,%s)", (ns_key, t, rv)) except _psycopg2.IntegrityError: self._conn.rollback() c.execute("UPDATE _ts SET _value=%s WHERE _key=%s AND _timestamp=%s", (rv, ns_key, t)) self._conn.commit()
def read(self, pathfilename): #seb: expand path to allow using homedir and relative paths pathfilename = os.path.realpath(os.path.expanduser(pathfilename)) with codecs.open(pathfilename, 'r', encoding=ServiceDefault.CHAR_CODEC) as input_file: config_pairs = input_file.read() with _closing(_StringIO("[{0}]{1}{2}".format(self._default_section, os.linesep, config_pairs))) \ as default_section: _RawConfigParser.readfp(self, default_section)
def dump_into_file(capes, filepath): """Serialize *capes* into the file *filepath* as an `SQLite <http://www.sqlite.org/>`_ database (see :mod:`sqlite3`). :param capes: the CAPE to serialize :type capes: list of :class:`CourseAndProfessorEvaluation` :param filepath: path to file to write SQLite data to :type filepath: string """ with _closing(_sqlite_connect(filepath)) as conn: create_cape_tables(conn) for cape in capes: dump_into_db(cape, conn)
def open(self): """A context in which this settings object is open for access Upon entering this context, :meth:`sync` is called. Upon exiting this context, :meth:`close` is called. :rtype: context """ self._isopen = True self.sync() return _closing(self)
def discard__(self): """Delete the entire namespace from the store. Remove file if database is empty.""" with _closing(self._conn.cursor()) as c: c.execute( f"DELETE FROM _kv WHERE _key LIKE '{self._namespace}__%'") c.execute( f"DELETE FROM _ts WHERE _key LIKE '{self._namespace}__%'") c.execute( f"DELETE FROM _es WHERE _key LIKE '{self._namespace}__%'") self._conn.commit() # Delete file if store is completely empty. with _closing(self._conn.cursor()) as c: c.execute("SELECT COUNT(_key) FROM _kv") size = c.fetchone()[0] c.execute("SELECT COUNT(_key) FROM _ts") size += c.fetchone()[0] c.execute("SELECT COUNT(_key) FROM _es") size += c.fetchone()[0] if size == 0: self._conn.close() _os.remove(self._filename)
def enter__(self, frozen): """Connect to database. In write mode, the store schema will be created if necessary.""" self._conn = _psycopg2.connect(self._dsn) if not frozen: try: self._conn.cursor().execute(f'CREATE SCHEMA {self._namespace}') except _psycopg2.ProgrammingError: self._conn.rollback() # Schema already exists with _closing(self._conn.cursor()) as c: c.execute("SELECT schema_name FROM information_schema.schemata WHERE schema_name=%s", (self._namespace,)) row = c.fetchone() if row is None: raise FileNotFoundError((self._dsn, self._namespace)) c.execute(f'SET search_path TO {self._namespace}') if not frozen: with _closing(self._conn.cursor()) as c: c.execute( "CREATE TABLE IF NOT EXISTS _kv (" "_key TEXT NOT NULL PRIMARY KEY," "_value BYTEA NOT NULL)") c.execute( "CREATE TABLE IF NOT EXISTS _ts (" "_key TEXT NOT NULL," "_timestamp BIGINT NOT NULL," "_value BYTEA NOT NULL," "PRIMARY KEY (_key, _timestamp))") c.execute( "CREATE TABLE IF NOT EXISTS _es (" "_key TEXT NOT NULL," "_timestamp BIGINT NOT NULL," "_item TEXT NOT NULL," "_op BYTEA NOT NULL," "_value BYTEA NOT NULL," "PRIMARY KEY (_key, _timestamp, _item, _op))") self._conn.commit()
def get_page(url:str, user_agent='Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'): """ Attempts to get the content at `url` by making an HTTP GET request. If the content-type of response is some kind of HTML/XML, return the text content as bytes, otherwise return None. """ headers = { 'User-Agent': user_agent } try: with _closing(_get(url, stream=True, headers=headers)) as resp: if _is_good_response(resp): return resp.content else: return None except _RequestException as e: _log_error('Error during requests to {0} : {1}'.format(url, str(e))) return None
def enter__(self, frozen): """Open database. In write mode, the store file will be created if necessary. Raise FileNotFoundError in frozen mode if file does not exist.""" if not _os.path.isfile(self._filename) and frozen: raise FileNotFoundError(self._filename) self._conn = _sqlite3.connect(self._filename) if not frozen: with _closing(self._conn.cursor()) as c: c.execute("PRAGMA foreign_keys = ON") c.execute("CREATE TABLE IF NOT EXISTS _kv (" "_key TEXT NOT NULL PRIMARY KEY," "_value BLOB NOT NULL)") c.execute("CREATE TABLE IF NOT EXISTS _ts (" "_key TEXT NOT NULL, " "_timestamp BIGINT NOT NULL, " "_value BLOB NOT NULL, " "PRIMARY KEY (_key, _timestamp))") c.execute("CREATE TABLE IF NOT EXISTS _es (" "_key TEXT NOT NULL," "_timestamp BIGINT NOT NULL," "_item TEXT NOT NULL," "_op BLOB NOT NULL," "_value BLOB NOT NULL," "PRIMARY KEY (_key, _timestamp, _item, _op))")
def closing(sock): return _closing(sock)
def collection_to_string(collection, depth=3): with _closing(_StringIO()) as item_list: _itr_printer(collection, depth=depth, stream=item_list) return item_list.getvalue()
def error(self, error_message): with _closing(_StringIO()) as usage: self.print_usage(usage) message = EBSCliAttr.ErrorMsg.format(error_message, usage.getvalue(), self.prog) raise ArgumentError(message)
def find_free_port(): with _closing(_socket.socket(_socket.AF_INET, _socket.SOCK_STREAM)) as s: s.bind(('', 0)) s.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1) return s.getsockname()[1]
def kv_get(self, ns_key): """Return raw value from store, or None if key not contained in store.""" with _closing(self._conn.cursor()) as c: c.execute("SELECT _value FROM _kv WHERE _key=%s", (ns_key,)) vrow = c.fetchone() return None if vrow is None else bytes(vrow[0])
def discard__(self): """Delete the entire namespace from the store.""" with _closing(self._conn.cursor()) as c: c.execute(f"DROP SCHEMA {self._namespace} CASCADE") self._conn.commit()
def kv_set(self, ns_key, raw_value): """Store value.""" with _closing(self._conn.cursor()) as c: c.execute("INSERT OR REPLACE INTO _kv VALUES (?,?)", (ns_key, raw_value)) self._conn.commit()
def collection_to_string(collection, depth=3): with _closing(_StringIO()) as item_list: _itr_printer(collection, depth = depth, stream = item_list) return item_list.getvalue()