def init(self): super(LoadPictures, self).init() if not self._dbc: self._dbc = db.DatabaseConnection( self._settings['DATABASE_CONNECTION_STRING']) if not self._storage: self._storage = storage.get_storage(self._settings) if not self._storage_local: self._storage_local = storage.get_storage( dict(self._settings.copy(), STORAGE='local'))
def lag(self, nid, threshold=_threshold): sql = ('SELECT ROUND(AVG(travel_time) / {0}) AS lag ' + 'FROM reading ' + 'WHERE node = {1}') sql = sql.format(constant.minute, nid) with db.DatabaseConnection() as connection: with db.DatabaseCursor(connection) as cursor: cursor.execute(sql) row = cursor.fetchone() return row['lag']
def __init__(self, nid, connection=None, freq='T'): self.nid = nid close = not connection if close: connection = db.DatabaseConnection().resource self.neighbors = nd.get_neighbors(self.nid, connection) self.readings = self.__get_readings(self.neighbors, connection, freq) if close: connection.close()
def __init__(self, nid, connection=None, freq='T'): self.nid = nid self.freq = freq close = not connection if close: connection = db.DatabaseConnection().resource self.name = self.__get_name(connection) self.readings = self.__get_readings(connection) # self.neighbors = get_neighbors(self.nid, connection) if close: connection.close()
def getnodes(connection=None): if not connection: with db.DatabaseConnection() as conn: yield from getnodes(conn) return result = '@id' with db.DatabaseCursor(connection) as cursor: while True: cursor.execute('CALL getnode({0})'.format(result)) cursor.execute('SELECT {0}'.format(result)) row = cursor.fetchone() if not row[result]: raise StopIteration yield row[result]
def __init__(self, nid, level, cluster, lag=0, db_conn=None, seen=None): self.lag = lag self.children = set() close = not db_conn if close: db_conn = db.DatabaseConnection().resource self.node = nd.Node(nid, db_conn) if not seen: seen = set() seen.add(nid) self.__build(level, cluster, db_conn, seen) if close: db_conn.close()
cargs = cli.CommandLine(cli.optsfile('storage')) args = cargs.args # # Open and parse the data file # with open(args.input, mode='rb') as fp: data = pickle.load(fp) keys = [] values = [] for i in data: if not keys: keys = i.keys() values.append([i[x] for x in keys]) assert (keys and values) # # Create the SQL statement and execute! # s = ['%s'] * len(keys) opts = [','.join(x) for x in (keys, s)] sql = ['INSERT IGNORE INTO reading ({0})', 'VALUES ({1})'] sql = db.process(sql, opts) db.EstablishCredentials(user='******') with db.DatabaseConnection() as connection: with db.DatabaseCursor(connection) as cursor: # http://stackoverflow.com/a/18245311 cursor.executemany(sql, values)
def init(self): super(WriteIndex, self).init() if not self._dbc: self._dbc = db.DatabaseConnection( self._settings['DATABASE_CONNECTION_STRING'])
def init(self, index_file): super(LoadFeatures, self).init() if not self._dbc: self._dbc = db.DatabaseConnection( self._settings['DATABASE_CONNECTION_STRING']) index.init(index_file)
def nodegen(*args): with db.DatabaseConnection() as conn: for (i, j) in enumerate(getnodes(conn)): tup = (i, j, args) if len(args) else (i, j) yield tup
def init(self): super(ExtractFeatures, self).init() if not self._dbc: self._dbc = db.DatabaseConnection(self._settings['DATABASE_CONNECTION_STRING']) if not self._storage: self._storage = storage.get_storage(self._settings)
def init(self): super(ImportDatafeed, self).init() if not self._dbc: self._dbc = db.DatabaseConnection(self._settings['DATABASE_CONNECTION_STRING'])