def get_data_from_storage(channels, time_start, time_end): storage = PostgresqlStorage(database=POSTGRESQL_DB, user=POSTGRESQL_USER, password=POSTGRESQL_PASSWORD, tablename=POSTGRESQL_TABLE, host=POSTGRESQL_HOST) format = '%Y-%m-%d %H:%M:%S' time_start = time_start.strftime(format) time_end = time_end.strftime(format) data = storage.get_data(channels=channels, time_start=time_start, time_end=time_end) data_of_channels = {} for x in channels: data_of_channels[x] = [] for x in data: dtime = x[0] # хак # чтобы время соответствовало формату '%Y-%m-%d %H:%M:%S.%f' if not dtime.microsecond: dtime = dtime.replace(microsecond=1) data_of_channels[x[-1]].append((dtime, x[1]) ) # data, value, channel_id return data_of_channels
def update_list(): table_name = 'channels' storage = PostgresqlStorage(database=POSTGRESQL_DB, user=POSTGRESQL_USER, password=POSTGRESQL_PASSWORD, tablename=POSTGRESQL_TABLE, host=POSTGRESQL_HOST) results = storage.raw_sql('SELECT * FROM %s ' % table_name) assert results base = ultramemcache.Client([MEMCACHE_SERVER], debug=0) for channel_id, name in results: base.set(name, channel_id)
def main(): file_path = os.path.join(RES_FOLDER, 'channels', 'to_add.txt') if not os.path.exists(file_path): raise Exception('not found file: %s' % file_path) with open(file_path, 'r') as fio: channels = fio.readlines() table_name = 'channels' storage = PostgresqlStorage(database=POSTGRESQL_DB, user=POSTGRESQL_USER, password=POSTGRESQL_PASSWORD, tablename=POSTGRESQL_TABLE, host=POSTGRESQL_HOST) [storage.raw_sql('INSERT INTO %s (channel_name) VALUES (%s)' % (table_name, x)) for x in channels] results = storage.raw_sql('SELECT * FROM %s ' % table_name) assert results for channel_id, name in results: set(unicode(name), channel_id)
def __init__(self, filename=os.path.join(DB_FOLDER, 'berkeley.db'), read=False, sql_storage=None): self.filename = filename self.database = db.DB() self.generator = GeneratorId() # todo # move_number == 1000 self.sync_number = BERKELEY_SYNC_NUMBER self.move_number = BERKELEY_MOVE_NUMBER self.stored_id = 0 self.db_type = db.DB_BTREE # self.db_type = db.DB_HASH # self.db_type = db.DB_QUEUE if read: self.database.open(self.filename, None, self.db_type, db.DB_READ_COMMITTED) else: self.database.open(self.filename, None, self.db_type, db.DB_CREATE) if sql_storage is None: self.sql_storage = PostgresqlStorage(database=POSTGRESQL_DB, user=POSTGRESQL_USER, password=POSTGRESQL_PASSWORD, tablename=POSTGRESQL_TABLE, host=POSTGRESQL_HOST) else: self.sql_storage = sql_storage self.id = len(self.database) self.generator.setId(self.id) # todo # может список в динамике обновится update_list() self.kv_storage = ultramemcache.Client([MEMCACHE_SERVER], debug=0)
class BerkeleyStorage(Singleton): def __init__(self, filename=os.path.join(DB_FOLDER, 'berkeley.db'), read=False, sql_storage=None): self.filename = filename self.database = db.DB() self.generator = GeneratorId() # todo # move_number == 1000 self.sync_number = BERKELEY_SYNC_NUMBER self.move_number = BERKELEY_MOVE_NUMBER self.stored_id = 0 self.db_type = db.DB_BTREE # self.db_type = db.DB_HASH # self.db_type = db.DB_QUEUE if read: self.database.open(self.filename, None, self.db_type, db.DB_READ_COMMITTED) else: self.database.open(self.filename, None, self.db_type, db.DB_CREATE) if sql_storage is None: self.sql_storage = PostgresqlStorage(database=POSTGRESQL_DB, user=POSTGRESQL_USER, password=POSTGRESQL_PASSWORD, tablename=POSTGRESQL_TABLE, host=POSTGRESQL_HOST) else: self.sql_storage = sql_storage self.id = len(self.database) self.generator.setId(self.id) # todo # может список в динамике обновится update_list() self.kv_storage = ultramemcache.Client([MEMCACHE_SERVER], debug=0) def get_id(self, name): return self.kv_storage.get(name) def __del__(self): self.database.close() def add_json(self, data_dict): for key, values in data_dict.items(): [self.add('%s\t%s\t%s' % (key, val.get('value'), val.get('time'))) for val in values] def add(self, value): self.id = GeneratorId().getid() self.database.put(str(self.id), value) if not self.id % self.sync_number: # self.database.sync() self.check() def length(self): return len(self.database) def check(self): """проверяет сколько уже в базе значений. и запускает перекладывание в postgresql :return: """ if self.id / self.move_number > self.stored_id / self.move_number: values = [] for x in xrange(self.stored_id, self.stored_id + self.move_number): try: if self.database.get('%s' % x): info = self.database.get('%s' % x).split('\t') channel_id = self.get_id(info[0]) if channel_id is None: # from scripts.python.update_channels import update_list # update_list() # self.kv_storage = load() # channel_id = self.get_id(info[0]) # чтение с диска во время обработки большого потока # приводит к блокировке по диску. raise Exception( 'Not found channel_name in kvstorage: %s' % info[0] ) values.append([info[2], info[1], channel_id]) # time, value, channel_id self.database.delete('%s' % x) except AttributeError as e: log_error(e, x, self.database.get('%s' % x)) self.sql_storage.add(*values) self.stored_id += self.move_number return True return False