class Conexao(object): def conecta(self): # self.DbHost = DbHost # self.DbName = DbName # self.DbUser = DbUser # self.DbPassword = DbPassword # self.conecta = conecta # self.erro = erro # self.dbhandler = dbhandler # Caminho absoluto config.ini self.path = os.path.abspath(os.path.dirname(sys.argv[0])) config = configparser.ConfigParser() config.sections() # Buscando Dados config.ini if config.read(os.path.join(self.path, 'config.ini')): self.DbHost = config['DEFAULT']['DbHost'] self.DbName = config['DEFAULT']['DbName'] self.DbUser = config['DEFAULT']['DbUser'] self.DbPassword = config['DEFAULT']['DbPassword'] # Realizando a conexao com o DB i = 0 try: self.dbhandler = PooledMySQLDatabase(self.DbName, user=self.DbUser, password=self.DbPassword, host=self.DbHost) self.dbhandler.connect() print("Sucesso") print(i) i += 1 with self.dbhandler.connection_context(): self.prepare_database() except: self.erro = "err" def prepare_database(self): try: tabelas = CriarTabelas() tabelas.teste = "andre" tabelas.tabelas() except InternalError as err: print("INrter")
def __init__(self, db=None): if not db: db = CONFIG.get('env', {}).get('env', None) if db == 'root': self.db = CONFIG.get('db_root') elif db == 'test': self.db = CONFIG.get('db_test') elif db == 'analysis': self.db = CONFIG.get('db_analysis') else: self.db = CONFIG.get('db') self.redis_config = CONFIG.get('redis') logger.debug('db setting: ' + str(self.db)) self.database = peewee.MySQLDatabase(self.db.get('database'), host=self.db.get('host'), port=self.db.get('port', 3306), user=self.db.get('user'), passwd=self.db.get('passwd'), charset=self.db.get('charset')) self.database_async = peewee_async.MySQLDatabase( self.db.get('database'), host=self.db.get('host'), port=self.db.get('port', 3306), user=self.db.get('user'), password=self.db.get('passwd'), charset=self.db.get('charset')) self.pool = PooledMySQLDatabase(self.db.get('database'), host=self.db.get('host'), port=self.db.get('port', 3306), user=self.db.get('user'), passwd=self.db.get('passwd'), max_connections=self.db.get( 'max_connections', 10)) self.pool_async = peewee_async.PooledMySQLDatabase( self.db.get('database'), host=self.db.get('host'), port=self.db.get('port', 3306), user=self.db.get('user'), password=self.db.get('passwd'), charset=self.db.get('charset'), max_connections=self.db.get('max_connections', 10)) # redis_pool_async = aioredis.create_redis_pool( # (config['redis']['host'], config['redis']['port']), # minsize=config['redis']['minsize'], # maxsize=config['redis']['maxsize'], # loop=asyncio.get_event_loop() # ) logger.debug('redis setting: ' + str(self.redis_config)) self.redis = redis.ConnectionPool( host=self.redis_config['host'], port=self.redis_config['port'], password=self.redis_config['password'], db=self.redis_config['db'], max_connections=self.redis_config['maxsize'], decode_responses=True)
def db_pool(self): global _MySQLDatabasePool _MySQLDatabasePool = PooledMySQLDatabase(self.conf["db"], host=self.conf["host"], user=self.conf["user"], passwd=self.conf["passwd"], max_connections=10, stale_timeout=300) return _MySQLDatabasePool
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if WORK_MODE == WorkMode.STANDALONE: if USE_LOCAL_DATABASE: self.database_connection = APSWDatabase('fate_flow_sqlite.db') RuntimeConfig.init_config(USE_LOCAL_DATABASE=True) stat_logger.info('init sqlite database on standalone mode successfully') else: self.database_connection = PooledMySQLDatabase(db_name, **database_config) stat_logger.info('init mysql database on standalone mode successfully') RuntimeConfig.init_config(USE_LOCAL_DATABASE=False) elif WORK_MODE == WorkMode.CLUSTER: self.database_connection = PooledMySQLDatabase(db_name, **database_config) stat_logger.info('init mysql database on cluster mode successfully') RuntimeConfig.init_config(USE_LOCAL_DATABASE=False) else: raise Exception('can not init database')
def __get_pooled_db(): return PooledMySQLDatabase(database=__db, max_connections=128, stale_timeout=120, host=__host, port=int(__port), user=__user, passwd=__passwd, charset='utf8')
def __init__(self): PATH = lambda p: os.path.abspath( os.path.join(os.path.dirname(__file__), p)) DB_PASS = ConfigHelper().get('DB_PASS') self.db = PooledMySQLDatabase('PersonalHomepage', user='******', password=DB_PASS, host='localhost', port=3306)
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if WORK_MODE == WorkMode.STANDALONE: self.database_connection = APSWDatabase('fate_flow_sqlite.db') elif WORK_MODE == WorkMode.CLUSTER: self.database_connection = PooledMySQLDatabase( db_name, **database_config) else: raise Exception('can not init database')
def prod_database(): log('connecting to prod db') return PooledMySQLDatabase( ENVARS.database_name, host=ENVARS.host, user=ENVARS.username, password=ENVARS.password, max_connections=32, # 50 max on t2.micro stale_timeout=300, # 5 minutes. )
class Db(object): fn = peewee.fn JOIN_LEFT_OUTER = peewee.JOIN_LEFT_OUTER def __init__(self, kw): self.config = kw self.load_database() self.Model = self.get_model_class() def load_database(self): self.database = PooledMySQLDatabase(self.config.pop('db'), threadlocals=True, **self.config) def get_model_class(self): class BaseModel(_model): def __str__(self): r = {} for k in self._data.keys(): try: r[k] = str(getattr(self, k)) except: r[k] = simplejson.dumps(getattr(self, k)) return str(r) class Meta: database = self.database return BaseModel def connect(self): self.database.connect() def close(self): try: self.database.close() except: pass @property def handle(self): return self.database
def __init__(self): config_helper = ConfigHelper() DB_PASS = config_helper.get('DB_PASS') DB_HOST = config_helper.get('DB_HOST') DB_USER = config_helper.get('DB_USER') DB_PORT = int(config_helper.get('DB_PORT')) self.db = PooledMySQLDatabase('PersonalHomepage', user=DB_USER, password=DB_PASS, host=DB_HOST, port=DB_PORT)
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if is_standalone: from playhouse.apsw_ext import APSWDatabase self.database_connection = APSWDatabase( file_utils.get_project_base_directory("fate_sqlite.db")) else: from playhouse.pool import PooledMySQLDatabase self.database_connection = PooledMySQLDatabase( db_name, **database_config)
def getDatabaseByCfg(cfgname): return PooledMySQLDatabase( max_connections=10, database=pt.getConfigProperties(cfgname, "scrapy.%s.name" % (cfgname, )), passwd=pt.getConfigProperties(cfgname, "scrapy.%s.password" % (cfgname, )), user=pt.getConfigProperties(cfgname, "scrapy.%s.user" % (cfgname, )), host=pt.getConfigProperties(cfgname, "scrapy.%s.host" % (cfgname, )), port=int( pt.getConfigProperties(cfgname, "scrapy.%s.port" % (cfgname, ))))
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if WORK_MODE == WorkMode.STANDALONE: db_file_path = os.path.join(file_utils.get_python_base_directory(), 'fate_flow', 'fate_flow_sqlite.db') self.database_connection = APSWDatabase(db_file_path) elif WORK_MODE == WorkMode.CLUSTER: self.database_connection = PooledMySQLDatabase( db_name, **database_config) else: raise Exception('can not init database')
def get_new_db(max_connections=20): global database m_dict = g_conf["mysql"] # database = MySQLDatabase(m_dict["dbname"], **{'host': m_dict["host"], "passwd":m_dict["password"], 'port': int(m_dict["port"]), 'user': m_dict["uname"]}) database = PooledMySQLDatabase(database=m_dict["database"], host=m_dict["host"], port=int(m_dict["port"]), user=m_dict["uname"], passwd=m_dict["password"], stale_timeout=300, charset="utf8mb4", max_connections=max_connections) return database
def db_config(): global db_connection #pylint: disable=W0603 database_config = DATABASE.copy() db_type = database_config.pop("db_type") db_name = database_config.pop("db_name") if db_type == DBTYPE.SQLITE: db_connection = APSWDatabase( database_config.get('db_path', 'fedlearner.db')) logging.debug('init sqlite database on standalone mode successfully') elif db_type == DBTYPE.MYSQL: db_connection = PooledMySQLDatabase(db_name, **database_config) logging.debug('init mysql database on standalone mode successfully') else: raise Exception('can not init database')
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if IS_STANDALONE: from playhouse.apsw_ext import APSWDatabase self.database_connection = APSWDatabase( file_utils.get_project_base_directory("fate_sqlite.db")) RuntimeConfig.init_config(USE_LOCAL_DATABASE=True) stat_logger.info( 'init sqlite database on standalone mode successfully') else: self.database_connection = PooledMySQLDatabase( db_name, **database_config) stat_logger.info( 'init mysql database on cluster mode successfully')
def get_MysqlConnection(db_host, db_port, db_user, db_password, db_name, max_connection=100, stale_timeout=60): database_proxy.initialize( PooledMySQLDatabase(host=db_host, port=db_port, user=db_user, password=db_password, database=db_name, max_connections=max_connection, stale_timeout=stale_timeout)) return database_proxy
def get_mysql(mysql1): mysql2 = eval(mysql1) database = PooledMySQLDatabase(database=mysql2['db'], max_connections=300, **{ 'host': mysql2['host'], 'port': int(mysql2['port']), 'user': mysql2['user'], 'passwd': mysql2['pass'], 'charset': 'utf8' }) database.connect() database.close()
def get_database(cls, refresh=False): """ 单例 :param refresh: :return: """ if refresh or MysqlManage.__database is None: with MysqlManage._instance_lock: mysql_config = get_system_config()['mysql'] if refresh or MysqlManage.__database is None: MysqlManage.__database = PooledMySQLDatabase( database=mysql_config["database"], host=mysql_config['host'], port=int(mysql_config['port']), user=mysql_config['user'], passwd=mysql_config['password'], max_connections=mysql_config["max_connections"], stale_timeout=mysql_config["stale_timeout"]) return MysqlManage.__database
def __init__(self): self.log = logging.getLogger(__name__) self.config = ConfigParser.ConfigParser() self.config.read('dsn.conf') #self.db = MySQLDatabase(self.config.get('db', 'database'), **{ self.db = PooledMySQLDatabase(self.config.get('db', 'database'), host = self.config.get('db', 'host'), user = self.config.get('db', 'user'), password = self.config.get('db', 'password'), stale_timeout = 300 # 5 minutes ) self.existingSites = None self.existingStates = None self.existingDishes = None self.spacecraftById = None self.spacecraftByName = None self.targetHist = None self.dishHist = None self.signalHist = None self.detailHist = None self.last_time = 0
def init_database(db_name, db_host, db_port, db_user, db_pass): log.info('Connecting to MySQL database on %s:%i...', db_host, db_port) database = PooledMySQLDatabase(db_name, user=db_user, password=db_pass, host=db_host, port=db_port, stale_timeout=60, max_connections=None, charset='utf8mb4') # Initialize Database Proxy db.initialize(database) try: verify_database_schema() verify_table_encoding(db_name) except Exception as e: log.exception('Failed to verify database schema: %s', e) sys.exit(1) return db
def get_database(config_file=None): """ Open connection to database Parameters ---------- config_file : str, optional yaml file containing configuration settings. Returns ------- Database : object A Peewee MySQLDatabase object. """ settings = get_settings() database = PooledMySQLDatabase(settings['database'], host=settings['host'], port=settings['port'], user=settings['user'], passwd=settings['password'], stale_timeout=150) return database
def check(): try: # 使用连接池 database = PooledMySQLDatabase(database=config.rp_db['db'], max_connections=300, **{ 'host': config.rp_db['host'], 'port': int(config.rp_db['port']), 'user': config.rp_db['user'], 'passwd': config.rp_db['pass'], 'charset': 'utf8' }) database.connect() database.close() except Exception as Err: print('***** reading_pro_db database error') return False print("***** reading_pro_db database ok") return True
from peewee import * from playhouse.pool import PooledMySQLDatabase from os import path import json from datetime import date, datetime f = open(path.join(path.dirname(path.dirname(__file__)), 'config.json'), 'r') config = json.load(f) f.close() db = PooledMySQLDatabase( max_connections=None, stale_timeout=300, # 5 minutes. **config, ) db.connect(reuse_if_open=True) class BaseModel(Model): class Meta: database = db class Status(BaseModel): id = IntegerField(primary_key=True) card_id = CharField(18) target_temp = IntegerField() cur_temp = FloatField() speed = IntegerField() energy = FloatField() amount = FloatField()
"""데이터베이스 모델""" import datetime import peewee from playhouse.pool import PooledMySQLDatabase db = PooledMySQLDatabase( 'book_db', max_connections=8, stale_timeout=10, user='******') class BaseModel(peewee.Model): """공통 부모 모델""" created_at = peewee.DateTimeField(default=datetime.datetime.utcnow) updated_at = peewee.DateTimeField() def save(self, *args, **kwargs): self.updated_at = datetime.datetime.utcnow() super().save(*args, **kwargs) class Meta: database = db class Publisher(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField() is_active = peewee.BooleanField() class Meta:
from peewee import * from playhouse.pool import PooledMySQLDatabase database = PooledMySQLDatabase('account', max_connections=8, stale_timeout=300, host='127.0.0.1', user='******', passwd='', charset='utf8', port=3306) # database = SqliteDatabase('account.db', pragmas={'journal_mode': 'wal', 'cache_size': -1024 * 64}) class UnknownField(object): def __init__(self, *_, **__): pass class BaseModel(Model): class Meta: database = database class BillModel(Model): class Meta: database = database def to_xbill(self) -> "XBill": raise NotImplementedError
# -*-coding:utf-8 -*- from peewee import Model, CharField from playhouse.pool import PooledMySQLDatabase mysql_db = PooledMySQLDatabase("tornado_auth", user='******', password='******', host='127.0.0.1', port=3306, max_connections=20, stale_timeout=300) class AuthenticationModel(Model): class Meta: database = mysql_db class User(AuthenticationModel): username = CharField(unique=True, null=False) password = CharField(null=False) salt = CharField() mysql_db.connect() mysql_db.create_tables([ User, ], safe=True)
from peewee import SelectQuery, CharField, IntegerField, fn, Model, FloatField, MySQLDatabase, TextField,\ DateTimeField,TextField from playhouse.shortcuts import model_to_dict as to_dict import playhouse as ph from playhouse.pool import PooledMySQLDatabase from infoLogConfig import logger users = [{'name': '', 'pwd': ''}] for user in users: try: db = PooledMySQLDatabase( database='myblog', max_connections=5, stale_timeout=60, # 1 min timeout=0, user=user['name'], host='127.0.0.1', passwd=user['pwd'], ) with db.execution_context(): pass break except: logger.warning("this mysql username is not " + user['name']) def applyConnect(func): def applyFunc(cls, *args, **kwargs): with db.execution_context(): return func(cls, *args, **kwargs)
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") self.database_connection = PooledMySQLDatabase(db_name, **database_config) stat_logger.info('init mysql database on cluster mode successfully')
# encoding=utf-8 from playhouse.pool import PooledMySQLDatabase db = PooledMySQLDatabase( 'any_time', max_connections=32, stale_timeout=300, # 5 minutes. host="192.168.100.243", user="******", passwd="abcd.1234") db.connect()
from peewee import * from playhouse.pool import PooledMySQLDatabase from configs import common_db_config max_connections = common_db_config.get('max_connections') if max_connections is not None: database = PooledMySQLDatabase('cmdb', **common_db_config) else: database = MySQLDatabase('cmdb', **common_db_config) class UnknownField(object): pass class BaseModel(Model): class Meta: database = database class AssetNicMac(BaseModel): assets = IntegerField(db_column='assets_id', index=True, null=True) mac = CharField(null=True) nic = CharField(null=True) nic_id = PrimaryKeyField() class Meta: db_table = 'asset_nic_mac' class Rack(BaseModel):
#!/usr/bin/env python # -*- coding: utf-8 -*- from playhouse.pool import PooledMySQLDatabase from peewee import * database = PooledMySQLDatabase(None) class BaseModel(Model): class Meta: database = database class Pmc(BaseModel): name = CharField() class Meta: table_name = 'pmcs' class Park(BaseModel): pmc = ForeignKeyField(Pmc, backref='parks') spaces = IntegerField(column_name='parking_lots', null=True) vistor_spaces = IntegerField(column_name='parking_visitor_lots', null=True) total_remaining_spaces = IntegerField(column_name='total_remain_lots', null=True) vip_remaining_spaces = IntegerField(column_name='vip_remain_lots', null=True) visitor_remaining_spaces = IntegerField(column_name='visitor_remain_lots', null=True)
class DBSync(object): def __init__(self): self.log = logging.getLogger(__name__) self.config = ConfigParser.ConfigParser() self.config.read('dsn.conf') #self.db = MySQLDatabase(self.config.get('db', 'database'), **{ self.db = PooledMySQLDatabase(self.config.get('db', 'database'), host = self.config.get('db', 'host'), user = self.config.get('db', 'user'), password = self.config.get('db', 'password'), stale_timeout = 300 # 5 minutes ) self.existingSites = None self.existingStates = None self.existingDishes = None self.spacecraftById = None self.spacecraftByName = None self.targetHist = None self.dishHist = None self.signalHist = None self.detailHist = None self.last_time = 0 def sync_config(self, sites, spacecrafts): self.flush_ref() self.sync_sites(sites) self.sync_spacecraft(spacecrafts) def sync_sites(self, sites): with Using(self.db, [ConfigSite, ConfigDish]): with self.db.atomic(): existingSites = {} for site in ConfigSite.select(): existingSites[site.name] = site existingDishes = {} for dish in ConfigDish.select(): existingDishes[dish.name] = dish for siteName in sites: siteData = sites[siteName] site = existingSites.get(siteName, None) if site: if (site.friendlyname != siteData['friendly_name'] or site.latitude != siteData['latitude'] or site.longitude != siteData['longitude']): self.log.info("Updating config: site %s" % siteName) cmd = (ConfigSite.update( friendlyname=siteData['friendly_name'], latitude=siteData['latitude'], longitude=siteData['longitude']) .where(ConfigSite.id == site.id)) cmd.execute() else: self.log.info("Creating config: site %s" % siteName) site = ConfigSite.create( name=siteName, friendlyname=siteData['friendly_name'], latitude=siteData['latitude'], longitude=siteData['longitude']) for dishName in siteData['dishes']: dishData = siteData['dishes'][dishName] dish = existingDishes.get(dishName, None) if dish: if (dish.configsiteid != site.id or dish.friendlyname != dishData['friendly_name'] or dish.type != dishData['type']): self.log.info("Updating config: dish %s" % dishName) cmd = (ConfigDish.update( configsiteid=site.id, friendlyname=dishData['friendly_name'], type=dishData['type']) .where(ConfigDish.id == dish.id)) cmd.execute() else: self.log.info("Creating config: dish %s" % dishName) ConfigDish.create( name=dishName, configsiteid=site.id, friendlyname=dishData['friendly_name'], type=dishData['type']) def sync_spacecraft(self, spacecrafts): with Using(self.db, [ConfigSpacecraft]): with self.db.atomic(): existingSpacecraft = {} for spacecraft in ConfigSpacecraft.select(): existingSpacecraft[spacecraft.name] = spacecraft for spacecraftName in spacecrafts: spacecraftDescr = spacecrafts[spacecraftName] spacecraft = existingSpacecraft.get(spacecraftName, None) if spacecraft: if spacecraft.friendlyname != spacecraftDescr or spacecraft.flags != '': self.log.info("Updating config: spacecraft %s" % spacecraftName) cmd = (ConfigSpacecraft.update(friendlyname=spacecraftDescr, lastid=None, flags='') .where(ConfigSpacecraft.id == spacecraft.id)) cmd.execute() else: self.log.info("Creating config: spacecraft %s" % spacecraftName) ConfigSpacecraft.create(name=spacecraftName, friendlyname=spacecraftDescr) def flush_ref(self): self.existingSites = None self.existingStates = None self.existingDishes = None self.spacecraftById = None self.spacecraftByName = None def load_ref(self): if not self.existingSites: with Using(self.db, [ConfigSite]): self.existingSites = {} for site in ConfigSite.select(): self.existingSites[site.name] = site if not self.existingStates: with Using(self.db, [ConfigState]): self.existingStates = {} for state in ConfigState.select(): self.existingStates[state.name] = state if not self.existingDishes: with Using(self.db, [ConfigDish]): self.existingDishes = {} for dish in ConfigDish.select(): self.existingDishes[dish.name] = dish if not self.spacecraftByName: with Using(self.db, [ConfigSpacecraft]): self.spacecraftById = {} self.spacecraftByName = {} for spacecraft in ConfigSpacecraft.select(): self.spacecraftByName[spacecraft.name.lower()] = spacecraft if spacecraft.lastid: self.spacecraftById[spacecraft.lastid] = spacecraft def punch_data(self, data): this_time = data['time'] if not this_time: self.log.warning('Ignoring response with no time') pprint(data) return False self.log.info('Received data for time %d' % ((int)(this_time/5000))) self.load_ref() #self.log.info('ref loaded') is_backdated = None if this_time == self.last_time: self.log.info('Ignoring duplicate time %d' % this_time) return False elif this_time < self.last_time: self.log.info('Handling backdated data (%d < %d)' % (this_time, self.last_time)) is_backdated = this_time else: self.last_time = this_time with self.db.atomic(): # handle station data for stationName in data['stations']: stationData = data['stations'][stationName] site = self.existingSites[stationName] if stationData['time_zone_offset'] != site.timezoneoffset: with Using(self.db, [ConfigSite]): site.timezoneoffset = stationData['time_zone_offset'] site.save() #self.log.info('stations loaded') # create the time event with Using(self.db, [DataEvent]): if DataEvent.select(DataEvent.time).where(DataEvent.time == this_time).exists(): self.log.info('Ignoring previously logged time %d' % this_time) return False event = DataEvent.create(time=this_time) # do everything else self.punch_dishes(event.id, is_backdated, data['dishes']) return True def discover_arrays(self, dishes): # go through the signals being specified by arrayed dishes and try to find the "primary" signal, # if there is one (yet) minorSignals = {} majorSignals = {} for dishName in dishes: dishData = dishes[dishName] if not('Array' in dishData['flags']): continue for signalData in dishData['down_signal']: spacecraft = self.get_spacecraft(signalData) state = self.get_state(signalData['debug'], False, signalData['type']) if state.valuetype in ('data','carrier+'): majorSignals[spacecraft.id] = signalData else: minorSignals[spacecraft.id] = signalData results = {} for spacecraftId in majorSignals: if spacecraftId in minorSignals: results[spacecraftId] = majorSignals[spacecraftId] return results def punch_dishes(self, eventid, is_backdated, dishes): with Using(self.db, [ConfigDish, ConfigSpacecraft, ConfigState, DataDish, DataDishPos, DataTarget, DataSignal, DataSignalDet]): arrayedSignals = self.discover_arrays(dishes) dishOut = [] targetOut = [] signalOut = [] for dishName in dishes: if not (dishName in self.existingDishes): dish = ConfigDish.create(name=dishName) self.existingDishes[dishName] = dish else: dish = self.existingDishes[dishName] dishData = dishes[dishName] flags = ','.join(sorted(dishData['flags'])) created = dishData['created'] updated = dishData['updated'] created_time = calendar.timegm(created.utctimetuple())*1000 + created.microsecond / 1000 updated_time = calendar.timegm(updated.utctimetuple())*1000 + updated.microsecond / 1000 # collect the list of current spacecraft targets from the signal reports targets = {} isTesting = False for signalData in (dishData.get('down_signal', []) + dishData.get('up_signal', [])): if signalData['spacecraft_id']: spacecraft = self.get_spacecraft(signalData) if spacecraft: targets[spacecraft.id] = True if 'Testing' in spacecraft.flags: isTesting = True if isTesting: if flags != '': flags += ',' flags += 'Testing' targetList = targets.keys() # find the previous version of this record if not self.dishHist: self.dishHist = {} if is_backdated: histEntry = (DataDish.select(DataDish) .join(DataEvent, on=(DataDish.eventid==DataEvent.id)) .where((DataDish.configdishid==dish.id)&(DataEvent.time < is_backdated)) .order_by(DataEvent.time.desc()) .limit(1).first()) elif dish.id in self.dishHist: histEntry = self.dishHist[dish.id] else: histEntry = (DataDish.select(DataDish) .join(DataEvent, on=(DataDish.eventid==DataEvent.id)) .where(DataDish.configdishid==dish.id) .order_by(DataEvent.time.desc()) .limit(1).first()) if histEntry: self.dishHist[dish.id] = histEntry # has there been any change to this record? if not self.signalHist: self.signalHist = {} if not self.detailHist: self.detailHist = {} if (not histEntry or histEntry.createdtime != created_time or histEntry.updatedtime != updated_time or histEntry.flags != flags or histEntry.targetspacecraft1 != (targetList[0] if len(targetList) > 0 else None) or histEntry.targetspacecraft2 != (targetList[1] if len(targetList) > 1 else None) or histEntry.targetspacecraft3 != (targetList[2] if len(targetList) > 2 else None)): histEntry = DataDish.create( configdishid = dish.id, eventid = eventid, createdtime = created_time, updatedtime = updated_time, flags = flags, targetspacecraft1 = targetList[0] if len(targetList) > 0 else None, targetspacecraft2 = targetList[1] if len(targetList) > 1 else None, targetspacecraft3 = targetList[2] if len(targetList) > 2 else None ) self.dishHist[dish.id] = histEntry signalHist = {} self.signalHist[dish.id] = signalHist detailHist = {} self.detailHist[dish.id] = detailHist elif is_backdated: (signalHist, detailHist) = self.collect_signals(histEntry, is_backdated) elif not dish.id in self.signalHist: (signalHist, detailHist) = self.collect_signals(histEntry, None) self.signalHist[dish.id] = signalHist self.detailHist[dish.id] = detailHist else: signalHist = self.signalHist[dish.id] detailHist = self.detailHist[dish.id] # grab target history, if we have any if not self.targetHist: self.targetHist = {} if is_backdated: targetHist = {} else: if not(dish.id in self.targetHist): self.targetHist[dish.id] = {} targetHist = self.targetHist[dish.id] self.punch_targets(eventid, is_backdated, dish, dishData['targets'], targetOut, targetHist) self.punch_signals(eventid, dish, histEntry, dishData['down_signal'], dishData['up_signal'], arrayedSignals, signalHist, detailHist, signalOut) if (dishData['azimuth_angle'] is not None and dishData['elevation_angle'] is not None and dishData['wind_speed'] is not None): dishOut.append({ 'configdishid': dish.id, 'eventid': eventid, 'azimuthangle': dishData['azimuth_angle'], 'elevationangle': dishData['elevation_angle'], 'windspeed': dishData['wind_speed'] }) if len(dishOut) > 0: cmd = DataDishPos.insert_many(dishOut) cmd.execute() if len(targetOut) > 0: cmd = DataTarget.insert_many(targetOut) cmd.execute() if len(signalOut) > 0: cmd = DataSignalDet.insert_many(signalOut) cmd.execute() def punch_targets(self, eventid, is_backdated, dish, targets, targetOut, targetHist): for targetName in targets: targetData = targets[targetName] # identify the target spaceship spacecraft = self.get_spacecraft(targetData) if not spacecraft: return if targetData['down_range'] != -1 or targetData['up_range'] != -1 or targetData['rtlt'] != -1: # find the previous version of this record if is_backdated: histEntry = (DataTarget.select(DataTarget) .join(DataEvent, on=(DataTarget.eventid==DataEvent.id)) .where( (DataTarget.configdishid==dish.id) & (DataTarget.configspacecraftid==spacecraft.id) & (DataEvent.time < is_backdated) ) .order_by(DataEvent.time.desc()) .limit(1).dicts().first()) if histEntry: targetHist[spacecraft.id] = histEntry elif spacecraft.id in targetHist: histEntry = targetHist[spacecraft.id] else: histEntry = (DataTarget.select(DataTarget) .join(DataEvent, on=(DataTarget.eventid==DataEvent.id)) .where((DataTarget.configdishid==dish.id) & (DataTarget.configspacecraftid==spacecraft.id)) .order_by(DataEvent.time.desc()) .limit(1).dicts().first()) if histEntry: targetHist[spacecraft.id] = histEntry # if there has been a change, create a new record if (not histEntry or histEntry['downlegrange'] != targetData['down_range'] or histEntry['uplegrange'] != targetData['up_range'] or histEntry['rtlt'] != targetData['rtlt']): newTarget = { 'configdishid': dish.id, 'eventid': eventid, 'configspacecraftid': spacecraft.id, 'downlegrange': targetData['down_range'], 'uplegrange': targetData['up_range'], 'rtlt': targetData['rtlt'] } targetOut.append(newTarget) targetHist[spacecraft.id] = newTarget def collect_signals(self, dataDish, is_backdated): query = (DataSignal.select(DataSignal.eventid) .join(DataEvent, on=(DataSignal.eventid==DataEvent.id))) if is_backdated: query = query.where((DataSignal.datadishid == dataDish.id) & (DataEvent.time < is_backdated)) else: query = query.where(DataSignal.datadishid == dataDish.id) eventId = query.order_by(DataEvent.time.desc()).limit(1).scalar() results = {} details = {} if eventId: prevEntry = None for entry in (DataSignal.select().where( (DataSignal.datadishid == dataDish.id) & (DataSignal.eventid == eventId) ).order_by( DataSignal.updown.asc(), DataSignal.configspacecraftid.asc(), DataSignal.flags.desc(), DataSignal.id.asc() )): if (not prevEntry or prevEntry.configspacecraftid != entry.configspacecraftid or prevEntry.updown != entry.updown): seq = 1 elif prevEntry.configspacecraftid == entry.configspacecraftid and prevEntry.flags < entry.flags: pass # slave connection else: seq = seq + 1 key = (unicode(entry.configspacecraftid) + ('u' if entry.updown == 'up' else 'd') + ':' + unicode(seq) + ('s' if 'slave' in entry.flags.lower() else '')) results[key] = entry query = (DataSignalDet.select(DataSignalDet.datarate, DataSignalDet.frequency, DataSignalDet.power) .join(DataEvent, on=(DataSignalDet.eventid==DataEvent.id))) if is_backdated: query = query.where((DataSignalDet.datasignalid == entry.id) & (DataEvent.time < is_backdated)) else: query = query.where(DataSignalDet.datasignalid == entry.id) details[entry.id] = query.order_by(DataEvent.time.desc()).limit(1).dicts().first() prevEntry = entry return (results, details) def collect_parsed_signals(self, signals, isUp, arrayedSignals, ourSignals): # collect and identify the signals so we can match them up to previous signal reports signalList = [] for signalData in signals: # identify the target spacecraft if not signalData['debug']: continue spacecraft = self.get_spacecraft(signalData) if not spacecraft: continue state = self.get_state(signalData['debug'], isUp, signalData['type']) #collect signal flags flags = set() flagsort = 0 if arrayedSignals and spacecraft.id in arrayedSignals: if arrayedSignals[spacecraft.id] == signalData: flags.add('master') flagsort = 1 else: flags.add('slave') flagsort = 2 # side effect - punch the spacecraft with its last known protocol if spacecraft and state.encoding and state.encoding != 'UNC' and spacecraft.encoding != state.encoding: spacecraft.encoding = state.encoding spacecraft.save() signalList.append({ 'isUp': isUp, 'spacecraft': spacecraft, 'spacecraft_id': spacecraft.id, 'state': state, 'flags': flags, 'data': signalData, 'flagsort': flagsort, 'frequency': signalData['frequency'] }) seq = 1 prevEntry = None for entry in sorted(signalList, key=itemgetter('spacecraft_id', 'flagsort', 'frequency')): if (not prevEntry or prevEntry['spacecraft_id'] != entry['spacecraft_id']): seq = 1 elif (prevEntry['spacecraft_id'] == entry['spacecraft_id'] and prevEntry['flagsort'] < entry['flagsort']): pass # slave connection else: seq = seq + 1 # create a unique id for this signal key = (unicode(entry['spacecraft_id']) + ('u' if isUp else 'd') + ':' + unicode(seq) + ('s' if 'slave' in entry['flags'] else '')) ourSignals[key] = entry prevEntry = entry def punch_signals(self, eventid, dish, dataDish, signalDown, signalUp, arrayedSignals, signalHist, detailHist, signalOut): # collect the signals ourSignals = {} self.collect_parsed_signals(signalDown, False, arrayedSignals, ourSignals) self.collect_parsed_signals(signalUp, True, None, ourSignals) # has anything changed in these signals? isChanged = False if len(ourSignals) != len(signalHist): self.log.info('CHANGED: %d != %d' % (len(ourSignals), len(signalHist))) isChanged = True if not isChanged: for key in ourSignals: entry = ourSignals[key] state = entry['state'] # if we don't have a corresponding entry in the history, or the entry is different, # then we have a change histEntry = signalHist.get(key, None) if (not histEntry) or (histEntry.stateid != state.id): if not histEntry: self.log.info('CHANGED: no entry %s' % key) pprint(signalHist) else: self.log.info('CHANGED(%s on %d): %d -> %s[%d]' % (key, histEntry.configspacecraftid, histEntry.stateid, state.name, state.id)) isChanged = True break # now process the signals, including any changes if necessary if isChanged: signalHist.clear() detailHist.clear() for key in ourSignals: entry = ourSignals[key] state = entry['state'] if isChanged: # our state has changed, create a new signal record spacecraft = entry['spacecraft'] self.log.info('new signal(%s on %d) has state=%s[%d], flags=%s' % (key, spacecraft.id, state.name, state.id, ','.join(entry['flags']))) baseSignal = DataSignal.create( eventid = eventid, datadishid = dataDish.id, configdishid = dish.id, updown = 'up' if entry['isUp'] else 'down', stateid = state.id, configspacecraftid = spacecraft.id, flags = ','.join(entry['flags']) ) signalHist[key] = baseSignal else: baseSignal = signalHist[key] # create a new signal report if not(state.valuetype in ('none','idle','idle+')): signalData = entry['data'] lastSignalData = detailHist.get(baseSignal.id, None) if (not lastSignalData or lastSignalData['datarate'] != signalData['data_rate'] or lastSignalData['frequency'] != signalData['frequency'] or lastSignalData['power'] != signalData['power']): #self.log.info('signal %s: rate=%s, freq=%s, power=%s' % (key, signalData.get('data_rate',''), signalData.get('frequency',''), signalData.get('power',''))) newRecord = { 'eventid': eventid, 'datasignalid': baseSignal.id, 'datarate': signalData['data_rate'], 'frequency': signalData['frequency'], 'power': signalData['power'] } detailHist[baseSignal.id] = newRecord signalOut.append(newRecord) def get_state(self, debug, isUp, signalType): state = self.existingStates.get(debug, None) if state: return state parsed = parse_debug(debug, isUp) state = ConfigState.create( name = debug, updown = 'UP' if isUp else 'down', signaltype = signalType, decoder1 = parsed.get('decoder1', None), decoder2 = parsed.get('decoder2', None), encoding = parsed.get('encoding', None), task = parsed.get('task', None), flags = ','.join(parsed.get('flags', set())), valuetype = parsed.get('valueType', None) ) self.existingStates[state.name] = state return state def get_spacecraft(self, targetData): targetName = targetData['spacecraft'] or '' targetId = targetData['spacecraft_id'] # first look it up by name spacecraft = self.spacecraftById.get(targetId, None) if spacecraft: return spacecraft if spacecraft.name.lower() == targetName.lower() else None # couldn't find it by id, find it by name spacecraft = self.spacecraftByName.get(targetName.lower(), None) if spacecraft: # If we don't know the official spacecraft_id, push it to the ConfigSpacraft table if self.spacecraftById.get(targetId, None) != spacecraft: spacecraft.lastid = targetId spacecraft.save() self.spacecraftById[targetId] = spacecraft return spacecraft # couldn't find it anywhere, let's create it as a testing "spacecraft" spacecraft = ConfigSpacecraft.create(lastid=targetId, name=targetName, flags='Testing') self.spacecraftByName[targetName.lower()] = spacecraft self.spacecraftById[targetId] = spacecraft return spacecraft