def handle(self, *args, **options): """Note that subscriber created this way are only for devel purposes""" no_of_record = 86400 # second in one day if options.get("delta-day"): try: day_delta_int = int(options.get("delta-day")) except ValueError: day_delta_int = 1 else: day_delta_int = 1 accountcode = "".join([choice("1234567890") for i in range(4)]) accountcode = "12345" now = datetime.datetime.today() date_now = datetime.datetime(now.year, now.month, now.day, now.hour, now.minute, now.second, 0) today_delta = datetime.timedelta( hours=datetime.datetime.now().hour, minutes=datetime.datetime.now().minute, seconds=datetime.datetime.now().second, ) date_today = date_now - today_delta - datetime.timedelta(days=day_delta_int) numbercall = 10 for i in range(0, int(no_of_record)): delta_duration = i call_date = date_today + datetime.timedelta(seconds=delta_duration) delta_call = random.randint(-2, 2) numbercall = numbercall + delta_call switch_id = 1 if numbercall < 0: numbercall = 0 print "%s (accountcode:%s, switch_id:%d) ==> %d" % (call_date, accountcode, switch_id, numbercall) call_json = { "switch_id": switch_id, "call_date": call_date, "numbercall": numbercall, "accountcode": accountcode, } settings.DBCON[settings.MONGO_CDRSTATS["CONC_CALL"]].insert(call_json) # Create collection for Analytics set_concurrentcall_analytic(call_date, switch_id, accountcode, numbercall) # Add unique index with sorting try: settings.DBCON[settings.MONGO_CDRSTATS["CONC_CALL"]].ensure_index( [("call_date", -1), ("switch_id", 1), ("accountcode", 1)], unique=True ) except: print "Error: Adding unique index"
def run(self, **kwargs): logger = self.get_logger() logger.info('TASK :: get_channels_info') totalcall = 0 # Get calldate now = datetime.today() date_now = datetime(now.year, now.month, now.day, now.hour, now.minute, now.second, 0) #key_date / minute precision key_date = "%d-%d-%d-%d-%d" % (now.year, now.month, now.day, now.hour, now.minute) # Retrieve SwitchID try: switch = Switch.objects.get(ipaddress=settings.LOCAL_SWITCH_IP) switch_id = switch.id except: logger.error("Cannot retrieve Switch %s" % settings.LOCAL_SWITCH_IP) return False if settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['cdr_type'] == 'freeswitch': con = False try: con = sqlite3.connect('/usr/local/freeswitch/db/core.db') cur = con.cursor() cur.execute('SELECT accountcode, count(*) FROM channels') rows = cur.fetchall() for row in rows: if not row[0]: accountcode = '' else: accountcode = row[0] numbercall = row[1] totalcall = totalcall + numbercall logger.debug('%s (accountcode:%s, switch_id:%d) ==> %s' % (date_now, accountcode, switch_id, str(numbercall))) call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } settings.DBCON[settings.MONGO_CDRSTATS['CONC_CALL']].insert(call_json) #Save to cache key = "%s-%d-%s" % (key_date, switch_id, str(accountcode)) cache.set(key, numbercall, 1800) # 30 minutes #Create collection for Analytics set_concurrentcall_analytic(date_now, switch_id, accountcode, numbercall) except sqlite3.Error, e: logger.error('Error %s:' % e.args[0]) finally:
def run(self, **kwargs): logger = self.get_logger() logger.info('TASK :: get_channels_info') totalcall = 0 # Get calldate now = datetime.today() date_now = datetime(now.year, now.month, now.day, now.hour, now.minute, now.second, 0) #key_date / minute precision key_date = "%d-%d-%d-%d-%d" % (now.year, now.month, now.day, now.hour, now.minute) # Retrieve SwitchID try: switch = Switch.objects.get(ipaddress=settings.LOCAL_SWITCH_IP) switch_id = switch.id except: logger.error("Cannot retrieve Switch %s" % settings.LOCAL_SWITCH_IP) return False if settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['cdr_type'] == 'freeswitch': con = False try: con = sqlite3.connect('/usr/local/freeswitch/db/core.db') cur = con.cursor() cur.execute('SELECT accountcode, count(*) FROM channels') rows = cur.fetchall() for row in rows: if not row[0]: accountcode = '' else: accountcode = row[0] numbercall = row[1] totalcall = totalcall + numbercall logger.debug('%s (accountcode:%s, switch_id:%d) ==> %s' % (date_now, accountcode, switch_id, str(numbercall))) call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } settings.DBCON[settings.MONGO_CDRSTATS['CONC_CALL']].insert(call_json) #Save to Redis cache key = "%s-%d-%s" % (key_date, switch_id, str(accountcode)) cache.set(key, numbercall, 1800) # 30 minutes #Create collection for Analytics set_concurrentcall_analytic(date_now, switch_id, accountcode, numbercall) except sqlite3.Error, e: logger.error('Error %s:' % e.args[0]) finally:
def handle(self, *args, **options): """Note that subscriber created this way are only for devel purposes""" no_of_record = 86400 # second in one day if options.get('delta-day'): try: day_delta_int = int(options.get('delta-day')) except ValueError: day_delta_int = 1 else: day_delta_int = 1 accountcode = ''.join([choice('1234567890') for i in range(4)]) accountcode = '12345' now = datetime.datetime.today() date_now = datetime.datetime(now.year, now.month, now.day, now.hour, now.minute, now.second, 0) today_delta = datetime.timedelta(hours=datetime.datetime.now().hour, minutes=datetime.datetime.now().minute, seconds=datetime.datetime.now().second) date_today = date_now - today_delta \ - datetime.timedelta(days=day_delta_int) numbercall = 10 for i in range(0, int(no_of_record)): delta_duration = i call_date = date_today + datetime.timedelta(seconds=delta_duration) delta_call = random.randint(-2, 2) numbercall = numbercall + delta_call switch_id = 1 if numbercall < 0: numbercall = 0 print '%s (accountcode:%s, switch_id:%d) ==> %d' % (call_date, accountcode, switch_id, numbercall) call_json = {'switch_id': switch_id, 'call_date': call_date, 'numbercall': numbercall, 'accountcode': accountcode} settings.DBCON[settings.MONGO_CDRSTATS['CONC_CALL']].insert(call_json) #Create collection for Analytics set_concurrentcall_analytic(call_date, switch_id, accountcode, numbercall) # Add unique index with sorting try: settings.DBCON[settings.MONGO_CDRSTATS['CONC_CALL']].ensure_index([('call_date', -1), ('switch_id', 1), ('accountcode', 1)], unique=True) except: print "Error: Adding unique index"
totalcall = totalcall + numbercall logger.debug('%s (accountcode:%s, switch_id:%d) ==> %s' % (date_now, accountcode, switch_id, str(numbercall))) call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } settings.DBCON[settings.MONGO_CDRSTATS['CONC_CALL']].insert(call_json) #Save to cache key = "%s-%d-%s" % (key_date, switch_id, str(accountcode)) cache.set(key, numbercall, 1800) # 30 minutes #Create collection for Analytics set_concurrentcall_analytic(date_now, switch_id, accountcode, numbercall) #For any switches #There is no calls if totalcall == 0: accountcode = '' numbercall = 0 call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } settings.DBCON[settings.MONGO_CDRSTATS['CONC_CALL']].insert(call_json) key = "%s-%d-%s" % (key_date, switch_id, str(accountcode))
def run(self, **kwargs): logger = self.get_logger() logger.info('TASK :: get_channels_info') totalcall = 0 # Get calldate now = datetime.today() date_now = datetime(now.year, now.month, now.day, now.hour, now.minute, now.second, 0) # key_date / minute precision key_date = "%d-%d-%d-%d-%d" % (now.year, now.month, now.day, now.hour, now.minute) # Retrieve SwitchID try: switch = Switch.objects.get(ipaddress=settings.LOCAL_SWITCH_IP) switch_id = switch.id except: logger.error("Cannot retrieve Switch %s" % settings.LOCAL_SWITCH_IP) return False if settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['cdr_type'] == 'freeswitch': con = False if settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['internal_db_engine'] == 'pgsql': user = settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['internal_db_user'] password = settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['internal_db_password'] db_name = settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['internal_db_name'] host = settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['internal_db_host'] port = settings.CDR_BACKEND[settings.LOCAL_SWITCH_IP]['internal_db_port'] try: connection = PgDatabase.connect(user=user, password=password, database=db_name, host=host, port=port) connection.autocommit = True cur = connection.cursor() cur.execute('SELECT accountcode,COUNT(*) FROM channels GROUP BY accountcode;') rows = cur.fetchall() for row in rows: if not row[0]: accountcode = '' else: accountcode = row[0] numbercall = row[1] totalcall = totalcall + numbercall logger.debug('%s (accountcode:%s, switch_id:%d) ==> %s' % (date_now, accountcode, switch_id, str(numbercall))) call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } mongodb.conc_call.insert(call_json) # Save to cache key = "%s-%d-%s" % (key_date, switch_id, str(accountcode)) cache.set(key, numbercall, 1800) # 30 minutes # Create collection for Analytics set_concurrentcall_analytic(date_now, switch_id, accountcode, numbercall) except PgDatabase.Error, e: logger.error('Error %s:' % e.args[0]) finally: if con:
def run(self, **kwargs): logger = self.get_logger() logger.info('TASK :: get_channels_info') totalcall = 0 # Get calldate now = datetime.today() date_now = datetime(now.year, now.month, now.day, now.hour, now.minute, now.second, 0) # key_date / minute precision key_date = "%d-%d-%d-%d-%d" % (now.year, now.month, now.day, now.hour, now.minute) # Retrieve SwitchID try: switch = Switch.objects.get(ipaddress=settings.LOCAL_SWITCH_IP) switch_id = switch.id except: logger.error("Cannot retrieve Switch %s" % settings.LOCAL_SWITCH_IP) return False if settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['cdr_type'] == 'freeswitch': con = False if settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['internal_db_engine'] == 'pgsql': user = settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['internal_db_user'] password = settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['internal_db_password'] db_name = settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['internal_db_name'] host = settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['internal_db_host'] port = settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['internal_db_port'] try: connection = PgDatabase.connect(user=user, password=password, database=db_name, host=host, port=port) connection.autocommit = True cur = connection.cursor() cur.execute( 'SELECT accountcode,COUNT(*) FROM channels GROUP BY accountcode;' ) rows = cur.fetchall() for row in rows: if not row[0]: accountcode = '' else: accountcode = row[0] numbercall = row[1] totalcall = totalcall + numbercall logger.debug( '%s (accountcode:%s, switch_id:%d) ==> %s' % (date_now, accountcode, switch_id, str(numbercall))) call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } mongodb.conc_call.insert(call_json) # Save to cache key = "%s-%d-%s" % (key_date, switch_id, str(accountcode)) cache.set(key, numbercall, 1800) # 30 minutes # Create collection for Analytics set_concurrentcall_analytic(date_now, switch_id, accountcode, numbercall) except PgDatabase.Error, e: logger.error('Error %s:' % e.args[0]) finally: if con:
call_json = { 'switch_id': switch_id, 'call_date': date_now, 'numbercall': numbercall, 'accountcode': accountcode, } mongodb.conc_call.insert(call_json) # Save to cache key = "%s-%d-%s" % (key_date, switch_id, str(accountcode)) cache.set(key, numbercall, 1800) # 30 minutes # Create collection for Analytics set_concurrentcall_analytic(date_now, switch_id, accountcode, numbercall) except sqlite3.Error, e: logger.error('Error %s:' % e.args[0]) finally: if con: con.close() elif settings.CDR_BACKEND[ settings.LOCAL_SWITCH_IP]['cdr_type'] == 'asterisk': manager = asterisk.manager.Manager() listaccount = {} try: # connect to the manager try: manager.connect(settings.ASTERISK_MANAGER_HOST) manager.login(settings.ASTERISK_MANAGER_USER,