class proxystatisticsService(object): logger = log.get_logger("proxystatisticsService") @classmethod def getUsersLogins(self): pgConn = proxystatisticsPgConnector() result = pgConn.execute_select( "SELECT sum(count) AS total FROM statistics") pgConn.close() timestamp = int(time.time()) Metric.save( Metric(None, "logins", result[0][0], datetime.fromtimestamp(timestamp))) self.logger.info("{0} total logins".format(result[0][0])) return result[0][0]
class nginxlogsService(object): logger = log.get_logger("nginxlogsService") @classmethod def getApiRequests(self): pgConn = destinationPgConnector() metrics_names = ["token", "authorize", "userinfo", "devicecode", "introspect"] metrics_results = [] api_requests = 0 for metric in metrics_names: # Get the last date we process data lastMetric = Metric.getLastDateForMetric(metric) # Initialize whereClause whereClause = '' # Check if we have already stored metrics if lastMetric: dayFrom = lastMetric[0][0] previousValue = lastMetric[0][1] whereClause = " AND created > '{0}'".format(dayFrom) else: previousValue = 0 # Get the date of the last log processed lastDate = pgConn.execute_select("SELECT max(created) FROM syslogs") if lastDate[0][0]!=None: lastDate = lastDate[0][0] else: lastDate = datetime.now() # Get the new metrics metrics_value = pgConn.execute_select( "SELECT count(*) FROM syslogs WHERE log_message LIKE '%/oidc/{0}%' AND service='nginx' {1}".format(metric, whereClause)) metrics_results.append(Metric( None, metric, metrics_value[0][0] + previousValue, lastDate)) # Add metrics to total api requests api_requests += metrics_value[0][0] + previousValue self.logger.info("{0} total api requests".format(api_requests)) metrics_results.append( Metric(None, "api_requests", api_requests, lastDate)) pgConn.close() # Save all metrics to database Metric.saveAll(metrics_results) return api_requests
def log_decorator_wrapper(self, *args, **kwargs): # Build logger object logger_obj = log.get_logger(log_file_name=self.log_file_name, log_sub_dir=self.log_file_dir) """ Create a list of the positional arguments passed to function. - Using repr() for string representation for each argument. repr() is similar to str() only difference being it prints with a pair of quotes and if we calculate a value we get more precise value than str(). """ args_passed_in_function = [repr(a) for a in args] """ Create a list of the keyword arguments. The f-string formats each argument as key=value, where the !r specifier means that repr() is used to represent the value. """ kwargs_passed_in_function = [ f"{k}={v!r}" for k, v in kwargs.items() ] """ The lists of positional and keyword arguments is joined together to form final string """ formatted_arguments = ", ".join(args_passed_in_function + kwargs_passed_in_function) """ Generate file name and function name for calling function. __func.name__ will give the name of the caller function ie. wrapper_log_info and caller file name ie log-decorator.py - In order to get actual function and file name we will use 'extra' parameter. - To get the file name we are using in-built module inspect.getframeinfo which returns calling file name """ py_file_caller = getframeinfo(stack()[1][0]) extra_args = { 'func_name_override': func.__name__, 'file_name_override': os.path.basename(py_file_caller.filename) } """ Before to the function execution, log function details.""" logger_obj.info( f"Arguments: {formatted_arguments} - Begin function") try: """ log return value from the function """ value = func(self, *args, **kwargs) logger_obj.info(f"Returned: - End function {value!r}") except: """log exception if occurs in function""" logger_obj.error(f"Exception with Args: {formatted_arguments}", exc_info=True) raise # Return function value return value
class ipToCountry: logger = log.get_logger("ipToCountry") @classmethod def mapIpToCountry(self): # handler for ip databases ipDatabaseHandler = geoipDatabase() ipData = countryStatisticsController.getDataNotMapped() countryStatsList = [] usercountryStatsList = [] savedItems = 0 for item in ipData: # get network address ipaddr = ipaddress.ip_network(item.ip).network_address # print(ipaddr) # get country code/ name countryData = ipDatabaseHandler.getCountryFromIp( str(ipaddr), item.ipVersion) if (countryData[0] != None): countryStatisticsItem = countryStatistics( None, item.accessed, item.sourceIdp, item.service, countryData[0], countryData[1], 1) countryStatsList.append(countryStatisticsItem) usercountryStatisticsItem = userCountryStatistics( None, item.accessed, item.userid, countryData[0], countryData[1], 1) usercountryStatsList.append(usercountryStatisticsItem) savedItems += 1 else: self.logger.warning( "ip {0} not found at database".format(ipaddr)) # save data to tables countryStatistics.saveAll(countryStatsList) userCountryStatistics.saveAll(usercountryStatsList) self.logger.info("{0} ips mapped to countries".format(savedItems))
# -*- coding: utf-8 -*- """ ProxyPoolApi: 用来返回可用IP, 所有外部调用, 只是用这个接口 """ import os import sys sys.path.append('..') from Logger.log import get_logger, get_folder _logger = get_logger(__name__) _file_path = get_folder() class ProxyPool(object): def _get_txt_usable(self): path = _file_path + "/usable_proxy_pool.txt" new_path = _file_path + "/text.txt" if not os.path.exists(path): return None with open(path, "r") as file: proxy = file.readline() file.close() lines = (x for x in open(path, "r") if x != proxy) with open(new_path, "w+") as file: file.writelines(lines) file.close() proxy = proxy.replace("\n", "") os.remove(path) os.rename(new_path, path)
class pgConnector: logger = log.get_logger("pgConnector") conn = None def __init__(self, filename="config.py", section="source_database"): self.filename = filename self.section = section self.params = self.config(filename, section) if self.conn == None: try: self.logger.debug( 'Connecting to the PostgreSQL database...{0}'.format( section)) self.conn = psycopg2.connect(**self.params) except psycopg2.OperationalError as err: self.logger.error(str(err).strip()) sys.exit(1) def config(self, filename='config.py', section='source_database'): # create a parser parser = ConfigParser() # read config file parser.read(filename) # get section, default to source_database db = {} if parser.has_section(section): params = parser.items(section) for param in params: db[param[0]] = param[1] else: self.logger.error('Section {0} not found in the {1} file'.format( section, filename)) raise Exception('Section {0} not found in the {1} file'.format( section, filename)) return db def execute_select(self, query): # create a cursor if not hasattr(self.conn, 'cursor'): self.__init__() cur = self.conn.cursor() # execute a statement cur.execute(query) return cur.fetchall() def close(self): self.conn.close() self.conn = None self.logger.debug('Database connection "{0}" closed'.format( self.section))
class comanageService(object): logger = log.get_logger("comanageService") @classmethod def getRegisteredUsers(self): pgConn = comanagePgConnector() result = pgConn.execute_select( "SELECT count(*) AS total FROM cm_co_people WHERE co_person_id IS NULL AND NOT deleted AND status='A'" ) pgConn.close() timestamp = int(time.time()) Metric.save( Metric(None, "registered_users", result[0][0], datetime.fromtimestamp(timestamp))) self.logger.info("{0} total registered users".format(result[0][0])) return result[0][0] @classmethod def getUsersMembershipsInCOUs(self): pgConn = comanagePgConnector() # check if section 'metric_users_memberships_in_cous' exists in configuration file and name attribute is not empty if configParser.hasSection('metric_users_memberships_in_cous') == False or not 'name' in configParser.getConfig('metric_users_memberships_in_cous').keys() or \ configParser.getConfig('metric_users_memberships_in_cous')['name'] == '': return None metric_name = configParser.getConfig( 'metric_users_memberships_in_cous')['name'] coSubQuery = '' subCouSubQuery = '' # if 'co_id' attribute exists and is not empty if 'co_id' in configParser.getConfig('metric_users_memberships_in_cous').keys() and \ configParser.getConfig('metric_users_memberships_in_cous')['co_id']!='': coSubQuery = ' AND "CoPeople"."co_id"=' + configParser.getConfig( 'metric_users_memberships_in_cous')['co_id'] # if 'regex_cou_name' attribute exists and is not empty if 'regex_cou_name' in configParser.getConfig('metric_users_memberships_in_cous').keys() and \ configParser.getConfig('metric_users_memberships_in_cous')['regex_cou_name']!='': subCouSubQuery = ' "Cous"."name" ~ \'' + configParser.getConfig( 'metric_users_memberships_in_cous' )['regex_cou_name'] + '\' AND ' result = pgConn.execute_select( 'SELECT count("CoPersonRole"."co_person_id") AS "CoPersonRole__co_person_id" \ FROM "public"."cm_co_person_roles" AS "CoPersonRole" \ INNER JOIN "public"."cm_co_people" AS "CoPeople" \ ON("CoPeople"."id"="CoPersonRole"."co_person_id" AND "CoPeople"."co_person_id" IS NULL AND \ "CoPeople"."status"=\'A\' AND "CoPeople"."deleted" IS NOT true ' + coSubQuery + ') \ INNER JOIN "public"."cm_cous" AS "Cous" \ ON("Cous"."id"="CoPersonRole"."cou_id" AND "Cous"."cou_id" IS NULL \ AND "Cous"."deleted" IS NOT true) \ WHERE ' + subCouSubQuery + '\ ("CoPersonRole".status=\'A\' OR "CoPersonRole".status=\'GP\') \ AND "CoPersonRole"."co_person_role_id" IS NULL \ AND "CoPersonRole"."deleted" IS NOT true \ ') timestamp = int(time.time()) pgConn.close() Metric.save( Metric(None, metric_name, result[0][0], datetime.fromtimestamp(timestamp))) self.logger.info("{0} user's memberships in cous".format(result[0][0])) return result[0][0]