def log_all(msg): for _ in range(0, 11): logger.debug(msg) logger.info(msg) logger.warning(msg) logger.error(msg) logger.critical(msg)
def get(cls, urls: list): if not isinstance(urls, list): raise TypeError('urls is not of type list') response = None for url in urls: params = url['params'] for param, value in params.items(): match = cls.RE_PARSE_PARAM.match(str(value)) if match: action = match.group('action') attributes = match.group('attributes') content = match.group('content') log.debug( f"params: action={action}, attributes={attributes}, content={content}" ) if action == 'eval': if attributes == 'json': if response: try: log.debug(f"response={response}") json = loads(response) log.debug(f"json={json}") content = eval(content) except Exception as ex: log.warning( f"Cannot evaluate {content}: {ex}") traceback.print_exc(file=stdout) content = '' params[param] = content response = Requester.get(url['url'], **params) if not response: break return response
def test_non_str_logging(self): logger.info(10) self.assertIn("10", self.last_line()) # Those should not throw any error. logger.debug([10, 20, 30]) logger.critical({}) logger.warning(set([-1, 4]))
def test_message_is_logged(self): logger.debug("message 1") self.assertIn("message 1", self.last_line()) logger.info("message 2") self.assertIn("message 2", self.last_line()) logger.warning("message 3") self.assertIn("message 3", self.last_line()) logger.error("message 4") self.assertIn("message 4", self.last_line()) logger.critical("message 5") self.assertIn("message 5", self.last_line())
def test_filepath_is_logged(self): logger.debug("message 1") self.assertIn("test_colored_logger.py", self.last_line()) logger.info("message 2") self.assertIn("test_colored_logger.py", self.last_line()) logger.warning("message 3") self.assertIn("test_colored_logger.py", self.last_line()) logger.error("message 4") self.assertIn("test_colored_logger.py", self.last_line()) logger.critical("message 5") self.assertIn("test_colored_logger.py", self.last_line())
def test_level_is_logged(self): logger.debug("message 1") self.assertIn("DEBUG", self.last_line()) logger.info("message 2") self.assertIn("INFO", self.last_line()) logger.warning("message 3") self.assertIn("WARNING", self.last_line()) logger.error("message 4") self.assertIn("ERROR", self.last_line()) logger.critical("message 5") self.assertIn("CRITICAL", self.last_line())
def highlight(rec: dict, fields: list, terms: list): if rec is not None and fields is not None and terms is not None: for field in fields: if field in rec: rec[field + '_highlighted'] = rec[field] for term in terms: rec[field + '_highlighted'] = re.sub( rf"\b{term}\b", f"<em class='highlight'>{term}</em>", rec[field + '_highlighted']) else: log.warning( f"Field '{field}' is not in highlighting record.") return rec
def log_all(): logger.debug("message 1") logger.info("message 2") logger.warning("message 3") logger.error("message 4") logger.critical("message 5")
def run(self): if self.mongo_uri is None: return db = pymongo.MongoClient(self.mongo_uri).get_database() pipeline = [{ "$match": { "ns.coll": { "$in": self.collections } } }, { "$set": { "timestamp": "$clusterTime", "user": f"$fullDocument.{environ.get('CT_USER_FIELD')}", "db": "$ns.db", "coll": "$ns.coll", "doc_id": "$fullDocument._id", "type": "$operationType", "updatedFields": "$updateDescription.updatedFields", "removedFields": "$updateDescription.removedFields", "fullDocument": "$fullDocument" } }, { "$project": { "timestamp": 1, "user": 1, "db": 1, "coll": 1, "type": 1, "doc_id": 1, "updatedFields": 1, "removedFields": 1, "fullDocument": 1 } }] resume_token = None self.running = True for flusher in self.flushers: flusher.start() self.__status = 'running' while self.running: try: with db.watch(pipeline, 'updateLookup', resume_after=resume_token) as stream: if not self.running: log.debug("Closeing stream...") stream.close() for change in stream: if not self.running: break createDoc = False ignoredFields = [] # General changes change['timestamp'] = change[ 'timestamp'].as_datetime().strftime( '%Y-%m-%dT%H:%M:%S.%f') if 'user' not in change: change['user'] = '******' else: change['user'] = change['user'] # Type specific changes if change['type'] == 'insert': change['fullDocument'] = change['fullDocument'] createDoc = True if environ.get('CT_DEBUG'): log.debug( "{timestamp}: user={user} db={db} coll={coll} type={type} doc_id={doc_id}" .format(**change)) elif change['type'] == 'update': updatedFields = {} removedFields = [] for field, value in change[ 'updatedFields'].items(): if self.__match(field): # json_value = json.loads(value) if isinstance(value, (dict, list)): flat_value = flatten_json(value) for _field, _value in flat_value.items( ): updatedFields[ f"{field}.{_field}"] = _value else: updatedFields[field] = value createDoc = True else: ignoredFields.append(field) for field in change['removedFields']: if self.__match(field): removedFields.append(field) createDoc = True else: ignoredFields.append(field) change['updatedFields'] = updatedFields change['removedFields'] = removedFields del change['fullDocument'] if environ.get('CT_DEBUG'): log_msg = "{timestamp}: user={user} db={db} coll={coll} type={type} doc_id={doc_id} updatedFields={updatedFields} removedFields={removedFields}".format( **change) log_msg = ( log_msg[:500] + '...' ) if len(log_msg) > 500 else log_msg log.debug(log_msg) # If we need to create a change entry if createDoc: self.__add(change) else: if change['type'] in ['insert', 'update']: log.debug( "Not tracking change for: {timestamp}: user={user} db={db} coll={coll} type={type} doc_id={doc_id} ignoredFields={ignoredFields}" .format(**change, ignoredFields=ignoredFields)) else: log.warning( "Not tracking change for: {0}".format( change)) resume_token = stream.resume_token except Exception as ex: self.__status = 'error' log.error(ex) traceback.print_exc(file=sys.stdout) pass