def configure_logdna_logging(logdna_key, logdna_app): logdna_options = {} logdna_options['index_meta'] = True logdna_options['app'] = logdna_app logdna_handler = LogDNAHandler(logdna_key, logdna_options) logdna_handler.setLevel(logging.DEBUG) add_handler_to_logger(logdna_handler)
def messages_preserved_if_excp(self): options = { 'hostname': 'localhost', 'url': 'http://localhost:8080', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE' } server_address = ('localhost', 8080) httpd = HTTPServer(server_address, failed_RequestHandler) failed_case_logger = LogDNAHandler(key, options) log.addHandler(failed_case_logger) line = "second test. server fails" def send_log_to_fail(): log.info(line) server_thread = threading.Thread(target=httpd.handle_request) logdna_thread = threading.Thread(target=send_log_to_fail) server_thread.daemon = True logdna_thread.daemon = True server_thread.start() logdna_thread.start() server_thread.join() logdna_thread.join() self.assertEqual(len(failed_case_logger.buf), 1)
def set_options(self, task_keys=None, var_options=None, direct=None): super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct) self.conf_key = self.get_option('conf_key') self.plugin_ignore_errors = self.get_option('plugin_ignore_errors') self.conf_hostname = self.get_option('conf_hostname') self.conf_tags = self.get_option('conf_tags') self.mac = get_mac() self.ip = get_ip() if self.conf_hostname is None: self.conf_hostname = get_hostname() self.conf_tags = self.conf_tags.split(',') if HAS_LOGDNA: self.log = logging.getLogger('logdna') self.log.setLevel(logging.INFO) self.options = { 'hostname': self.conf_hostname, 'mac': self.mac, 'index_meta': True } self.log.addHandler(LogDNAHandler(self.conf_key, self.options)) self.disabled = False else: self.disabled = True self._display.warning( 'WARNING:\nPlease, install LogDNA Python Package: `pip install logdna`' )
def stops_retention_when_buf_is_full(self): port = get_port() options = { 'hostname': 'localhost', 'url': 'http://localhost:{0}'.format(port), 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE', 'buf_retention_limit': 50, 'equest_timeout': 10, 'flush_interval': 1, 'retry_interval_secs': 1 } handler = LogDNAHandler(LOGDNA_API_KEY, options) logger.addHandler(handler) line = "when buffer grows bigger than we want" lineTwo = "when buffer grows bigger than we want. And more and more" server_thread = start_server(port, FailedRequestHandler) logdna_thread = info(line, lineTwo) server_thread.join() logdna_thread.join() self.assertEqual(len(handler.buf), 1) self.assertNotEqual(handler.buf[0]['line'], lineTwo) logger.removeHandler(handler)
def server_recieves_messages(self): options = { 'hostname': 'localhost', 'url': 'http://localhost:8081', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE' } server_address = ('localhost', 8081) httpd = HTTPServer(server_address, successful_RequestHandler) test = LogDNAHandler(key, options) log.addHandler(test) line = "python python python" def send_log(): log.info(line) server_thread = threading.Thread(target=httpd.handle_request) logdna_thread = threading.Thread(target=send_log) server_thread.daemon = True logdna_thread.daemon = True server_thread.start() logdna_thread.start() server_thread.join() logdna_thread.join() self.assertEqual(len(expectedLines), 1) self.assertIn(line, expectedLines)
def __init__(self, options: Dict[str, Any], instance: str): key = options['token'] options = { "app": instance, "hostname": get_device_id(), "include_standard_meta": True } self.hdlr = LogDNAHandler(key, options) self.meta = {} self.patch_emit(self.hdlr) logging.getLogger('').addHandler(self.hdlr)
def log(data, handler_name): log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = {'hostname': 'Leetcode_API', 'index_meta': True} # Defaults to False; when True meta objects are searchable log.addHandler(LogDNAHandler(INGESTION_KEY, options)) meta = {"handler_name": handler_name} opts = {'level': 'warn', 'meta': meta} log.info(data, opts)
def get_logger(key, ingestion_endpoint): """add the logdna handler and a stream handler that prints stuff to stderr""" print(f"ingestion_endpoint: {ingestion_endpoint}") # print(f"key: {key}") log = logging.getLogger() log.setLevel(logging.INFO) sh = logging.StreamHandler(sys.stdout) sh.setFormatter(FormatterArgs()) log.addHandler(sh) options = { 'hostname': 'flowfunc', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE', 'index_meta': True, # Defaults to False; when True meta objects are searchable 'url': f'{ingestion_endpoint}/logs/ingest', } test = LogDNAHandler(key, options) log.addHandler(test) return log
def messages_preserved_if_excp(self): port = get_port() options = { 'hostname': 'localhost', 'url': 'http://localhost:{0}'.format(port), 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE' } handler = LogDNAHandler(LOGDNA_API_KEY, options) logger.addHandler(handler) line = "second test. server fails" server_thread = start_server(port, FailedRequestHandler) logdna_thread = info(line) server_thread.join() logdna_thread.join() self.assertEqual(len(handler.buf), 1) logger.removeHandler(handler)
def server_recieves_messages(self): port = get_port() options = { 'hostname': 'localhost', 'url': 'http://localhost:{0}'.format(port), 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE' } handler = LogDNAHandler(LOGDNA_API_KEY, options) logger.addHandler(handler) line = "python python python" server_thread = start_server(port, SuccessfulRequestHandler) logdna_thread = info(line) server_thread.join() logdna_thread.join() self.assertEqual(len(expectedLines), 1) self.assertIn(line, expectedLines) logger.removeHandler(handler)
def stops_retention_when_buf_is_full(self): options = { 'hostname': 'localhost', 'url': 'http://localhost:1337', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE', 'buf_retention_limit': 50, 'equest_timeout': 10, 'flush_interval': 1, 'retry_interval_secs': 1 } server_address = ('localhost', 1337) httpd = HTTPServer(server_address, failed_RequestHandler) failed_case_logger = LogDNAHandler(key, options) log.addHandler(failed_case_logger) line = "when buffer grows bigger than we want" lineTwo = "when buffer grows bigger than we want. And more and more" def send_log_to_fail(): log.info(line) log.info(lineTwo) server_thread = threading.Thread(target=httpd.handle_request) logdna_thread = threading.Thread(target=send_log_to_fail) server_thread.daemon = True logdna_thread.daemon = True server_thread.start() logdna_thread.start() server_thread.join() logdna_thread.join() self.assertEqual(len(failed_case_logger.buf), 1) self.assertNotEqual(failed_case_logger.buf[0]['line'], lineTwo)
def get_mac_address(): h = iter(hex(get_mac())[2:].zfill(12)) return ":".join(i + next(h) for i in h) logdna_options = { "app": __name__, "index_meta": True, "hostname": node(), "ip": get_my_ip_address(), "mac": get_mac_address(), } logdna_handler = LogDNAHandler(getenv("LOGDNA_API_KEY"), options=logdna_options) logdna = logging.getLogger(__name__) logdna.setLevel(logging.INFO) logdna.addHandler(logdna_handler) app = Sanic(__name__) @app.middleware def log_request(request: Request): logdna.info("I was Here with a new Request to URL: {}".format(request.url)) @app.route("/") def default(request):
https://docs.djangoproject.com/en/2.2/ref/settings/ """ import os import logging from logdna import LogDNAHandler from hidden import SECRET_KEY # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) logger = logging.getLogger("Blog Log") key = os.environ.get("LOGDNA") dnaHandler = LogDNAHandler(key) logger.addHandler(dnaHandler) logging.info("Logger configured", {"app": "Django Blog"}) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = SECRET_KEY # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = []
import logging import os from logdna import LogDNAHandler key = os.environ['LOGDNA_KEY'] log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = { 'hostname': 'pytest', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE' } # Defaults to False; when True meta objects are searchable options['index_meta'] = True test = LogDNAHandler(key, options) log.addHandler(test)
############## logging ################### # importing module import logging from logdna import LogDNAHandler options = {'hostname': 'desktop', 'ip': '10.0.0.5', 'mac': 'C0:FF:EE:C0:FF:EE'} options['index_meta'] = True # Creating an object logger = logging.getLogger('logdna') logger.setLevel(logging.INFO) test = LogDNAHandler(settings.LOGDNA_INGEST_KEY, options) # #print settings.LOGDNA_INGEST_KEY # #print test # log.addHandler(test) # log.warn("Warning message", {'app': 'bloop'}) # log.info("Info message") # Setting the threshold of logger to DEBUG logger.setLevel(logging.DEBUG) ########## # Create your views here. def allview(request):
from short_text_screen.handlers import short_text_conversation_handler from file_screen.handlers import file_conversation_handler from help_screen.handlers import help_command_handler, help_handler from telegram.ext import Updater from decouple import config import sentry_sdk import logging from logdna import LogDNAHandler from sentry_sdk.integrations.logging import LoggingIntegration logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s') logDNAoptions = dict() logDNAoptions['index_meta'] = True logDNAoptions['hostname'] = config('HOSTNAME', default='localhost') logDNAhandler = LogDNAHandler(config('LOGDNA_KEY'), options=logDNAoptions) logger = logging.getLogger() logger.addHandler(logDNAhandler) sentry_logging = LoggingIntegration(level=logging.DEBUG, event_level=logging.ERROR) sentry_sdk.init(config('SENTRY_URL'), traces_sample_rate=1.0, integrations=[sentry_logging]) class AnalyzeBot: def __init__(self, token: str): self.__api_token = token
class BooksSpider(scrapy.Spider): name = 'Books' allowed_domains = ['69shu.com'] db = redis.Redis(db=1) logdna_fn = LogDNAHandler(get_project_settings()['LOGDNA_KEY'], { "index_meta": True, "tags": [name] }) logging.getLogger().addHandler(logdna_fn) custom_settings = { 'ITEM_PIPELINES': { 'Fiction.pipelines.FictionPipelineBooks': 100, } } # 书目录Index def start_requests(self): yield scrapy.Request('https://www.69shu.com/allvisit_1.htm', callback=self.start_requests_list) # 书目录 def start_requests_list(self, response): max_page = int( response.xpath( '/html/body/div[2]/div[3]/div/div[2]/div/div/div/a[14]/text()' ).extract()[0]) book_urls = response.xpath( '//*[@id="content"]/div/div[2]/div/ul/li/span[3]/a/@href').extract( ) for book_url in book_urls: yield Request(book_url, callback=self.parse_read) for num in range(1, max_page + 1): yield scrapy.Request('https://www.69shu.com/allvisit_' + str(num) + '.htm', callback=self.parse) # 获取每一本书的URL def parse(self, response): book_urls = response.xpath( '//*[@id="content"]/div/div[2]/div/ul/li/span[3]/a/@href').extract( ) for book_url in book_urls: if '/230.htm' not in book_url: yield Request(book_url, callback=self.parse_read) # 获取马上阅读按钮的URL,进入章节目录 def parse_read(self, response): read_url_slice = response.xpath('//html/body/div[2]/div[4]/div[2]') read_url = read_url_slice.xpath('a/@href').extract()[0] yield Request(read_url, callback=self.parse_chapter) # 获取小说章节的URL def parse_chapter(self, response): chapter_urls = response.xpath( '/html/body/div[2]/div[4]/ul/li/a/@href').extract() for chapter_url in chapter_urls: if "newmessage" not in chapter_url: # 去重发生时机(查询去重,写入要用PIPELINE) uuid = chapter_url.split('/')[4] + '-' + chapter_url.split( '/')[5] if self.db.hexists('books', uuid) == False: yield Request(chapter_url, callback=self.parse_content) # 获取小说名字,章节的名字和内容 def parse_content(self, response): try: # 小说名字 title = response.xpath( '/html/body/div[2]/div[2]/div[1]/a[3]/text()').extract_first() # 小说章节名字 chapter_name = response.xpath( '/html/body/div[2]/table/tbody/tr/td/h1/text()').extract_first( ) # 小说章节内容 chapter_content = response.xpath( '/html/body/div[2]/table/tbody/tr/td/div[1]/text()').extract() chapter_content_full = '' item = BooksItem() item['id_primary'] = response.url.split('/')[4] item['id_subset'] = response.url.split('/')[5] item['title'] = title item['chapter_name'] = chapter_name item['chapter_content'] = chapter_content_full.join( chapter_content) yield item except: # 这里最好分开处理,通常是某些内容出现错误,部分可以人工后处理(未实现) pass
hostname = config_json['hostname'] endpoint_url = config_json['dynamodb_endpoint'] users_table = config_json['users_table'] except Exception as e: print(e) exit() # Set up logging to LogDNA log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = { 'app': 'Discord Gacha Reset', 'hostname': hostname, 'index_meta': True } handler = LogDNAHandler(ingestion_key, options) log.addHandler(handler) # Also log to stdout log.addHandler(logging.StreamHandler()) # Reset all last_pack_opened to the specified time new_time = sys.argv[2] + ' ' + sys.argv[3] try: dynamodb = boto3.resource("dynamodb", region_name="us-west-1", endpoint_url=endpoint_url) table = dynamodb.Table(users_table) # Get all users
import requests import os import logging from logdna import LogDNAHandler with open('config.json') as config_file: config = json.load(config_file) log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = {'hostname': 'Zuydbot', 'index_meta': True, 'meta': { 'app': 'API' }} handler = LogDNAHandler(config['logging']['key'], options) log.addHandler(handler) class School(commands.Cog): """Integrate Untis and Moodle with Discord""" def __init__(self, bot): self.bot = bot self.api = APIConnection() self.date = None self.time = None self.bot.loop.create_task(self.update()) self.bot.loop.create_task(self.check()) def get_time(self): now = datetime.datetime.now()
from flask import Flask, request, Response #apiKey = '6161d5eed2ebb9cdc8add24e6e9b0c98' apiKey = secrets.logdna_api_key logger = logging.getLogger('logdna') logger.setLevel(logging.INFO) options = { 'hostname': 'aixbot.pp.ua', 'ip': '100.25.183.46', 'mac': '06:e6:89:04:7f:97' #'url':'https://logs.eu-gb.logging.cloud.ibm.com/logs/ingest' } options['index_meta'] = True handler = LogDNAHandler(apiKey, options) logger.addHandler(handler) #logger = logging.getLogger() #logger.setLevel(logging.DEBUG) #handler = logging.StreamHandler() #formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') #handler.setFormatter(formatter) #logger.addHandler(handler) app = Flask(__name__) userrole = [0] #0 - guest, 1 - student, 2 - teacher securityCode = [0] from queue import Queue rootId = "x9sFVcYKD2F74K+E1ruH7w=="
from logging import getLogger, INFO from logdna import LogDNAHandler from livecheck import * from botprefix import * from keep_alive import keep_alive from threading import Thread key = 'myapiKey' logs = getLogger('logdna') logs.setLevel(INFO) options = { 'hostname': 'Twitch_Log', 'index_meta': True, 'include_standard_meta': False } logInput = LogDNAHandler(key, options) logs.addHandler(logInput) temp3 = False hasBeenLive = False startTimerP = time() promptStop = 600 toCommit = [] chanID = { 'forsen': 22484632, 'lirik': 23161357, 'drdisrespect': 17337557, 'mizkif': 94753024, 'trainwreckstv': 71190292, 'xqcow': 71092938,
parser.add_argument("--logdna-url", "-u", dest="logdna_url", default="https://logs.logdna.com/logs/ingest", help="URL for Log DNA Loghost url e.g. https://logs.logdna.com/logs/ingest'") args = parser.parse_args() log = logging.getLogger('logdna') log.setLevel(logging.INFO) # Probably change these options = { 'hostname': 'pytest', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE', 'url': args.logdna_url } options['index_meta'] = True test = LogDNAHandler(args.logdna_key, options) log.addHandler(test) c = falco.Client( endpoint="unix:///var/run/falco.sock", output_format=args.output_format, ) for event in c.sub(): print(event) event_options = { "level": event.priority.value, "app": "falco", "meta": json.loads(event.to_json())
'username': '******', 'password': '******', } aws_secrets = aws.get_secret(app.config['aws_secret_name']) for key, value in aws_secrets.items(): if key in secret_mapping: app.config[secret_mapping[key]][key] = value # if enabled logs will be sent to LogDNA via ingest api. if 'LOGDNA_INGESTION_KEY' in os.environ: import logging from logdna import LogDNAHandler logdna_handler = LogDNAHandler( os.getenv('LOGDNA_INGESTION_KEY'), {'app': 'Nebula', 'include_standard_meta': True}) log_level = logging.DEBUG if 'DEBUG' in os.environ else logging.INFO logdna_handler.setLevel(log_level) app.logger.addHandler(logdna_handler) app.logger.info('added logdna handler..') if 'general' not in app.config: app.config['general'] = { 'filecache': '/tmp/nebula', 'secret_key': 'changeme' } if 'site_name' not in app.config['general']: app.config['general']['site_name'] = 'nebula'
from ldr import LDR from redis_client import RedisCache import delegator randomFlag = True app = Flask(__name__) # Initialize logdna log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = { 'hostname': 'roomAutomation', } # Defaults to False; when True meta objects are searchable options['index_meta'] = True test = LogDNAHandler(os.environ.get("LOGDNA_KEY"), options) log.addHandler(test) # Initialize sentry sentry_sdk.init(dsn=os.environ.get('SENTRY_DSN'), integrations=[FlaskIntegration()]) redis_connection = RedisCache() # rbgObject = rgbStrip.rgb(GPIO) rgbSmall = {"r": 0, "g": 0, "b": 1} rgbLarge = {"r": 0, "g": 0, "b": 1} monitorTop = [0, 0, 125] monitorBottom = [0, 0, 125] monitorLeft = [0, 0, 125] monitorRight = [0, 0, 125]
environ['SCRIPT_NAME'] = self.prefix return self.app(environ, start_response) else: start_response('404', [('Content-Type', 'text/plain')]) return ["This url does not belong to the app.".encode()] # Make the WSGI interface available at the top level so wfastcgi can get it. # wsgi_app = app.wsgi_app app.wsgi_app = PrefixMiddleware(app.wsgi_app, prefix='/api') ingestionKey = 'b7c813e09f26938d8bcd7c4f38be2a40' logdna_options = {'app': 'ibmcos', 'level': 'Debug', 'index_meta': True} logging.basicConfig(handlers=[ logging.FileHandler(filename='log.log', encoding='utf-8', mode='a+'), LogDNAHandler(ingestionKey, logdna_options) ], level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', datefmt='%Y%m%d.%H%M%S') # Create resource def fn_cos_create_resource(endpoint, apikey, instanceid): return ibm_boto3.resource("s3", ibm_api_key_id=apikey, ibm_service_instance_id=instanceid, config=Config(signature_version="oauth"), endpoint_url=endpoint)
table.put_item(Item={'rarity': rarity, 'cards': cards}) print('Done') info = {'table_name': table_name, 'rarity': rarity, 'channel': channel} log.info('Added cards to database', {'meta': info}) try: if len(sys.argv) != 5: print('USAGE: channel_scraper.py path/to/config channel_id set rarity') exit() config = open(sys.argv[1]) config_json = json.load(config) token = config_json['token'] key = config_json['ingestion_key'] # Set up logging log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = {'app': 'Discord Gacha Channel Scraper', 'hostname': 'Local Dev'} options['index_meta'] = True handler = LogDNAHandler(key, options) log.addHandler(handler) channel = str(sys.argv[2]) table_name = str(sys.argv[3]) rarity = str(sys.argv[4]) client.run(token) except Exception as e: print(e)
log = logging.getLogger('logdna') log.setLevel(logging.INFO) timestamp = [] def format_number(number: str): z = phonenumbers.parse("+" + number, None) return (phonenumbers.format_number( z, phonenumbers.PhoneNumberFormat.INTERNATIONAL)) options = { 'hostname': 'SMSService', 'ip': '10.0.1.1', 'mac': 'C0:FF:EE:C0:FF:EE' } options['index_meta'] = True test = LogDNAHandler(logkey, options) log.addHandler(test) print("Starting up...") def isInt(s): try: int(s) return True except ValueError: return False def genkey(length: int): keys = redis.lrange("sms_keys", 0, -1) x = ''.join( random.choice(string.ascii_uppercase + string.ascii_lowercase +
from .commands.career_stats import CareerStats from .commands.save_id import SaveId from .commands.leaderboard import Leaderboard from .commands.iratings import Iratings from .commands.all_series import AllSeries from .commands.current_series import CurrentSeries from .commands.set_fav_series import SetFavSeries from .commands.add_fav_series import AddFavSeries from .commands.remove_fav_series import RemoveFavSeries dotenv.load_dotenv() logdna_key = os.getenv("LOGDNA_INGESTION_KEY") log = logging.getLogger('logdna') log.setLevel(logging.DEBUG) handler = LogDNAHandler(logdna_key, {'hostname': os.getenv("LOG_LOCATION")}) log.addHandler(handler) username = '******' password = '******' class Iracing(commands.Cog): """A cog that can give iRacing data about users""" def __init__(self): super().__init__() self.pyracing = Client(username, password) self.all_series = [] self.update_user = UpdateUser(self.pyracing, log) self.updater = Update(self.pyracing, log, self.update_user) self.recent_races = RecentRaces(self.pyracing, log) self.last_series = LastSeries(self.pyracing, log)
CORS(app) app.config.from_pyfile('config.py') manager = Manager(app) logdna_key = app.config['LOGDNA_KEY'] log = logging.getLogger('logdna') log.setLevel(logging.INFO) options = {'hostname': 'dapp', 'ip': '127.0.0.1', 'index_meta': True} console = logging.StreamHandler() root = logging.getLogger('') root.addHandler(console) if logdna_key != "": root.addHandler(LogDNAHandler(logdna_key, options)) def download(url): h = {"Accept-Encoding": "identity"} r = requests.get(url, stream=True, verify=False, headers=h) try: r.raise_for_status() except requests.exceptions.HTTPError as e: log.exception("IPFS Server Error! url:{0}, exception:{1}".format( url, str(e))) return "IPFS Server Error! \n", 503 if "content-type" in r.headers: return send_file(r.raw, r.headers["content-type"])
import logging from logdna import LogDNAHandler app = Flask(__name__) app.config.from_pyfile('settings.cfg') # Flask Bootstrap extension Bootstrap(app) # Set-up LogDNA for logging logdnakey = os.environ['LOGDNA_KEY'] logger = logging.getLogger('logdna') # Disabled the level so that we can have more verbose logging send to LogDNA # logger.setLevel(logging.INFO) options = {} logdna = LogDNAHandler(logdnakey, options) root = logging.getLogger() root.addHandler(logdna) @app.route('/') def hello_world(): return 'Hello, World!' @app.route('/accounts') def accounts_search(): """Gets an account's Recurly ID based on the provided 'account_code' """ client = recurly.Client(os.environ['RECURLY_KEY']) recurly_email = request.args.get('email') recurly_code = request.args.get('code') account_list = [] try: