def init(self, directory=None, whitelist=None, blacklist=None): self.logger = config.getLogger(__name__) if directory: self.directory = directory + os.sep else: self.directory = directory self.refresh(whitelist, blacklist)
def __init__(self, type = "AZURE"): logger = config.getLogger() # Initialize either Azure of Google version of the recognizer if (type == "AZURE"): logger.info("Intialize Azure version...") from .azure_raccon_recognizer import AzureRaccoonRecognizer self.recognizer = AzureRaccoonRecognizer() elif (type == "GCP"): logger.info("Intialize GCP version...") from .gcp_raccoon_recognizer import GCPRaccoonRecognizer self.recognizer = GCPRaccoonRecognizer() else: logger.info("Invalid type...")
def lambda_handler(event, context): ## Creation of retVal Dict which will act as the body of the returnResponse function (JSON) retVal = {} headers = {} ## Initalize Logger logger = config.getLogger() ## Initalize DB connection conn = getDbConfig(logger) if conn is None: retVal[ 'message'] = "Whoops, something went wrong at out end! Please try again later!" return returnResponse(502, json.dumps(retVal), headers, logger) ## Check for HTTP Request completion try: qsp = event['queryStringParameters'] header = {k.lower(): v for k, v in event['headers'].items()} httpMethod = str(event['httpMethod']) except Exception as e: retVal['message'] = "Invalid / Incomplete HTTP Request" return returnResponse(400, json.dumps(retVal), headers, logger, conn) # Debug the event logger.debug('Event = {}'.format(json.dumps(event))) ## Check if publisher key has been parsed in as part of the header publisherId = verifyPublisher(conn, header, logger) if publisherId is None: retVal['message'] = "Error: PublisherKey invalid or Missing" return returnResponse(403, json.dumps(retVal), headers, logger, conn) ## Validate user based on Infomo user-Key userId = verifyUsers(conn, header, logger) if httpMethod == 'POST': return post(conn, header, event['body'], publisherId, userId, headers, retVal, logger) else: retVal = errString("Invalid Request!", retVal) return returnResponse(403, json.dumps(retVal), headers, logger, conn)
__date__ = "2016-08-18" import yaml import sys import json import os import time import math from lib2to3.pgen2 import driver import requests from bs4 import BeautifulSoup from phantomJsUtils import PhantomJsUtils from selenium import webdriver import config logger = config.getLogger("test") class Scraping: def scraping_requests(self, url): """ requests利用してスクレイピング """ timeStart = time.clock(); # get a HTML response response = requests.get(url) html = response.text.encode(response.encoding) # prevent encoding errors # parse the response timeTotal = round(time.clock() - timeStart, 3); logger.debug("scraping {0} {1}s".format( url, timeTotal)) return html
import config moduleLogger = config.getLogger('services') class Backend(object): def __init__(self, host, priority=0, **kwargs): self.host = host self.priority = priority def __eq__(self, other): try: if self.host == other.host: return True except: pass return False def __hash__(self): return (hash(type(self)) ^ hash(self.host) ^ hash(self.priority)) class Service(object): def __init__(self, name): self.name = name self.backends = set() self.added = [] self.removed = [] self.last_pushed = 0 self.changed = True self.logger = moduleLogger.getChild(name)
from whoosh.index import create_in from whoosh import index, sorting from whoosh.qparser import QueryParser from whoosh.fields import * from radix_tree import RadixTree import pymongo from ac_trie import Trie import json import uuid import config import logging from multiprocessing import Process, Queue import time import re logger = config.getLogger('ad_service', "./data/ad_service.log") class update_task(Process): def __init__(self, task_queue, ix): Process.__init__(self) self.task_queue = task_queue self.ix = ix self.tagsParser = Trie(config.SKILL_FILE) def cut(self, value): value = value.lower().replace(' ', '') value = value.encode('UTF-8') terms = self.tagsParser.parse(value) v = {} for i in terms:
import pull import push from time import sleep import config moduleLogger = config.getLogger('flue') services = {} #change to dict while True: moduleLogger.debug('Watching %s for registered services'%config.ETCD_ROOT_KEY) updated = pull.getServices(services) if updated == None: continue removed = [] for name, service in services.items(): if not name in updated: removed.append(name) # moduleLogger.debug(updated, removed) push.updateDNS(updated, removed) services = updated sleep(config.POLL_TIME)
## Paranoid Pirate worker # # Author: Daniel Lundin <dln(at)eintr(dot)org> # from random import randint import time import zmq from ad_service import ADIndex, update_task import sys import config from multiprocessing import Process, Queue from whoosh import index from whoosh.fields import * logger = config.getLogger('ppworker', "./data/ppworker.log") HEARTBEAT_LIVENESS = config.HEARTBEAT_LIVENESS # 3..5 is reasonable HEARTBEAT_INTERVAL = config.HEARTBEAT_INTERVAL # Seconds INTERVAL_INIT = config.INTERVAL_INIT INTERVAL_MAX = config.INTERVAL_MAX # Paranoid Pirate Protocol constants PPP_READY = config.PPP_READY # Signals worker is ready PPP_HEARTBEAT = config.PPP_HEARTBEAT # Signals worker heartbeat INDEX_PATH = "indexdir" HOST = 'localhost' WORKER_HOST = "tcp://localhost:5556" SUBSCRIBER_HOST = "tcp://localhost:5557"
import re from config import getLogger from bots import RedditBot logger = getLogger() class TicketBot(RedditBot): def __init__(self, user_name, *args, **kwargs): super().__init__(user_name, *args, **kwargs) self.COMMAND_PATTERN = "!FAUbot (buy|sell) (\d{1,2})" def work(self): logger.info("Getting unread messages") inbox = self.r.get_unread(unset_has_mail=True) for message in inbox: command = re.search(self.COMMAND_PATTERN, message.body) if command: logger.info("Found message with a command") operation = command.groups()[0] number = command.groups()[1] logger.info("Command: operation=[{}], number=[{}]".format(operation, number)) subject = "FAUbot received your command" reply = """Hello! You have sent me a command. According to the message you sent me, you want to: `{} {}` ticket{}. Right now I'm just a prototype, so I will not process your request.""".format(operation, number, ('s' if int(number) > 1 else '')) logger.info("Sending reply to: recipient=[{}]".format(message.author)) self.r.send_message(message.author, subject, reply) logger.info("Message sent.")
import requests import datetime from cachetools import ttl_cache from collections import namedtuple from bs4 import BeautifulSoup from random import randint from config import getLogger from config.bot_config import get_interval from bots import RedditBot # region constants SUBMISSION_INTERVAL_HOURS = get_interval('submission_interval_hours') # endregion # region globals logger = getLogger() Link = namedtuple('Link', 'url title') # endregion # region helpers def clean_dir(obj): """ When you want to call dir() on something but don't want to see any private attributes/methods. This is just a helper function used to figure out what attributes/methods an object has. :param obj: The thing to call dir() on, e.g. dir(obj) :return: A list of public methods and/or attributes of the object. """ return [ d for d in dir(obj) if not d.startswith('_') and not d.endswith('_')
import etcd import config import json from services import Backend, Service moduleLogger = config.getLogger('etcd') def etcdClientConfig(): clientConfig = { 'host': config.ETCD_HOST, 'port': config.ETCD_PORT } return clientConfig def createClient(): return etcd.Client(**etcdClientConfig()) etcdClient = createClient() def getServices(services = {}): try: raw = etcdClient.read(config.ETCD_ROOT_KEY, recursive = True, sorted=True) except: moduleLogger.error('Failed to connect to etcd host %s:%s'%(config.ETCD_HOST, config.ETCD_PORT)) return services rawServices = [x for x in raw.get_subtree() if x.dir and not x.key == config.ETCD_ROOT_KEY ] for service in rawServices: # split the key to obtain the service name name = service.key[1:].split('/')[1] # load the available backends try:
import threading import praw from abc import ABCMeta, abstractmethod from collections import namedtuple from config import bot_config from config import getLogger logger = getLogger() # you will need this to use logger functions BotSignature = namedtuple('BotSignature', 'classname username permissions') DEFAULT_SLEEP_INTERVAL = bot_config.get_sleep_interval('default') RUN_BOTS_ONCE = bot_config.should_run_once() # region EXCEPTIONS class MissingRefreshTokenError(ValueError): pass class InvalidBotClassName(ValueError): pass # endregion # region BASECLASSES class Bot(threading.Thread, metaclass=ABCMeta): """ Base class for all bots. It is a Thread that will continue to do work until it is told to stop.
import dns.update import dns.query import dns.tsigkeyring from socket import gethostbyname import config moduleLogger = config.getLogger('ddns') def loadKey(): try: return dns.tsigkeyring.from_text(config.RNDC_KEY) except: raise Exception('Error loading tsig key') def updateDNS(services, removed): update = dns.update.Update(config.DNS_ZONE, keyring=loadKey()) for name in removed: moduleLogger.debug('Removing records for nonexistent service %s' % name) update.delete(service) for name, service in services.items(): service.diff() if service.shouldUpdate(): moduleLogger.debug('Updating records for %s' % name) update.delete(name) for backend in service.backends: update.add(name, config.RECORD_TTL, 'A', gethostbyname(backend.host))
# -*- coding: UTF-8 -*- __author__ = 'klein' import config import requests from BeautifulSoup import BeautifulSoup logger = config.getLogger("name1", config.logging.DEBUG) if __name__ == '__main__': response = requests.get(config.url) soup = BeautifulSoup(response.text) print soup
def __init__(self): self._TEMP_FILENAME = "capture.jpg" self.client = vision.ImageAnnotatorClient() self.logger = config.getLogger()
"""Read information about firefox heaps from Core Dumps. - modified jemalloc ver. """ from __future__ import absolute_import from __future__ import division import os import math import ctypes from pwnlib.elf.elf import ELF import config logname = "pwnlib." + __name__ log = config.getLogger(logname) from utils import unpack from datatypes import * class JEHeap(object): """Encapsulates information about a memory mapping in a :class:`Corefile`. """ def __init__(self, core): self._core=core #: : class:`int`: misc info on bin size, etc. self.kMinTinyClass = core.capsz self.kMaxTinyClass = 8 self.kMinQuantumClass = self.kMaxTinyClass * 2 self.kMaxQuantumClass = 512
from abc import ABCMeta from time import sleep from argparse import ArgumentParser import newsbot # you must import your bot file here, even if you don't use it import eventbot import ticketbot import config from config import praw_config, bot_config from bots import InvalidBotClassName, BotSignature, RedditBot # If you declare your own RedditBot subclass in its own file, # you must import it or else it will not be added to BOT_CLASSES. BOT_CLASSES = {cls.__name__: cls for cls in RedditBot.get_subclasses()} logger = config.getLogger() parser = ArgumentParser(description="FAUbot options") parser.add_argument( "-a", "--account", dest='account', choices=praw_config.get_all_site_names(), help= "Specify which Reddit account configured in praw.ini will be used to launch bots." ) # region DISPATCH class Dispatch(threading.Thread, metaclass=ABCMeta): """ An object used to create, launch, and terminate bots.
# ## Paranoid Pirate queue # # Author: Daniel Lundin <dln(at)eintr(dot)org> # from collections import OrderedDict import time import sys import zmq from hashlib import md5 from cache.lrucache import LRUCache import config logger = config.getLogger('ppqueue',"./data/ppqueue.log") HEARTBEAT_LIVENESS = config.HEARTBEAT_LIVENESS # 3..5 is reasonable HEARTBEAT_INTERVAL = config.HEARTBEAT_INTERVAL # Seconds # Paranoid Pirate Protocol constants PPP_READY = config.PPP_READY # Signals worker is ready PPP_HEARTBEAT = config.PPP_HEARTBEAT # Signals worker heartbeat class Worker(object): def __init__(self, address): self.address = address self.expiry = time.time() + HEARTBEAT_INTERVAL * HEARTBEAT_LIVENESS class WorkerQueue(object):
from time import sleep from argparse import ArgumentParser import newsbot # you must import your bot file here, even if you don't use it import eventbot import ticketbot import config from config import praw_config, bot_config from bots import InvalidBotClassName, BotSignature, RedditBot # If you declare your own RedditBot subclass in its own file, # you must import it or else it will not be added to BOT_CLASSES. BOT_CLASSES = {cls.__name__: cls for cls in RedditBot.get_subclasses()} logger = config.getLogger() parser = ArgumentParser(description="FAUbot options") parser.add_argument("-a", "--account", dest='account', choices=praw_config.get_all_site_names(), help="Specify which Reddit account configured in praw.ini will be used to launch bots.") # region DISPATCH class Dispatch(threading.Thread, metaclass=ABCMeta): """ An object used to create, launch, and terminate bots. """ def __init__(self, bot_signatures, stop_event=None): """ Initializes a Dispatch object, and creates a pool of bots. :param bot_signatures: A list of BotSignatures used to create the new bots :param stop_event: A threading.Event used to keep the Dispatch alive and tell it when to close.
from whoosh.index import create_in from whoosh import index, sorting from whoosh.qparser import QueryParser from whoosh.fields import * from radix_tree import RadixTree import pymongo from ac_trie import Trie import json import uuid import config import logging from multiprocessing import Process, Queue import time import re logger = config.getLogger("ad_service", "./data/ad_service.log") class update_task(Process): def __init__(self, task_queue, ix): Process.__init__(self) self.task_queue = task_queue self.ix = ix self.tagsParser = Trie(config.SKILL_FILE) def cut(self, value): value = value.lower().replace(" ", "") value = value.encode("UTF-8") terms = self.tagsParser.parse(value) v = {} for i in terms: