def start(host="localhost", port=24224, app="engine"): """ Listen to salt events and forward them to fluent args: host (str): Host running fluentd agent. Default is localhost port (int): Port of fluentd agent. Default is 24224 app (str): Text sent as fluentd tag. Default is "engine". This text is appended to "saltstack." to form a fluentd tag, ex: "saltstack.engine" """ SENDER_NAME = "saltstack" sender.setup(SENDER_NAME, host=host, port=port) if __opts__.get("id").endswith("_master"): event_bus = salt.utils.event.get_master_event( __opts__, __opts__["sock_dir"], listen=True ) else: event_bus = salt.utils.event.get_event( "minion", transport=__opts__["transport"], opts=__opts__, sock_dir=__opts__["sock_dir"], listen=True, ) log.info("Fluent engine started") while True: salt_event = event_bus.get_event_block() if salt_event: event.Event(app, salt_event)
def initialize(self, shard_id): ''' Called once by a KCLProcess before any calls to process_records :type shard_id: str :param shard_id: The shard id that this processor is going to be working on. ''' self.largest_seq = None self.send_record_errcnt = 0 self.last_checkpoint_time = time.time() self.shard_id = shard_id.replace('shardId', 'sid') import logging logging.basicConfig( filename='/tmp/akca-{}.log'.format(self.shard_id), level=logging.INFO, format= '[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s', datefmt='%H:%M:%S') self._logging = logging from fluent import sender, event sender.setup('akca.{}'.format(self.shard_id)) self.evt = event self.log_critical('processor initialized for {}'.format(self.shard_id))
def retrieve_tenant(tenant_id): conn: psycopg2.connect() tn_id = str(tenant_id) tn: Tenant sender.setup('fluentd.test', host='localhost', port=24224) try: conn = psycopg2.connect(database="postgres", user="******", password="******", host="127.0.0.1", port="5432") event.Event('follow', {'function': 'retrieve_tenant', 'status': 'DB_conn_opened'}) cur = conn.cursor() cur.execute( "SELECT TENANT_ID, USERS_ID, DEVICES, ADMIN_USER_ID, APPS_ID, RULES from USEMGMNT WHERE TENANT_ID =" + tn_id + " ;") rows = cur.fetchall() for row in rows: tn = Tenant(row[0], row[0], row[0], row[0], row[0], row[0]) print(tn.tenant_id) print(tn.users_id) print(tn.devices) print(tn.admin_user_id) print(tn.apps_id) print(tn.rules) conn.commit() return tn event.Event('follow', {'function': 'retrieve_tenant', 'status': 'successful'}) except: self.event.Event('follow', {'function': 'retrieve_tenant', 'status': 'exception occurred'}) finally: conn.close() event.Event('follow', {'function': 'retrieve_tenant', 'status': 'DB_conn_closed'})
def update_logger(self, logger: logging.Logger=None): if logger is None: logger = logging.root self._log_level = self.log_level if isinstance(self.log_level, int) else logging.getLevelName(self.log_level) if logger is logging.root: self._log_format = self.log_format.format( PEER_ID=self.peer_id[:8] if self.peer_id != "RadioStation" else self.peer_id, CHANNEL_NAME=self.channel_name, SCORE_PACKAGE=self.score_package and f"({self.score_package})" ) self._update_log_output_type() self._update_handlers(logger) if self.log_color: self._update_log_color_set(logger) for handler in logger.handlers: if isinstance(handler, logging.StreamHandler): handler.addFilter(self._root_stream_filter) else: logger.setLevel(self._log_level) if self.log_monitor: sender.setup('loopchain', host=self.log_monitor_host, port=self.log_monitor_port)
def _tryToStart(self): self.robotName = self._getRobotName() if self.robotName is None: self.retryCount += 1 qi.async(self._tryToStart, delay=5 * 1000 * 1000 * self.retryCount) return with self.lock: if self.running: self.stop() host = self._get_pref('host') if host is not None: tag = self._get_pref('tag', 'pepper') sender.setup(tag, host=host, port=int(self._get_pref('port', '24224'))) self.running = True interval = self._get_pref('metrics_interval', str(DEFAULT_METRICS_INTERVAL)) self.metricsInterval = max(int(interval), MIN_METRICS_INTERVAL) metrics_conf = {'interval_sec': self.metricsInterval} self.sendEvent('service', {'status': 'started', 'config': metrics_conf, 'retried': self.retryCount}) self.sendEvent('cpu_info', cpu_stat.cpu_info()) self._startWatchingLogs() self._sendMetrics()
def start(host='localhost', port=24224, app='engine'): ''' Listen to salt events and forward them to fluent args: host (str): Host running fluentd agent. Default is localhost port (int): Port of fluentd agent. Default is 24224 app (str): Text sent as fluentd tag. Default is "engine". This text is appended to "saltstack." to form a fluentd tag, ex: "saltstack.engine" ''' SENDER_NAME = 'saltstack' sender.setup(SENDER_NAME, host=host, port=port) if __opts__.get('id').endswith('_master'): event_bus = salt.utils.event.get_master_event( __opts__, __opts__['sock_dir'], listen=True) else: event_bus = salt.utils.event.get_event( 'minion', transport=__opts__['transport'], opts=__opts__, sock_dir=__opts__['sock_dir'], listen=True) log.info('Fluent engine started') while True: salt_event = event_bus.get_event_block() if salt_event: event.Event(app, salt_event)
def get(self, request): # DBから値を取る data = Friend.objects.all() for item in data: if item.id == 1: item.name += 'M' friend = Friend.objects.get(id=1) print(vars(friend)) # redisから値を取る pool = redis.ConnectionPool(host='localhost', port=6379, db=0) r = redis.StrictRedis(connection_pool=pool) bvalue = r.get('test') value = '' if bvalue is None: value = 'None' else: value = bvalue.decode() # fulentdにログ送信 sender.setup('debug', host='localhost', port=24224) event.Event('follow', {'from': 'userA', 'to': 'userB'}) # templateに渡す値を作る params = { 'title': 'Hello/Index', 'message': 'all friend', 'data': [friend], 'value': value, } return render(request, 'hello/index.html', params)
def __init__(self, config_path, cla_dict): trulia_conf = TruliaConfLoader.TruliaConfLoader(config_path) self.load_trulia_params(trulia_conf) self.db_mgr = DatabaseManager.DatabaseManager(config_path) kv_store = cla_dict['kv_store'] if kv_store == '': self.kv_mgr = None elif kv_store == 'h': print "loading HBase manager", import HBaseManager self.kv_mgr = HBaseManager.HBaseManager() print "completed" elif kv_store == 'r': print "loading Redis manager", import RedisManager self.kv_mgr = RedisManager.RedisManager() print "completed" fluentd_rx = cla_dict['fluentd_rx'] if fluentd_rx == '': self.fluentd_enabled = False print "FluentD not enabled" elif fluentd_rx == 'f': print "loading fluentd for local fs" sender.setup('fs') self.fluentd_enabled = True print "FluentD enabled for local filesystem" elif fluentd_rx == 'h': print "loading fluentd for hdfs" sender.setup('hdfs') self.fluentd_enabled = True print "FluentD enabled for HDFS"
def daemon_task(): cli = docker.Client() sender.setup('log.pidstat', host=FLUENTD_HOST, port=24224) while True: cids = [] for c in cli.containers(): image = c['Image'] if image.count('docker_agent') or image.count('fluentd'): continue cids.append(c['Id']) if len(cids) == 0: time.sleep(INTERVAL_SECONDS) continue procs = [] now = datetime.now() for cid in cids: print("%s CONTAINER ID:%s" % (now, cid[:12])) p = multiprocessing.Process(target=pidstat, args=(cid,)) procs.append(p) p.start() [p.join() for p in procs]
def __init__(self, tag, host='127.0.0.1', port=24224): self.host = host self.port = port self.tag = tag self.msg = { 'uuid': self.get_uuid(), #规则:机器名-timestamp-rand(10000,99999) 'level': 0, #0/10/20,对应 info/warning/error 'client_ip': '', #用户访问 ip 'category': '', #项目标识 'logtime': 0, #精确到秒的unix_timestamp 'service': '', #服务模块标识 'status': 0, #0表示操作成功,没有异常.其它值表示操作可能存在其他问题 'machine': socket.gethostname(), #机器名 'server_ip': self.get_ip_address('eth0'), #默认取eth0网卡的 ip 地址 'user_info': {}, #用户信息 'logmsg': {}, #自定义扩展字段 'userRequest': { 'get': {}, 'post': {}, 'cookie': {}, 'server': {} }, #用户发起的 http 请求信息,建议在记 #录错误日志的时候记录该信息 } sender.setup(self.tag, host=self.host, port=self.port)
def plugin_loaded(): global SETTINGS SETTINGS = sublime.load_settings(SETTINGS_FILE) sender.setup( str(SETTINGS.get('tagprefix')), host=str(SETTINGS.get('host')), port=int(SETTINGS.get('port')) )
def run(): max_retries = 7 retries = 0 today = datetime.today().strftime("%Y%m%d") while True: try: if retries >= max_retries: print(today, '-> Data.go.kr API Call Error!!!') return hira_url = 'http://apis.data.go.kr/B551182/hospInfoService/getHospBasisList' ServiceKey = '2UWvNIvNmrdJQKq4OhO3sipd2S7NO36m1bCqJi61wdShwJIp1Wg5D%2FAEanZ97vZAydIS%2FN9eL418xqyN7J0vzw%3D%3D' queryParams = '?' + 'serviceKey=' + ServiceKey + '&_type=json&numOfRows=10' # queryParams = '?numOfRows=70000&pageNo=1&ServiceKey=%s&_type=json'%(service_key) request = Request(hira_url + queryParams) request.get_method = lambda: 'GET' response = urlopen(request) if response.getcode( ) == 200: #urllib.request.urlopen().getcode() 의 결과값 status: 200 이면 잘 연결되었다는 뜻 response_body = urlopen(request).read() hosp_json_rt = response_body.decode('utf-8') hosp_items = json.loads(hosp_json_rt) print(today, '-> 병원 API호출통해 받은 건수 : ', hosp_items['response']['body']['totalCount']) else: print(today, '-> 병원 API호출 result : ', response.getcode()) retries += 1 wait = 2**(retries - 1) time.sleep(wait) continue sender.setup('td.hira', host='localhost', port=24224) print(today, '-> sender.setup ok ') tot = 0 #컬럼 제한 했으니 제한 없게 for item in hosp_items['response']['body']['items']['item']: tot = tot + 1 item['inputDate'] = today item['inputType'] = '병원' event.Event('getParmacyBasisList_getHospBasisList', item) if tot % 5000 == 0: print(today, '-> TD 전송 병원진행건수 : ', tot) print(today, '-> TD 전송 병원완료건수 : ', tot) sender.close() break except: retries += 1 wait = 2**(retries - 1) time.sleep(wait)
def __init__(self): sender.setup('rk', host=common_config.fluentd_host, port=common_config.fluentd_port, bufmax=5 * 1024 * 1024, timeout=5.0, verbose=False, buffer_overflow_handler=buffer_overflow_handler) self.root_dir = common_config.bi_root_path self.zone = common_config.zone
def submit_record(self, recordType, record): try: from fluent import sender from fluent import event if self.host is None: sender = sender.setup(self.tag) else: sender.setup(self.tag, host=self.host, port=self.port) event.Event(recordType, record) except: print("Fluentd: Failed to submit a record")
def set_log_level(): logging.basicConfig(handlers=[ logging.FileHandler(conf.LOG_FILE_PATH, 'w', 'utf-8'), logging.StreamHandler() ], format=conf.LOG_FORMAT, level=conf.LOG_LEVEL) # monitor setting if conf.MONITOR_LOG: sender.setup('loopchain', host=conf.MONITOR_LOG_HOST, port=conf.MONITOR_LOG_PORT)
def __init__(self, cmd, config): if len(cmd) <= 1: cmd = cmd[0] self.command = cmd if not config is None: pattern = re.compile('(\w+):(\w+|\([^)]+\));?') param = dict(pattern.findall(config)) if param.get('host') is None: param['host'] = 'localhost' if param.get('port') is None: param['port'] = 24224 sender.setup(param['tag'], host=param['host'], port=param['port'])
def post(cls): name = request.form.get("name") age = request.form.get("age") if not name or not age: return cls.get() data = { "name": name, "age": age, "created": datetime.now().strftime("%Y/%m/%d %H/%M/%S") } sender.setup("fluent.test", host="0.0.0.0", port=24224) event.Event("local", data) # FluentdTestMongoObj.insert(data) return render_template(cls.TEMPLATE, name=name, age=age)
def __init__(self): host = core.parser.getValue('REDIS', 'host') port = core.parser.getValue('REDIS', 'port') self.log = core.parser.getValue('REDIS', 'log') self.fluent = core.parser.getValue('REDIS', 'fluent') self.redis = None self.strict_redis = None if host and port: self.redis = redis.Redis(host = str(host), port = int(port), db = 0) self.strict_redis = redis.StrictRedis(host = str(host), port = int(port), db = 0) log.debug(self.redis.info()) self.redis.flushdb() else: log.warn("no redis") if self.fluent: sender.setup('scnm', host=host, port=24224)
def update_tenant_admin(admin_user_id: int, tenant_id: int): conn: psycopg2.connect() tn_id = str(tenant_id) sender.setup('fluentd.test', host='localhost', port=24224) try: conn = psycopg2.connect(database="postgres", user="******", password="******", host="127.0.0.1", port="5432") event.Event('follow', {'function': 'update_tenant_admin', 'status': 'DB_conn_opened'}) cur = conn.cursor() cur.execute( "UPDATE USEMGMNT set ADMIN_USER_ID = " + str(admin_user_id) + " where TENANT_ID = " + tn_id + " ;") conn.commit() event.Event('follow', {'function': 'update_tenant_admin', 'status': 'successful'}) except: event.Event('follow', {'function': 'update_tenant_admin', 'status': 'exception occurred'}) finally: conn.close() event.Event('follow', {'function': 'update_tenant_admin', 'status': 'DB_conn_closed'})
def __call__(self, *args, **kwargs): """Comportamiento básico del aspecto.""" # fh = logging.FileHandler('auth.log') # self.logger = logging.getLogger('python-aop') # self.logger.setLevel(logging.INFO) # fh.setLevel(logging.INFO) # self.logger.addHandler(fh) sender.setup(fluent.project_name, host=fluent.fluent_url, port=int(fluent.fluent_port)) # sender.setup('oooooooooo', host='192.168.181.99', port=30224) # self.before(*args, **kwargs) # kwargs['aop_status'] = aop_status # kwargs.__setattr__("aop_status",aop_status) response = self.around(*args, **kwargs) # self.after(*args, **kwargs) # self.logger.removeHandler(fh) sender.close return response
def main(): sender.setup('td') totalsums=0 mp=MongoOp('localhost') r=GetItems.get_cat(mp,3) for c in r: cid=c['CategoryId'] msg="cid={} aucnum={} catpath={}".format(cid,c['NumOfAuctions'],c['CategoryPath'].encode('utf-8')) my_logger.info(msg) gi=GetItems(mp,cid,c) pages=gi.get_pages(cid) msg="pages={},totalaccess={}".format(pages,GetItems.TotalAccess) my_logger.info(msg) for i in range(1,pages): b=gi.get_items(cid,i) if b==0: msg="page {} break ,totalaccess={}".format(i,GetItems.TotalAccess) my_logger.debug(msg) break
def create_user(self): conn: psycopg2.connect() sender.setup('fluentd.test', host='localhost', port=24224) try: conn = psycopg2.connect(database="postgres", user="******", password="******", host="127.0.0.1", port="5432") event.Event('follow', {'function': 'create_user', 'status': 'DB_conn_opened'}) cur = conn.cursor() cur.execute("INSERT INTO USERS (USER_ID, USER_PWD, FIRST_NAME, LAST_NAME, PH_NUMBER, API_ACL, TENANT_ID) \ VALUES (" + str(self.user_id) + ", '" + self.user_pwd + "', '" + self.first_name + "', '" + self.last_name + "', '" + str(self.ph_number) + "', '" + str( self.api_acl) + "', '" + str(self.tenant_id) + "') ;") conn.commit() event.Event('follow', {'function': 'create_user', 'status': 'successful'}) except: event.Event('follow', {'function': 'create_user', 'status': 'exception occurred'}) finally: conn.close() event.Event('follow', {'function': 'create_user', 'status': 'DB_conn_closed'})
def main(): sender.setup('td') totalsums = 0 mp = MongoOp('localhost') r = GetItems.get_cat(mp, 3) for c in r: cid = c['CategoryId'] msg = "cid={} aucnum={} catpath={}".format( cid, c['NumOfAuctions'], c['CategoryPath'].encode('utf-8')) my_logger.info(msg) gi = GetItems(mp, cid, c) pages = gi.get_pages(cid) msg = "pages={},totalaccess={}".format(pages, GetItems.TotalAccess) my_logger.info(msg) for i in range(1, pages): b = gi.get_items(cid, i) if b == 0: msg = "page {} break ,totalaccess={}".format( i, GetItems.TotalAccess) my_logger.debug(msg) break
def trace_to_log(): output = {} n = 1 result = os.popen('traceroute '+conf.TARGET_HOST) for line in iter(result): if ("traceroute" or "Warning") in line: pass elif conf.FILTER != '': if conf.FILTER in line: output['hop_'+str(n)] = transform_to_dict(line) n += 1 else: pass else: output['hop_'+str(n)] = transform_to_dict(line) n += 1 sender.setup(conf.FLUENT_TAG, host=conf.FLUENT_HOST, port=conf.FLUENT_PORT) subtag = conf.TARGET_HOST.replace(".", "") event.Event(subtag, output) log.info("Message %s was sent!" % output)
def add_tenant_user(user_id: int, tenant_id: int): conn: psycopg2.connect() tn_id = str(tenant_id) sender.setup('fluentd.test', host='localhost', port=24224) try: conn = psycopg2.connect(database="postgres", user="******", password="******", host="127.0.0.1", port="5432") event.Event('follow', {'function': 'add_tenant_user', 'status': 'DB_conn_opened'}) cur = conn.cursor() cur.execute("SELECT USERS_ID from USEMGMNT WHERE TENANT_ID=" + tn_id + " ;") rows = cur.fetchall() _users_: list for row in rows: _users_ = list(row[0]) _users_.append(str(user_id)) cur.execute("UPDATE USEMGMNT set USERS_ID = " + str(_users_) + " where TENANT_ID = " + tn_id + " ;") conn.commit() event.Event('follow', {'function': 'add_tenant_user', 'status': 'successful'}) except: event.Event('follow', {'function': 'add_tenant_user', 'status': 'exception occurred'}) finally: conn.close() event.Event('follow', {'function': 'add_tenant_user', 'status': 'DB_conn_closed'})
def detect_logging(self): if 'LOGGING' in os.environ: if os.environ['LOGGING'] == STACKDRIVER: self.logging = os.environ['LOGGING'] self.logger = logging.Client().logger(self.logger_name) if os.environ['LOGGING'] == FLUENTD: self.logging = os.environ['LOGGING'] #defaults for fluentd f_host = 'fluentd' f_port = 24224 if 'FLUENTD_HOST' in os.environ: f_host = os.environ['FLUENTD_HOST'] if 'FLUENTD_PORT' in os.environ: f_port = os.environ['FLUENTD_PORT'] self.logger = sender.setup(self.logger_name, host=f_host, port=f_port)
# -*- coding: utf-8 -*- import unittest from mock import patch from fluent import event, sender sender.setup(server='localhost', tag='app') class TestEvent(unittest.TestCase): def test_logging(self): # XXX: This tests succeeds even if the fluentd connection failed # send event with tag app.follow event.Event('follow', { 'from': 'userA', 'to': 'userB' }) def test_logging_with_timestamp(self): # XXX: This tests succeeds even if the fluentd connection failed # send event with tag app.follow, with timestamp event.Event('follow', { 'from': 'userA', 'to': 'userB' }, time=int(0)) def test_no_last_error_on_successful_event(self):
def send_logline(data): tag = '{0}.{1}'.format(data['processname'], data['channel']) sender.setup('supervisor', host=config.FLUENTD_HOST, port=config.FLUENTD_PORT) event.Event(tag, {'logline': data['logline']})
'gmail': { 'key': alg_utils.get_secret('gsuite', 'gmail_robot_keyfile'), 'scopes': ['https://www.googleapis.com/auth/gmail.modify'] }, 'admin': { 'key': alg_utils.get_secret('gsuite', 'admin_robot_keyfile'), 'scopes': ['https://www.googleapis.com/auth/admin'] } } logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR) server_email = alg_utils.get_config('gsuite', 'server_account') discovery_url = 'https://www.googleapis.com/discovery' drive_discovery_url = 'https://www.googleapis.com/discovery/v1/apis/drive/v3/rest' sender.setup(host=alg_utils.get_config('fluent', 'host'), port=alg_utils.get_config('fluent', 'port'), tag='alg.worker.pickles') def fire_batch_value_clear(email: str, spreadsheet_id: str, ranges: list): http = get_authorized_http('drive', email=email) body = {'ranges': ranges} return build('sheets', 'v4', http=http) \ .spreadsheets() \ .values() \ .batchClear(spreadsheetId=spreadsheet_id, body=body) \ .execute() @retry(wait_exponential_multiplier=1000, wait_exponential_max=10000,
from fluent import sender, event sender.setup('td.test_db', host='localhost', port=24224) event.Event('follow', {'from': 'XXX', 'to': 'YYY'})
import http.client as http_client except ImportError: # Python 2 import httplib as http_client http_client.HTTPConnection.debuglevel = 1 from fluent import sender, event # Connect to Redis # redis = Redis(host="redis", db=0, socket_connect_timeout=2, socket_timeout=2) app = Flask(__name__) # from flask_bootstrap import Bootstrap # Bootstrap(app) sender.setup('helloworld', host='192.168.181.99', port=30224) # sender.setup('helloworld', host='fluentd-es.logging', port=24224) @app.route("/") def hello(): event.Event('follow-event-base', {'from': 'userA', 'to': 'userB'}) visits = "hahahahahhahahahahahhahahahahhaahhaha..." html = "<h3>Hello {name}!</h3>" \ "<b>Hostname:</b> {hostname}<br/>" \ "<b>Visits:</b> ${visits}" return html.format(name=os.getenv("NAME", "world"), hostname=socket.gethostname(), visits=visits)
def setUp(self): self._server = mockserver.MockRecvServer('localhost') sender.setup('app', port=self._server.port)
from raven import Client from fluent import sender from raven.contrib.celery import register_signal from scrapi.settings.defaults import * logging.basicConfig(level=logging.INFO) logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARNING) logger = logging.getLogger(__name__) try: from scrapi.settings.local import * except ImportError as error: logger.warn("No scrapi local.py settings file found. Try running $ cp scrapi/settings/local-dist.py scrapi/settings/local.py. Defaulting to scrapi/settings/defaults.py") if USE_FLUENTD: sender.setup(**FLUENTD_ARGS) if SENTRY_DSN: client = Client(SENTRY_DSN) register_signal(client) CELERY_ENABLE_UTC = True CELERY_RESULT_BACKEND = None CELERY_TASK_SERIALIZER = 'pickle' CELERY_ACCEPT_CONTENT = ['pickle'] CELERY_RESULT_SERIALIZER = 'pickle' CELERY_IMPORTS = ('scrapi.tasks', 'scrapi.migrations')
__author__ = 'hoangnn' from fluent import sender, event from logging import warning, info, basicConfig, INFO if __name__ == "__main__": sender.setup('fluentd.test', host='sachcu.mobi', port=3008) event.Event('follow',{'from': 'userA', 'to': 'userB'}) basicConfig(level=INFO) info("Test warning log")
def __init__(self): sender.setup('FM')
# if need to use connectionpool http://www.cnblogs.com/englefly/p/4579863.html BATCH_INTERVAL = 1.2; def getSqlContextInstance(sparkContext): if ('sqlContextSingletonInstance' not in globals()): globals()['sqlContextSingletonInstance'] = SQLContext(sparkContext) return globals()['sqlContextSingletonInstance'] conf = SparkConf().setAppName("PySpark Cassandra Text Bids Join").set("spark.es.host", "ec2-52-88-7-3.us-west-2.compute.amazonaws.com").set("spark.streaming.receiver.maxRate",2000).set("spark.streaming.kafka.maxRatePerPartition",1000).set("spark.streaming.backpressure.enabled",True).set("spark.cassandra.connection.host","172.31.1.138") kafkaBrokers = {"metadata.broker.list": "52.33.29.117:9092,52.33.248.41:9092,52.35.99.109:9092,52.89.231.174:9092"} sc = SparkContext(conf=conf) ssc = StreamingContext(sc, BATCH_INTERVAL) sender.setup('spark.out', host='localhost', port=24224) zkQuorum, topic = sys.argv[1:] # streams = [KafkaUtils.createStream(ssc, zkQuorum, "spark-streaming-consumer", {topic: 1}) for x in range(0,3)] # stream = ssc.union(streams) # stream = KafkaUtils.createStream(ssc, zkQuorum, "spark-streaming-consumer", {topic: 1}) stream = KafkaUtils.createDirectStream(ssc, [topic], kafkaBrokers) # streams2 = [KafkaUtils.createStream(ssc, zkQuorum, "spark-streaming-consumer2", {"pharma_bids_prices2": 1}) for x in range(0,3)] # stream2 = ssc.union(streams2) # stream2 = KafkaUtils.createStream(ssc, zkQuorum, "spark-streaming-consumer2", {"pharma_bids_prices2": 1}) stream2 = KafkaUtils.createDirectStream(ssc, ["pharma_bids_prices2"], kafkaBrokers) datasourcetype = "TT" if topic=="TT_raw" else "RD"
from celery import Celery from fluent import sender, event app = Celery('gt-leaf') app.config_from_object('celeryconfig') sender.setup('greenthumb') @app.task def photoresistor(): # todo: calibration import RPi.GPIO as GPIO, time GPIO.setmode(GPIO.BCM) def RCtime(RCpin): reading = 0 TIMEOUT = 2000 GPIO.setup(RCpin, GPIO.OUT) GPIO.output(RCpin, GPIO.LOW) time.sleep(0.1) GPIO.setup(RCpin, GPIO.IN) while (GPIO.input(RCpin) == GPIO.LOW and reading < TIMEOUT): reading += 1 return reading result = RCtime(18) event.Event('sensor', {'name': 'photoresistor', 'value': result}) return result @app.task def pump():
conn.delete_table('city_stats_26june14', disable=True) conn.create_table('city_stats_26june14', {'cf': {} ,'i':{}}) table = conn.table('city_stats_26june14') #table = conn.table('city_stats') # TODO cleanup fluent_path = "../extern/fluent-logger-python" sys.path.append(fluent_path) from fluent import sender from fluent import event sender.setup('hdfs') file_list = get_file_list(data_dir, loc_type) for file_name in file_list: print 'Parsing',file_name file_handle = open(file_name, 'r') text = file_handle.read() file_handle.close() parse_get_city_stats_resp(text) #city_dict = TruliaDataFetcher.parse_get_city_stats_resp(text)
import random import time from flyplanner.colony import Colony from fluent import sender sender.setup("flyplanner", host="localhost", port=24224, nanosecond_precision=False) def main(): initial_energy = 30_000 colony = Colony([], initial_energy) while colony.is_active(): time.sleep(random.uniform(0.001, 0.01)) colony.new_day() print( f"A colony live {colony.days_alive} days with {initial_energy} energy") print( f"{colony.left} leaved, {colony.born} born and {colony.died} flies died during colony life" ) sender.close() if __name__ == "__main__": main()
import os import datetime as dt import time from flask import Flask from fluent import sender from fluent import event app = Flask(__name__) sender.setup('helloflask', host='localhost', port=24224) @app.route("/") def hello(): host = os.environ["HOSTNAME"] now = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S") event.Event('hello', {'host': host, 'datetime': now}) time.sleep(.034) return "It's something-running-on-{}!\nIt's now {}.".format(host, now) if __name__ == "__main__": # Only for debugging while developing app.run(host='0.0.0.0', debug=True, port=80)
def __init__(self, config): host = config.get('host', 'localhost') port = config.get('port', 24224) tag_name = config.get('tag_name', 'django.actionlog') sender.setup(tag_name, host=host, port=port)
return (int(out0, 16) + (int(out1, 16) * 0x100) + (int(out2, 16) * 0x10000)) / 4096.0 def send_metrics(): # read atmospheric pressure from LPS atmos = read_lps() # read humidity and temp from DHT humidity, temp = Adafruit_DHT.read_retry(Adafruit_DHT.DHT22, DHT22_GPIO) # write metrics to local fluentd event.Event("metrics", { "atmos": atmos, "hum": humidity, "temp": temp }) # init LPS init_lps() # init fluentd sender.setup(FL_TAG) # measure and send the metrics periodically last_checked = 0 while True: if time.time() - last_checked > SEND_INTERVAL: last_checked = time.time() send_metrics() time.sleep(0.5)
UPDATE_CLIENT_BATCH_SIZE = alg_utils.get_config('tuning', 'update_client_batch_size') VC_BATCH_SIZE = alg_utils.get_config('tuning', 'vc_batch_size') NOTE_ARBITRATION_BATCH_SIZE = alg_utils.get_config('tuning', 'note_arbitration_batch_size') CLINICAL_HOTWORDS = alg_utils.get_config('overwatch', 'clinical_hotwords') IGNORE_WORDS = alg_utils.get_config('overwatch', 'ignore_words') try: IGNORE_WORDLIST_PATH = os.path.join(os.path.dirname(__file__), "ignore_words.txt") except IOError: IGNORE_WORDLIST = open(os.path.join(os.path.dirname(__file__), "ignore_words.txt")) for word in IGNORE_WORDS: IGNORE_WORDLIST.write(word) IGNORE_WORDLIST_PATH = os.path.join(os.path.dirname(__file__), "ignore_words.txt") sender.setup( host=alg_utils.get_config('fluent', 'host'), port=alg_utils.get_config('fluent', 'port'), tag='alg.worker.overwatch') @app.task def vc_single(email): service = gsuite_tasks.get_email_request(email) request = service.users().messages().list(userId=email) results = request.execute() for result in results: mongo_tasks.store_email(result) next_results = service.users().messages().list_next(request, results) while next_results: for result in next_results: mongo_tasks.store_email(result) next_results = service.users().messages().list_next(request, results)
#!/usr/bin/python #rock, scissors, paper with fluentd logging #[email protected] import random from fluent import sender from fluent import event tie= "is a tie" p1 = "goes to you; the computer loses." p2 = "goes to the computer; you lose." sender.setup('td.rsp_db', host='localhost', port=24224) myDict = {('rock', 'rock') : tie, ('rock', 'paper') : p2, ('rock', 'scissors') : p1, ('paper', 'rock') : p1, ('paper', 'paper') : tie, ('paper', 'scissors') : p2, ('scissors', 'rock') : p2, ('scissors', 'paper') : p1, ('scissors', 'scissors') : tie } def throw(player1, player2): verdict = myDict[(player1, player2)] print "The game " + verdict event.Event('game_data', { 'verdict': verdict }) def lets_get_started(): try: player1 = raw_input("Throw! Enter 'rock', 'paper', or 'scissors': " ) print "You chose: " + player1 event.Event('game_data', { 'player': 'Player 1', 'choice': player1
def init_fluentd(): sender.setup(conf.FLUENTD_TAG, host=conf.FLUENTD_ADDRESS, port=conf.FLUENTD_PORT)
# coding: utf-8 from __future__ import absolute_import, division, print_function import arrow from fluent.sender import setup from fluent.event import Event from tweepy.api import API from tweepy.auth import AppAuthHandler from kotodama.settings import TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET setup('kotodama') auth = AppAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) def seek(db, query): now = arrow.utcnow().timestamp latest_seeked = db.get('%s:latest' % query) if not latest_seeked: tweets = API(auth)\ .search(q=query, result_type='recent', count=1) db.set('%s:latest' % query, tweets.since_id) return tweets = API(auth)\ .search(q=query, result_type='recent', count=100, since_id=int(latest_seeked)) if len(tweets) > 0:
def run(): max_retries = 7 retries = 0 today = datetime.today().strftime("%Y%m%d") while True: try: if retries >= max_retries: print(today, '-> Data.go.kr API Call Error!!!') return url = 'http://apis.data.go.kr/B551182/pharmacyInfoService/getParmacyBasisList' queryParams = '?' + 'serviceKey=' + '0iL736RveVYyQp%2F99eV2vwEyvArFlIW4cPcPfSZd7sRb0tDE3wbwQTZrmZt3mxB55AOU9HQC3uiA6e%2F8oqt%2FCQ%3D%3D&_type=json&numOfRows=100000' request = Request(url + queryParams) request.get_method = lambda: 'GET' response = urlopen(request) if response.getcode() == 200: response_body = response.read() pharm_json_rt = response_body.decode('utf-8') pharm_items = json.loads(pharm_json_rt) print(today, '-> 약국 API호출통해 받은 건수 : ', pharm_items['response']['body']['totalCount']) else: print(today, '-> 약국 API호출 result : ', response.getcode()) retries += 1 wait = 2**(retries - 1) time.sleep(wait) continue url = 'http://apis.data.go.kr/B551182/hospInfoService/getHospBasisList' queryParams = '?' + 'serviceKey=' + '0iL736RveVYyQp%2F99eV2vwEyvArFlIW4cPcPfSZd7sRb0tDE3wbwQTZrmZt3mxB55AOU9HQC3uiA6e%2F8oqt%2FCQ%3D%3D&_type=json&numOfRows=100000' request = Request(url + queryParams) request.get_method = lambda: 'GET' response = urlopen(request) if response.getcode() == 200: response_body = urlopen(request).read() hosp_json_rt = response_body.decode('utf-8') hosp_items = json.loads(hosp_json_rt) print(today, '-> 병원 API호출통해 받은 건수 : ', hosp_items['response']['body']['totalCount']) else: print(today, '-> 병원 API호출 result : ', response.getcode()) retries += 1 wait = 2**(retries - 1) time.sleep(wait) continue sender.setup('td.hira', host='localhost', port=24224) print(today, '-> sender.setup ok ') tot = 0 for item in pharm_items['response']['body']['items']['item']: tot = tot + 1 item['inputDate'] = today item['inputType'] = '약국' event.Event('getParmacyBasisList_getHospBasisList', item) if tot % 5000 == 0: print(today, '-> TD 전송 약국진행건수 : ', tot) print(today, '-> TD 전송 약국완료건수 : ', tot) tot = 0 for item in hosp_items['response']['body']['items']['item']: tot = tot + 1 item['inputDate'] = today item['inputType'] = '병원' event.Event('getParmacyBasisList_getHospBasisList', item) if tot % 5000 == 0: print(today, '-> TD 전송 병원진행건수 : ', tot) print(today, '-> TD 전송 병원완료건수 : ', tot) sender.close() break except: retries += 1 wait = 2**(retries - 1) time.sleep(wait)
#!/usr/bin/env python3 import logging import time from fluent import event from fluent import sender from . import alg_utils from . import database_tasks from . import gsuite_tasks from . import report_tasks sender.setup( host=alg_utils.get_config('fluent', 'host'), port=alg_utils.get_config('fluent', 'port'), tag='alg.worker.pickles') server_email = alg_utils.get_config('gsuite', 'server_account') def create_team_book(team_id, team_name, template_file_id, template_sheets): requested_changes = [] gsuite_tasks.create_workbook(server_email, team_name) team_bookfile = gsuite_tasks.get_file_by_name(server_email, team_name) summary_page = gsuite_tasks.fire_spreadsheet_copy( email=server_email, src_workbook_id=template_file_id, sheet_id=template_sheets['summary']['properties']['properties']['sheetId'], dst_workbook_id=team_bookfile['id'] ) requested_changes.append({ 'updateCells': {
# -*- coding: utf-8 -*- from datetime import datetime from time import sleep from fluent import sender from fluent import event # for local fluent sender.setup('alog') # for remote fluent # sender.setup('app', host='127.0.0.1', port=24224) # send event to fluentd, with 'app.follow' tag while True: event.Event('parsed', { 'host': 'api.anjuke.com', 'datetime': str(datetime.now()) })
from celery import Celery from celery.schedules import crontab from fluent import sender, event from .alg_tasks import alg_utils sender.setup(host='fluentd', port=24224, tag='alg.api.app') event.Event('event', { 'task': 'app', 'info': { 'message': 'started up the main application for the api eye' } }) RABBITMQ_USERNAME = alg_utils.get_secret('rabbitmq', 'username') RABBITMQ_PASSWORD = alg_utils.get_secret('rabbitmq', 'password') RABBITMQ_ADDRESS = alg_utils.get_config('rabbit', 'address') REDIS_ADDRESS = alg_utils.get_config('redis', 'address') REDIS_PASSWORD = alg_utils.get_secret('redis', 'password') REDIS_DATABASE = alg_utils.get_config('redis', 'database') RABBIT_URL = 'amqp://' + RABBITMQ_USERNAME + ':' + RABBITMQ_PASSWORD + '@' + RABBITMQ_ADDRESS REDIS_URL = 'redis://:' + REDIS_PASSWORD + '@' + REDIS_ADDRESS + REDIS_DATABASE event.Event('event', { 'task': 'app', 'info': { 'message': 'created the configurations for redis and rabbit', 'params': { 'rabbitmq_username': RABBITMQ_USERNAME, 'rabbitmq_password': RABBITMQ_PASSWORD,