def build_credible_report(report_package): event.Event( 'event', { 'pickle_task': 'started building credible report', 'report_package': str(report_package) }) key_id = report_package['key_id'] params = [ report_package['param1'], report_package['param2'], report_package['param3'], ] logging.debug('starting report for key id: ' + str(key_id) + 'with params: ' + str(params)) report_data = report_tasks.get_google_formatted_report( report_id=key_id, params=params, ) report_tag = (str(key_id) + '_' + str(report_package['param1']) + '_' + str(report_package['param2']) + '_' + str(report_package['param3'])) event.Event( 'event', { 'task': 'pickle_tasks', 'info': { 'message': 'finished building credible report', 'report_pacakge': str(report_package) } }) return {'report_tag': report_tag, 'report_data': report_data}
def build_credible_lists(): event.Event('event', {'pickle_task': 'started credible lists for the mailer'}) report_list = [] email_list = {} mailing_lists = database_tasks.get_mailing_list() for emp_id in mailing_lists: email_address = report_tasks.get_emp_email(emp_id) mailed_reports = mailing_lists.get(emp_id) report_keys = [] for mailed_report in mailed_reports: report_key = (str(mailed_report['key_id']) + '_' + str(mailed_report['param1']) + '_' + str(mailed_report['param2']) + '_' + str(mailed_report['param3'])) report_keys.append(report_key) email_list[emp_id] = { 'email_address': email_address, 'reports': report_keys, } for mailed_report in mailed_reports: if mailed_report not in report_list: report_list.append(mailed_report) event.Event('event', {'pickle_task': 'built credible lists for the mailer'}) return { 'report_list': report_list, 'mailing_list': email_list, }
def get_sheet_values(email, workbook_id, sheet_name): event.Event( 'event', { 'gsuite_task': 'started the extraction of workbook data', 'workbook_id': str(workbook_id), 'email': str(email), 'sheet name': str(sheet_name) }) http = get_authorized_http('drive', email) service = build("sheets", "v4", http=http, discoveryServiceUrl=discovery_url) response = service.spreadsheets().values().get( spreadsheetId=workbook_id, range=str(sheet_name), ).execute() values = response['values'] event.Event( 'event', { 'gsuite_task': 'completed the extraction of workbook data', 'workbook_id': str(workbook_id), 'email': str(email), 'sheet name': str(sheet_name) }) return values
def package_single_field_similarities(results, field_name): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started package_single_field_similarities', 'results': results, 'field_name': field_name } }) returned_data = {} for service_id in results: result = results[service_id] if result > 0.9: returned_data[service_id] = result if returned_data: event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished package_single_field_similarities', 'field_name': field_name[returned_data] } }) return {field_name: returned_data} else: event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished package_single_field_similarities', 'field_name': '{}' } }) return {}
def check_for_field_similarities(fields): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started check_for_field_similarities', 'fields': fields } }) header = [] service_id = fields['clientvisit_id'] del fields['clientvisit_id'] for field_name in fields: field_value = fields[field_name] header.append( calculate_field_similarity.s(field_name, field_value, service_id)) result = chord(header)(package_all_field_similarities.s()) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished check_for_field_similarities', 'result': result } }) return result
def store_field_similarities(field_name, field_value, service_id): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started store_field_similarity', 'field_name': field_name, 'field_value': field_value, 'service_id': service_id } }) field_ids = { 'presentation': '514015', 'interventions': '514018', 'response': '514019' } header = [] field_id = field_ids[field_name] foreign_fields = report_tasks.get_given_field(field_id, service_id) for foreign_service_id in foreign_fields: foreign_field = foreign_fields[foreign_service_id]['field'] header.append( get_lcs_percentage.delay(foreign_service_id, service_id, field_id, foreign_field, field_value)) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished store_field_similarity', 'field_name': field_name, 'field_value': field_value, 'service_id': service_id } })
def retrieve_tenant(tenant_id): conn: psycopg2.connect() tn_id = str(tenant_id) tn: Tenant sender.setup('fluentd.test', host='localhost', port=24224) try: conn = psycopg2.connect(database="postgres", user="******", password="******", host="127.0.0.1", port="5432") event.Event('follow', {'function': 'retrieve_tenant', 'status': 'DB_conn_opened'}) cur = conn.cursor() cur.execute( "SELECT TENANT_ID, USERS_ID, DEVICES, ADMIN_USER_ID, APPS_ID, RULES from USEMGMNT WHERE TENANT_ID =" + tn_id + " ;") rows = cur.fetchall() for row in rows: tn = Tenant(row[0], row[0], row[0], row[0], row[0], row[0]) print(tn.tenant_id) print(tn.users_id) print(tn.devices) print(tn.admin_user_id) print(tn.apps_id) print(tn.rules) conn.commit() return tn event.Event('follow', {'function': 'retrieve_tenant', 'status': 'successful'}) except: self.event.Event('follow', {'function': 'retrieve_tenant', 'status': 'exception occurred'}) finally: conn.close() event.Event('follow', {'function': 'retrieve_tenant', 'status': 'DB_conn_closed'})
def synchronize_groups(): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started synchronize_groups' } }) group_map = gsuite_tasks.get_group_name_map() credible_user_dict = report_tasks.get_credible_email_report() for user_email in credible_user_dict: if gsuite_tasks.check_user(user_email): user_groups = gsuite_tasks.get_emp_group(user_email) user_team = credible_user_dict[user_email]['team_name'] team_email = group_map[user_team] if team_email not in user_groups: gsuite_tasks.add_google_group_user( user_email, team_email, credible_user_dict[user_email]['is_admin']) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished synchronize_groups' } })
def write_values(email, workbook_id, sheet_name, sheet_data): event.Event( 'event', { 'gsuite_task': 'started writing values to a workbook sheet', 'workbook_id': str(workbook_id), 'email': str(email), 'sheet_name': str(sheet_name), 'sheet_data': str(sheet_data) }) body = { 'valueInputOption': 'USER_ENTERED', 'data': { 'range': sheet_name, 'values': sheet_data } } http = get_authorized_http('drive', email) service = build("sheets", "v4", http=http, discoveryServiceUrl=discovery_url) test = service.spreadsheets().values().batchUpdate( spreadsheetId=workbook_id, body=body, ).execute() event.Event( 'event', { 'gsuite_task': 'finished writing values to a workbook sheet', 'workbook_id': str(workbook_id), 'email': str(email), 'sheet_name': str(sheet_name), 'sheet_data': str(sheet_data), 'write return': str(test) })
def check_for_hotwords(fields): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started check_for_hotwords', 'fields': fields } }) tagged_hotwords = {} for field_name in fields: field = fields[field_name].lower() field = field.replace(',', '') words = field.split(' ') for hotword in CLINICAL_HOTWORDS: if hotword in words: if field_name not in tagged_hotwords: tagged_hotwords[field_name] = [hotword] else: tagged_hotwords[field_name].append(hotword) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished check_for_hotwords', 'tagged_hotwords': tagged_hotwords } }) return tagged_hotwords
def lp_unapproved_commsupt(): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started lp_unapproved_commsupt' } }) unapproved_service_ids = report_tasks.get_unapproved_pilot_whales() if unapproved_service_ids: pending_service_ids = database_tasks.check_arbitration_status( unapproved_service_ids) ids_for_arbitration = [ x for x in unapproved_service_ids if x not in pending_service_ids ] batch_count = 0 header = [] for unapproved_service_id in ids_for_arbitration: if batch_count >= NOTE_ARBITRATION_BATCH_SIZE: chord(header)(adjust_notes.s()) header = [] batch_count = 0 else: header.append(arbitrate_note.s(unapproved_service_id)) batch_count += 1 if len(header) > 0: chord(header)(adjust_notes.s()) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished lp_unapproved_commsupt' } })
def lp_clinical_team(): event.Event('event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started lp_clinical_team' } }) update_data = report_tasks.get_profile_changes() count = 0 header = [] batch = {} for client_id in update_data: if count >= UPDATE_CLIENT_BATCH_SIZE: header.append(update_client_profile_batch.s(batch)) batch = {} count = 0 else: update_package = update_data[client_id] batch[client_id] = update_package count += 1 if len(batch) > 0: header.append(update_client_profile_batch.s(batch)) group(header)() event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished lp_clinical_team' } })
def get_lcs_percentage(foreign_service_id, local_service_id, field_id, foreign_field, local_field): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started get_lcs_percentage', 'foreign_service_id': foreign_service_id, 'local_service_id': local_service_id, 'field_id': field_id, 'foreign_field': foreign_field, 'local_field': local_field } }) result = lcs(local_field, foreign_field) percentage_match = float(len(result)) / float(len(local_field)) database_tasks.store_similarity(local_service_id=local_service_id, foreign_service_id=foreign_service_id, field_id=field_id, similarity=round(percentage_match, 2)) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished get_lcs_percentage', 'return': { foreign_service_id: round(percentage_match, 2) } } }) return {foreign_service_id: round(percentage_match, 2)}
def select_audit_targets(num_targets): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started select_audit_targets', 'num_targets': num_targets } }) target_package = generate_audit_targets(num_targets) dates = target_package['dates'] targets = target_package['targets'] subject = 'audit targets for ' + dates['start_date'] + ' - ' + dates[ 'end_date'] for team_name in targets: clinical_command = report_tasks.get_clinical_command(team_name) clinical_command_emails = [] for emp_id in clinical_command: clinical_command_emails.append(clinical_command[emp_id]['email']) gsuite_tasks.send_email_to_multiple(clinical_command_emails, subject, str(targets[team_name])) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished select_audit_targets', 'num_targets': num_targets } })
def create_workbook(email, workbook_name): event.Event( 'event', { 'task': 'gsuite_tasks', 'info': { 'message': 'started creating a workbook', 'email': str(email), 'workbook_name': str(workbook_name) } }) body = { 'mimeType': 'application/vnd.google-apps.spreadsheet', 'name': workbook_name, } http = get_authorized_http('drive', email) service = build("drive", "v3", http=http, discoveryServiceUrl=drive_discovery_url) response = service.files().create(body=body).execute() workbook_id = response['id'] event.Event( 'event', { 'task': 'gsuite_tasks', 'info': { 'message': 'finished creating a workbook', 'email': str(email), 'workbook_name': str(workbook_name), 'new_workbook_id': str(workbook_id) } }) return workbook_id
def build_single_workbook(reports, receipt): event.Event( 'event', { 'pickle_task': 'started building user workbook', 'report_package': str(reports) }) email = reports['email_address'] ordered_report_tags = reports['reports'] workbook_name = time.strftime("%x") + '_daily report' master_workbook_id = receipt['master_workbook_id'] report_receipt = receipt['report_receipt'] workbook_id = gsuite_tasks.create_workbook(email=email, workbook_name=workbook_name) for report_tag in ordered_report_tags: sheet_name = report_receipt.get(report_tag) values = gsuite_tasks.get_sheet_values(email=server_email, workbook_id=master_workbook_id, sheet_name=sheet_name) gsuite_tasks.create_sheet(email=email, workbook_id=workbook_id, sheet_name=sheet_name) gsuite_tasks.write_values(email=email, workbook_id=workbook_id, sheet_name=sheet_name, sheet_data=values) event.Event( 'event', { 'pickle_task': 'finished building user workbook', 'report_package': str(reports) }) return { 'message': email + ' mailed successfully', 'workbook_id': workbook_id }
def build_master_workbook(reports): event.Event( 'event', { 'task': 'pickle_tasks', 'info': { 'message': 'started building master workbook', 'reports': str(reports) } }) receipt = {} reports_receipt = {} workbook_name = time.strftime("%x") + '_daily report' workbook_id = gsuite_tasks.create_workbook(email=server_email, workbook_name=workbook_name) for report in reports: event.Event( 'event', { 'task': 'pickle_tasks', 'info': { 'message': 'started building master workbook worksheet', 'report': str(report) } }) report_tag = report['report_tag'] sheet_name = get_sheet_name(report_tag) data = report['report_data'] gsuite_tasks.create_sheet(email=server_email, workbook_id=workbook_id, sheet_name=sheet_name) gsuite_tasks.write_values( email=server_email, workbook_id=workbook_id, sheet_name=sheet_name, sheet_data=data, ) reports_receipt[report_tag] = sheet_name event.Event( 'event', { 'task': 'pickle_tasks', 'info': { 'message': 'finished building master workbook', 'report': str(report), 'sheet_name': sheet_name } }) gsuite_tasks.delete_first_sheet_id(email=server_email, workbook_id=workbook_id) receipt['master_workbook_id'] = workbook_id receipt['report_receipt'] = reports_receipt event.Event( 'event', { 'task': 'pickle_tasks', 'info': { 'message': 'finished building master workbook', 'reports': str(reports) } }) return receipt
def arbitrate_note(service_id, ordered_checks=None): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started arbitrate_note', 'service_id': service_id, 'ordered_checks': ordered_checks } }) if ordered_checks is None: ordered_checks = {} go_ahead = database_tasks.check_single_service_status(service_id) if go_ahead: database_tasks.record_arbitration_start(service_id, time.time()) service_package = report_tasks.get_service_package(service_id) fields = report_tasks.get_commsupt_fields(service_id) header = [] for check in ordered_checks: if check in locals(): called_function = locals()[check] arguments = ordered_checks[check] sig = signature(called_function) if len(arguments) != len(sig.parameters) - 1: raise SyntaxError('incorrect arguments passed for ' + str(called_function)) header.append(called_function.s(*arguments)) else: raise KeyError('requested check does not exist!') field_check = check_fields(service_id, fields=fields, ghost=True, hotword=True, clone=True, similarities=False, restrict_to_full_clones=True) tx_date_check = check_tx_date(service_id) arbitration = package_arbitration(field_check, tx_date_check) event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished arbitrate_note', 'service_id': service_id, 'arbitration': { 'service_package': service_package, 'arbitration': arbitration } } }) return { service_id: { 'service_package': service_package, 'arbitration': arbitration } }
def emit(self, messages): is_middleware = messages['is_middleware'] del messages['is_middleware'] if is_middleware: event.Event('footprint', messages) else: label_name = '' if messages.get('label_name'): label_name = messages.get('label_name') del messages['label_name'] event.Event(label_name, messages)
def testLogging(self): # send event with tag app.follow event.Event('follow', { 'from': 'userA', 'to': 'userB' }) # send event with tag app.follow, with timestamp event.Event('follow', { 'from': 'userA', 'to': 'userB' }, time=int(0))
def delete_first_sheet_id(email, workbook_id): event.Event( 'event', { 'gsuite_task': 'started the deletion of the first workbook sheet', 'workbook_id': str(workbook_id), 'email': str(email) }) delete_sheet(email, workbook_id, 0) event.Event( 'event', { 'gsuite_task': 'completed the deletion of the first workbook sheet', 'workbook_id': str(workbook_id), 'email': str(email) })
def check_for_ghosts(fields): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started check_for_ghosts', 'fields': fields } }) ghost_results = {} checker = enchant.DictWithPWL('en_US', IGNORE_WORDLIST_PATH) for field_name in fields: off_words = [] if field_name != 'clientvisit_id': field = fields[field_name].lower() if len(field) < 11: ghost_results['character_results'] = { field_name: str(len(field)) } field = field.replace(',', ' ') field = field.replace('"', ' ') field = field.replace('\'', ' ') field = field.replace('/', ' ') field = field.replace('\n', ' ') field = field.replace('-', ' ') field = field.replace('.', ' ') field = field.replace('(', ' ') field = field.replace(')', ' ') field = field.replace('&', ' ') field = field.replace(':', ' ') words = field.split(' ') if len(words) < 6: ghost_results['word_results'] = {field_name: str(len(words))} for word in words: if word and not alg_utils.is_int(word): if not checker.check(word): off_words.append(word) if len(off_words) / len(words) > 1 / 6: ghost_results['spelling'] = {field_name: off_words} event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished check_for_ghosts', 'ghost_results': ghost_results } }) return ghost_results
def portalRoute(): headers = getForwardHeaders(request) portal = getProductReviews(headers) event.Event('portalRoute', { 'data': portal }) return portal
def bi(self, date, server_id, operation, str_data): # 内部会调用到emit函数 event.Event( 'bi.' + self.root_dir + '.' + self.zone + "." + date + '.' + server_id + '.' + operation, {'bi_log': str_data}) # 尝试重发(如果event.Event中没能发送成功的) self.send_retry()
def log(): response = {'status': 'fail'} data = request.get_json() if not data: return jsonify(response), 400 # Hostname of the running container is the Docker ID (short) container_id = os.environ.get('HOSTNAME') # Emit the event to the fluentd microservice event.Event( 'debug', { 'log': data['message'], 'container_name': '/arc_fluent_python_1', 'container_id': container_id, 'source': 'python' }) # Also sending the response back to check that it worked properly. response['log'] = data['message'] response['container_id'] = container_id response['status'] = 'success' return jsonify(response), 200
def test_logging(self): # XXX: This tests succeeds even if the fluentd connection failed # send event with tag app.follow event.Event('follow', { 'from': 'userA', 'to': 'userB' })
def get(self, request): # DBから値を取る data = Friend.objects.all() for item in data: if item.id == 1: item.name += 'M' friend = Friend.objects.get(id=1) print(vars(friend)) # redisから値を取る pool = redis.ConnectionPool(host='localhost', port=6379, db=0) r = redis.StrictRedis(connection_pool=pool) bvalue = r.get('test') value = '' if bvalue is None: value = 'None' else: value = bvalue.decode() # fulentdにログ送信 sender.setup('debug', host='localhost', port=24224) event.Event('follow', {'from': 'userA', 'to': 'userB'}) # templateに渡す値を作る params = { 'title': 'Hello/Index', 'message': 'all friend', 'data': [friend], 'value': value, } return render(request, 'hello/index.html', params)
def log(self, child_tag, msg): """把日志发送到中心节点. Args: msg: Dict,需要发送的日志内容,比如{ 'level':0, #0/10/20,对应 info/warning/error 'client_ip':'', #用户访问 ip 'logtime':int(time.time()), #精确到秒的unix_timestamp,默认取发送时的时间 'service':'', #服务模块标识 'status':0, #0表示操作成功,没有异常.其它值表示操作可能存在其他问题 'server_ip':self.get_ip_address('eth0'),#服务器 ip,默认获取eth0的 IP 'user_info':{}, #用户信息 'logmsg':{}, #自定义扩展字段 'user_request':{'get':{},'post':{},'cookie':{},'server':{}}, #用户发起的 http 请求信息 } tag: String,日志分类标识,比如:projectname.user.login。 Returns: log's uuid Raises: None """ assert isinstance(msg, dict), 'msg must be a dict.' if (not msg.has_key('logtime')): msg['logtime'] = int(time.time()) self.msg.update(msg) #self.msg['tag']=self.tag + '.' + child_tag event.Event(child_tag, self.msg) uuid = self.msg.get('uuid') self.msg.clear() return uuid
def check_for_clones(fields, restrict_to_full_clones=False): event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'started check_for_clones', 'fields': fields, 'restrict_to_full_clones': restrict_to_full_clones } }) clone_package = {} presentation = fields['presentation'] response = fields['response'] clientvisit_id = fields['clientvisit_id'] clones = report_tasks.check_service_for_clones(presentation, response, clientvisit_id) for service_id in clones: match = clones[service_id]['match'] if match not in clone_package: clone_package[match] = [service_id] else: clone_package[match].append(service_id) if restrict_to_full_clones: filtered_clones = {} for match_type in clones: full_match = 'presentation & response' if match_type == full_match: event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished check_for_clones', 'match_type': clone_package[match_type] } }) return {full_match: clone_package[match_type]} return filtered_clones elif not restrict_to_full_clones: event.Event( 'event', { 'task': 'overwatch_tasks', 'info': { 'message': 'finished check_for_clones', 'clone_packgage': clone_package } }) return clone_package
def test_logging_with_timestamp(self): # XXX: This tests succeeds even if the fluentd connection failed # send event with tag app.follow, with timestamp event.Event('follow', { 'from': 'userA', 'to': 'userB' }, time=int(0))