def run_mcp_download_job(minutes_before): logger.info('retrieving secrets for MCP') mcp_secrets = secret.get_secret('ngsiem-aca-logstash-api', [ 'mcp_username', 'mcp_password', 'mcp_customer_id', 'mcp_madjv_username', 'mcp_madjv_password', 'mcp_madjv_customer_id', 'mcp_qa_username', 'mcp_qa_password', 'mcp_qa_customer_id' ]) try: # normal mcp download_mcp_log(mcp_secrets['mcp_username'], mcp_secrets['mcp_password'], mcp_secrets['mcp_customer_id'], '/mcp/', minutes_before) except Exception as e: logger.error(f"Error: {str(e)}") try: # madjv mcp download_mcp_log(mcp_secrets['mcp_madjv_username'], mcp_secrets['mcp_madjv_password'], mcp_secrets['mcp_madjv_customer_id'], '/mcp_madjv/', minutes_before) except Exception as e: logger.error(f"Error: {str(e)}") try: # qa mcp download_mcp_log(mcp_secrets['mcp_qa_username'], mcp_secrets['mcp_qa_password'], mcp_secrets['mcp_qa_customer_id'], '/mcp_qa/', minutes_before) except Exception as e: logger.error(f"Error: {str(e)}")
def pull_pp_siem_logs(): url = 'https://tap-api-v2.proofpoint.com/v2/siem/all' headers = { 'content-type': 'application/json', 'Accept': 'application/json' } qs = {"sinceSeconds": 300, "format": "JSON"} logger.info('retrieving secrets for pp_siem') secrets = secret.get_secret('ngsiem-aca-logstash-api', [ 'proofpoint_tap_user', 'proofpoint_tap_password', 'sns_api_error_arn' ]) try: r = requests.get(url, auth=(secrets['proofpoint_tap_user'], secrets['proofpoint_tap_password']), headers=headers, params=qs) print(r.content) return r.json() except Exception as e: sns.generate_sns("proofpoint_siem") logger.error(f"Error for SIEM API call: {str(e)}")
def set_creds(): secrets = secret.get_secret( 'kafka_tgrc_team_producer', ['KAFKA_USERNAME', 'KAFKA_PASSWORD', 'BOOTSTRAP_URL']) os.environ['KAFKA_USERNAME'] = secrets['KAFKA_USERNAME'] os.environ['KAFKA_PASSWORD'] = secrets['KAFKA_PASSWORD'] os.environ['KAFKA_HOSTS'] = secrets['BOOTSTRAP_URL']
def pull_okta_logs(minutes_before): logger.info('retrieving secrets for Okta') secrets = secret.get_secret('ngsiem-aca-logstash-api', ['okta_auth', 'sns_api_error_arn', 'okta_url']) current_time = datetime.datetime.utcnow() if minutes_before > 0: current_time = current_time - \ datetime.timedelta(minutes=minutes_before) fifteen_minutes_ago = (current_time - datetime.timedelta(minutes=15)).isoformat() twenty_minutes_ago = (current_time - datetime.timedelta(minutes=20)).isoformat() url = f"{secrets['okta_url']}/api/v1/logs?since={twenty_minutes_ago}&until={fifteen_minutes_ago}" auth_token = f'SSWS {secrets["okta_auth"]}' headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': auth_token} try: r = requests.get(url, headers=headers) if r.status_code == 200: return r.json() else: logger.error(f"The API query for Okta is not returning a 200: {r.status_code}") sns.generate_sns("okta") return None except Exception as e: logger.error(f"Error occurred when querying for Okta logs: {e}") sns.generate_sns("okta") return None
def replace_docs(db_name, col_name, docs): client = mc(get_secret()) db = client.get_database(db_name) col = db[col_name] for doc in docs: thread_filter = {"thread_num": doc['thread_num']} col.find_one_and_replace(thread_filter, doc, upsert=True)
def main(): tornado.options.define("static_path", default = "../static", help = "path to static files directory", type = str) tornado.options.define("templates_path", default = "../templates", help = "path to template files directory", type = str) tornado.options.define("sctp_port", default = 55770, help = "port of sctp server", type = int) tornado.options.define("sctp_host", default = "localhost", help = "host of sctp server", type = str) tornado.options.define("event_wait_timeout", default = 10, help = "time to wait commands processing", type = int) tornado.options.define("idtf_serach_limit", default = 30, help = "number of maximum results for searching by identifier", type = str) tornado.options.define("redis_host", default = "localhost", help = "host of redis server", type = str) tornado.options.define("redis_port", default = 6379, help = "port of redis server", type = int) tornado.options.define("redis_db_idtf", default = 0, help = "number of redis database to store identifiers", type = int) tornado.options.define("redis_db_user", default = 1, help = "number of redis database to store user info", type = int) tornado.options.parse_command_line() tornado.options.parse_config_file("server.conf") socketRouter = sockjs.tornado.SockJSRouter(ws.SocketHandler, '/sctp') rules = [ (r"/", MainHandler), # api (r"/api/init/", api.Init), (r"/api/cmd/do/", api.CmdDo), (r"/api/question/answer/translate/", api.QuestionAnswerTranslate), (r"/api/link/content/", api.LinkContent), (r"/api/link/format/", api.LinkFormat), (r"/api/languages/", api.Languages), (r"/api/languages/set/", api.LanguageSet), (r"/api/idtf/find/", api.IdtfFind), (r"/api/idtf/resolve/", api.IdtfResolve), (r"/api/addr/resolve/", api.AddrResolve), (r"/api/info/tooltip/", api.InfoTooltip), (r"/api/user/", api.User), ] rules.extend(socketRouter.urls) application = tornado.web.Application( rules, cookie_secret = secret.get_secret(), login_url = "/auth/login", template_path = tornado.options.options.templates_path, static_path = tornado.options.options.static_path, xsrf_cookies = False, gzip = True ) application.listen(8000) tornado.ioloop.PeriodicCallback(try_exit, 1000).start() tornado.ioloop.IOLoop.instance().start()
def insert_new_docs(db_name, col_name, docs): client = mc(get_secret()) db = client.get_database(db_name) col = db[col_name] requests = [] for doc in docs: thread_filter = {"thread_num": doc['thread_num']} requests.append(UpdateOne(thread_filter, {'$set': doc}, upsert=True)) col.bulk_write(requests)
def Run(self, data: str, action_id: str): "Run an action, action_id is empty for default action" log_secret.debug('activate %s (%s)', data, action_id) if data == UNLOCK: self.update_terms(unlock=True) else: secret.unlock_all() password = secret.get_secret(item_path=data) clipboard.put(password) if self.clear_clipboard_timer is not None: GLib.source_remove(self.clear_clipboard_timer) self.clear_clipboard_timer = GLib.timeout_add_seconds( CLIPBOARD_TIMEOUT, self.clear_clipboard)
def setup_module(): global CONF CONF = site_conf() global SECRET SECRET = secret.get_secret(CONF.get('files', 'secret')) global WEB_ADDRESS WEB_ADDRESS = CONF.get('web', 'address') global WEB_ROOT WEB_ROOT = CONF.get('web', 'root') global API_URI API_URI = WEB_ADDRESS + '/aiohttp' global TEST_USER TEST_USER = pytest.config.getoption('--test_user') global TEST_USER_EMAIL TEST_USER_EMAIL = pytest.config.getoption('--test_user_email') logging.debug('using test user ' + TEST_USER + ' email ' + TEST_USER_EMAIL) global TEST_HUNTER TEST_HUNTER = pytest.config.getoption('--test_hunter') global user_data user_data = None global loop loop = asyncio.get_event_loop()
while True: """ Query TRAP API (JSON format) starting from minutes_before send logs to kafka reduce minutes_before in next iteration and repeat when iteration reaches now -20 minutes run the job once every 5 minutes """ logger.info(f'minutes before: {minutes_before}') if minutes_before <= 0: logger.info('waiting for 5 minutes') time.sleep(300) logger.info('TRAP query started') secrets = secret.get_secret('ngsiem-aca-logstash-api', ['pp_trap_cluster']) logs = pull_pp_trap_logs(minutes_before, secrets['pp_trap_cluster']) logger.info('TRAP query finished') minutes_before = minutes_before - 5 if logs: logger.info('TRAP_produce started') kafka_producer.run_kafka_producer_job() kafka_producer.run_kafka_producer_job( logs, 'test_log_security_proofpoint.trap_weekly') logger.info('TRAP_produce finished') else: logger.info("No logs for TRAP call.") with open(minutes_before_file, 'w') as minutes_file: minutes_before = 0 if minutes_before < 0 else minutes_before minutes_file.write(str(minutes_before))
https://docs.djangoproject.com/en/3.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.0/ref/settings/ """ from secret import get_secret import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = get_secret() # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'content.apps.ContentConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages',
threatIDs.append(inner_map_item["threatID"]) # Getting threatIds from messagesBlocked if response["messagesBlocked"]: for item in response["messagesBlocked"]: for inner_map_item in item["threatsInfoMap"]: threatIDs.append(inner_map_item["threatID"]) return threatIDs def query_siem_api(principal, password, seconds): headers = {'content-type': 'application/json', 'Accept': 'application/json'} qs = {"sinceSeconds": seconds, "format": "JSON"} r = requests.get('https://tap-api-v2.proofpoint.com/v2/siem/all', auth=(principal, password), headers=headers, params=qs) logger.info(f"Output from the SIEM API: {str(r.json())}") return r.json() if __name__ == "__main__": while True: time.sleep(299) secrets = secret.get_secret('ngsiem-aca-logstash-api', ['proofpoint_tap_user', 'proofpoint_tap_password', 'sns_api_error_arn']) siem_logs = query_siem_api(secrets['proofpoint_tap_user'], secrets['proofpoint_tap_password'], 300) threat_ids = parse_for_threatIds(siem_logs) unique_ids = tuple(threat_ids) forensics_logs = query_forensics_api(secrets["proofpoint_tap_user"], secrets["proofpoint_tap_password"], unique_ids) if forensics_logs["forensics"]: kafka_producer.run_kafka_producer_job(forensics_logs, "test_log_security_proofpoint.forensics_api_monthly") logger.info(f"pp_forensics produce finished")
def main(): tornado.options.define("static_path", default = "../client/static", help = "path to static files directory", type = str) tornado.options.define("templates_path", default = "../client/templates", help = "path to template files directory", type = str) tornado.options.define("sctp_port", default = 55770, help = "port of sctp server", type = int) tornado.options.define("sctp_host", default = "localhost", help = "host of sctp server", type = str) tornado.options.define("event_wait_timeout", default = 10, help = "time to wait commands processing", type = int) tornado.options.define("idtf_serach_limit", default = 20, help = "number of maximum results for searching by identifier", type = int) tornado.options.define("redis_host", default = "localhost", help = "host of redis server", type = str) tornado.options.define("redis_port", default = 6379, help = "port of redis server", type = int) tornado.options.define("redis_db_idtf", default = 0, help = "number of redis database to store identifiers", type = int) tornado.options.define("redis_db_user", default = 1, help = "number of redis database to store user info", type = int) tornado.options.define("host", default = "localhost", help = "host name", type = str) tornado.options.define("port", default = 8000, help = "host port", type = int) tornado.options.define("google_client_id", default = "", help = "client id for google auth", type = str) tornado.options.define("google_client_secret", default = "", help = "client secret for google auth", type = str) tornado.options.define("apiai_subscription_key", default = "", help = "subscription key for api.ai", type = str) tornado.options.define("apiai_client_access_token", default = "", help = "client access token for api.ai", type = str) tornado.options.define("user_key_expire_time", default = 600, help = "user key expire time in seconds", type = int) tornado.options.define("super_emails", default = "", help = "email of site super administrator (maximum rights)", type = list) tornado.options.define("db_path", default = "data.db", help = "path to database file", type = str) tornado.options.define("cfg", default = "server.conf", help = "path to configuration file", type = str) tornado.options.parse_command_line() if os.path.exists(tornado.options.options.cfg): tornado.options.parse_config_file(tornado.options.options.cfg) # prepare database database = db.DataBase() database.init() rules = [ (r"/", MainHandler), (r"/static/(.*)", NoCacheStaticHandler, {"path": tornado.options.options.static_path}), # api (r"/api/init/", api.Init), (r"/api/context/", api.ContextMenu), (r"/api/cmd/do/", api.CmdDo), (r"/api/cmd/text/", NaturalLanguageSearch), (r"/api/question/answer/translate/", api.QuestionAnswerTranslate), (r"/api/link/content/", api.LinkContent), (r"/api/link/format/", api.LinkFormat), (r"/api/languages/", api.Languages), (r"/api/languages/set/", api.LanguageSet), (r"/api/idtf/find/", api.IdtfFind), (r"/api/idtf/resolve/", api.IdtfResolve), (r"/api/addr/resolve/", api.AddrResolve), (r"/api/info/tooltip/", api.InfoTooltip), (r"/api/user/", api.User), (r"/auth/google$", auth.GoogleOAuth2LoginHandler), (r"/auth/logout$", auth.LogOut), (r"/admin$", admin.MainHandler), (r"/admin/users/get$", admin_users.UsersInfo), (r"/admin/users/set_rights$", admin_users.UserSetRights), (r"/admin/users/list_rights$", admin_users.UserListRights), (r"/sctp", ws.SocketHandler), ] application = tornado.web.Application( handlers = rules, cookie_secret = secret.get_secret(), login_url = "/auth/google", template_path = tornado.options.options.templates_path, xsrf_cookies = False, gzip = True, google_oauth = {"key": tornado.options.options.google_client_id, "secret": tornado.options.options.google_client_secret } ) application.listen(tornado.options.options.port) tornado.ioloop.PeriodicCallback(try_exit, 1000).start() tornado.ioloop.IOLoop.instance().start()
import time from flask import Flask, request, jsonify from werkzeug.exceptions import InternalServerError from validator import validate, bad_request from db import DBConn, splice_params from conf import CONF, APP_NAME, start_logging from secret import get_secret, create_token import send_email from elog import ELog from upload_srv import upload_client APP = Flask(APP_NAME) APP.config.update(CONF['flask']) APP.secret_key = get_secret(CONF['files']['secret']) with APP.app_context(): start_logging('srv', CONF['logs']['srv_level']) logging.debug('starting in debug mode') DB = DBConn(CONF.items('db')) DB.connect() DB.verbose = True APP.db = DB def _create_token(data): return create_token(data, APP.secret_key) @APP.errorhandler(InternalServerError) def internal_error(exception):
from secret import get_secret, create_token from db import DBConn, splice_params from conf import CONF sys.path.append('test') DB = DBConn(CONF.items('db')) DB.verbose = True DB.connect() API_URI = 'https://dev.lenfer.ru/api/' #API_URI = 'http://my.lenfer.ru/api/' LOGGER = logging.getLogger(__name__) SECRET = get_secret(CONF.get('files', 'secret')) def _create_token(data): return create_token(data, SECRET) LOGIN = '******' PASSWORD = '******' HASHIDS = Hashids(salt=SECRET.decode('utf-8'), min_length=6) def rnd_string(length=8): return ''.join(choice(CHARS) for _ in range(length))
def insert_docs(db_name, col_name, docs): client = mc(get_secret()) db = client.get_database(db_name) col = db[col_name] col.insert_many(docs)
import urllib.request as url_request from bs4 import BeautifulSoup as Soup from pymongo import UpdateOne, MongoClient as mc from secret import get_secret connect_string = get_secret() class SneakyURLopener(url_request.FancyURLopener): version = "Mozilla/5.0" def get_element(parent, el, attributes, i=0): container = parent.find_all(el, attributes) if len(container) > 0: return container[i] print('Failed to find element.\nel: %s\nattributes: %s' % (el, attributes)) return None def insert_docs(db_name, col_name, docs): client = mc(get_secret()) db = client.get_database(db_name) col = db[col_name] col.insert_many(docs) def insert_new_docs(db_name, col_name, docs): client = mc(get_secret()) db = client.get_database(db_name) col = db[col_name]
'Sync-App-Token': syncplicity_secrets['syncplicity_app_key'], 'Content-Type': 'application/x-www-form-urlencoded' } try: sync_post = requests.post(url, data='grant_type=client_credentials', headers=headers) token = sync_post.json()["access_token"] return (token) except Exception as e: logger.error(f"Exception occurred in get_access_token: {str(e)}") return None if __name__ == "__main__": yesterday = datetime.date.today() - datetime.timedelta(days=1) yesterday.strftime('%yyyy-%mm-%dd') syncplicity_secrets = secret.get_secret( 'ngsiem-aca-logstash-api', ['syncplicity_app_key', 'syncplicity_client_id', 'syncplicity_secret']) # while True: directory = '/syncplicity_admin/' token = get_access_token(syncplicity_secrets) # print(token) if token is not None: filenames = get_filenames(token, syncplicity_secrets) for file in filenames: if "Audit administrator actions" in file["Filename"] and str(yesterday) in file["Filename"]: logger.info(f"Found this file: {file['Filename']}") get_logs(token, file, directory, yesterday) produce_csv_to_kafka("log_audit_syncplicity.adm_monthly", directory) delete_files(directory)
def main(): """sends cfm requests""" start_logging('send_cfm_requests') logging.debug('start send cfm requests') conf = site_conf() secret = get_secret(conf.get('files', 'secret')) db_params = conf.items('db') _db = DBConn(db_params) yield from _db.connect() data = yield from _db.execute( """ select correspondent, correspondent_email, json_agg(json_build_object('callsign', callsign, 'stationCallsign', station_callsign, 'rda', rda, 'band', band, 'mode', mode, 'tstamp', to_char(tstamp, 'DD mon YYYY HH24:MI'), 'rcvRST', rec_rst, 'sntRST', sent_rst)) as qso from (select * from cfm_request_qso where not sent and correspondent not in (select callsign from cfm_request_blacklist)) as data group by correspondent, correspondent_email""", None, True) if not data: return sent_to = [] for row in data: token = create_token(secret, {'callsign': row['correspondent']}) link_cfm = conf.get('web', 'address') + '/#/cfm_qso/?token=' + token + \ '&callsign=' + row['correspondent'] link_blacklist = conf.get('web', 'address') +\ '/#/cfm_blacklist/?token=' + token qso_txt = format_qsos(row['qso']) text = (""" Здравствуйте, {correspondent}. Просим Вас поддержать проект CFMRDA для создания единой базы по программе диплома RDA. Вы можете подтвердить конкретные связи, которые очень важны Вашим корреспондентам, приславшим запросы или залить полностью свой лог. """ + qso_txt + """ Для подтверждения QSO зайдите на эту страницу - {link_cfm} Если указанные данные верны, поставьте отметки "Подтвердить" в каждом QSO и нажмите кнопку "OK" Было бы удобнее, если бы вы зарегистрировались на CFMRDA.ru и загрузили бы свои логи в базу данных сайта. Если Вы не хотите регистрироваться или у Вас возникли какие-то трудности при загрузке, пришлите свой лог, желательно в формате ADIF на адрес техподдержки [email protected] Спасибо. 73! Команда CFMRDA.ru Если вы не хотите в дальнейшем получать подобные запросы на подтверждение QSO, пройдите по этой ссылке - {link_blacklist} И нажмите кнопку "Не присылать мне больше запросов от CFMRDA.ru" """).format_map({'correspondent': row['correspondent'],\ 'link_cfm': link_cfm, 'link_blacklist': link_blacklist}) retries = 0 while retries < 3: if send_email(text=text,\ fr=conf.get('email', 'address'),\ to=row['correspondent_email'],\ subject="Запрос на подтверждение QSO от CFMRDA.ru"): logging.error('cfm request email sent to ' + row['correspondent']) sent_to.append(row) break else: retries += 1 yield from asyncio.sleep(10) if retries == 3: logging.error('Email delivery failed. Correspondent: ' + row['correspondent']\ + ', address: ' + row['correspondent_email']) yield from asyncio.sleep(10) logging.error('all requests were sent') if sent_to: yield from _db.execute(""" update cfm_request_qso set sent = true, status_tstamp = now() where correspondent = %(correspondent)s and not sent""",\ sent_to) logging.error('cfm_request_qso table updated') yield from _db.execute( """ update cfm_requests set tstamp = now() where callsign = %(correspondent)s; insert into cfm_requests select %(correspondent)s, now() where not exists (select 1 from cfm_requests where callsign = %(correspondent)s) """, sent_to) logging.error('cfm_requests table updated')
def wipe_col(db_name, col_name): client = mc(get_secret()) db = client.get_database(db_name) col = db[col_name] col.delete_many({})
#!/usr/bin/python3 #coding=utf-8 from common import site_conf from secret import get_secret, create_token conf = site_conf() secret = get_secret(conf.get('files', 'secret')) print(create_token(secret, {'callsign': 'R7CL'}))
from secret import get_secret, create_token from conf import CONF from db import DBConn PARSER = argparse.ArgumentParser(description="lenfer device creator") PARSER.add_argument('type') PARSER.add_argument('user', nargs='?') ARGS = PARSER.parse_args() DB = DBConn(CONF.items('db')) DB.connect() if not ARGS.type: sys.exit('Device type is required.') PARAMS = {'device_type_id': ARGS.type, 'login': ARGS.user, 'props': []} SECRET = get_secret(CONF['files']['secret']).decode('utf-8') HASHIDS = Hashids(salt=SECRET, min_length=6) DEVICE_TYPE_DATA = DB.execute( """ select id, software_type, updates, props, modes from devices_types where id = %(device_type_id)s """, PARAMS) if not DEVICE_TYPE_DATA: sys.exit('Invalid device type.') if DEVICE_TYPE_DATA['modes']: PARAMS['mode'] = DEVICE_TYPE_DATA['modes'][0]['id']
def main(): tornado.options.define("static_path", default="../static", help="path to static files directory", type=str) tornado.options.define("templates_path", default="../templates", help="path to template files directory", type=str) tornado.options.define("sctp_port", default=55770, help="port of sctp server", type=int) tornado.options.define("sctp_host", default="localhost", help="host of sctp server", type=str) tornado.options.define("event_wait_timeout", default=10, help="time to wait commands processing", type=int) tornado.options.define( "idtf_serach_limit", default=30, help="number of maximum results for searching by identifier", type=str) tornado.options.define("redis_host", default="localhost", help="host of redis server", type=str) tornado.options.define("redis_port", default=6379, help="port of redis server", type=int) tornado.options.define( "redis_db_idtf", default=0, help="number of redis database to store identifiers", type=int) tornado.options.define("redis_db_user", default=1, help="number of redis database to store user info", type=int) tornado.options.parse_command_line() tornado.options.parse_config_file("server.conf") socketRouter = sockjs.tornado.SockJSRouter(ws.SocketHandler, '/sctp') rules = [ (r"/", MainHandler), # api (r"/api/init/", api.Init), (r"/api/cmd/do/", api.CmdDo), (r"/api/question/answer/translate/", api.QuestionAnswerTranslate), (r"/api/link/content/", api.LinkContent), (r"/api/link/format/", api.LinkFormat), (r"/api/languages/", api.Languages), (r"/api/languages/set/", api.LanguageSet), (r"/api/idtf/find/", api.IdtfFind), (r"/api/idtf/resolve/", api.IdtfResolve), (r"/api/addr/resolve/", api.AddrResolve), (r"/api/info/tooltip/", api.InfoTooltip), (r"/api/user/", api.User), ] rules.extend(socketRouter.urls) application = tornado.web.Application( rules, cookie_secret=secret.get_secret(), login_url="/auth/login", template_path=tornado.options.options.templates_path, static_path=tornado.options.options.static_path, xsrf_cookies=False, gzip=True) application.listen(8000) tornado.ioloop.PeriodicCallback(try_exit, 1000).start() tornado.ioloop.IOLoop.instance().start()
def main(): tornado.options.define("static_path", default="../client/static", help="path to static files directory", type=str) tornado.options.define("templates_path", default="../client/templates", help="path to template files directory", type=str) tornado.options.define("sctp_port", default=55770, help="port of sctp server", type=int) tornado.options.define("sctp_host", default="localhost", help="host of sctp server", type=str) tornado.options.define("event_wait_timeout", default=10, help="time to wait commands processing", type=int) tornado.options.define( "idtf_serach_limit", default=100, help="number of maximum results for searching by identifier", type=int) tornado.options.define("redis_host", default="localhost", help="host of redis server", type=str) tornado.options.define("redis_port", default=6379, help="port of redis server", type=int) tornado.options.define( "redis_db_idtf", default=0, help="number of redis database to store identifiers", type=int) tornado.options.define("redis_db_user", default=1, help="number of redis database to store user info", type=int) tornado.options.define("host", default="localhost", help="host name", type=str) tornado.options.define("port", default=8000, help="host port", type=int) tornado.options.define("google_client_id", default="", help="client id for google auth", type=str) tornado.options.define("google_client_secret", default="", help="client secret for google auth", type=str) tornado.options.define("apiai_subscription_key", default="", help="subscription key for api.ai", type=str) tornado.options.define("apiai_client_access_token", default="", help="client access token for api.ai", type=str) tornado.options.define("user_key_expire_time", default=600, help="user key expire time in seconds", type=int) tornado.options.define( "super_emails", default="", help="email of site super administrator (maximum rights)", type=list) tornado.options.define("db_path", default="data.db", help="path to database file", type=str) tornado.options.define("cfg", default="server.conf", help="path to configuration file", type=str) tornado.options.parse_command_line() if os.path.exists(tornado.options.options.cfg): tornado.options.parse_config_file(tornado.options.options.cfg) # prepare database database = db.DataBase() database.init() # prepare logger logger_sc.init() rules = [ (r"/", MainHandler), (r"/static/(.*)", NoCacheStaticHandler, { "path": tornado.options.options.static_path }), # api (r"/api/init/", api.Init), (r"/api/context/", api.ContextMenu), (r"/api/cmd/do/", api.CmdDo), (r"/api/cmd/text/", NaturalLanguageSearch), (r"/api/question/answer/translate/", api.QuestionAnswerTranslate), (r"/api/link/content/", api.LinkContent), (r"/api/link/format/", api.LinkFormat), (r"/api/languages/", api.Languages), (r"/api/languages/set/", api.LanguageSet), (r"/api/idtf/find/", api.IdtfFind), (r"/api/idtf/resolve/", api.IdtfResolve), (r"/api/addr/resolve/", api.AddrResolve), (r"/api/info/tooltip/", api.InfoTooltip), (r"/api/user/", api.User), (r"/auth/google$", auth.GoogleOAuth2LoginHandler), (r"/auth/logout$", auth.LogOut), (r"/admin$", admin.MainHandler), (r"/admin/users/get$", admin_users.UsersInfo), (r"/admin/users/set_rights$", admin_users.UserSetRights), (r"/admin/users/list_rights$", admin_users.UserListRights), (r"/sctp", ws.SocketHandler), ] application = tornado.web.Application( handlers=rules, cookie_secret=secret.get_secret(), login_url="/auth/google", template_path=tornado.options.options.templates_path, xsrf_cookies=False, gzip=True, google_oauth={ "key": tornado.options.options.google_client_id, "secret": tornado.options.options.google_client_secret }) application.listen(tornado.options.options.port) tornado.ioloop.PeriodicCallback(try_exit, 1000).start() tornado.ioloop.IOLoop.instance().start()
additionalInfo ''' list_dicts = [] for log in logs["value"]: print(f"LOG WITHOUT FLATTENING: {log}") final_dict = dict(log) user_agent = [] additional_info = json.loads(log["additionalInfo"]) for item in additional_info: user_agent.append(item["Value"]) final_dict["user_agent"] = user_agent del final_dict['additionalInfo'] list_dicts.append(final_dict) print(f"Log after flattening: {final_dict}") return list_dicts if __name__ == "__main__": secrets = secret.get_secret('ngsiem-aca-logstash-api', [ 'azure_graph_secret', 'azure_graph_client_id', 'azure_graph_scope', 'azure_graph_tenant' ]) token = get_auth_token(secrets) logs = pull_graph_alerts(secrets, token) if logs["value"]: flattened_logs = flatten_objects(logs) kafka_producer.run_kafka_producer_job( flattened_logs, "log_security_azure.graph_identity_protection_api_monthly")
def set_creds(): secrets = secret.get_secret('ngsiem-aca-kafka-config', ['username', 'password', 'kafka_hosts']) os.environ['KAFKA_USERNAME'] = secrets['username'] os.environ['KAFKA_PASSWORD'] = secrets['password'] os.environ['KAFKA_HOSTS'] = secrets["kafka_hosts"]
'kafka_tgrc_team_producer', ['KAFKA_USERNAME', 'KAFKA_PASSWORD', 'BOOTSTRAP_URL']) os.environ['KAFKA_USERNAME'] = secrets['KAFKA_USERNAME'] os.environ['KAFKA_PASSWORD'] = secrets['KAFKA_PASSWORD'] os.environ['KAFKA_HOSTS'] = secrets['BOOTSTRAP_URL'] class PollingException(Exception): def __init__(self, message): self.message = message if __name__ == '__main__': set_creds() pod_secrets = secret.get_secret('ngsiem-aca-logstash-api', [ 'proofpoint_pod_api_key_original_prod', 'proofpoint_websocket_key', 'proofpoint_pod_hosted_name' ]) PRODUCTION_HOST = 'logstream.proofpoint.com' compression = 'permessage-deflate; client_no_context_takeover; server_no_context_takeover' filename = "/opt/scripts/proofpoint_client_prod/sample.txt" config = Config(PRODUCTION_HOST, websocket_extensions=compression, ping_interval=5, msg_type="message", trace=True) client = KafkaClient(config, [ pod_secrets["proofpoint_pod_hosted_name"], pod_secrets["proofpoint_pod_api_key_original_prod"] ], logger) # time.sleep(30)