def bbscan_parse_uri(scheme, ip, port, title, content, status_code, header, task_name, task_id, tag_name): """ 检查是否在白名单-> 从content解析二级路径-> 把路径和要检测的内容组合成新的url 检测白名单放在后面,更具有通用性 :param content: :return: """ try: if check_white_list(content): data = {"ip": ip, "port": port, "vul_title": title} redis_conn.lpush("VulScan", json.dumps(data)) # TODO 存储BBScan白名单的结果到数据库 # return else: log.info("starting parse uri for %s://%s:%s", scheme, ip, port) web = Web(scheme, ip, port, title, content, status_code, header, task_name, task_id, tag_name) web.init_run() except: log.error("celery task parse_uri error for %s://%s:%s", scheme, ip, port, exc_info=True)
def http_detect(host, port): """ 检测给定host和IP运行的是http服务,还是非http服务 :param host: :param port: :return: """ probe = b"GET / HTTP/1.0\r\n\r\n" service = 'unknown' content = '' if int(port) == 80: service = 'http' return service, '' if int(port) == 443: service = 'https' return service, '' socket.setdefaulttimeout(SOCKET_TIMEOUT) try: with socket.create_connection((host, port), timeout=SOCKET_TIMEOUT) as conn: conn.send(probe) time.sleep(1) b = conn.recv(102400).decode() if b[:5] == 'HTTP/': service = 'http' return service, b else: """183.131.28.66:10443""" context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE with socket.create_connection((host, port), timeout=SOCKET_TIMEOUT) as conn: with context.wrap_socket(conn, server_hostname=host) as sconn: sconn.send(probe) time.sleep(1) c = sconn.recv(102400).decode() if c[:5] == 'HTTP/': service = 'https' return service, c except ConnectionResetError: """183.131.28.66:31443""" context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE with socket.create_connection((host, port), timeout=SOCKET_TIMEOUT) as conn: with context.wrap_socket(conn, server_hostname=host) as sconn: sconn.send(probe) time.sleep(1) c = sconn.recv(102400).decode() if c[:5] == 'HTTP/': service = 'https' return service, c except Exception as ex: log.info("http detect service unknown for %s:%s ", host, port) return service, content
def wait_for_callback(): last_sync = time.time() next_refresh = time.time() + refresh_callback_minutes * 60 next_sync = next_refresh while True: time_left = max(min(next_sync, next_refresh) - time.time(), 0) log.info("Waiting for callback for {}...".format( helpers.format_seconds(time_left))) serversocket.settimeout(time_left) try: (clientsocket, address) = serversocket.accept() clientsocket.close() if not network.is_bunq_server(address[0]): log.warning("Source IP not in BUNQ range".format(address[0])) continue log.info("Incoming call from {}...".format(address[0])) except socket.timeout as e: pass if next_refresh <= time.time(): return elif time.time() < last_sync + 30: next_sync = last_sync + 30 else: synchronize() last_sync = time.time() next_sync = next_refresh
def upload_transactions(budget_id, transactions): if config["dry"]: log.info("Dry run, skipping upload to YNAB...") return 0, 0, 0 method = "v1/budgets/" + budget_id + "/transactions" reversed_transactions = list(reversed(transactions)) created = duplicates = patched = 0 new_list = [t for t in reversed_transactions if t.get("new")] for new_batch in chunker(new_list, 100): log.info("Creating transactions up to {}...".format( new_batch[-1]["date"])) new_result = post(method, {"transactions": new_batch}) created += len(new_result["transaction_ids"]) duplicates += len(new_result["duplicate_import_ids"]) patch_list = [ t for t in reversed_transactions if not t.get("new") and t.get("dirty") ] for patch_batch in chunker(patch_list, 100): log.debug("Patching transactions up to {}...".format( patch_batch[-1]["date"])) patch_result = patch(method, {"transactions": patch_batch}) patched += len(patch_result["transaction_ids"]) return created, duplicates, patched
def write_state(self): if os.environ.get("AWS_REGION"): log.info('Writing SSM state {0}'.format(self.ssm_path)) parameter_store.put_parameter(self.ssm_path, json.dumps(self.state, indent=4)) else: with open(self.state_fn, "w") as f: json.dump(self.state, f, indent=4)
def portmap_search(): if not upnp: return log.info("Searching for upnp gateway...") try: upnp.discover() upnp.selectigd() except Exception as e: log.error("Error searching for upnp gateway: {0}".format(e))
def get_public_ip(): local_ip = get_local_ip() if not is_private_ip(local_ip): return local_ip external_ip = get_portmap_external_ip() if external_ip: return external_ip log.info("Retrieving public IP from {}...".format(public_ip_url)) return requests.get(public_ip_url).text
def add_callbacks(sync): url = state.get("aws_callback") log.info("SSM callback = \"{}\"".format(url)) if not url: return log.info("Adding callbacks...") for acc in sync.get_bunq_accounts(): bunq_api.add_callback(acc["bunq_user_id"], acc["bunq_account_id"], "bunq2ynab-lambda", url)
def get_portmap_external_ip(): if not upnp: return None try: external_ip = upnp.externalipaddress() log.info("Retrieved external IP {} from upnp gateway...".format( external_ip)) return external_ip except: return None
def portmap_remove(port): if not upnp or not port: return log.info("Removing upnp port {} mapping...".format(port)) try: result = upnp.deleteportmapping(port, 'TCP') if not result: log.error("Failed to remove upnp port mapping.") except Exception as e: log.error("Error removing upnp port mapping: {0}".format(e))
def get_private_key(): pem_str = state.get("private_key") if pem_str: return crypto.load_privatekey(crypto.FILETYPE_PEM, pem_str) log.info("Generating new private key...") key = crypto.PKey() key.generate_key(crypto.TYPE_RSA, 2048) pem = crypto.dump_privatekey(crypto.FILETYPE_PEM, key) state.set("private_key", pem.decode("utf-8")) state.set("private_key_for_api_token", config.get("api_token")) return key
def read_ssm_config(self): log.info('Reading config from SSM'.format(self.ssm_path)) try: resp = parameter_store.fetch_parameter(self.ssm_path) self.config = json.loads(resp) log.debug('Fetched configuration') except Exception as e: log.critical( "Error loading configuration from SSM Parameter: {}: {}". format(self.ssm_path, e)) sys.exit(1)
def crawl(self, path): """ 从redis里面取,分离出来content,content是bytes类型 :param path: str :return: """ try: if path == '/': self.content = self.content else: headers = dict(HEADERS, Range='bytes=0-204800') self.content = requests.get(self.url + path, headers=headers, verify=False, allow_redirects=False, timeout=HTTP_TIMEOUT).content # print("content is %s" % self.content) log.info("BBSCan: parse link from content") soup = BeautifulSoup(self.content, "html.parser") for link in soup.find_all(['a', 'link', 'script', 'img']): url = link.get('href', '').strip() or link.get('src', '').strip() if url.startswith('..'): continue # print ("url is %s" % url) if not url.startswith('/') and url.find('//') < 0: url = '/' + url url, depth = self.cal_depth(url) if depth <= MAX_DEPTH: # child = url.strip('/').split('/') # for i in range(len(child)): # self.get_all_uri('/'+ '/'.join(child[:i+1])) # uri.append(url) # log.info("get first url %s", url) # if url.strip('/').find( '/') > 0: #解析/1/2/3/为/1/,/1/2/,/1/2/3/ log.info("get url %s", url) child = url.strip('/').split('/') for i in range(len(child)): self.get_all_uri('/' + '/'.join(child[:i + 1])) else: self.get_all_uri(url) # print ("url is %s:%s" % (url, depth)) # url 放入到队列里面 # print("URIS is %s" % list(set(url))) # 爬取页面的路径之后,去重 except Exception as e: print("Error %s" % str(e)) log.error("BBScan: crawl %s%s failed", self.base_url, path, exc_info=True)
def teardown_callback(): log.info("Cleaning up...") for acc in sync_obj.get_bunq_accounts(): try: bunq_api.remove_callback(acc["bunq_user_id"], acc["bunq_account_id"], "bunq2ynab-autosync") except Exception as e: log.info("Error removing callback: {}".format(e)) try: network.portmap_remove(portmap_port) except Exception as e: log.error("Error removing upnp port mapping: {}".format(e))
def register_device(): permitted_ips = ['*'] if config.get("single_ip"): permitted_ips = [network.get_public_ip()] log.info("Registering permitted IPs {}".format(",".join(permitted_ips))) method = "v1/device-server" data = { "description": "bunq2ynab on " + network.get_hostname(), "secret": config.get("api_token"), "permitted_ips": permitted_ips } post(method, data) state.set("device_registered", "True")
def fingerprint_scan(ip, port): ret = "unknown" if int(port) not in [80, 443]: log.info("port in 80 or 443, return") try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = sock.connect_ex((ip, int(port))) if result == 0: for i in PROBE: sock.sendall(i.encode()) response = sock.recv(256) sock.close() if response: break log.info('Fingerprint response for %s:%s is %s', ip, port, response) if response: for pattern in SIGNS: pattern = pattern.split(b'|') if re.search(pattern[-1], response, re.I|re.M): # proto = '{}:{}'.format(pattern[1].decode(), port) # print(proto) ret = pattern[1].decode() # + '-self' log.info('get fingerprint success for %s:%s is %s', ip,port,ret) break except socket.timeout: log.info("get fingerprint timeout") except Exception as e: # print(str(e)) log.error('get fingerprint failed', exc_info=True) pass return ret
def get_all_uri(self, uri='/'): try: url = str(uri) # print("status_404 is %s" % self.has_status_404) # print(self.urls_processed) log.info("get path for %s%s", self.base_url, url) url_pattern = url # url_pattern = '/' + url.strip('/') + '/' # url_pattern = re.sub(r'\d+', '{num}', url) if url_pattern in self.urls_processed or len( self.urls_processed) >= self.limit: log.info("BBScan STOP: processed Max Url limit: %s, Get: %s", self.urls_processed, url_pattern) return else: log.info("add url to processed %s", url_pattern) self.urls_processed.add(url_pattern) redis_conn_byte.lpush("bbscan_uri", self.url + url) # 把爬取的路径放入到redis里面 log.info('save url %s', url) self.save_res_to_redis(url) self.crawl(url) except Exception as e: print("Get_all_Uri %s" % str(e)) pass
def load(self): if self.loaded: return if os.environ.get("AWS_REGION"): log.info('Fetching SSM state {0}'.format(self.ssm_path)) resp = parameter_store.fetch_parameter(self.ssm_path) self.state.update(json.loads(resp)) else: if os.path.exists(self.state_fn): # make sure we have write access with open(self.state_fn, "r+") as f: self.state.update(json.load(f)) else: # Write exmpty state self.write_state() self.loaded = True
def check_http_status(scheme, host, port, task_name, task_id, tag_name): """ 检测端口的连通 状态码的有效性 压入redis,供bbscan扫描 :param scheme: :param host: :param port: :return: """ url = "%s://%s:%s" % (scheme, host, port) try: # s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # s.settimeout(5.0) # if s.connect_ex((url, int(port))) == 0: log.info('Checking Http Status Valid: %s', url) status_code, headers, content = http_request(url) if status_code: location = headers.get('Location', '') if status_code not in HTTP.IGNORE_CODE: if status_code in [301, 302 ] and location.startswith("https://"): scheme, host, port = get_hostname_port(location) status_code, headers, content = http_request(location) m = re.search('<title>(.*?)</title>', decode_text(content)) title = m.group(1) if m else '' header = get_headers(headers) banner = header + decode_text(content) rdata = { 'scheme': scheme, 'port': port, 'ip': host, 'title': title, 'status_code': status_code, "banner": banner, 'task_name': task_name, 'task_id': task_id, 'tag_name': tag_name } redis_conn.lpush("BBScan_First", json.dumps(rdata)) #压入redis,bbscan来解析扫描 return True except Exception as e: log.error('[Warning] Get http connection failed %s:%s' % (host, port), exc_info=True) return False
def get_installation_token(): token = state.get("installation_token") if token: return token log.info("Requesting installation token...") public_key = get_public_key() pem = crypto.dump_publickey(crypto.FILETYPE_PEM, public_key) method = "v1/installation" data = {"client_public_key": pem.decode("utf-8")} reply = post(method, data) token = None for row in reply: if "Token" in row: token = row["Token"]["token"] if not token: raise Exception("No token returned by installation") state.set("installation_token", token)
def portmap_add(try_port, local_port): if not upnp: return if not try_port: try_port = local_port log.info("Adding upnp port mapping...") for i in range(0, 128): try: upnp.addportmapping(try_port, 'TCP', upnp.lanaddr, local_port, 'bynq2ynab-autosync', '') return try_port except Exception as e: if "ConflictInMappingEntry" not in str(e): log.error("Failed to map port: {}".format(e)) return log.info("Port {} is already mapped, trying next port...".format( try_port)) try_port = next_port(try_port)
def get_session_token(): check_stale_api_token() token = state.get("session_token") if token: return token if not state.get("installation_token"): get_installation_token() if not state.get("device_registered"): register_device() log.info("Requesting session token...") method = "v1/session-server" data = {"secret": config.get("api_token")} reply = post(method, data) session_token = None for row in reply: if "Token" in row: session_token = row["Token"]["token"] if not session_token: raise Exception("No token returned by session-server") state.set("session_token", session_token) return session_token
def check_port_open(host, port): """ 检测端口连通性 :param host: :param port: :return: """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(5.0) try: if s.connect_ex((host, int(port))) == 0: log.info("Check port open %s:%s is Open", host, port) return True except Exception: log.error('[Warning] Fail to connect to %s:%s' % (host, port), exc_info=True) finally: # s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0)) s.close() log.info("Check port Close %s:%s is Close", host, port) return False
def get_iban_from_event(event): body_str = event.get("body") if not body_str: log.info("No request body found") return try: body = json.loads(body_str) except json.JSONDecodeError as e: log.error("Error decoding quest body as JSON: {}".format(e)) return nu = body.get("NotificationUrl", {}) category = nu.get("category") if category != "MUTATION": log.error("Category is not MUTATION but {}".format(e)) return iban = nu.get("object", {}).get("Payment", {}).get("alias", {}).get("iban") if not iban: log.error("No IBAN found in request body") return log.info("Found IBAN {} in request body".format(iban)) return iban
def populate(self): if self.populated: raise Exception("Sync object is already populated") log.info("Retrieving bunq accounts...") self.bunq_accounts = list(bunq_api.get_accounts()) log.info("Retrieving ynab accounts...") self.ynab_accounts = list(ynab.get_accounts()) self.confpairs = config.get("accounts", [{}]) if not isinstance(self.confpairs, list): raise Exception('Configuration "accounts" must be a list') for conf in self.confpairs: for k in conf: if k not in ("bunq_user_name", "bunq_account_name", "ynab_budget_name", "ynab_account_name"): raise Exception('Accounts cannot contain "{}"'.format(k)) if conf.get("bunq_user_name", "") == "": conf["bunq_user_name"] = "*" if conf.get("bunq_account_name", "") == "": conf["bunq_account_name"] = "*" if conf.get("ynab_budget_name", "") == "": conf["ynab_budget_name"] = "*" if conf.get("ynab_account_name", "") == "": conf["ynab_account_name"] = "*" self.syncpairs = [ { **ba, **ya } for ba in self.bunq_accounts for ya in self.ynab_accounts if [True for cp in self.confpairs if matching_pairs(ba, ya, cp)] ] for cp in self.confpairs: if "matched" not in cp: log.warning("No matches for rule {}.".format(pair_to_str(cp))) else: del cp["matched"] self.populated = True
def synchronize(): try: log.info("Starting sync at " + time.strftime('%Y-%m-%d %H:%M:%S')) sync_obj.synchronize() log.info("Finished sync at " + time.strftime('%Y-%m-%d %H:%M:%S')) log.info("") except Exception as e: log.error("Error during synching: {}".format(e)) log.error(traceback.format_exc())
def scheduler_poc_scan(): while redis_conn.llen("Task_Poc_Scan"): poc_object = json.loads(redis_conn.lpop("Task_Poc_Scan")) task_name = poc_object["task_name"] task_id = poc_object["task_id"] tag_name = poc_object["tag_name"] hostname = poc_object["hostname"] port = poc_object["port"] pocs = poc_object["pocs"] pocs = pocs.split(',') if isinstance(pocs, str) else pocs service = poc_object['service'] # service = poc_object['service'] or 'http' # log.info("subscribe: pocs is %s:%s" % (pocs, type(pocs))) # # try: # service = poc_object['service'] # except: # service = 'http' log.info('target is %s service is %s' % (hostname + ':' + str(port), service)) if service and pocs: # 识别出来的服务,比如http,https for poc in pocs: vuln_scan.delay(hostname, port, service, poc, task_id, task_name, tag_name)
def setup_callback(): global serversocket, callback_ip, callback_port, local_port, portmap_port # Don't try to map ports if we have a public IP callback_ip = callback_port = None using_portmap = False local_ip = network.get_local_ip() if not network.is_private_ip(local_ip): log.info("Host has a public IP...") callback_ip = local_ip elif config.get("port"): log.info("Host has a private IP, port specified, configure forward " + "manually...") callback_ip = network.get_public_ip() else: log.info("Host has a private IP, trying upnp port mapping...") network.portmap_setup() network.portmap_search() callback_ip = network.get_public_ip() using_portmap = True if not callback_ip: log.error("No public IP found, not registering callback.") return if not serversocket: serversocket, local_port = bind_port() log.info("Listening on port {0}...".format(local_port)) serversocket.listen(5) # max incoming calls queued if not using_portmap: callback_port = local_port else: portmap_port = network.portmap_add(portmap_port, local_port) if not portmap_port: log.error("Failed to map port, not registering callback.") return callback_port = portmap_port sync_obj.populate() for acc in sync_obj.get_bunq_accounts(): url = "https://{}:{}/bunq2ynab-autosync".format( callback_ip, callback_port) bunq_api.add_callback(acc["bunq_user_id"], acc["bunq_account_id"], "bunq2ynab-autosync", url)
log.info("Error removing callback: {}".format(e)) try: network.portmap_remove(portmap_port) except Exception as e: log.error("Error removing upnp port mapping: {}".format(e)) # ----- Main loop try: while True: try: sync_obj = sync.Sync() setup_callback() log.info("Starting periodic synchronization...") synchronize() if callback_ip and callback_port: wait_for_callback() else: log.warning("No callback, waiting for {} minutes...".format( refresh_nocallback_minutes)) time.sleep(refresh_nocallback_minutes * 60) except Exception as e: log.error("Error: {}".format(e)) log.error(traceback.format_exc()) log.error("Error occured, waiting 10 seconds.") time.sleep(10) finally: teardown_callback()
def synchronize_account(self, syncpair): log.info("Synching {}...".format(pair_to_str(syncpair))) get_all = config.get("all", False) if get_all: start_dt = "2000-01-01" else: dt = datetime.datetime.now() - datetime.timedelta(days=35) start_dt = dt.strftime("%Y-%m-%d") log.info("Reading ynab transactions from {}...".format(start_dt)) transactions = ynab.get_transactions(syncpair["ynab_budget_id"], syncpair["ynab_account_id"], start_dt) log.info("Retrieved {} ynab transactions...".format(len(transactions))) # Push start date back to latest YNAB entry if not get_all: start_dt = min(start_dt, get_last_transaction_date(transactions)) log.info("Reading bunq payments from {}...".format(start_dt)) payments = bunq_api.get_payments(syncpair["bunq_user_id"], syncpair["bunq_account_id"], start_dt) log.info("Retrieved {} bunq payments...".format(len(payments))) self.extend_transactions(transactions, payments, syncpair) zerofx.merge(transactions) created, duplicates, patched = ynab.upload_transactions( syncpair["ynab_budget_id"], transactions) msg = "{}: Created {} and patched {} transactions.{}\n".format( pair_to_str(syncpair), created, patched, " There were {} duplicates.".format(duplicates) if duplicates else "") log.info(msg) return msg