def addDefense(node, proto, dTime): db = init() cursor = db.cursor() cursor.execute("insert into defense(node, proto, time) values(?, ?, ?)", (node, proto, dTime)) db.commit() log.success('[+] DoS Defense data logged')
def parse(path): # Read contents of file with open(path) as file: program_string = file.read() # Lex character-by-character tokens, lex_errors = tokenise(program_string) # Syntax parse program, parse_errors = syntax_parser.parse(tokens) # Print out classes and method names if there were no errors all_errors = lex_errors + parse_errors if not all_errors: logger.success('No errors found') # Iterate through all the classes in the program AST, and collect them for class_def in program: logger.header(class_def.class_type, end=' - ') methods = ', '.join([feature.identifier for feature in class_def]) logger.info(methods) # Otherwise, print out the errors else: logger.error('Errors found') logger.info('\n'.join(all_errors))
def lambda_handler(event, context): file = scrape_d.get_content() # upload the csv to s3 session = boto3.Session(aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET, region_name=REGION_NAME) s3 = session.resource('s3') bucket = s3.Bucket(S3_BUCKET) try: s3.meta.client.head_bucket(Bucket=S3_BUCKET) logger.info('Uploading result of scrape to S3') bucket.upload_file('/tmp/%s.csv' % file, '%s.csv' % file, ExtraArgs={'ACL': 'public-read'}) logger.success('Uploaded %s.csv to S3' % file) except ClientError as e: # 404 error, bucket does not exist. error_code = e.response['Error']['Code'] if error_code == '404': logger.err('Bucket(%s) does not exist' % S3_BUCKET) else: logger.err(e)
def packetHandler(pkt): global mac global iface if pkt.haslayer(ARP) and pkt.haslayer(Padding) and pkt.getlayer(Padding).load != fx.defaultPadding: lg.warning(pformat(pkt)) print('') # replying to request paddingPayload = pkt.getlayer(Padding) ether = pkt.getlayer(Ether) arp = pkt.getlayer(ARP) # arpPacket(srcIP, srcMac, dstIP, dstMac, opCode, payload): # reversing dst -> src """ >>> ls(Ether) dst : DestMACField = (None) src : SourceMACField = (None) type : XShortEnumField = (36864) >>> ls(ARP) hwtype : XShortField = (1) ptype : XShortEnumField = (2048) hwlen : FieldLenField = (None) plen : FieldLenField = (None) op : ShortEnumField = (1) hwsrc : MultipleTypeField = (None) psrc : MultipleTypeField = (None) hwdst : MultipleTypeField = (None) pdst : MultipleTypeField = (None) """ newPkt = fx.arpPacket(arp.pdst, mac, arp.psrc, arp.hwsrc, 2, paddingPayload.load) fx.sendPacket(iface, newPkt) lg.success(pformat(newPkt)) print('')
def online(sid, data): l.success(str(data)) devInfo = db.getDeviceInfo(data['devId']) if devInfo['status'] == 0: sio.disconnect(sid) return l.default('Device: {0} is online'.format(data['devId']))
def generateResults(random_state=20, path='all'): results_path = data_path + path + '/' X_train, X_test, y_train, y_test = train_test_split(inputs, targets, test_size=0.2, random_state=random_state) print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) model = LinearSVC(random_state=0).fit(X_train, y_train) print(model) lg.success('LinearSVC: {:.2f}\n'.format(model.score(X_test, y_test))) y_pred = model.predict(X_test) fx.cm_analysis(y_test, y_pred, fx.class_names, results_path + 'LinearSVC_Detail.png') plot_confusion_matrix(model, X_test, y_test, normalize='true', display_labels=fx.class_names, cmap=plt.cm.Blues, include_values=True) plt.title('Linear SVC - {:.2f}'.format(model.score(X_test, y_test))) #plt.show() plt.savefig(results_path + 'LinearSVC.eps') plt.savefig(results_path + 'LinearSVC.png', dpi=1200) model = SVC(random_state=0).fit(X_train, y_train) print(model) lg.success('SVC: {:.2f}\n'.format(model.score(X_test, y_test))) y_pred = model.predict(X_test) fx.cm_analysis(y_test, y_pred, fx.class_names, results_path + 'SVC_Detail.png') plot_confusion_matrix(model, X_test, y_test, normalize='true', display_labels=fx.class_names, cmap=plt.cm.Blues, include_values=True) plt.title('SVC - {:.2f}'.format(model.score(X_test, y_test))) #plt.show() plt.savefig(results_path + 'SVC.eps') plt.savefig(results_path + 'SVC.png', dpi=1200) model = KNeighborsClassifier(n_neighbors=1).fit(X_train, y_train) print(model) lg.success('KNN: {:.2f}\n'.format(model.score(X_test, y_test))) y_pred = model.predict(X_test) fx.cm_analysis(y_test, y_pred, fx.class_names, results_path + 'KNN_Detail.png') plot_confusion_matrix(model, X_test, y_test, normalize='true', display_labels=fx.class_names, cmap=plt.cm.Blues, include_values=True) plt.title('KNN - {:.2f}'.format(model.score(X_test, y_test))) #plt.show() plt.savefig(results_path + 'KNN.eps') plt.savefig(results_path + 'KNN.png', dpi=1200) model = DecisionTreeClassifier(random_state=0).fit(X_train, y_train) print(model) lg.success('DecisionTree: {:.2f}\n'.format(model.score(X_test, y_test))) y_pred = model.predict(X_test) fx.cm_analysis(y_test, y_pred, fx.class_names, results_path + 'DT_Detail.png') plot_confusion_matrix(model, X_test, y_test, normalize='true', display_labels=fx.class_names, cmap=plt.cm.Blues, include_values=True) plt.title('Decision Tree - {:.2f}'.format(model.score(X_test, y_test))) #plt.show() plt.savefig(results_path + 'DT.eps') plt.savefig(results_path + 'DT.png', dpi=1200) model = LogisticRegression(random_state=0).fit(X_train, y_train) print(model) lg.success('LogisticRegression: {:.2f}\n'.format(model.score(X_test, y_test))) y_pred = model.predict(X_test) fx.cm_analysis(y_test, y_pred, fx.class_names, results_path + 'LR_Detail.png') plot_confusion_matrix(model, X_test, y_test, normalize='true', display_labels=fx.class_names, cmap=plt.cm.Blues, include_values=True) plt.title('Logistic Regression - {:.2f}'.format(model.score(X_test, y_test))) #plt.show() plt.savefig(results_path + 'LR.eps') plt.savefig(results_path + 'LR.png', dpi=1200)
def addData(data): db = init() cursor = db.cursor() cursor.execute( "insert into data(source_ip, destination_ip, protocol, switch_mac, p1, p2, p3, p4, p1_rx_packets, p1_tx_packets, p1_rx_bytes, p1_tx_bytes, p2_rx_packets, p2_tx_packets, p2_rx_bytes, p2_tx_bytes, p3_rx_packets, p3_tx_packets, p3_rx_bytes, p3_tx_bytes, p4_rx_packets, p4_tx_packets, p4_rx_bytes, p4_tx_bytes, label) values(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(data)) db.commit() log.success('[+] Added new data')
def updateKeys(): privKey, pubKey = func.genDefaultKeys(1) db = init() cursor = db.cursor() cursor.execute("update keys set privKey=?, pubKey=?, devId=?", (privKey, pubKey, devId)) db.commit() l.success("Priv/Pub Keys updated")
def registerDevice(devId, pubKey): db = init() cursor = db.cursor() cursor.execute( "insert into fleets(fleetId, pubKey, status) values(?, ?, ?)", (devId, pubKey, 0)) db.commit() l.success('Fleet server registered')
def registerDevice(devId, pubKey): db = init() cursor = db.cursor() cursor.execute( "insert into devices(devId, pubKey, status) values(?, ?, ?)", (devId, pubKey, 0)) db.commit() l.success("IoT Device public key registered")
def addDD(table, data): db = init() cursor = db.cursor() cursor.execute( "insert into " + table + "(protocol, time, model) values(?, ?, ?)", tuple(data)) db.commit() log.success('[+] {} data added'.format(table))
def add_entry(iface, ip, mac): iface = str(iface) ip = str(ip) mac = str(mac) if ip != '0.0.0.0': res = subprocess.check_output(['arp', '-i', iface, '-s', ip, mac], shell=False) log.success('Added Sys ARP Entry => ' + ip + ' = ' + mac)
def addCPU(node, scenario, cpu, mem): db = init() cursor = db.cursor() cursor.execute( "insert into cpu(node, scenario, cpu, mem) values(?, ?, ?, ?)", (node, scenario, cpu, mem)) db.commit() log.success('[+] CPU data added: Node => {}, Scenario => {}'.format( node, scenario))
def add_data(emb, model, folds, fold, kappa, weight): conn = init() cursor = conn.cursor() log.default("[+] Adding data...") cursor.execute( "insert into results(emb, model, folds, fold, kappa, weight) values(?, ?, ?, ?, ?, ?)", (emb, model, folds, fold, kappa, weight)) conn.commit() log.success("[+] Data added...")
def renderFile(path: str, templateName: str): """Gets the template and renders the substitutions to a new file Args: path (str): path to where the rendered file will be saves. templateName (str): template name. Default substitution keys: 'now' (str): ISO Date and time of rendering. 'fileName' (str): Filename automatically detected from 'path'. Raises: NameError: Raises if template has a substitution key that has no value defined. """ # Define constants _substitutions['now'] = datetime.datetime.now().isoformat() fileNameResul = re.search(r'/?([a-zA-Z_\-\d ]+)$', path) # File checks if Path.isfile(path): raise FileRenderError(f'File {path} already exists.') if fileNameResul == None: raise FileRenderError(f'"path=[{path}]" is not a valid path.') fileName = fileNameResul.groups()[0] _substitutions['fileName'] = fileName # Start rendering file logger.info(f'Started rendering files...') with open(f'file_templates/{templateName}') as template: with open(path, 'wt') as output: lineNum = 0 for line in template: lineNum += 1 for name in _substitutions: line = re.sub(f'<%{name}%>', _substitutions[name], line) # Check if there was a missed substitution key has_left = re.search(r'<%([a-zA-Z\d]*)%>', line) if has_left != None: key = has_left.groups()[0] start = has_left.span()[0] end = has_left.span()[1] output.close() remove(path) raise FileRenderError( f'Key "{key}" appeared in template but was not given a substitution value [@line {lineNum}: cols {start} -> {end}]' ) output.write(line) print('Rendering' + ('.' * (lineNum % 4) + (' ' * (3 - lineNum % 4))), f'{lineNum} lines rendered', end='\r\033[K') logger.success(f'File rendered!')
def download_file(host, file_path, file_name): try: ftp = ftplib.FTP(host) ftp.login() ftp.cwd(file_path) ftp.retrbinary('RETR ' + file_path + file_name, open(file_name, 'wb').write) logger.success('Download: %s' % host + file_path + file_name) ftp.quit() except ftplib.all_errors as e: logger.error('%s\nCannot download file: %s.' % (e, host + file_path + file_name))
def logData(table, ip, mac, seq, tm, scn, binValue, category): db = init() cursor = db.cursor() cursor.execute( 'insert into ' + table + '(ip, mac, seq, time, scenario, bin, category) values(?, ?, ?, ?, ?, ?, ?)', (ip, mac, seq, tm, scn, binValue, category)) db.commit() lg.success('Added data to {}: {} <==> {}'.format(table, scn, binValue))
def fetch_and_store_dictionary(): logr.log('Fetching dictionary...') r = requests.get(config.DICTIONARY_LINK) with open(config.DICTIONARY_FILE, 'wb') as words_file: for pkg in list(r): words_file.write(pkg) logr.success('Dictionary fetched and stored')
def erasePhotos(photos, threadId, uid): photos = shift(photos) for photo in photos: result = api_call( "photos.delete", { "owner_id": uid, "photo_id": photo["pid"], "version": "5.63", "access_token": access_token }) logger.success("[%s]: Фото %s удалено" % (threadId, str(photo["pid"])))
def addAllData(data, tsize, y_test, y_pred, model): precision = precision_score(y_test, y_pred) recall = recall_score(y_test, y_pred) accuracy = accuracy_score(y_test, y_pred) f1 = f1_score(y_test, y_pred) db = init() cursor = db.cursor() cursor.execute( "insert into all_data(data, tsize, precision, recall, accuracy, f1, model) values(?, ?, ?, ?, ?, ?, ?)", (data, tsize, precision, recall, accuracy, f1, model)) db.commit() log.success('[+] {} <=> {} <=> {}'.format(data, tsize, model))
def download_file(host, file_path, file_name): try: ftp = ftplib.FTP(host) ftp.login() ftp.cwd(file_path) ftp.retrbinary('RETR ' + file_path + file_name, open(file_name, 'wb').write) logger.success('Download: %s' % host + file_path + file_name) ftp.quit() except ftplib.all_errors as e: logger.error('%s\nCannot download file: %s.' % (e, host + file_path + file_name))
def rsa(length): l.default('RSA, Generating key {0}'.format(str(length))) start = time.time() rsakey = RSA.generate(1024 * 1, Random.new().read) pubCipher = rsakey.publickey() privCipher = rsakey end = time.time() db = init() cursor = db.cursor() cursor.execute("insert into algo(category, len, time) values(?, ?, ?)", ('rsa', length, end - start)) db.commit() l.success('done...') print('\n')
def oaep(length): l.default('OAEP, Generating key {0}'.format(str(length))) start = time.time() random_generator = Random.new().read rsakey = RSA.generate(1024 * length, random_generator) pubCipher = PKCS1_OAEP.new(rsakey.publickey()) privCipher = PKCS1_OAEP.new(rsakey) end = time.time() db = init() cursor = db.cursor() cursor.execute("insert into algo(category, len, time) values(?, ?, ?)", ('oaep', length, end - start)) db.commit() l.success('Done...') print('\n')
def compile_dependecy(self, sym, code): code = self.prec(code) match = re.search(funcdef_re % sym, code, re.MULTILINE) if not match: raise Exception("Function definition not found!") result = match.groupdict() code = result["all"] ccode = "\n".join(self.header) + code asm = self.preasm(compiler.compile(ccode, self.arch, self.extra_cflags)) bincode = self.assembler.asm(asm, addr=self.patcher.binary.next_alloc) addr = self.patcher.inject(raw=bincode) self.addsym(sym, addr) logger.success("Resolved %s @ 0x%x" % (sym, addr)) return addr
def main(): target = args.target logger.target(args.target) logger.alert('CONNECTING TO TARGET: {}'.format(logger.ORANGE(args.target))) if args.pipe: pipe_name = args.pipe logger.action('SKIPPING PIPE DISCOVERY') logger.alert('USING SPECIFIED PIPE: {}'.format(logger.ORANGE(args.pipe))) else: pipe_name = None try: exploit(target, pipe_name) logger.success('FINISHED!') except: logger.error('COULD NOT CONNECT TO {}'.format(target))
def startPyload(): # start pyload logger.log("Starting PyLoad") startPyloadProc = subprocess.Popen(startPyloadCmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) startPyloadReturnCode = startPyloadProc.wait() if startPyloadReturnCode > 0: logger.error( "Could not start pyload with command: {}".format(startPyloadCmd)) else: logger.success("Pyload started.") startPyCmd = startPyloadCmdWithPython.format(installationPath) logger.log( "Trying to start pyload directly with python: {}".format(startPyCmd)) os.system(startPyCmd)
def checker(host): try: conn = MYSMB(host) try: conn.login(USERNAME, PASSWORD) except smb.SessionError as e: logger.error('LOGIN FAILED: ' + nt_errors.ERROR_MESSAGES[e.error_code][0]) sys.exit() finally: logger.info('CONNECTED TO {}'.format(logger.BLUE(host))) logger.info('TARGET OS: ' + conn.get_server_os()) tid = conn.tree_connect_andx('\\\\' + target + '\\' + 'IPC$') conn.set_default_tid(tid) # test if target is vulnerable TRANS_PEEK_NMPIPE = 0x23 recvPkt = conn.send_trans(pack('<H', TRANS_PEEK_NMPIPE), maxParameterCount=0xffff, maxDataCount=0x800) status = recvPkt.getNTStatus() if status == 0xC0000205: # STATUS_INSUFF_SERVER_RESOURCES logger.success('{} IS NOT PATCHED!'.format(logger.GREEN(target))) else: logger.error('{} IS PATCHED!'.format(target)) sys.exit() logger.action('CHECKING NAMED PIPES...') for pipe_name, pipe_uuid in pipes.items(): try: dce = conn.get_dce_rpc(pipe_name) dce.connect() try: dce.bind(pipe_uuid, transfer_syntax=NDR64Syntax) logger.success('{}: OK (64 bit)'.format(pipe_name)) except DCERPCException as e: if 'transfer_syntaxes_not_supported' in str(e): logger.success('{}: OK (32 bit)'.format(pipe_name)) else: logger.success('{}: OK ({})'.format(pipe_name, str(e))) dce.disconnect() except smb.SessionError as e: logger.error('{}: {}'.format( pipe_name, nt_errors.ERROR_MESSAGES[e.error_code][0])) except smbconnection.SessionError as e: logger.error('{}: {}'.format( pipe_name, nt_errors.ERROR_MESSAGES[e.error][0])) conn.disconnect_tree(tid) conn.logoff() conn.get_socket().close() except: logger.error('COULD NOT CONNECT TO {}'.format(logger.RED(host)))
def ms17_010(target): try: logger.info('Attempting to connect to: {}'.format(logger.BLUE(target))) conn = MYSMB(target, timeout=5) try: conn.login(USERNAME, PASSWORD) except smb.SessionError as e: logger.error('Login failed, got error: ' + logger.RED(nt_errors.ERROR_MESSAGES[e.error_code][0])) sys.exit() finally: logger.info('Found target OS: ' + logger.BLUE(conn.get_server_os())) tid = conn.tree_connect_andx('\\\\' + target + '\\' + 'IPC$') conn.set_default_tid(tid) # test if target is vulnerable TRANS_PEEK_NMPIPE = 0x23 recvPkt = conn.send_trans(pack('<H', TRANS_PEEK_NMPIPE), maxParameterCount=0xffff, maxDataCount=0x800) status = recvPkt.getNTStatus() if status == 0xC0000205: # STATUS_INSUFF_SERVER_RESOURCES logger.success('{} IS NOT PATCHED!'.format(logger.GREEN(target))) else: logger.error('{} IS PATCHED!'.format(logger.RED(target))) sys.exit() logger.action('Looking for the named pipes...') for pipe_name, pipe_uuid in pipes.items(): try: dce = conn.get_dce_rpc(pipe_name) dce.connect() try: dce.bind(pipe_uuid, transfer_syntax=NDR64Syntax) logger.success('{}: OK (64 bit)'.format(logger.GREEN(pipe_name))) except DCERPCException as e: if 'transfer_syntaxes_not_supported' in str(e): logger.success('{}: OK (32 bit)'.format(logger.GREEN(pipe_name))) else: logger.success('{}: OK ({})'.format(logger.GREEN(pipe_name), str(e))) dce.disconnect() except smb.SessionError as e: logger.error('{}: {}'.format(logger.RED(pipe_name), logger.RED(nt_errors.ERROR_MESSAGES[e.error_code][0]))) except smbconnection.SessionError as e: logger.error('{}: {}'.format(logger.RED(pipe_name), logger.RED(nt_errors.ERROR_MESSAGES[e.error][0]))) conn.disconnect_tree(tid) conn.logoff() conn.get_socket().close() except (KeyboardInterrupt, SystemExit): logger.error('Keyboard interrupt received..') sys.exit(-1) except: logger.error('Connection failed to: {}'.format(logger.RED(str(target))))
def start_server(): logger.info("**Server Started**") server_sock = BluetoothSocket(RFCOMM) server_sock.bind(("", CHANNEL)) server_sock.listen(1) port = server_sock.getsockname()[1] advertise_service(server_sock, "BluenetServer", service_id=UUID, service_classes=[UUID, SERIAL_PORT_CLASS], profiles=[SERIAL_PORT_PROFILE]) logger.info("Waiting for connection on RFCOMM channel %d" % port) client_sock, client_info = server_sock.accept() logger.success("Accepted connection from " + str(client_info)) data = None try: while True: data = client_sock.recv(1024) if len(data) == 0: break logger.info("Received [%s]" % data) data_json = json.loads(data) if (data_json["action"] == "power_off"): action.power_off() elif (data_json["action"] == "setup_wifi"): action.setup_wifi(data_json) elif (data_json["action"] == "check_connectivity"): client_sock.send( json.dumps({"connectivity": action.check_connectivity()})) elif (data_json["action"] == "restart"): action.restart() except Exception as e: logger.error("Exception: " + str(e)) close_connections()
def loop(project:dict): driver = webdriver.Chrome(executable_path="./chromedriver.exe", chrome_options=chrome_options) driver.get(project["url"]) try: driver.find_element_by_class_name("confirmVote").click() WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.CLASS_NAME, "voteComplete"))) logger.success(project) except: logger.fail(project) driver.close() #TODO: count successes on file copy (so write acces wont fail in logs_counter) counted_successes = count_check.get_success_num(project["url"]) print(counted_successes, "/", project["num_iter"]) if counted_successes < project["num_iter"]: loop(project) else: print("Finished!")
def cFlare(hostname): # Initialize email = "*****@*****.**" key = "109da6ce9eb354c49fff3b55ab6f152721e5a" ip = hostname cf = CloudFlare(email, key) pages = cf.apiCall('/zones')['result_info']['total_pages'] for page in xrange(0, pages + 1): zones = cf.apiCall('/zones', 'GET', {'page': page})['result'] for zone in zones: zone_id = zone['id'] records = cf.apiCall("/zones/" + zone_id + "/dns_records", "GET")['result'] for record in records: if record['type'] == 'A': identifier = record['id'] data = { 'type': 'A', 'name': record['name'], 'content': ip, 'proxied': record['proxied'], 'ttl': record['ttl'] } res = cf.apiCall( "/zones/" + zone_id + "/dns_records/" + identifier, "PUT", data)['result'] if res != None: logger.success(res['name']) else: logger.fail(record['name'])