def render_noose(count=0): noose = " " head = " " left_arm = " " torso = " " right_arm = " " left_leg = " " right_left = " " if count > 0: noose = "|" if count > 1: head = magenta("0") if count > 2: left_arm = yellow("/") if count > 3: torso = yellow("|") if count > 4: right_arm = yellow("\\") if count > 5: left_leg = cyan("/") if count > 6: right_left = cyan("\\") head = magenta("X") top_row = " +---+" noose_row = str.format("\n %s |" % noose) head_row = str.format("\n %s |" % head) torso_row = str.format("\n %s%s%s |" % (left_arm, torso, right_arm)) leg_row = str.format("\n %s %s |" % (left_leg, right_left)) bottom_row = "\n |\n==========\n" print(top_row + noose_row + head_row + torso_row + leg_row + bottom_row)
def run_all(app, analyzers): for analyzer in analyzers: print( color.bright('----- Begin %s report for %s -----' % (analyzer, color.cyan(app)))) subprocess.call(['./%s' % analyzer, app]) print( color.bright('----- End %s report for %s -------' % (analyzer, color.cyan(app)))) print('')
def _format_review(self): if not self.reviews['reviews']: return '' review = self.reviews['reviews'][self._current_review] return '%s\n User: %s\n Rating: %s\n Review: %s\n' % ( color.green('Review'), color.cyan(review['user']['name']), color.yellow('*' * review['rating']), review['excerpt'])
def moveVersionDir(info, root): ''' adds source file to lab repository. Makes new directory rsyncs files except for contents of Support_Docs folder Args: info (dict) information about new lab object root (str) root path of lab repository Return: none ''' versionDir = root + info["directory"] if not os.path.isdir(versionDir): os.system("mkdir " + versionDir) #os.system("echo rsync -avz --exclude Support_Docs " + info["originalDir"] + " " + versionDir) os.system("sudo rsync -avz --exclude Support_Docs " + info["originalDir"] + " " + versionDir) else: print("") print(color.cyan("Lab folder " + versionDir + " Already Exists.")) print("") if input( color.yellow("Do you want to update the folder contents? N/y: " )).lower() == "y": print("") os.system("rsync -avz --exclude Support_Docs " + info["originalDir"] + " " + versionDir) else: print("Exiting...") exit()
def getNewDisciplines(masterList): ''' Get list of disciplines from user and check if they are valid Args: masterList (list) complete pool of valid topics Return: disciplines (str) single valid discipline for new lab ''' valid = False while not valid: item = input("Enter new discipline: ") for i in masterList: if i.lower() == item.lower(): valid = True item = i print("Adding " + i + " to disciplines") print("") if not valid: print(color.cyan(item + " is an invalid discipline.")) if not input(color.yellow( "Would you like to try again? Y/n ")).lower() == "n": continue else: break return item
def getNewTopic(topicSource, masterList): ''' Get list of topics from user and check if they are valid Args: topicSource (string) path to file containing list of disciplines masterList (list) complete pool of valid topics Return: topics (str) single valid topic ''' valid = False while not valid: item = input(color.yellow("Enter new topic: ")) for i in masterList: if i.replace("'", "").lower() == item.replace("'", "").lower(): valid = True item = i print(item) print("Adding " + i + " to topics") print("") if not valid: print(color.cyan(item + " is an invalid topic.")) if not input(color.yellow( "Would you like to try again? Y/n ")).lower() == "n": continue else: break return item
def diff(n1, n2): if n2 is None: return color.cyan(' %9f') % n1 if n1 > n2: return color.blue('+%9f') % (n1) elif n1 < n2: return color.red('-%9f') % (n1) else: return color.yellow(' %9f') % n1
def menu(self): d = self.details loc = d['location'] return '\n' + '\n'.join([ color.cyan('Name: %s' % d['name']), 'Address: %s' % ', '.join(loc['display_address']), 'Phone: %s' % d.get('display_phone') or d.get('phone', ''), 'Reviews: %s' % d['review_count'], 'Text: %s' % d.get('snippet_text'), ]) + '\n' + self._format_review()
def _format_review(self): if not self.reviews['reviews']: return '' review = self.reviews['reviews'][self._current_review] return '%s\n User: %s\n Rating: %s\n Review: %s\n' % ( color.green('Review'), color.cyan(review['user']['name']), color.yellow('*' * review['rating']), review['excerpt'] )
def _format_results(self): formatted_biz = [] for idx, biz in enumerate(self.results): cat_string = '' if biz.get('categories'): cat_string = '(%s)' % (','.join(cat[0] for cat in biz['categories'])) formatted_biz.append( "%2d. %-45s %-30s %s" % (idx, color.cyan(biz['name'][:29]), biz['location']['address'] [0][:29] if biz['location']['address'] else '', cat_string)) return '\n'.join(formatted_biz)
def _format_results(self): formatted_biz = [] for idx, biz in enumerate(self.results): cat_string = '' if biz.get('categories'): cat_string = '(%s)' % (','.join(cat[0] for cat in biz['categories'])) formatted_biz.append( "%2d. %-45s %-30s %s" % (idx, color.cyan(biz['name'][:29]), biz['location']['address'][0][:29] if biz['location']['address'] else '', cat_string ) ) return '\n'.join(formatted_biz)
def validDir(info, root): ''' checks that the verison has not already been added to repository file structure Args: info (dict) information about new lab object root (str) root path of lab repository Return: (bool) True is lab has not already been added to repository file structure ''' versionDir = root + info["directory"] if not os.path.isdir(versionDir): return True else: print(color.cyan("Lab folder " + versionDir + " Already Exists.")) print("Exiting...") return False
def generate_report(app, fullpacket=False, pcapfile=''): ''' Print report based on collected data ''' report = {} report['app'] = app report['testtime'] = os.path.getmtime(pcapfile) # This is an un-failable test report['failedtest'] = False report['targets'] = net.targets report['dnsreqs'] = net.dnsreqs if app.endswith('.pcap'): app_or_pcap = 'pcap' jsonfile = '%s.%s' % (app, json_output) else: app_or_pcap = 'application' jsonfile = os.path.join(os.path.dirname(pcapfile), 'net.json') print('') print('Summary for %s: %s' % (app_or_pcap, color.bright(color.cyan(app)))) print('') print(color.bright('Hosts contacted:')) # For each target (unsorted) for target in net.targets: # Get protocols used if fullpacket: protos = get_protos_full(net.targets[target]) else: protos = get_protos(net.targets[target]) # Get host name host = net.get_hostname(target) protolist = ', '.join(protos) print('%s : %s : %s' % (color.bright('CONNECT'), host, protolist)) print('') print(color.bright('DNS queries made:')) for dnsreq in net.dnsreqs: print('%s : %s' % (color.bright('LOOKUP'), dnsreq)) with open(jsonfile, 'w') as fp: json.dump(report, fp)
def prepare(file1_name, file2_name, file3_name): scaler = MinMaxScaler() # print(chart.plot(file1['close'][:130], {'height': 4, 'format':'{:8.0f}'})) file1 = pd.read_csv(file1_name) file1[['close']] = scaler.fit_transform(file1[['close']]) file1[['volume']] = scaler.fit_transform(file1[['volume']]) file1 = file1.set_index(['date']) file2 = pd.read_csv(file2_name) file2[['close']] = scaler.fit_transform(file2[['close']]) file2[['volume']] = scaler.fit_transform(file2[['volume']]) file2 = file2.set_index(['date']) file3 = pd.read_csv(file3_name) file3[['close']] = scaler.fit_transform(file3[['close']]) file3[['volume']] = scaler.fit_transform(file3[['volume']]) file3 = file3.set_index(['date']) history = [] data = [] target = [] file1_close = None file2_close = None file3_close = None file1_volume = None file2_volume = None file3_volume = None i = 0 backtrack = 30 for index, file1_row in file1.iterrows(): # print(color.cyan(index)) #print(row) try: file2_row = file2.loc[index] file3_row = file3.loc[index] # print(color.yellow('%s found') % index) except: print(color.red('%s not found') % index) continue if len(history) >= backtrack: data.append(history[i-backtrack:i]) print(color.green(str(i-backtrack)+':'+str(i))) #target.append(outcome(file1_row['close'], file1_close)) target.append([file1_row['close']]) # if len(history) >= backtrack+1: # print(data) # print(target) # sys.exit() print( str(i).rjust(3)+':', outcome(file1_row['close'], file1_close), diff(file1_row['close'], file1_close), diff(file1_row['volume'], file1_volume), diff(file2_row['close'], file2_close), diff(file2_row['volume'], file2_volume), diff(file3_row['close'], file3_close), diff(file3_row['volume'], file3_volume), ['%9f' % h[0] for h in data[i-backtrack]] if len(history) >= backtrack else '', target[i-backtrack] if len(history) >= backtrack else '' ) file1_close = file1_row['close'] file1_volume = file1_row['volume'] file2_close = file2_row['close'] file2_volume = file2_row['volume'] file3_close = file3_row['close'] file3_volume = file3_row['volume'] history.append([file1_close, file1_volume, file2_close, file2_volume, file3_close, file3_volume]) i += 1 print(data[len(data)-1]) print(color.cyan(target[len(target)-1])) return data, target
pred = model.predict(np.array(data, dtype=float)) i = 0 for item in data: if i > 0: real_text = 'steady' if target[i] > target[i - 1]: real_text = 'up' elif target[i] < target[i - 1]: real_text = 'down' pred_text = 'steady' if pred[i] > target[i - 1]: pred_text = 'up' elif pred[i] < target[i - 1]: pred_text = 'down' if real_text == pred_text: print('real', color.cyan(target[i]), real_text, '| predict', color.cyan(pred[i]), pred_text) else: print('real', target[i], real_text, '| predict', pred[i], pred_text) i += 1 plt.plot(target, color='blue') plt.plot(pred, color='red') plt.show()
def addEquipItem(eqdb, itemId): ''' Adds a new piece of equipment to a lab object Args: eqdb (pjlDB.EquipDB) entire equipment inventory database object generated by pjlDB itemId (str) equipment id number entered by user Return: equipItem (dict) dicitonary for single equipment item ''' if debug: print(color.green("[Debug - addEquipIten] entering")) equipItem = {} validItem = False validAlt = False validNum = False itemName = "" altName = "" amount = "" # adds main item while not validItem: if itemId == "retry": itemId = input(color.yellow("Enter the equipment id number: ")) validItem, itemName, itemError = equipValid(eqdb, itemId) if not validItem: print(itemError) if input(color.yellow( "Do you wish to try again? Y/n: ")).lower() == "n": break else: itemId = "retry" else: equipItem['id'] = itemId equipItem['name'] = itemName validItem = True # adds alternate item while not validAlt: altId = input( color.yellow( "Enter id number of an alternate for this item. If none hit Enter. " )) if not altId == "": validAlt, altName, altError = equipValid(eqdb, altId) if not validAlt: print(altError) if input(color.yellow( "Do you wish to try again? Y/n: ")).lower() == "n": break else: altId = "" altName = "" validAlt = False else: equipItem['alt-name'] = altName equipItem['alt-id'] = altId validAlt = True else: equipItem['alt-name'] = "" equipItem['alt-id'] = "" validAlt = True # adds the number of units needed while not validNum: amount = input( color.yellow("Please enter how many " + itemName + "(s) are needed? ")) if amount.isdigit(): equipItem['amount'] = amount validNum = True else: print(color.cyan(amount + " is not a valid number.")) if input(color.yellow( "Do you wish to try again? Y/n: ")).lower() == "n": break equipItem["id"] = itemId return equipItem
def generate_report(app, pcapfile=''): ''' Print report based on collected data ''' global sslpacketcount if app.endswith('.pcap'): app_or_pcap = 'pcap' jsonfile = '%s.%s' % (pcapfile, json_output) else: app_or_pcap = 'application' jsonfile = os.path.join(os.path.dirname(pcapfile), json_output) report = {} report['app'] = app report['testtime'] = os.path.getmtime(pcapfile) report['sslversions'] = net.sslversions report['requestedciphers'] = net.requestedciphers report['negotiatedciphers'] = net.negotiatedciphers report['dtlsversions'] = net.dtlsversions report['negotiateddtlsciphers'] = net.negotiateddtlsciphers seen_mandatory_ciphers = [] seen_optional_ciphers = [] seen_other_ciphers = [] failedtest = False failedreasons = [] print('') print('Summary for application: %s' % color.bright(color.cyan(app))) print('') if net.sslpacketcount > 0: print(color.bright('TLS/SSL protocols used:')) # For each target (unsorted) for sslversion in net.sslversions: if sslversion == 'TLS 1.2': sslversion = color.bright(color.green(sslversion)) else: failedtest = True failedreasons.append('%s is used, rather than TLS 1.2' % sslversion) sslversion = color.bright(color.red(sslversion)) print(sslversion) print( color.bright('Hosts using %s:' % color.decolorize(sslversion))) for host in net.sslversions[color.decolorize(sslversion)]: print(host) print('') for ciphersuite in net.requestedciphers: if ciphersuite in net.mandatory_ciphers: #ciphersuite = color.bright(color.green(ciphersuite)) seen_mandatory_ciphers.append(ciphersuite) elif ciphersuite in net.optional_ciphers: #ciphersuite = color.bright(ciphersuite) seen_optional_ciphers.append(ciphersuite) else: #ciphersuite = color.dim(ciphersuite) seen_other_ciphers.append(ciphersuite) if len(seen_mandatory_ciphers) == 0: failedtest = True failedreasons.append('%s is not supported by client' % net.mandatory_ciphers[0]) print( color.bright( 'Observed mandatory ciphers in TLS/SSL client requests:')) for cipher in seen_mandatory_ciphers: print(color.bright(color.green(cipher))) report['seen_mandatory_ciphers'] = seen_mandatory_ciphers print('') print( color.bright( 'Observed optional ciphers in TLS/SSL client requests:')) for cipher in seen_optional_ciphers: print(cipher) report['seen_optional_ciphers'] = seen_optional_ciphers print('') print( color.bright('Observed other ciphers in TLS/SSL client requests:')) for cipher in seen_other_ciphers: print(color.dim(cipher)) report['seen_other_ciphers'] = seen_other_ciphers print('') print(color.bright('Negotiated TLS/SSL ciphers:')) for ciphersuite in net.negotiatedciphers: if ciphersuite in net.mandatory_ciphers: ciphersuite = color.bright(color.green(ciphersuite)) elif ciphersuite in net.optional_ciphers: pass #ciphersuite = color.bright(ciphersuite) else: ciphersuite = color.dim(ciphersuite) print(ciphersuite) print( color.bright('Hosts using %s:' % color.decolorize(ciphersuite))) for host in net.negotiatedciphers[color.decolorize(ciphersuite)]: print(host) print('') print('') else: print(color.bright(color.green('No TLS/SSL traffic seen'))) print('') if net.dtlspacketcount > 0: print(color.bright('DTLS protocols used:')) # For each target (unsorted) for dtlsversion in net.dtlsversions: if dtlsversion == 'DTLS 1.2': dtlsversion = color.bright(color.green(dtlsversion)) else: failedtest = True failedreasons.append('%s is used, rather than DTLS 1.2' % dtlsversion) dtlsversion = color.bright(color.red(dtlsversion)) print(dtlsversion) print( color.bright('Hosts using %s:' % color.decolorize(dtlsversion))) for host in net.dtlsversions[color.decolorize(dtlsversion)]: print(host) print('') report['dtlsciphers'] = net.requesteddtlsciphers for ciphersuite in net.requesteddtlsciphers: if ciphersuite in net.mandatory_ciphers: #ciphersuite = color.bright(color.green(ciphersuite)) seen_mandatory_ciphers.append(ciphersuite) elif ciphersuite in net.optional_ciphers: #ciphersuite = color.bright(ciphersuite) seen_optional_ciphers.append(ciphersuite) else: #ciphersuite = color.dim(ciphersuite) seen_other_ciphers.append(ciphersuite) if len(seen_mandatory_ciphers) == 0: failedtest = True failedreasons.append('%s is not supported by client' % net.mandatory_ciphers[0]) print( color.bright( 'Observed mandatory ciphers in DTLS client requests:')) for cipher in seen_mandatory_ciphers: print(color.bright(color.green(cipher))) print('') report['seen_mandatory_dtls_ciphers'] = seen_mandatory_ciphers print( color.bright('Observed optional ciphers in DTLS client requests:')) for cipher in seen_optional_ciphers: print(cipher) print('') report['seen_optional_dtls_ciphers'] = seen_optional_ciphers print(color.bright('Observed other ciphers in DTLS client requests:')) for cipher in seen_other_ciphers: print(color.dim(cipher)) print('') report['seen_other_dtls_ciphers'] = seen_other_ciphers print(color.bright('Negotiated DTLS ciphers:')) for ciphersuite in net.negotiateddtlsciphers: if ciphersuite in net.mandatory_ciphers: ciphersuite = color.bright(color.green(ciphersuite)) elif ciphersuite in net.optional_ciphers: pass #ciphersuite = color.bright(ciphersuite) else: ciphersuite = color.dim(ciphersuite) print(ciphersuite) print( color.bright('Hosts using %s:' % color.decolorize(ciphersuite))) for host in net.negotiateddtlsciphers[color.decolorize( ciphersuite)]: print(host) print('') print('') else: print(color.bright(color.green('No DTLS traffic seen'))) report['failedtest'] = failedtest report['failedreasons'] = failedreasons if failedtest: print( color.bright( color.red('App %s failed crypto checking because:' % app))) for reason in failedreasons: print(color.bright(color.red(reason))) else: print(color.bright(color.green('App %s passed crypto checking' % app))) # print(report) with open(jsonfile, 'w') as fp: json.dump(report, fp)
def check_app(app, force=False): ''' Check application based on app name in Tapioca results ''' dnscacheloaded = False largewarned = False # Get pcap file location if app.endswith('.pcap'): pcapfile = app if os.path.exists(pcapfile): sys.stdout = Logger('%s.%s' % (pcapfile, report_output)) else: pcapfile = os.path.join('results', app, 'tcpdump.pcap') if os.path.exists(pcapfile): sys.stdout = Logger(os.path.join('results', app, report_output)) if os.path.exists(pcapfile): pcapdir = os.path.dirname(pcapfile) dnspkl = os.path.join(pcapdir, '.dnsmap.pkl') eprint(color.bright('Checking app %s...' % color.cyan(app))) if os.path.exists(dnspkl) and not force: eprint('Loading cached DNS info...') with open(dnspkl, 'rb') as pklhandle: try: net.dnsmap = pickle.load(pklhandle) dnscacheloaded = True except: pass if not dnscacheloaded: if os.path.getsize(pcapfile) > 100000000: # Over 100MB eprint( color.bright( color.yellow( 'Warning: capture size is large. Please be patient.' ))) largewarned = True # Get captured DNS info for IP addresses eprint('Getting DNS info...') dnspackets = pyshark.FileCapture(pcapfile, keep_packets=False, display_filter='dns') dnspackets.apply_on_packets(net.get_dns_info, timeout=1000) with open(dnspkl, 'wb') as pklhandle: pickle.dump(net.dnsmap, pklhandle, protocol=pickle.HIGHEST_PROTOCOL) if os.path.getsize(pcapfile) > 100000000 and not largewarned: # Over 100MB eprint( color.bright( color.yellow( 'Warning: capture size is large. Please be patient.'))) largewarned = True sslpackets = pyshark.FileCapture(pcapfile, keep_packets=False, display_filter='ssl') eprint('Getting SSL info from capture...') # get_indexed_ssl_info(cap) sslpackets.apply_on_packets(net.get_ssl_info, timeout=1000) dtlspackets = pyshark.FileCapture(pcapfile, keep_packets=False, display_filter='dtls') eprint('Getting DTLS info from capture...') dtlspackets.apply_on_packets(net.get_dtls_info, timeout=1000) # Print report generate_report(app, pcapfile=pcapfile) # Reset globals net.clear()
def check_app(app, fullpacket=False, force=False): ''' Check application based on app name in Tapioca results ''' dnscacheloaded = False targetscacheloaded = False largewarned = False # load local network from config net.set_local() # Get pcap file location if app.endswith('.pcap'): pcapfile = app if os.path.exists(pcapfile): sys.stdout = Logger('%s.%s' % (pcapfile, report_output)) else: pcapfile = os.path.join('results', app, 'tcpdump.pcap') if os.path.exists(pcapfile): sys.stdout = Logger(os.path.join('results', app, report_output)) if os.path.exists(pcapfile): pcapdir = os.path.dirname(pcapfile) dnspkl = os.path.join(pcapdir, '.dnsmap.pkl') targetspkl = os.path.join(pcapdir, '.targets.pkl') eprint(color.bright('Checking app %s...' % color.cyan(app))) if os.path.exists(dnspkl) and not force: eprint('Loading cached DNS info...') with open(dnspkl, 'rb') as pklhandle: try: net.dnsmap = pickle.load(pklhandle) net.dnsreqs = pickle.load(pklhandle) dnscacheloaded = True except: pass if not dnscacheloaded: if os.path.getsize(pcapfile) > 100000000: # Over 100MB eprint( color.bright( color.yellow( 'Warning: capture size is large. Please be patient.' ))) largewarned = True # Get captured DNS info for IP addresses eprint('Getting DNS info...') dnspackets = pyshark.FileCapture(pcapfile, keep_packets=False, display_filter='dns') dnspackets.apply_on_packets(net.get_dns_info, timeout=1000) with open(dnspkl, 'wb') as pklhandle: pickle.dump(net.dnsmap, pklhandle, protocol=pickle.HIGHEST_PROTOCOL) pickle.dump(net.dnsreqs, pklhandle, protocol=pickle.HIGHEST_PROTOCOL) # if os.path.exists(targetspkl) and not force: # eprint('Loading cached targets...') # with open(targetspkl, 'rb') as pklhandle: # try: # net.targets = pickle.load(pklhandle) # targetscacheloaded = True # except: # pass if not targetscacheloaded: if fullpacket: packets = pyshark.FileCapture(pcapfile, keep_packets=False) # Get hosts contacted eprint('Getting hosts contacted...') packets.apply_on_packets(net.get_hosts_contacted_fullpacket, timeout=1000) else: packets = pyshark.FileCapture(pcapfile, keep_packets=False, only_summaries=True) # Get hosts contacted eprint('Getting hosts contacted...') packets.apply_on_packets(net.get_hosts_contacted, timeout=1000) # with open(targetspkl, 'wb') as pklhandle: # pickle.dump( # net.targets, pklhandle, protocol=pickle.HIGHEST_PROTOCOL) # Print report generate_report(app, fullpacket=fullpacket, pcapfile=pcapfile) # Reset globals net.clear()
model.summary() print(color.green('Starting training')) history = model.fit(data, target, epochs=100) model.save('train.model') plt.plot(history.history['loss']) plt.show() sys.exit() #print((y_test*100).tolist()) #print(x_test.tolist()) # print(color.cyan(int(model.predict(np.array([[[101./100], [101./100], [103./100], [104./100], [105./100]]], dtype=float))[0][0]*100))) # print(model.predict(x_test).tolist()) result = model.predict(data) print(color.yellow(data.tolist())) print(color.cyan(target.tolist())) print(color.cyan(result.tolist())) plt.style.use('fivethirtyeight') plt.scatter(range(95), result, c='r') plt.scatter(range(95), target, c='b') plt.show() plt.plot(history.history['loss']) plt.show()
def manage(conf, args): ''' Move a file to the base directory and leave a link pointing to its new location in its place. ''' # bail if the file is already a link if os.path.islink(args.path): raise ValueError('Unable to manage ' + color.cyan(args.path) + " since it's already a link!") # make sure the path is a descendant of the destination directory if not util.is_descendant(args.path, conf['destination']): raise ValueError("Unable to manage files that aren't descendants of " + 'the destination directory (' + color.cyan(conf['destination']) + ')') # mark files that aren't direct descendants of the root as such unrooted = os.path.dirname(args.path) != conf['destination'] # get the path of the file if it will be copied into the repo directory dest_path = os.path.join(constants.REPO_DIR, os.path.basename(args.path)) # rename the file as appropriate to to its original name dest_path, config_file_path = config.configify_file_name(dest_path) # give unrooted files a config file path so they'll go to the correct place if unrooted and config_file_path is None: config_file_path = util.toggle_hidden(dest_path, True) # bail if the file is already managed and we're not overwriting dest_exists = os.path.exists(dest_path) config_exists = (config_file_path is not None and os.path.exists(config_file_path)) if (dest_exists or config_exists) and not args.force: raise ValueError("Can't manage " + color.cyan(args.path) + " since it already appears to be managed (use --force to override)") # create a file config if necessary # replace any existing dest file with a copy of the new one util.rm(dest_path, force=True) util.cp(args.path, dest_path, recursive=True) # replace any existing config file with our new one if config_file_path is not None: util.rm(config_file_path, force=True) # build a config for this file file_config = config.normalize_file_config({ 'paths': [args.path], }, conf['destination']) # create a config file from our config dict with open(config_file_path, 'w') as f: json.dump(file_config, f, indent=2) # create a link to the new location, overwriting the old file util.symlink(args.path, dest_path, overwrite=True) print(color.cyan(args.path), 'copied and linked') # add and commit the file to the repo if --save is specified if args.save: files = [color.cyan(os.path.basename(dest_path))] if config_file_path: files.append(color.cyan(os.path.basename(config_file_path))) files = files.join(' and ') print('Adding', files, 'to the repository...') # move us to the current repo directory so all git commands start there os.chdir(constants.REPO_DIR) # alert the user if we have uncommitted changes (git exits non-0 in this case) if git.diff(exit_code=True, quiet=True, _ok_code=(0, 1)).exit_code != 0: raise ValueError('The repository has uncommitted changes - the ' 'newly-managed file will have to be added to the repo manually.') # add the new files to the staging area git.add(dest_path) if config_file_path is not None: git.add(config_file_path) print('Successfully added', files, 'to the repository') print('Committing changes...') # commit the file to the repository commit_message = 'Manage %s' % os.path.basename(args.path) git.commit(m=commit_message, quiet=True) print('Commit successful!') print('Pushing committed changes...') # pull any changes down from upstream, then push our new addition git.pull(rebase=True, quiet=True) git.push(quiet=True) print('Push successful!')
def check_app(app, searchterm, encoding='string'): ''' Check application based on app name in Tapioca results ''' global pcapfile, ssltestfile, fullmitmfile global found, foundunprot, foundprot, foundunenc ssltesttime = None fullmitmtime = None pcaptime = None appbase = os.path.basename(app) # Get pcap file location if appbase == ssltestfile or app == fullmitmfile: # Check mitmproxy log logfile = app jsonfile = '%s.%s' % (app, json_output) if os.path.exists(logfile): if appbase == ssltestfile: ssltesttime = os.path.getmtime(app) elif appbase == fullmitmfile: fullmitmtime = os.path.getmtime(app) print_header(logfile) print( color.bright('searching %s for %s (%s)') % (color.cyan(logfile), searchterm, encoding)) searchmitmflow(logfile, searchterm) print('') elif appbase.endswith('.pcap'): # Check tcpdump pcap logfile = app jsonfile = '%s.%s' % (app, json_output) if os.path.exists(logfile): pcaptime = os.path.getmtime(app) print_header(logfile) print( color.bright('searching %s for %s (%s)') % (color.cyan(logfile), searchterm, encoding)) searchtcpdump(logfile, searchterm) print('') else: # check app (all captures availabale) appdir = os.path.join('results', app) jsonfile = os.path.join(appdir, json_output) # app name, so check all three logfile = os.path.join('results', app, pcapfile) if os.path.exists(logfile): pcaptime = os.path.getmtime(logfile) print_header(logfile) print( color.bright('searching %s for %s (%s)') % (color.cyan(logfile), searchterm, encoding)) searchtcpdump(logfile, searchterm) print('') logfile = os.path.join('results', app, ssltestfile) if os.path.exists(logfile): ssltesttime = os.path.getmtime(logfile) print_header(logfile) print( color.bright('searching %s for %s (%s)') % (color.cyan(logfile), searchterm, encoding)) searchmitmflow(logfile, searchterm) print('') logfile = os.path.join('results', app, fullmitmfile) if os.path.exists(logfile): fullmitmtime = os.path.getmtime(logfile) print_header(logfile) print( color.bright('searching %s for %s (%s)') % (color.cyan(logfile), searchterm, encoding)) searchmitmflow(logfile, searchterm) print('') report = {} report['app'] = app report['pcaptime'] = pcaptime report['ssltesttime'] = ssltesttime report['fullmitmtime'] = fullmitmtime report['searchterm'] = searchterm report['found'] = found report['foundunenc'] = foundunenc report['foundunprot'] = foundunprot report['foundprot'] = foundprot with open(jsonfile, 'w') as fp: json.dump(report, fp)
def predict(*args): result = model.predict( np.array([[[args[i] / 100] for i in range(5)]], dtype=float)) print(color.yellow([[[args[i]] for i in range(5)]]), color.red(args[5]), color.cyan(round(result[0][0] * 100)))
def wait(word): Log._print(color.cyan("[.] %s\n" % word))