def send_tool(self): Utils.sftp_put(ip=config.mobile_ip, port=config.ssh_port, username=config.mobile_user, password=config.mobile_password, local_file="./tools/keychain_dumper", remote_path='./keychain_dumper')
class CreateDomain: def __init__(self): print('Create Domain') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def create_workload_domain(self): # validations data = self.utils.read_input( os.path.abspath(__file__ + '/../') + '/domain_creation_spec_vxrail.json') validations_url = 'https://' + self.hostname + '/v1/domains/validations' response = self.utils.post_request(data, validations_url) print('Validatin started for domain. The valdidation id is: ' + response['id']) validate_poll_url = 'https://' + self.hostname + '/v1/domains/validations/' + response[ 'id'] print('Polling on validation api ' + validate_poll_url) time.sleep(10) validation_status = self.utils.poll_on_id(validate_poll_url, False) print('Validate domain ended with status: ' + validation_status) if validation_status != 'SUCCEEDED': print('Validation Failed.') exit(1) # Domain Creation domain_creation_url = 'https://' + self.hostname + '/v1/domains' response = self.utils.post_request(data, domain_creation_url) print('Creating Domain...') task_url = 'https://' + self.hostname + '/v1/tasks/' + response['id'] print("Domain creation task completed with status: " + self.utils.poll_on_id(task_url, True))
class CreateCluster: def __init__(self): print('Create Clusterr') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def create_cluster(self): data = self.utils.read_input( os.path.abspath(__file__ + '/../') + '/cluster_creation_spec_vxrail.json') validations_url = 'https://' + self.hostname + '/v1/clusters/validations' response = self.utils.post_request(data, validations_url) print('Validatin started for cluster. The valdidation id is: ' + response['id']) validate_poll_url = 'https://' + self.hostname + '/v1/clusters/validations/' + response[ 'id'] print('Polling on validation api ' + validate_poll_url) time.sleep(10) validation_status = self.utils.poll_on_id(validate_poll_url, False) print('Validate cluster ended with status: ' + validation_status) if validation_status != 'SUCCEEDED': print('Validation Failed.') exit(1) create_cluster_url = 'https://' + self.hostname + '/v1/clusters' response = self.utils.post_request(data, create_cluster_url) print('Creating Cluster...') task_url = 'https://' + self.hostname + '/v1/tasks/' + response['id'] print('Create cluster ended with status: ' + self.utils.poll_on_id(task_url, True))
def launch(self): sem = Semaphore(0) try: scanner = VulnscanManager(config.omp_ip, config.omp_user, config.omp_password) task_id, target_id = scanner.launch_scan(target='https://log.cmbchina.com', profile="Full and fast", callback_end=partial(lambda x: x.release(), sem) ) print task_id, target_id sem.acquire() print("finished") report_id = scanner.get_report_id(task_id) self.result_name = task_id + '.csv' cmd = 'omp --get-report ' + report_id + ' --format ' + data.format_csv + ' > ' + self.result_name Utils.cmd_block(self.client, cmd) Utils.sftp_get(config.server_ip, config.port, config.server_user, config.server_password, self.result_name, './temp/server/'+self.result_name) except VulnscanException as e: print("Error:") print(e)
class CreateDomain: def __init__(self): print('Create Domain') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def create_workload_domain(self): #validations payload = self.utils.read_input( os.path.abspath(__file__ + '/../') + '/domain_creation_spec.json') validations_url = 'https://' + self.hostname + '/v1/domains/validations/creations' print('Validating the input....') response = self.utils.post_request(payload, validations_url) if (response['resultStatus'] != 'SUCCEEDED'): print('Validation Failed.') exit(1) #Domain Creation domain_creation_url = self.hostname + '/v1/domains' response = self.utils.post_request(payload, domain_creation_url) print('Creating Domain...') task_url = 'https://' + self.hostname + '/v1/tasks/' + response['id'] print("Domain creation task completed with status: " + self.utils.poll_on_id(task_url, True))
def crawl_item_details(queue, i): shopall = ShopAllItemDb() agentIp = Utils.GetAgentIp() # agentIp = "127.0.0.1:8888" driver = PhantomDriver(1, agentIp, 60).return_driver() while (True): try: nid = None nid = queue.get(block=False) except: pass try: if nid: item = CrawlItem.crawl(driver, nid) if item: print("进程%s抓取%s宝贝详情成功, 使用Ip:%s" % (i, nid, agentIp)) shopall.insert_or_update_details(item) else: # 如果抓取失败就更换代理ip并重新放到队列中 agentIp = Utils.GetAgentIp() try: queue.put(nid, block=False) except: pass print("进程%s抓取%s宝贝详情失败,重试 Ip:%s" % (i, nid, agentIp)) except Exception, e: agentIp = Utils.GetAgentIp() driver.quit() driver = PhantomDriver(1, agentIp, 60).return_driver() try: queue.put(nid, block=False) except: pass print("进程%s抓取%s宝贝详情失败:%s,退出浏览器重试" % (i, nid, e.message)) driver.quit()
class ComissionHosts: def __init__(self): print('Commission Hosts') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def commission_hosts(self): data = self.utils.read_input( os.path.abspath(__file__ + '/../') + '/commission_hosts_spec.json') validations_url = 'https://' + self.hostname + '/v1/hosts/validations/commissions' response = self.utils.post_request(data, validations_url) request_id = response['id'] print('Validating the input....') url = 'https://' + self.hostname + '/v1/hosts/validations/' + request_id result = self.utils.poll_on_id(url, False) if (result != 'SUCCEEDED'): print('Validation Failed.') exit(1) comission_url = 'https://' + self.hostname + '/v1/hosts' response = self.utils.post_request(data, comission_url) print('Commissioning hosts...') pprint.pprint(response) task_id = response['id'] task_url = 'https://' + self.hostname + '/v1/tasks/' + task_id print("Host Commission task completed with status: " + self.utils.poll_on_id(task_url, True))
def fuzz(self): for url in self.fuzz_inputs: self.delete_old_reports() Utils.openurl(url) time.sleep(2) Utils.kill_by_name(self.app) self.results[url] = self.crashed() print self.results
def openvas_start(self): cmd = "sudo service openvas-scanner restart" out = Utils.cmd_block(self.client, cmd).split("\n") print "1", out cmd = "sudo service openvas-manager restart" out = Utils.cmd_block(self.client, cmd).split("\n") print "2", out cmd = "sudo openvasmd --rebuild --progress" out = Utils.cmd_block(self.client, cmd).split("\n") print "3", out
def sql_check(): files = get_files() if not files: Utils.printy("No SQL files found ", 2) return retrieved_files = Utils.get_dataprotection(files) data.local_file_protection.extend(retrieved_files) check = Checker(files, 'SQL') check.start() Utils.printy_result('Database Check.', 1) return check.results
def dump(self): cmd = './keychain_dumper' out = Utils.cmd_block(self.client, cmd) lines = out.split('\n') for line in lines: if line.startswith('Keychain Data:') and not '(null)' in line: content = line[15:] if content: self.all_keychain_values.append(content) self.filter() Utils.printy_result('Keychain Dump', 1) return self.results
def get_data_from_api(self, year): print("Getting data from API...") # year = '21' url = f'https://www.siliconmilkroundabout.com/api/companies-attending?event_id={year}' try: companies_json = getJson(url) # Save retrieved data to file Utils.save_to_json(f'hiringcompanies20{year}.json', companies_json) except Exception as e: print(e) return companies_json
class VxRailAuthAutomator: def __init__(self, args): self.utils = Utils(args) self.description = "VxRail Manager authentication details" def main_func(self): three_line_separator = ['', '', ''] self.utils.printCyan( "Please input VxRail Manager's preconfigure root credentials") root_user = "******" root_password = self.__handle_password_input() print(*three_line_separator, sep='\n') self.utils.printCyan( "Please input VxRail Manager's preconfigured admin credentials") admin_user = self.utils.valid_input( "\033[1m Enter username (mystic): \033[0m", "mystic") admin_password = self.__handle_password_input() print(*three_line_separator, sep='\n') return { "rootCredentials": self.__to_credential_obj(root_user, root_password), "adminCredentials": self.__to_credential_obj(admin_user, admin_password) } def __to_credential_obj(self, user, pwd): return {"credentialType": "SSH", "username": user, "password": pwd} def __valid_password(self, inputstr): return self.utils.password_check(inputstr) def __valid_pwd_match(self, inputstr, ext_args): res = inputstr == ext_args if not res: self.utils.print_error("Password doesn't match") return res def __handle_password_input(self): while (True): thepwd = getpass.getpass("\033[1m Enter password: \033[0m") confirmpwd = getpass.getpass("\033[1m Confirm password: \033[0m") if thepwd != confirmpwd: self.utils.printRed("Passwords don't match") else: return thepwd
def check(self): files = self.get_files() if not files: Utils.printy("No Plist files found ", 2) return # Add data protection class retrieved_files = Utils.get_dataprotection(files) data.local_file_protection.extend(retrieved_files) # start check plist sensitive data check = Checker(files, 'PLIST') check.start() data.plist_file_results = check.results Utils.printy_result('Plist Check.', 1)
def __init__(self): args = [] args.append("localhost") args.append(input("\033[1m Enter the SSO username: \033[0m")) args.append(getpass.getpass("\033[1m Enter the SSO password: \033[0m")) self.utils = Utils(args) self.utils.printGreen('Welcome to VxRail Workload Automator') self.domains = DomainsAutomator(args) self.hosts = HostsAutomator(args) self.clusters = ClustersAutomator(args) self.nsxt = NSXTAutomator(args) self.vxrailmanager = VxRailAuthAutomator(args) self.licenses = LicenseAutomator(args) self.hostname = args[0]
class DecomissionHosts: def __init__(self): print('Decommission Hosts') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def decommission_hosts(self): decommission_url = 'https://'+self.hostname+'/v1/hosts' payload = self.utils.read_input(os.path.abspath(__file__ +'/../')+'/decommission_hosts_spec.json') print ('Decommissioning hosts...') response = self.utils.delete_request(payload,decommission_url) pprint.pprint(response) task_id = response['id'] task_url = 'https://'+self.hostname+'/v1/tasks/'+task_id print ('Decommissioning of hosts is ' + self.utils.poll_on_id(task_url,True))
def creat_target(self): cmd = 'omp -X "<create_target><name>' + self.target_name + '</name><hosts>' + self.ip + '</hosts></create_target>"' out = Utils.cmd_block(self.client, cmd).split("\n") print "4", out[0] status = ET.fromstring(out[0]).attrib['status'] print status if status == "400": cmd = "omp -T | grep " + self.target_name out = Utils.cmd_block(self.client, cmd).split("\n") self.target_id = out[0].split(" ")[0] if status == "201": self.target_id = ET.fromstring(out[0]).attrib['id'] print self.target_id
def crashed(self): cmd = 'find {} -type f | grep {}'.format(data.crash_report_folder, self.app) if Utils.cmd_block(data.client, cmd).split("\n")[0]: return True else: return False
class DeleteCluster: def __init__(self): print('Delete Cluster') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] self.cluster_id = sys.argv[4] def delete_cluster(self): data = {"markForDeletion": True} delete_cluster_url = 'https://' + self.hostname + '/v1/clusters/' + self.cluster_id self.utils.patch_request(data, delete_cluster_url) response = self.utils.delete_request({}, delete_cluster_url) task_id = response['id'] task_url = 'https://' + self.hostname + '/v1/tasks/' + task_id print("Cluster deletion Status:" + self.utils.poll_on_id(task_url, True))
def get_files(): files = [] dirs = [data.metadata['bundle_directory'], data.metadata['data_directory']] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*.sqlite"'.format( bin=data.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) temp = Utils.cmd_block(data.client, cmd).split("\n") cmd = '{bin} {dirs_str} -type f -name "*.db"'.format( bin=data.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) temp.extend(Utils.cmd_block(data.client, cmd).split("\n")) for db in temp: if db != '': files.append(db) return files
def get_clusters(self, points_df, kernel='gaussian', bandwidth=0.005, test_bl=None, test_tr=None, test_resolution=100, threshold=80, test_original=False): """ Thresholds a 3d map generated from KDE Parameters ---------- points_df : Pandas DataFrame An (n,2) dataframe containing columns 'lat', 'lng' kernel : string The kernel to use. Valid kernels are ['gaussian'|'tophat'|'epanechnikov'|'exponential'|'linear'|'cosine'] Default is 'gaussian'. bandwidth : float The bandwidth of the kernel. test_bl : tuple (lat, lng) bottom left corner of the matrix/grid to test on If None, defaults to min(lat), min(lng) values from points_df test_tr : tuple (lat, lng) top right corner of the matrix/grid to test on If None, defaults to max(lat), max(lng) values from points_df test_resolution: int Resolution of the test grid. Default is 100 (a 100*100 grid) Returns ------- result : pandas.DataFrame DataFrame with rows <= n and columns 'lat', 'lng', 'density', 'cluster_number' """ heatmap = self.get_heatmap(points_df, kernel=kernel, bandwidth=bandwidth, test_bl=test_bl, test_tr=test_tr, test_resolution=test_resolution, test_original=test_original) heatmap_df = pd.DataFrame(heatmap, columns=['lat', 'lng', 'density']) heatmap_df['thresholded'] = heatmap_df['density'] > np.percentile(heatmap_df['density'], threshold) if test_original: factors = list(Utils.factors(heatmap_df.shape[0])) factors.sort() rows = factors[math.floor(len(factors)/2)] cols = heatmap_df.shape[0]/rows test_resolution = (rows, cols) else: test_resolution = (test_resolution, test_resolution) X = heatmap_df['thresholded'][:,np.newaxis].reshape(test_resolution) cluster_numbers, num = measurements.label(X) heatmap_df['cluster_number'] = cluster_numbers.ravel() clusters = heatmap_df[heatmap_df['thresholded'] == True] del clusters['thresholded'] return clusters
def get(self): # Compose cmd string dirs = [data.metadata['bundle_directory'], data.metadata['data_directory']] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*sql*"'.format(bin=data.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) out = Utils.cmd_block(self.client, cmd).split("\n") # No files found if not out: print("No SQL files found") return # Add data protection class retrieved_files = Utils.get_dataprotection(out) for file_lable in retrieved_files: print file_lable[0], "protection:", file_lable[1]
def parse_plist(self, plist): """Given a plist file, copy it to temp folder, convert it to XML, and run plutil on it.""" # Copy the plist plist_temp = self.build_temp_path_for_file(plist.strip("'")) plist_copy = Utils.escape_path(plist_temp) self.file_copy(plist, plist_copy) # Convert to xml cmd = '{plutil} -convert xml1 {plist}'.format( plutil=data.DEVICE_TOOLS['PLUTIL'], plist=plist_copy) Utils.cmd_block(self.client, cmd) # Cat the content cmd = 'cat {}'.format(plist_copy) out = Utils.cmd_block(self.client, cmd) # Parse it with plistlib out = str(''.join(out).encode('utf-8')) pl = plistlib.readPlistFromString(out) return pl
class DeleteDomain: def __init__(self): print('Delete Domain') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] self.domain_id = sys.argv[4] def delete_domain(self): data = {"markForDeletion": True} delete_domain_url = 'https://' + self.hostname + '/v1/domains/' + self.domain_id self.utils.patch_request(data, delete_domain_url) response = self.utils.delete_request({}, delete_domain_url) print("Deleting Domain ...") task_id = response['id'] tasks_url = 'https://' + self.hostname + '/v1/tasks/' + task_id print("Domain deletion Status:" + self.utils.poll_on_id(tasks_url, True))
def __run_otool(self, query, grep=None): """Run otool against a specific architecture.""" cmd = '{bin} {query} {app}'.format(bin=data.DEVICE_TOOLS['OTOOL'], query=query, app=data.metadata['binary_path']) if grep: cmd = '%s | grep -Ei "%s"' % (cmd, grep) out = Utils.cmd_block(self.client, cmd).split("\n") return out
class LicenseAutomator: def __init__(self, args): self.utils = Utils(args) self.description = "Select license" self.hostname = args[0] def main_func(self): lcs = self.__get_licenses() selected = {} three_line_separator = ['', '', ''] for k in lcs.keys(): lcsls = lcs[k] self.utils.printCyan("Please choose a {} license:".format(k)) ct = 0 lcsmap = {} for onelcs in lcsls: ct += 1 self.utils.printBold("{}) {}".format( ct, self.__output_license_info(onelcs))) lcsmap[str(ct)] = onelcs selected[k] = lcsmap[self.__valid_option( input("\033[1m Enter your choice(number): \033[0m"), lcsmap.keys())]["key"] print(*three_line_separator, sep='\n') return {"licenseKeys": selected} def __output_license_info(self, licenseobj): return "{} ({})".format(licenseobj["key"], licenseobj["validity"]) def __get_licenses(self): self.utils.printGreen("Getting license information...") url = 'https://' + self.hostname + '/v1/license-keys?productType=VSAN,NSXT' response = self.utils.get_request(url) vsankeys = [{ "key": ele["key"], "validity": ele["licenseKeyValidity"]["licenseKeyStatus"] } for ele in response["elements"] if ele["productType"] == "VSAN"] nsxtkeys = [{ "key": ele["key"], "validity": ele["licenseKeyValidity"]["licenseKeyStatus"] } for ele in response["elements"] if ele["productType"] == "NSXT"] return {"VSAN": vsankeys, "NSX-T": nsxtkeys} def __valid_option(self, inputstr, choices): choice = str(inputstr).strip().lower() if choice in choices: return choice self.utils.printYellow("**Use first choice by default") return list(choices)[0]
class GetClusters: def __init__(self): print('Get Clusters') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def get_clusters(self): get_clusters_url = 'https://'+self.hostname+'/v1/clusters/' pprint.pprint(self.utils.get_request(get_clusters_url))
def dump_binary(): target_doc_path = data.metadata['data_directory'] + '/Documents' target_doc_file = target_doc_path + '/dumpdecrypted.dylib' Utils.sftp_put(ip=config.mobile_ip, port=config.ssh_port, username=config.mobile_user, password=config.mobile_password, remote_path=target_doc_file, local_file='./tools/dumpdecrypted.dylib') target_bin_path = data.metadata['binary_path'] dump_cmd = 'DYLD_INSERT_LIBRARIES={} {}'.format(target_doc_file, target_bin_path) Utils.cmd_block(data.client, dump_cmd) # get decrypted file from iphone remote_file = './{}.decrypted'.format(data.metadata['binary_name']) data.static_file_path = bin_get.via_sftp(remote_file)
def _detect_architectures(self, binary): """Use lipo to detect supported architectures.""" # Run lipo cmd = '{lipo} -info {binary}'.format(lipo=data.DEVICE_TOOLS['LIPO'], binary=binary) out = Utils.cmd_block(self.client, cmd) # Parse output msg = out.strip() res = msg.rsplit(': ')[-1].split(' ') return res
class GetNetworkPools: def __init__(self): print('Get Network Pools') self.utils = Utils(sys.argv) self.hostname = sys.argv[1] def get_network_pools(self): get_network_pools_url = 'https://' + self.hostname + '/v1/network-pools' response = self.utils.get_request(get_network_pools_url) pprint.pprint(response)
def get_files(self): files = [] dirs = [data.metadata['bundle_directory'], data.metadata['data_directory']] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*.plist"'.format(bin=data.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) temp = Utils.cmd_block(self.client, cmd).split("\n") for f in temp: if f != '': files.append(f) return files
def get_heatmap(self, variables_arr, kernel='gaussian', bandwidth=0.005, test_bl=None, test_tr=None, test_resolution=100): """ Returns estimation on specified test grid after using Kernel Density Estimation Parameters ---------- variables_arr : list of Pandas DataFrame objects Each dataframe should contain lat, long points and have the shape (n,2) kernel : string The kernel to use. Valid kernels are ['gaussian'|'tophat'|'epanechnikov'|'exponential'|'linear'|'cosine'] Default is 'gaussian'. bandwidth : float The bandwidth of the kernel. test_bl : tuple (lat, lng) bottom left corner of the matrix/grid to test on If None, defaults to min(lat), min(lng) values from points_df test_tr : tuple (lat, lng) top right corner of the matrix/grid to test on If None, defaults to max(lat), max(lng) values from points_df test_resolution: int Resolution of the test grid. Default is 100 (a 100*100 grid) Returns ------- result : np.array An (n,3) numpy array, with columns: lat, lng, hex-color-code """ if len(variables_arr) > 3: raise NotImplementedError("As of now, only 3 variables are supported") kde = KDE() densities_arr = [] for var in variables_arr: points_and_density = kde.get_heatmap(var, kernel=kernel, bandwidth=bandwidth, test_bl=test_bl, test_tr=test_tr, test_resolution=test_resolution) density = points_and_density[:,2] mapping = interp1d([density.min(), density.max()], [0,255]) densities_arr.append(mapping(density)) ipshell() rgb_arr = zip(densities_arr[0], densities_arr[1], densities_arr[2]) hex_arr = [Utils.rgb_to_hex(rgb) for rgb in rgb_arr] return pd.DataFrame().from_records(zip(points_and_density[:,0],points_and_density[:,1], hex_arr), columns=['lat', 'lng', 'color'])
def get_heatmap(data, outpath, bandwidth): sc = SpatialClusters() heatmap = sc.get_heatmap(data[['lat', 'lng']].dropna().as_matrix(), bandwidth) heatmap['colors'] = Utils.assign_colors_o(heatmap['density'], quantiles=[0.6, 0.75, 0.9, 0.95, 0.99]) heatmap.to_csv(outpath)