def report_ope(ope, scan_id, file_id): # インスタンス初期化 client = TenableIOClient() # 一時レポートCSVファイル名 todaynow = datetime.today().strftime('%Y%m%d%H%M%S') tenable_csv_folder = 'csv_report/' tenable_csv_file = tenable_csv_folder + str(todaynow) + '_scanid_' + str(scan_id) + '.csv' # ローカル環境適用 #if os.path.isdir(tenable_csv_folder): # pass #else: # os.makedirs(tenable_csv_folder) if ope == 'status': request_uri = 'scans/' + str(scan_id) + '/export/' + str(file_id) + '/status' resp = client.get(request_uri, path_params={'scan_id':scan_id, 'file_id':file_id}) obj_msg = json.loads(resp.text) str_msg = obj_msg.get('status') return render_template('index.html', message=str_msg) elif ope == 'download': request_uri = 'scans/' + str(scan_id) + '/export/' + str(file_id) + '/download' resp = client.get(request_uri, path_params={'scan_id':scan_id, 'file_id':file_id}, stream=True) # ローカル環境適用 #iter_content = resp.iter_content(chunk_size=1024) #with open(tenable_csv_file, mode='wb') as fd: # for ck in iter_content: # fd.write(ck) #str_msg = u'https://tenable-io.herokuapp.com/' + tenable_csv_file #return render_template('index.html', message=str_msg) # AWS S3に保存する s3 = S3Api() response = s3.response(tenable_csv_file, resp.text) return response #return render_template('index.html', message=resp.text) else: return render_template('index.html', message=u"REPORT_OPE|不正アクセスを記録しました。")
def downloadVulnsFromTio(filename, cust): DEBUG = True #Open the connection to Tio for the particular customer print("Connecting to Tenable.io for:", cust['name']) try: tioconn = TenableIOClient(access_key=cust['_id'], secret_key=cust['secretkey']) except: print("Problem connecting to Tenable.io") return (False) #Download a CSV file # Make the request for the file. requesturl = "workbenches/export?format=csv&report=vulnerabilities&chapter=vuln_by_plugin&date_range=0" resp = tioconn.get(requesturl) if DEBUG: print("Raw response text:", resp.text) respdata = json.loads(resp.text) downloadid = "" try: downloadid = str(respdata['file']) except: print("Unable to start download") return (False) statusurl = "workbenches/export/" + downloadid + "/status" downloadurl = "workbenches/export/" + downloadid + "/download" if DEBUG: print("Waiting for download ID", downloadid) print("status URL :", statusurl) downloadstatus = "" while (downloadstatus != "ready"): resp = tioconn.get(statusurl) respdata = json.loads(resp.text) downloadstatus = respdata['status'] if DEBUG: print("Raw response", resp.text) time.sleep(2) resp = tioconn.get(downloadurl) if DEBUG: print("Raw response", resp) with open(filename, 'wb') as fp: for chunk in resp.text: fp.write(chunk.encode('utf-8')) fp.close() sys.stdout.flush() return (True)
def s3_api(scan_id, file_id): # インスタンス初期化 client = TenableIOClient() request_uri = 'scans/' + str(scan_id) + '/export/' + str(file_id) + '/download' resp = client.get(request_uri, path_params={'scan_id':scan_id, 'file_id':file_id}, stream=True) s3 = S3Api() response = s3.response('20170213_tenable.csv', resp.text) return response
def GenerateAssetCSV(accesskey, secretkey, filename): #Create the connection to Tenable.io client = TenableIOClient(access_key=accesskey, secret_key=secretkey) #Gather the list of assets resp = client.get("assets") respdata = json.loads(resp.text) #Open the file that will become a CSV with open(filename, "w") as csvfile: #Create the header of the CSV file fieldnames = [ 'id', 'has_agent', 'last_seen', 'sources', 'ipv4', 'ipv6', 'fqdn', 'netbios_name', 'operating_system' ] #Create a CSV writer and associate with the file handle writer = csv.DictWriter(csvfile, fieldnames=fieldnames) #Write the CSV headers writer.writeheader() #Loop through all the downloaded assets and write them into the CSV file for i in respdata['assets']: rowdict = { 'id': i['id'], 'has_agent': i['has_agent'], 'last_seen': i['last_seen'], 'sources': i['sources'], 'ipv4': i['ipv4'], 'ipv6': i['ipv6'], 'fqdn': i['fqdn'], 'netbios_name': i['netbios_name'], 'operating_system': i['operating_system'] } writer.writerow(rowdict) #Close the file csvfile.close() return (True)
def test_s3(scan_id, file_id): # インスタンス初期化 client = TenableIOClient() request_uri = 'scans/' + str(scan_id) + '/export/' + str(file_id) + '/download' resp = client.get(request_uri, path_params={'scan_id':scan_id, 'file_id':file_id}, stream=True) #iter_content = resp.iter_content() S3_BUCKET = os.environ.get('S3_BUCKET') s3 = boto3.resource('s3') bucket = s3.Bucket(S3_BUCKET) PUT_OBJECT_KEY_NAME = '20170212_test.csv' obj = bucket.Object(PUT_OBJECT_KEY_NAME) response = obj.put( Body=resp.text, ContentEncoding='utf-8', ContentType='text/csv' ) return jsonify(response)
def FindNewHosts(accesskey,secretkey,searchhours): DEBUG=False #Start counting the number of new assets newcount=0 #Connect to Tenable.io client = TenableIOClient(access_key=accesskey, secret_key=secretkey) #Send API call to gather the list of assets resp=client.get("workbenches/assets") respdata=json.loads(resp.text) if DEBUG: print "Response",respdata print "\n\n" #Figure out the time delta based on the supplied hours. This will be used # as the cut-off time. hoursago=datetime.datetime.now()-datetime.timedelta(hours=int(searchhours)) #Open a CSV file with open("newhosts.csv","w") as csvfile: fieldnames=['id','first_seen','FQDNs','IP Addresses','NetBIOS Names'] writer=csv.DictWriter(csvfile,fieldnames=fieldnames) writer.writeheader() #Parse the data from Tenable.io for i in respdata['assets']: if DEBUG: print "Asset ID:",i['id'] for x in i['sources']: print "First seen",x['first_seen'],'by a',x['name'] for x in i['fqdn']: print "FQDN:",x for x in i['ipv4']: print "IPv4:",x for x in i['netbios_name']: print "NetBIOS name:",x #For this asset, go through all the vulnerability data sources and determine the # first time this asset was seen. first_seen=datetime.datetime.now() for x in i['sources']: this_time=datetime.datetime.strptime(x['first_seen'][0:19], '%Y-%m-%dT%H:%M:%S') if first_seen > this_time: first_seen=this_time if DEBUG: print "First seen:",first_seen #See if the first time the asset was seen is within the time range we are looking for. # If it is within the range, it is a new host and will be written to the CSV file. if first_seen >= hoursago: newcount+=1 fqdns="" ipv4="" netbios="" if DEBUG: print "New host!!!" print "First seen:",first_seen print "Asset ID:",i['id'] for x in i['sources']: if DEBUG: print "First seen",x['first_seen'],'by a',x['name'] for x in i['fqdn']: if DEBUG: print "FQDN:",x if fqdns == "": fqdns=x else: fqdns=fqdns+","+x for x in i['ipv4']: if DEBUG: print "IPv4:",x if ipv4 == "": ipv4=x else: ipv4=ipv4+","+x for x in i['netbios_name']: if DEBUG: print "NetBIOS name:",x if netbios == "": netbios=x else: netbios=netbios+","+x if DEBUG: print rowdict={'id':i['id'], 'first_seen': first_seen, 'FQDNs': fqdns,'IP Addresses': ipv4, 'NetBIOS Names': netbios} writer.writerow(rowdict) csvfile.close() print "Total new hosts in the specified time range:",newcount #If there were no new assets, then delete the CSV file. if newcount == 0: os.remove("newhosts.csv") return(newcount)
def getFolders(): client = TenableIOClient() resp = client.get('folders') return render_template('index.html', message=resp.text)