def main(): sbl = SafeBrowsingList(config.gsb_key, db_path=config.gsb_db_path) #result = sbl.lookup_url('http://www.amazon.esp.bravaidea.com/AWS/mobil/signin.php?https://www.amazon.com/gp/product/B00DBYBNEE/ref=nav_prime_try_btn') #print(result) dbo = db_operations.DBOperator() while True: slds = dbo.get_gsb_queryable_slds() urls = dbo.get_gsb_queryable_urls() domain_urls = slds+urls print(len(domain_urls)) query_time = datetime.now() print ("GSB Update time:", str(query_time)) run_sync(sbl) print ("Got updated GSB list. Now looking up %s domains: %s" % ( len(domain_urls), str(datetime.now()))) for d in domain_urls: print(d) try: result = sbl.lookup_url(d) print(result) result = "%s" % (result,) dbo.update_gsb_table(d, result, query_time) except Exception as e: print ("Exception. Skipping this domain: ", d, e) #print result print ("Done inserting into DB. Will update GSB list again", str(datetime.now())) time.sleep(3600)
def setup(): scheduler = sched.scheduler(time.time, time.sleep) db_operator = db_operations.DBOperator() for feeder in feeders: print "Scheduling an event" print feeder scheduler.enter(0, 1, run_run_event, (scheduler, feeder, db_operator)) scheduler.run()
def main(): dbo = db_operations.DBOperator() while True: data_dict = get_data() for domain in data_dict['domains']: dbo.insert_domain_gsb(domain) insert_ip_data(domain, dbo) for file_hash in data_dict['file_hashes']: dbo.insert_file_hash_vt(file_hash) print "Got and inserted %s domains and %s file_hashes:" % (len( data_dict['domains']), len(data_dict['file_hashes'])) time.sleep(QUERY_FREQUENCY)
def main(): dbo = db_operations.DBOperator() while True: # time.sleep(SLEEP_INTERVAL) file_hashes = dbo.get_vt_file_hashes() # print "# file hashes", file_hashes if len(file_hashes) > 0: print "Sending %s file_hashes to VT" % (len(file_hashes), ) for file_hash in file_hashes: json_resp, pos, total = get_vt_report(file_hash) dbo.update_vt_table(file_hash, json_resp, pos, total) time.sleep(SLEEP_INTERVAL) time.sleep(SLEEP_INTERVAL)
def main(): dbo = db_operations.DBOperator() # time.sleep(SLEEP_INTERVAL) file_hashes = dbo.get_vt_uploads_file_hashes_2() print len(file_hashes) # print "# file hashes", file_hashes if len(file_hashes) > 0: print "Sending %s file_hashes to VT" % (len(file_hashes), ) for file_hash in file_hashes: resp = get_vt_report(file_hash) if resp: json_resp, pos, total = resp else: continue if pos and pos != -1: dbo.update_vt_uploads_table(file_hash, json_resp, pos, total) time.sleep(SLEEP_INTERVAL)
def main(): dbo = db_operations.DBOperator() file_hashes = set(os.listdir(MILKED_FILES_PATH)) file_hashes_with_report = dbo.get_vt_file_hashes_with_report() sent_file_hashes = dbo.get_vt_uploads_file_hashes() file_hashes = file_hashes.difference(sent_file_hashes) file_hashes = file_hashes.difference(file_hashes_with_report) print len(file_hashes) for file_hash in file_hashes: try: response = send_file_to_vt(file_hash) if response['response_code'] != 1: print response return except Exception as e: print "Exception", e continue # import ipdb; ipdb.set_trace() dbo.insert_file_hash_vt_uploads(file_hash) time.sleep(SLEEP_INTERVAL)
pick = random.choice( the_list ) the_list.remove( pick ) if thedic[pick][1] is None : thedic[pick][1] = dt.now() if thedic[pick][1] <= dt.now() : return pick # check if the prtogram is started as worker, start an infinite loop try : counter =0 dbo = db_operations.DBOperator() while (True): print('started') #urls = ['https://disweb.deploys.io/channels/index.html?_sw-precache=527f447f356db3d634a7f7ca0aec7efe'] urls = dbo.get_vt_check_urls() if args['check'] else dbo.get_vt_check_urls() print(len(urls)) if( len(urls) > 0 ): key_index =0 for url in urls : key = getKey(key_timing_dict ) #key = 'b04043a00233894aa558e7d3c6801080b09de85d90f6129cfc25f9702146e220' print(url) headers = { "Accept-Encoding": "gzip, deflate", "User-Agent": random.choice( userAgents )