def subdomain_scan(domain, ret, now_time): database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() logger.sysinfo("Scanning domain %s." % domain) _engines = [_(domain) for _ in engines.values()] loop = asyncio.get_event_loop() if debug: loop.set_debug(True) for task in [asyncio.ensure_future(_engine.run()) for _engine in _engines]: loop.run_until_complete(task) # loop.close() for _engine in _engines: logger.sysinfo("{engine} Found {num} sites".format( engine=_engine.engine_name, num=len(_engine.results['subdomain']))) ret.update(_engine.results['subdomain']) logger.sysinfo("Found %d subdomains of %s." % (len(ret), domain)) for subdomain in ret: database.insert_subdomain(subdomain, None, None, 0, 0, now_time, domain) database.disconnect() return ret
def title_scan(domain, ret, now_time): ret = list(ret) database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() logger.sysinfo('Checking %d subdomains of %s.' % (len(ret), domain)) loop = asyncio.get_event_loop() thread_num = int(conf['config']['basic']['thread_num']) thread_num = thread_num if len(ret) > thread_num else thread_num tasks = [] for i in range(0, thread_num): tasks.append( asyncio.ensure_future( get_title([ret[x] for x in range(0 + i, len(ret), thread_num)]))) loop.run_until_complete(asyncio.wait(tasks)) for task in tasks: for subdomain, url, title, status, content_length in task.result(): database.update_subdomain_status(subdomain, url, title, status, content_length, now_time) database.disconnect() logger.sysinfo("Checked subdomains' status of %s." % domain)
def vul_scan(domain, now_time): datas = [] database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() logger.sysinfo("Scaning vul for: %s " % (domain)) for _row in database.select_mondomain(domain): data = { "subdomain": _row[0], "url": _row[1], "title": _row[2], "status": _row[3], "len": _row[4], "update_time": _row[5], "domain": _row[6] } datas.append(data) for data in datas: if data['status'] != 0: logger.sysinfo("Scaning vul for %s." % (data['url'])) crawlergo_scan(data['url'], data['domain'], now_time, database) logger.sysinfo("Scaned vul for: %s " % (domain)) database.disconnect()
def server_loop(): global bind_ip global bind_port global threads db = Database() db.connect() #db.init() db.commit() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((bind_ip, bind_port)) s.listen(20) while True: connection, address = s.accept() t = threading.Thread(target=handle_client, args=(connection, db)) threads.append(t) t.setDaemon(True) t.start() db.disconnect() return
def server_loop(): global bind_ip global bind_port global threads db = Database() db.connect() #db.init() db.commit() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((bind_ip, bind_port)) s.listen(20) while True: connection, address = s.accept() t = threading.Thread(target=handle_client, args=(connection, db)) threads.append(t) t.setDaemon(True) t.start() db.disconnect() return
def save(domains, path, filename, key): datas = [] database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() for domain in domains: for _row in database.select_mondomain(domain): data = { "subdomain": _row[0], "url": _row[1], "title": _row[2], "status": _row[3], "len": _row[4], "update_time": _row[5], "domain": _row[6] } datas.append(data) tocsv(datas, path, filename, key) database.disconnect()
def read_and_insert_bids(bids_dir, config_file, verbose, createcand, createvisit): """ Read the provided BIDS structure and import it into the database. :param bids_dir : path to the BIDS directory :type bids_dir : str :param config_file: path to the config file with database connection information :type config_file: str :param verbose : flag for more printing if set :type verbose : bool :param createcand : allow database candidate creation if it did not exist already :type createcand : bool :param createvisit: allow database visit creation if it did not exist already :type createvisit: bool """ # database connection db = Database(config_file.mysql, verbose) db.connect() # grep config settings from the Config module default_bids_vl = db.get_config('default_bids_vl') data_dir = db.get_config('dataDirBasepath') # making sure that there is a final / in data_dir data_dir = data_dir if data_dir.endswith('/') else data_dir + "/" # load the BIDS directory bids_reader = BidsReader(bids_dir, verbose) if not bids_reader.participants_info \ or not bids_reader.cand_sessions_list \ or not bids_reader.cand_session_modalities_list: message = '\n\tERROR: could not properly parse the following' \ 'BIDS directory:' + bids_dir + '\n' print(message) sys.exit(lib.exitcode.UNREADABLE_FILE) # create the LORIS_BIDS directory in data_dir based on Name and BIDS version loris_bids_root_dir = create_loris_bids_directory(bids_reader, data_dir, verbose) # loop through subjects for bids_subject_info in bids_reader.participants_info: # greps BIDS information for the candidate bids_id = bids_subject_info['participant_id'] bids_sessions = bids_reader.cand_sessions_list[bids_id] # greps BIDS candidate's info from LORIS (creates the candidate if it # does not exist yet in LORIS and the createcand flag is set to true) loris_cand_info = grep_or_create_candidate_db_info( bids_reader, bids_id, db, createcand, loris_bids_root_dir, verbose) cand_id = loris_cand_info['CandID'] center_id = loris_cand_info['RegistrationCenterID'] # greps BIDS session's info for the candidate from LORIS (creates the # session if it does not exist yet in LORIS and the createvisit is set # to true. If no visit in BIDS structure, then use default visit_label # stored in the Config module) loris_sessions_info = grep_candidate_sessions_info( bids_sessions, bids_id, cand_id, loris_bids_root_dir, createvisit, verbose, db, default_bids_vl, center_id) # read list of modalities per session / candidate and register data for row in bids_reader.cand_session_modalities_list: bids_session = row['bids_ses_id'] visit_label = bids_session if bids_session else default_bids_vl loris_bids_visit_rel_dir = 'sub-' + row[ 'bids_sub_id'] + '/' + 'ses-' + visit_label for modality in row['modalities']: loris_bids_modality_rel_dir = loris_bids_visit_rel_dir + '/' + modality + '/' lib.utilities.create_dir( loris_bids_root_dir + loris_bids_modality_rel_dir, verbose) if modality == 'eeg': Eeg(bids_reader=bids_reader, bids_sub_id=row['bids_sub_id'], bids_ses_id=row['bids_ses_id'], bids_modality=modality, db=db, verbose=verbose, data_dir=data_dir, default_visit_label=default_bids_vl, loris_bids_eeg_rel_dir=loris_bids_modality_rel_dir, loris_bids_root_dir=loris_bids_root_dir) elif modality in ['anat', 'dwi', 'fmap', 'func']: Mri(bids_reader=bids_reader, bids_sub_id=row['bids_sub_id'], bids_ses_id=row['bids_ses_id'], bids_modality=modality, db=db, verbose=verbose, data_dir=data_dir, default_visit_label=default_bids_vl, loris_bids_mri_rel_dir=loris_bids_modality_rel_dir, loris_bids_root_dir=loris_bids_root_dir) # disconnect from the database db.disconnect()
conf = ConfigParser() conf.read_file(open('config.ini')) # configuration mysql_host = conf.get('mysql', 'host') mysql_port = conf.getint('mysql', 'port') mysql_user = conf.get('mysql', 'user') mysql_password = conf.get('mysql', 'password') mysql_db = conf.get('mysql', 'schema') qodbc_dsn = conf.get('qodbc', 'dsn') # set defaults Entity.company_file = conf.get('company', 'file_number') if(conf.get('company', 'refresh_from')): Entity.last_entry_datetime = conf.get('company', 'refresh_from') rackspace = Database(pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, passwd=mysql_password, db=mysql_db, use_unicode=True, charset="utf8")) quickbooks = Database(pypyodbc.connect('DSN='+qodbc_dsn, autocommit=True)) print('connected') SalesReceipt(quickbooks,rackspace).sync() SalesReceiptItem(quickbooks,rackspace).sync() Inventory(quickbooks,rackspace).sync() rackspace.disconnect() quickbooks.disconnect() print('disconnected')
mysql_user = conf.get('mysql', 'user') mysql_password = conf.get('mysql', 'password') mysql_db = conf.get('mysql', 'schema') qodbc_dsn = conf.get('qodbc', 'dsn') # set defaults Entity.company_file = conf.get('company', 'file_number') if (conf.get('company', 'refresh_from')): Entity.last_entry_datetime = conf.get('company', 'refresh_from') rackspace = Database( pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, passwd=mysql_password, db=mysql_db, use_unicode=True, charset="utf8")) quickbooks = Database(pypyodbc.connect('DSN=' + qodbc_dsn, autocommit=True)) print('connected') SalesReceipt(quickbooks, rackspace).sync() SalesReceiptItem(quickbooks, rackspace).sync() Inventory(quickbooks, rackspace).sync() rackspace.disconnect() quickbooks.disconnect() print('disconnected')
def _run(domains_dic): database = Database(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'submon.db')) database.connect() database.init() now_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) filename = 'SubMon_subdomain_check_' + time.strftime("%Y%m%d_%H%M%S", time.localtime()) + '.xlsx' path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "data") if not os.path.exists(path): os.makedirs(path) for key in domains_dic.keys(): domains = list(set(domains_dic[key])) if len(domains) > 0: logger.sysinfo("Scanning %d domains at %s." % (len(domains), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))) for domain in domains: logger.sysinfo("Scanning domain %s." % domain) _engines = [_(domain) for _ in engines.values()] loop = asyncio.get_event_loop() if debug: loop.set_debug(True) for task in [asyncio.ensure_future(_engine.run()) for _engine in _engines ]: loop.run_until_complete(task) # loop.close() ret = set() for _engine in _engines: logger.sysinfo("{engine} Found {num} sites".format(engine=_engine.engine_name, num=len(_engine.results['subdomain']))) ret.update(_engine.results['subdomain']) logger.sysinfo("Found %d subdomains of %s." % (len(ret),domain)) for subdomain in ret: database.insert_subdomain(subdomain,None,None,0,0,now_time,domain) logger.sysinfo('Checking %d subdomains of %s.' % (len(ret),domain)) curl = Curl() curl.load_targets(ret) for subdomain,url,title,status,content_length in curl.run(): database.update_subdomain_status(subdomain,url,title,status,content_length,now_time) logger.sysinfo("Checked subdomains' status of %s." % domain) datas = [] for domain in domains: for _row in database.select_mondomain(domain): data = { "subdomain": _row[0], "url": _row[1], "title": _row[2], "status": _row[3], "len": _row[4], "update_time" : _row[5], "domain": _row[6] } datas.append(data) tocsv(datas, path,filename,key) logger.sysinfo("Fineshed scan %d domains at %s." % (len(domains), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))) else: logger.error("Loading %d domains." % (len(domains))) send_smtp(path, filename) database.disconnect() print() print()
class RunTestCase(unittest.TestCase): def setUp(self): parentpath = ( os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir ) ) ) manifestpath = os.path.join(parentpath, 'manifest.json') configpath = os.path.join(parentpath, 'config.json') rawsettings = None with open(configpath, 'r') as file_: rawsettings = json.load(file_) self.database = Database(rawsettings['options']['datasource']) rawmanifest = None with open(manifestpath, 'r') as file_: rawmanifest = json.load(file_) self.attributes = Attributes(rawmanifest['attributes'], self.database) self.threshold = rawsettings['options']['threshold'] self.processes = 2 def test_init(self): with tempfile.TemporaryDirectory() as directory: try: # Act run = Run( directory, self.attributes, self.database, self.threshold, self.processes ) # Assert self.assertIsNotNone(run.run_id) finally: self.database.post( 'DELETE FROM reaper_runs WHERE id = {0}'.format(run.run_id) ) self.database.disconnect() def test_save(self): with tempfile.TemporaryDirectory() as directory: # Arrange rresults = { 'architecture': 9.9, 'continuous_integration': True, 'community': 9, 'documentation': 9.9, 'history': 9.9, 'license': True, 'management': 9.9, 'unit_test': 9.9, 'state': 'active' } run = Run( directory, self.attributes, self.database, self.threshold, self.processes ) # Act run._save(10868464, 99.99, rresults) # Assert try: self.database.connect() actual = self.database.get( ''' SELECT project_id, architecture, continuous_integration, community, documentation, history, license, management, unit_test, state, score FROM reaper_results WHERE run_id = {0} '''.format(run.run_id) ) self.assertEqual(10868464, actual[0]) self.assertEqual(9.9, actual[1]) self.assertEqual(True, actual[2]) self.assertEqual(9, actual[3]) self.assertEqual(9.9, actual[4]) self.assertEqual(9.9, actual[5]) self.assertEqual(True, actual[6]) self.assertEqual(9.9, actual[7]) self.assertEqual(9.9, actual[8]) self.assertEqual('active', actual[9]) self.assertEqual(99.989998, actual[10]) finally: self.database.post( 'DELETE FROM reaper_runs WHERE id = {0}'.format(run.run_id) ) self.database.disconnect()