def __init__(self, db_path, browser_params, num_browsers, log_file = '~/openwpm.log', process_watchdog=False, task_description=None): # Flow control self.closing = False self.failure_flag = False self.threadlock = threading.Lock() self.failurecount = 0 # sets up the information needed to write to the database self.desc = task_description self.db_path = db_path self.log_file = log_file self.process_watchdog = process_watchdog # sets up the crawl data database self.db = sqlite3.connect(db_path) with open(os.path.join(os.path.dirname(__file__), 'schema.sql'), 'r') as f: self.db.executescript(f.read()) # prepares browser settings self.num_browsers = num_browsers # special case: for singleton dictionary, we perform deep copies so that number of dicts is <num_browsers> if type(browser_params) is not list: browser_params = [copy.deepcopy(browser_params) for i in xrange(0, num_browsers)] if len(browser_params) != num_browsers: raise Exception("Number of browser parameter dictionaries is not the same as <num_browsers>") # sets up logging server + connect a client self.logging_status_queue = None self.loggingserver = self._launch_loggingserver() self.logger_address = self.logging_status_queue.get() # socket location: (address, port) self.logger = MPLogger.loggingclient(*self.logger_address) # sets up the DataAggregator + associated queues self.aggregator_status_queue = None # queue used for sending graceful KILL command to DataAggregator self.data_aggregator = self._launch_data_aggregator() self.aggregator_address = self.aggregator_status_queue.get() # socket location: (address, port) # open client socket self.sock = clientsocket() self.sock.connect(self.aggregator_address[0], self.aggregator_address[1]) # update task table cur = self.db.cursor() cur.execute("INSERT INTO task (description) VALUES (?)", (self.desc,)) self.db.commit() self.task_id = cur.lastrowid # sets up the BrowserManager(s) + associated queues self.browsers = self._initialize_browsers(browser_params) # List of the Browser(s) self._launch_browsers() # start the manager watchdog thread = threading.Thread(target=self._manager_watchdog, args=()) thread.daemon = True thread.start()
def __init__(self, manager_params, browser_params, process_watchdog=False): # Make paths absolute in manager_params for path in ['data_directory', 'log_directory']: if manager_params[path] is not None: manager_params[path] = os.path.expanduser(manager_params[path]) manager_params['database_name'] = os.path.join( manager_params['data_directory'], manager_params['database_name']) manager_params['log_file'] = os.path.join( manager_params['log_directory'], manager_params['log_file']) manager_params['screenshot_path'] = os.path.join( manager_params['data_directory'], 'screenshots') manager_params['source_dump_path'] = os.path.join( manager_params['data_directory'], 'sources') self.manager_params = manager_params # Create data directories if they do not exist if not os.path.exists(manager_params['screenshot_path']): os.makedirs(manager_params['screenshot_path']) if not os.path.exists(manager_params['source_dump_path']): os.makedirs(manager_params['source_dump_path']) # check size of parameter dictionary self.num_browsers = manager_params['num_browsers'] if len(browser_params) != self.num_browsers: raise Exception( "Number of <browser_params> dicts is not the same as manager_params['num_browsers']" ) # Flow control self.closing = False self.failure_status = None self.threadlock = threading.Lock() self.failurecount = 0 if manager_params['failure_limit'] is not None: self.failure_limit = manager_params['failure_limit'] else: self.failure_limit = self.num_browsers * 2 + 10 self.process_watchdog = process_watchdog # sets up the crawl data database db_path = manager_params['database_name'] if not os.path.exists(manager_params['data_directory']): os.mkdir(manager_params['data_directory']) self.db = sqlite3.connect(db_path) with open(os.path.join(os.path.dirname(__file__), 'schema.sql'), 'r') as f: self.db.executescript(f.read()) self.db.commit() # sets up logging server + connect a client self.logging_status_queue = None self.loggingserver = self._launch_loggingserver() # socket location: (address, port) self.manager_params['logger_address'] = self.logging_status_queue.get() self.logger = MPLogger.loggingclient( *self.manager_params['logger_address']) # Mark if LDBAggregator is needed (if js is enabled on any browser) self.ldb_enabled = False for params in browser_params: if params['save_javascript'] or params['save_javascript_proxy']: self.ldb_enabled = True break # Initialize the data aggregators self._launch_aggregators() # open client socket self.sock = clientsocket(serialization='dill') self.sock.connect(*self.manager_params['aggregator_address']) print 1 self._save_configuration(browser_params) print 2 # read the last used site visit id cur = self.db.cursor() cur.execute("SELECT MAX(visit_id) from site_visits") last_visit_id = cur.fetchone()[0] if last_visit_id is None: last_visit_id = 0 self.next_visit_id = last_visit_id + 1 print 3 # sets up the BrowserManager(s) + associated queues self.browsers = self._initialize_browsers( browser_params) # List of the Browser(s) print 5 self._launch_browsers() print 4 # start the manager watchdog thread = threading.Thread(target=self._manager_watchdog, args=()) thread.daemon = True thread.start()
def __init__(self, manager_params, browser_params, process_watchdog=False, task_description=None): # Make paths absolute in manager_params manager_params['data_directory'] = os.path.expanduser(manager_params['data_directory']) manager_params['log_directory'] = os.path.expanduser(manager_params['log_directory']) manager_params['database_name'] = os.path.join(manager_params['data_directory'],manager_params['database_name']) manager_params['log_file'] = os.path.join(manager_params['log_directory'],manager_params['log_file']) self.manager_params = manager_params # Flow control self.closing = False self.failure_flag = False self.threadlock = threading.Lock() self.failurecount = 0 self.desc = task_description self.process_watchdog = process_watchdog # sets up the crawl data database db_path = manager_params['database_name'] if not os.path.exists(manager_params['data_directory']): os.mkdir(manager_params['data_directory']) self.db = sqlite3.connect(db_path) with open(os.path.join(os.path.dirname(__file__), 'schema.sql'), 'r') as f: self.db.executescript(f.read()) # check size of parameter dictionary self.num_browsers = manager_params['num_browsers'] if len(browser_params) != self.num_browsers: raise Exception("Number of <browser_params> dicts is not the same as manager_params['num_browsers']") # sets up logging server + connect a client self.logging_status_queue = None self.loggingserver = self._launch_loggingserver() # socket location: (address, port) self.manager_params['logger_address'] = self.logging_status_queue.get() self.logger = MPLogger.loggingclient(*self.manager_params['logger_address']) # Mark if LDBAggregator is needed (if js is enabled on any browser) self.ldb_enabled = False for params in browser_params: if params['save_javascript']: self.ldb_enabled = True break # Initialize the data aggregators self._launch_aggregators() # open client socket self.sock = clientsocket() self.sock.connect(*self.manager_params['aggregator_address']) # update task table cur = self.db.cursor() cur.execute("INSERT INTO task (description) VALUES (?)", (self.desc,)) self.db.commit() self.task_id = cur.lastrowid # sets up the BrowserManager(s) + associated queues self.browsers = self._initialize_browsers(browser_params) # List of the Browser(s) self._launch_browsers() # start the manager watchdog thread = threading.Thread(target=self._manager_watchdog, args=()) thread.daemon = True thread.start()
def __init__(self, manager_params, browser_params, process_watchdog=False): # Make paths absolute in manager_params for path in ['data_directory','log_directory']: if manager_params[path] is not None: manager_params[path] = os.path.expanduser(manager_params[path]) manager_params['database_name'] = os.path.join(manager_params['data_directory'],manager_params['database_name']) manager_params['log_file'] = os.path.join(manager_params['log_directory'],manager_params['log_file']) manager_params['screenshot_path'] = os.path.join(manager_params['data_directory'], 'screenshots') manager_params['source_dump_path'] = os.path.join(manager_params['data_directory'], 'sources') self.manager_params = manager_params # Create data directories if they do not exist if not os.path.exists(manager_params['screenshot_path']): os.makedirs(manager_params['screenshot_path']) if not os.path.exists(manager_params['source_dump_path']): os.makedirs(manager_params['source_dump_path']) # check size of parameter dictionary self.num_browsers = manager_params['num_browsers'] if len(browser_params) != self.num_browsers: raise Exception("Number of <browser_params> dicts is not the same as manager_params['num_browsers']") # Flow control self.closing = False self.failure_status = None self.threadlock = threading.Lock() self.failurecount = 0 if manager_params['failure_limit'] is not None: self.failure_limit = manager_params['failure_limit'] else: self.failure_limit = self.num_browsers * 2 + 10 self.process_watchdog = process_watchdog # sets up the crawl data database db_path = manager_params['database_name'] if not os.path.exists(manager_params['data_directory']): os.mkdir(manager_params['data_directory']) self.db = sqlite3.connect(db_path) with open(os.path.join(os.path.dirname(__file__), 'schema.sql'), 'r') as f: self.db.executescript(f.read()) self.db.commit() # sets up logging server + connect a client self.logging_status_queue = None self.loggingserver = self._launch_loggingserver() # socket location: (address, port) self.manager_params['logger_address'] = self.logging_status_queue.get() self.logger = MPLogger.loggingclient(*self.manager_params['logger_address']) # Mark if LDBAggregator is needed (if js is enabled on any browser) self.ldb_enabled = False for params in browser_params: if params['save_javascript'] or params['save_javascript_proxy']: self.ldb_enabled = True break # Initialize the data aggregators self._launch_aggregators() # open client socket self.sock = clientsocket(serialization='dill') self.sock.connect(*self.manager_params['aggregator_address']) self._save_configuration(browser_params) # read the last used site visit id cur = self.db.cursor() cur.execute("SELECT MAX(visit_id) from site_visits") last_visit_id = cur.fetchone()[0] if last_visit_id is None: last_visit_id = 0 self.next_visit_id = last_visit_id + 1 # sets up the BrowserManager(s) + associated queues self.browsers = self._initialize_browsers(browser_params) # List of the Browser(s) self._launch_browsers() # start the manager watchdog thread = threading.Thread(target=self._manager_watchdog, args=()) thread.daemon = True thread.start()
def __init__(self, manager_params, browser_params, process_watchdog=False): # Make paths absolute in manager_params for path in ['data_directory', 'log_directory']: if manager_params[path] is not None: manager_params[path] = os.path.expanduser(manager_params[path]) manager_params['database_name'] = os.path.join( manager_params['data_directory'], manager_params['database_name']) manager_params['log_file'] = os.path.join( manager_params['log_directory'], manager_params['log_file']) manager_params['screenshot_path'] = os.path.join( manager_params['data_directory'], 'screenshots') manager_params['source_dump_path'] = os.path.join( manager_params['data_directory'], 'sources') self.manager_params = manager_params self.browser_params = browser_params # Create data directories if they do not exist if not os.path.exists(manager_params['screenshot_path']): os.makedirs(manager_params['screenshot_path']) if not os.path.exists(manager_params['source_dump_path']): os.makedirs(manager_params['source_dump_path']) # check size of parameter dictionary self.num_browsers = manager_params['num_browsers'] if len(browser_params) != self.num_browsers: raise Exception("Number of <browser_params> dicts is not the same " "as manager_params['num_browsers']") # Flow control self.closing = False self.failure_status = None self.threadlock = threading.Lock() self.failurecount = 0 if manager_params['failure_limit'] is not None: self.failure_limit = manager_params['failure_limit'] else: self.failure_limit = self.num_browsers * 2 + 10 self.process_watchdog = process_watchdog # sets up logging server + connect a client self.logging_status_queue = None self.loggingserver = self._launch_loggingserver() # socket location: (address, port) self.manager_params['logger_address'] = self.logging_status_queue.get() self.logger = MPLogger.loggingclient( *self.manager_params['logger_address']) # Initialize the data aggregators self._launch_aggregators() # sets up the BrowserManager(s) + associated queues self.browsers = self._initialize_browsers(browser_params) self._launch_browsers() # start the manager watchdog thread = threading.Thread(target=self._manager_watchdog, args=()) thread.daemon = True thread.start() # Save crawl config information to database openwpm_v, browser_v = get_version() self.data_aggregator.save_configuration(openwpm_v, browser_v) self.logger.info( get_configuration_string(self.manager_params, browser_params, (openwpm_v, browser_v)))
def __init__(self, db_path, browser_params, num_browsers, log_file='~/openwpm.log', process_watchdog=False, task_description=None): # Flow control self.closing = False self.failure_flag = False self.threadlock = threading.Lock() self.failurecount = 0 # sets up the information needed to write to the database self.desc = task_description self.db_path = db_path self.log_file = log_file self.process_watchdog = process_watchdog # sets up the crawl data database self.db = sqlite3.connect(db_path) with open(os.path.join(os.path.dirname(__file__), 'schema.sql'), 'r') as f: self.db.executescript(f.read()) # prepares browser settings self.num_browsers = num_browsers # special case: for singleton dictionary, we perform deep copies so that number of dicts is <num_browsers> if type(browser_params) is not list: browser_params = [ copy.deepcopy(browser_params) for i in xrange(0, num_browsers) ] if len(browser_params) != num_browsers: raise Exception( "Number of browser parameter dictionaries is not the same as <num_browsers>" ) # sets up logging server + connect a client self.logging_status_queue = None self.loggingserver = self._launch_loggingserver() self.logger_address = self.logging_status_queue.get( ) # socket location: (address, port) self.logger = MPLogger.loggingclient(*self.logger_address) # sets up the DataAggregator + associated queues self.aggregator_status_queue = None # queue used for sending graceful KILL command to DataAggregator self.data_aggregator = self._launch_data_aggregator() self.aggregator_address = self.aggregator_status_queue.get( ) # socket location: (address, port) # open client socket self.sock = clientsocket() self.sock.connect(self.aggregator_address[0], self.aggregator_address[1]) # update task table cur = self.db.cursor() cur.execute("INSERT INTO task (description) VALUES (?)", (self.desc, )) self.db.commit() self.task_id = cur.lastrowid # sets up the BrowserManager(s) + associated queues self.browsers = self._initialize_browsers( browser_params) # List of the Browser(s) self._launch_browsers() # start the manager watchdog thread = threading.Thread(target=self._manager_watchdog, args=()) thread.daemon = True thread.start()