def doLogReport(self, jobName, jobToken, collection, reportName, reportDate, withResults, topCount, diagnosticTerms, update): """The actual work done in a worker thread to generate a summary report.""" (html_file, valid_file) = liblog.get_report_filenames(self.entConfig, liblog.SUMMARY_REPORT, reportName, collection) liblog.MakeGoogleDir(self.entConfig, os.path.dirname(html_file)) new_html_file = tempfile.mktemp('.report') new_valid_file = tempfile.mktemp('.report_valid') args = [] args.append(commands.mkarg(self.entConfig.GetEntHome())) args.append(commands.mkarg(collection)) args.append(commands.mkarg(reportDate)) args.append(withResults) args.append(topCount) args.append(commands.mkarg(diagnosticTerms)) args.append(commands.mkarg(html_file)) args.append(commands.mkarg(valid_file)) args.append(commands.mkarg(new_html_file)) args.append(commands.mkarg(new_valid_file)) cmd = ('. %s && cd %s/enterprise/legacy/logs && ' 'alarm %s nice -n 15 ./log_report.py %s' % (self.cfg.getGlobalParam('ENTERPRISE_BASHRC'), self.cfg.getGlobalParam('MAIN_GOOGLE3_DIR'), COMMAND_TIMEOUT_PERIOD, string.join(args, ' '))) logging.info('doLogReport(): CMD = %s' % cmd) returnCode = E.system(cmd) self.handleResult(jobName, jobToken, returnCode, liblog.SUMMARY_REPORT, collection, reportName, update, html_file, valid_file, new_html_file, new_valid_file)
def doCaptureCrawlQueue(self, encQueueName, captionTime, numUrlRequested, nextHours, host): """The actual work done in a worker thread to capture crawl queue.""" result_file = self.getCrawlQueueFileName(encQueueName) stat_file = self.getCrawlQueueIndexFileName(encQueueName) backend_server_name = 'supergsa_main' servers = (self.entConfig.GetServerManager(). Set(backend_server_name).Servers()) server_str = ','.join(map(lambda(x): '%s:%d' % (x.host(), x.port()), servers)) args = [] args.append(commands.mkarg(self.entConfig.GetEntHome())) args.append('crawlqueue') args.append(commands.mkarg(server_str)) args.append(commands.mkarg(result_file)) args.append(commands.mkarg(stat_file)) args.append(commands.mkarg(encQueueName)) args.append(commands.mkarg(captionTime)) args.append(numUrlRequested) args.append(nextHours) args.append(commands.mkarg(host)) cmd = ('. %s && cd %s/enterprise/legacy/util && alarm 18000 ' + './crawlmanager_client.py %s') % ( self.cfg.getGlobalParam('ENTERPRISE_BASHRC'), self.cfg.getGlobalParam('MAIN_GOOGLE3_DIR'), string.join(args, ' ')) logging.info('doCaptureCrawlQueue cmd: %s' % cmd) result = E.system(cmd) exited = os.WIFEXITED(result) if exited: result = os.WEXITSTATUS(result) self.joblock.acquire() try: if exited and result == SUCCESS: if self.runningJob.has_key(encQueueName): del self.runningJob[encQueueName] self.setQueueCompleteState(encQueueName, C.CRAWLQUEUE_STATUS_COMPLETE) logging.info('Crawl queue %s generated correctly' % encQueueName) else: logging.error(('Running job for queue %s complete, ' + 'but it was aborted.') % encQueueName) else: self.RemoveOldQueue(encQueueName) if self.runningJob.has_key(encQueueName): del self.runningJob[encQueueName] self.setQueueCompleteState(encQueueName, C.CRAWLQUEUE_STATUS_FAILURE) logging.error('Error running command [%s]' % cmd) else: logging.info(('Running job for queue %s failed, ' + 'but it was orphaned.') % encQueueName) finally: self.joblock.release()
def __init__(self, cfg, box_key_dir, license_key_dir): self.cfg = cfg self.inInitialization = true self.killingIsRunning = false self.counter_lock = threading.Lock() self.counter_file = cfg.getGlobalParam('ENT_LICENSE_COUNTER_FILE') self.counter_back_file = cfg.getGlobalParam('ENT_LICENSE_COUNTER_FILE_BACKUP') self.parser = ent_license.LicenseParser() data_dir = cfg.getGlobalParam('DATADIR') working_dir = E.mktemp(data_dir, "license_manager") E.system('rm -rf %s/@*.0license_manager; mkdir -p %s' % ( data_dir, working_dir)) box_pub_keyring = E.joinpaths([box_key_dir, "ent_box_key.pub"]) box_pri_keyring = E.joinpaths([box_key_dir, "ent_box_key.pri"]) license_pub_keyring = E.joinpaths([license_key_dir, "google_license_key.pub"]) self.decryptor = ent_license.LicenseDecryptor(working_dir, box_pub_keyring, box_pri_keyring, license_pub_keyring)
def doLogDump(self, jobName, jobToken, collection, reportName, reportDate, update): """The actual work done in a worker thread to generate a raw log report.""" (html_file, valid_file) = liblog.get_report_filenames(self.entConfig, liblog.RAW_REPORT, reportName, collection) liblog.MakeGoogleDir(self.entConfig, os.path.dirname(html_file)) # (TODO): Change this once the we move to python2.4 to use a safer call to # mkstemp which accepts the target directory name as an argument new_html_temp = tempfile.mktemp('.log') # create file in /export/hda3 instead of /tmp. The / partition is very # small compared to the /export/hda3 new_html_file = '/export/hda3' + new_html_temp # need to perform a check about file existance while os.path.exists(new_html_file): new_html_temp = tempfile.mktemp('.log') new_html_file = '/export/hda3' + new_html_temp new_valid_file = tempfile.mktemp('.log_valid') args = [] args.append(commands.mkarg(collection)) args.append(commands.mkarg(reportDate)) args.append(commands.mkarg(html_file)) args.append(commands.mkarg(valid_file)) args.append(commands.mkarg(new_valid_file)) cmd = ('(. %s && cd %s/enterprise/legacy/logs && ' 'alarm %s nice -n 15 ./apache_log.py %s %s) > %s' % (self.cfg.getGlobalParam('ENTERPRISE_BASHRC'), self.cfg.getGlobalParam('MAIN_GOOGLE3_DIR'), COMMAND_TIMEOUT_PERIOD, commands.mkarg(self.cfg.globalParams.GetEntHome()), string.join(args, ' '), commands.mkarg(new_html_file))) logging.info('doLogDump(): CMD = %s' % cmd) returnCode = E.system(cmd) self.handleResult(jobName, jobToken, returnCode, liblog.RAW_REPORT, collection, reportName, update, html_file, valid_file, new_html_file, new_valid_file)