def run(self): self.start = datetime.now() self.setup() logging.debug("Starting run %s" % self) try: while True: batch = self.fetchBatch() if batch: self.runBatch(batch) self.checkDeadline() else: self.finish() break except (TooLongError, DeadlineExceededError): logging.debug("Deadline expired, creating new request... Records: %s, Continuations: %s, Last record: %s" % (self.records_processed, self.continuations, self.last_record)) self.continuations += 1 task_name = self.sensorprocess.process_task_name(subset="cont_%s" % tools.unixtime()) tools.safe_add_task(self.run, _name=task_name, _queue="processing-queue-new") except (Shutdown): logging.debug("Finishing because instance shutdown...") self.finish(result=PROCESS.ERROR, narrative="Instance shutdown") except Exception, e: logging.error("Uncaught error: %s" % e) traceback.print_exc() self.finish(result=PROCESS.ERROR, narrative="Processing Error: %s" % e)
def run(self, start_cursor=None): self.worker_start = tools.unixtime() if self.has_section_files() and len(self.section_gcs_files) != len(self.repeat_sections): for section_name, section_questions in self.repeat_sections: self.section_gcs_files.append(gcs.open(self.getGCSFilename(suffix=section_name), 'w')) self.cursor = start_cursor self.setProgress({'max':self.count(), 'report': self.report.json()}) if not start_cursor: self.writeHeaders() try: # This is heavy self.writeData() except TooLongError: logging.debug("TooLongError: Going to the next batch") if self.report: self.finish(reportDone=False) tools.safe_add_task(self.run, start_cursor=self._get_cursor(), _queue="worker-queue") except Exception, e: # including DeadlineExceededError traceback.print_exc() logging.error("Error: %s" % e) self.setProgress({'error': "Error occurred: %s" % e, 'status': REPORT.ERROR}) return
def run(self, start_cursor=None): self.worker_start = tools.unixtime() self.cursor = start_cursor if not start_cursor: self.writeHeaders() try: # This is heavy self.writeData() except TooLongError: logging.debug("TooLongError: Going to the next batch") if self.report: self.finish(reportDone=False) tools.safe_add_task(self.run, start_cursor=self._get_cursor(), _queue="report-queue") except Exception, e: # including DeadlineExceededError traceback.print_exc() logging.error("Error: %s" % e) self.setProgress({ 'error': "Error occurred: %s" % e, 'status': REPORT.ERROR }) return
def generate(self, d): type = self.request.get_range('type', default=REPORT.SENSOR_DATA_REPORT) ftype = self.request.get_range('ftype', default=REPORT.CSV) target = self.request.get('target') specs_json = self.request.get('specs_json') specs = tools.getJson(specs_json) report = Report.Create(d['enterprise'], type=type, specs=specs, ftype=ftype) report.put() tools.safe_add_task(backgroundReportRun, str(report.key()), target=target, _queue="worker-queue") self.json_out(success=True, message="%s generating..." % report.title)
def generate(self, d): from constants import REPORT from handlers import APIError from tasks import backgroundReportRun type = self.request.get_range('type') if not type: raise APIError("No type in report request") ftype = self.request.get_range('ftype', default=REPORT.CSV) specs_json = self.request.get('specs_json') specs = tools.getJson(specs_json) report = Report.Create(self.user, type=type, specs=specs, ftype=ftype) report.put() tools.safe_add_task(backgroundReportRun, report.key.urlsafe(), _queue="report-queue") self.set_response(success=True, message="%s generating..." % report.title, data={ 'report': report.json() if report else None })
def testSafeAddTask(self): # Using warmup handler as dummy task tools.safe_add_task("/_ah/warmup") self.assertTasksInQueue(n=1, queue_names=['default']) self.execute_tasks_until_empty() tools.safe_add_task( [ {'url': "/_ah/warmup", 'params': {'foo': 'bar'}}, {'url': "/_ah/warmup", 'params': {'foo': 'baz'}} ], queue_name='report-queue') self.assertTasksInQueue(n=2, queue_names=['report-queue']) self.execute_tasks_until_empty() self.assertTasksInQueue(n=0)
def run(self): self.start = datetime.now() self.setup() try: while True: batch = self.fetchBatch() if batch: self.runBatch(batch) self.checkDeadline() else: self.finish() break except (TooLongError, DeadlineExceededError): logging.debug("Deadline expired, creating new request...") tools.safe_add_task(self.run, _queue="worker-queue") except Exception, e: logging.exception("Uncaught error: %s" % e) self.finish(result=PROCESS.ERROR, narrative="Processing Error: %s" % e)
def run(self, start_cursor=None): self.worker_start = tools.unixtime() self.cursor = start_cursor if not start_cursor: self.writeHeaders() try: # This is heavy self.writeData() except TooLongError: logging.debug("TooLongError: Going to the next batch") if self.report: self.finish(reportDone=False) tools.safe_add_task(self.run, start_cursor=self._get_cursor(), _queue="report-queue") except Exception, e: # including DeadlineExceededError traceback.print_exc() logging.error("Error: %s" % e) self.setProgress({'error': "Error occurred: %s" % e, 'status': REPORT.ERROR}) return
try: # This is heavy self.writeData() except TooLongError: logging.debug("TooLongError: Going to the next batch") if self.report: self.finish(reportDone=False) tools.safe_add_task(self.run, start_cursor=self._get_cursor(), _queue="worker-queue") except Exception, e: # including DeadlineExceededError traceback.print_exc() logging.error("Error: %s" % e) self.setProgress({'error': "Error occurred: %s" % e, 'status': REPORT.ERROR}) return else: tools.safe_add_task(self.finish) def writeHeaders(self): if self.report.ftype == REPORT.CSV: string = tools.normalize_to_ascii('"'+'","'.join(self.headers)+'"\n') self.gcs_file.write(string) if self.has_section_files(): for section_gcs_file, section_headers in zip(self.section_gcs_files, self.section_headers): string = tools.normalize_to_ascii('"'+'","'.join(section_headers)+'"\n') section_gcs_file.write(string) elif self.report.ftype == REPORT.XLS: for i, header in enumerate(self.headers): self.ws.write(0, i, header, self.xls_styles['bold']) if self.has_section_files(): for i, header in enumerate(self.section_headers): self.section_ws.write(0, i, header, self.xls_styles['bold'])
try: # This is heavy self.writeData() except TooLongError: logging.debug("TooLongError: Going to the next batch") if self.report: self.finish(reportDone=False) tools.safe_add_task(self.run, start_cursor=self._get_cursor(), _queue="worker-queue") except Exception, e: # including DeadlineExceededError traceback.print_exc() logging.error("Error: %s" % e) self.setProgress({'error': "Error occurred: %s" % e, 'status': REPORT.ERROR}) return else: tools.safe_add_task(self.finish) def writeHeaders(self): if self.report.ftype == REPORT.CSV: string = tools.normalize_to_ascii('"'+'","'.join(self.headers)+'"\n') self.gcs_file.write(string) elif self.report.ftype == REPORT.XLS: for i, header in enumerate(self.headers): self.ws.write(0, i, header, self.xls_styles['bold']) def writeData(self): total_i = self.counters['run'] while True: self.query = self._get_query() if self.query: entities = self.query.fetch(limit=self.batch_size)