def fetch_builder(buildbot_root, builder): """Fetches data about builder, if not already fetched.""" response = urlfetch.fetch('%s/builders/%s/builds/_all' % (buildbot_root, urllib.quote(builder)), deadline=URLFETCH_DEADLINE) if not response or response.status_code != 200: return for build in json.loads(response.content).itervalues(): # Only process complete builds. if ('times' not in build or len(build['times']) < 2 or not build['times'][0] or not build['times'][1]): continue key_name = Build.to_key_name(buildbot_root, builder, build['number'], build['times']) def tx_build(): if not Build.get_by_key_name(key_name): deferred.defer(fetch_build, buildbot_root, builder, build['number'], _transactional=True, _queue='buildbotfetch') db.run_in_transaction_custom_retries(10, tx_build)
def fetch_builder(buildbot_root, builder): """Fetches data about builder, if not already fetched.""" response = urlfetch.fetch('%s/builders/%s/builds/_all' % ( buildbot_root, urllib.quote(builder)), deadline=URLFETCH_DEADLINE) if not response or response.status_code != 200: return for build in json.loads(response.content).itervalues(): # Only process complete builds. if ('times' not in build or len(build['times']) < 2 or not build['times'][0] or not build['times'][1]): continue key_name = Build.to_key_name(buildbot_root, builder, build['number'], build['times']) def tx_build(): if not Build.get_by_key_name(key_name): deferred.defer(fetch_build, buildbot_root, builder, build['number'], _transactional=True, _queue='buildbotfetch') db.run_in_transaction_custom_retries(10, tx_build)
def emit(self, record): """Log an error to the datastore, if applicable. Args: The logging.LogRecord object. See http://docs.python.org/library/logging.html#logging.LogRecord """ try: if not record.exc_info: return signature = self.__GetSignature(record.exc_info) old_namespace = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') if not memcache.add(signature, None, self.log_interval): return db.run_in_transaction_custom_retries(1, self.__EmitTx, signature, record.exc_info) finally: namespace_manager.set_namespace(old_namespace) except Exception: self.handleError(record)
def mail_queue_expander(request): BATCH_SIZE=5 edition=db.get(request.form['edition']) if not edition: pass page=int(request.form.get('page',0)) subscriber_q=Query(subscriptions.models.Subscription, keys_only=True).filter('site =', edition.site).filter('active =', True) if request.form.has_key('cursor'): subscriber_q=subscriber_q.with_cursor(request.form['cursor']) subscribers=subscriber_q.fetch(BATCH_SIZE) if not subscribers: edition.status='complete' edition.put() return task=Task(params={'edition':edition.key(), 'cursor': subscriber_q.cursor(), 'page':page+1}, name="%s-%s-%s-%s" %(edition.site.slug, edition.issue_num,edition.publish_after.strftime("%Y%j%H%M-%S"), page+1) ) try: MailQueueExpander.add(task) except (TaskAlreadyExistsError, TombstonedTaskError): raise for sub in subscribers: def _tx(): pending_email=PendingEmail(subscription=sub, edition=edition) db.put(pending_email) SendNewsletter.add(Task(params={'pending_email':pending_email.key()}), transactional=True) db.run_in_transaction_custom_retries(10,_tx)
def post(self): reqstart = time.time() keys = self.request.get('keys') amount = int(self.request.get('value')) keys = keys.split(':') error = False total = 0 timings = [] for key in keys: try: start = time.time() db.run_in_transaction_custom_retries(1, increment_counter, key, amount, False) end = time.time() timings.append(end - start) total += end - start except: error = True reqend = time.time() if error: self.response.out.write("Success:False") else: self.response.out.write("Success:True\n") self.response.out.write("Start time:" + str(reqstart) + '\n') self.response.out.write("End time:" + str(reqend) + '\n') self.response.out.write("Total Time Taken:" + str(reqend - reqstart) + '\n') self.response.out.write("Cumulative DB Time Taken:" + str(total) + "\n") self.response.out.write("Number of Keys:" + str(len(timings)) + "\n") self.response.out.write("Timings:") for index,ii in enumerate(timings): timings[index] = str(ii) self.response.out.write(','.join(timings)) self.response.out.write('\n')
def log(self, error): """Log an error to the datastore, if applicable. """ signature = self.__GetSignature(error) if not memcache.add(signature, None, self.log_interval): return db.run_in_transaction_custom_retries(1, self.__EmitTx, signature, error)
def post(self): parent_key = self.request.get('parent') child_key = self.request.get('child') success = True start = time.time() try: db.run_in_transaction_custom_retries(1, create_parent_child, parent_key, child_key) except: success = False end = time.time() self.response.out.write("Success: %s\n"%str(success)) self.response.out.write("Time Taken: "+str(end - start) + "\n")
def post(self): parent_key = self.request.get('parent') child_key = self.request.get('child') success = True start = time.time() try: db.run_in_transaction_custom_retries(1, create_parent_child, parent_key, child_key) except: success = False end = time.time() self.response.out.write("Success: %s\n" % str(success)) self.response.out.write("Time Taken: " + str(end - start) + "\n")
def draw_static_tile(user, mapimage_key, zoom, northlat, westlng, offset_x_px, offset_y_px): new_tile = tile.CustomTile(user, zoom, northlat, westlng, offset_x_px, offset_y_px) # do the hard work of drawing the tiles in parallel def compose_and_save(key, tile, x, y): # but this has to be done in a transaction - otherwise the different threads will overwrite each other's progress on the shared mapimage mapimage = db.get(key) input_tuples = [(tile.image_out(), x, y, 1.0, images.TOP_LEFT)] if mapimage.img: input_tuples.append((mapimage.img, 0, 0, 1.0, images.TOP_LEFT)) img = images.composite(inputs=input_tuples, width=mapimage.width, height=mapimage.height, color=0, output_encoding=images.PNG) # redraw main image every time to show progress mapimage.img = db.Blob(img) mapimage.tiles_remaining -= 1 mapimage.last_updated = datetime.now() mapimage.put() db.run_in_transaction_custom_retries(10, compose_and_save, mapimage_key, new_tile, offset_x_px, offset_y_px)
def post(self): key_name = self.request.get('key') amount = int(self.request.get('value')) error = False start = time.time() try: db.run_in_transaction_custom_retries(1, increment_counter,key_name, amount, False) except: error = True end = time.time() if error: self.response.out.write("Sucess:False" + "\n") else: self.response.out.write("Start Time:" + str(start) + "\n") self.response.out.write("End Time:" + str(end) + "\n") self.response.out.write("Time Taken:" + str(end - start) + "\n")
def increment(name): """Increment the value for a given sharded counter. Parameters: name - The name of the counter """ config = GeneralCounterShardConfig.get_or_insert(name, name=name) def txn(): index = random.randint(0, config.num_shards - 1) shard_name = name + str(index) counter = GeneralCounterShard.get_by_key_name(shard_name) if counter is None: counter = GeneralCounterShard(key_name=shard_name, name=name) counter.count += 1 counter.put() db.run_in_transaction_custom_retries(100,txn) memcache.incr(name, initial_value=0)
def GetNextCounter(cls, counterName): def tx(): temp = cls.get_by_key_name(counterName) if not temp: temp = cls(key_name=counterName, counter=0) temp.counter += 1 temp.put() return temp.counter return db.run_in_transaction_custom_retries(10, tx)
def update_suppression_summary(suppression_result): key_name = MemorySuppressionSummary.to_key_name(suppression_result) MemorySuppressionSummary.get_or_insert( key_name, monthly_timestamp=suppression_result.time_finished.date().replace(day=1), buildbot_root=suppression_result.build_step.buildbot_root, builder=suppression_result.build_step.builder, step_name=suppression_result.build_step.step_name, name=suppression_result.name, count=0) def tx_summary(): summary = MemorySuppressionSummary.get_by_key_name(key_name) summary.count += 1 summary.put() db.run_in_transaction_custom_retries(10, tx_summary)
def post(self): key_name = self.request.get('key') amount = int(self.request.get('value')) error = False start = time.time() try: db.run_in_transaction_custom_retries(1, increment_counter, key_name, amount, False) except: error = True end = time.time() if error: self.response.out.write("Sucess:False" + "\n") else: self.response.out.write("Start Time:" + str(start) + "\n") self.response.out.write("End Time:" + str(end) + "\n") self.response.out.write("Time Taken:" + str(end - start) + "\n")
def fetch_step(step_key, stdio_url, parse_gtest, parse_suppression): """Fetches data about a single build step.""" step = BuildStep.get(step_key) if step.is_fetched: return step.fetch_timestamp = datetime.datetime.now() step.put() try: stdio_response = urlfetch.fetch(stdio_url, deadline=URLFETCH_DEADLINE) except urlfetch.ResponseTooLargeError: # Workaround http://code.google.com/p/googleappengine/issues/detail?id=5686 step.is_fetched = True step.is_too_large = True step.put() return if not stdio_response or stdio_response.status_code != 200: return gs_filename = '/chromium-build-logs/logs/%d/%d/%s' % ( step.fetch_timestamp.year, step.fetch_timestamp.month, str(step_key)) with cloudstorage.open(gs_filename, 'w', content_type='text/html') as gs_file: gs_file.write(stdio_response.content) def tx_step(): step = BuildStep.get(step_key) if step.is_fetched: return step.log_gs = gs_filename step.is_fetched = True step.put() if parse_gtest and step.status in PARSEABLE_STATUSES: deferred.defer(insert_gtest_results, step.key(), _transactional=True, _queue='slow') if parse_suppression and step.status in PARSEABLE_STATUSES: deferred.defer(reparse_suppression_results, step.key(), step.step_name, _transactional=True, _queue='slow') db.run_in_transaction_custom_retries(10, tx_step)
def _finalize_job(cls, mapreduce_spec, mapreduce_state, base_path): """Finalize job execution. Finalizes output writer, invokes done callback and save mapreduce state in a transaction, and schedule necessary clean ups. Args: mapreduce_spec: an instance of MapreduceSpec mapreduce_state: an instance of MapreduceState base_path: handler_base path. """ config = util.create_datastore_write_config(mapreduce_spec) if (mapreduce_spec.mapper.output_writer_class() and mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS): mapreduce_spec.mapper.output_writer_class().finalize_job( mapreduce_state) queue_name = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default") done_callback = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK) done_task = None if done_callback: done_task = taskqueue.Task( url=done_callback, headers={"Mapreduce-Id": mapreduce_spec.mapreduce_id}, method=mapreduce_spec.params.get("done_callback_method", "POST")) def put_state(state): state.put(config=config) if done_task and not _run_task_hook(mapreduce_spec.get_hooks(), "enqueue_done_task", done_task, queue_name): done_task.add(queue_name, transactional=True) logging.info("Final result for job '%s' is '%s'", mapreduce_spec.mapreduce_id, mapreduce_state.result_status) db.run_in_transaction_custom_retries(5, put_state, mapreduce_state) cls._clean_up_mr(mapreduce_spec, base_path)
def _finalize_job(cls, mapreduce_spec, mapreduce_state, base_path): """Finalize job execution. Finalizes output writer, invokes done callback and save mapreduce state in a transaction, and schedule necessary clean ups. Args: mapreduce_spec: an instance of MapreduceSpec mapreduce_state: an instance of MapreduceState base_path: handler_base path. """ config = util.create_datastore_write_config(mapreduce_spec) if (mapreduce_spec.mapper.output_writer_class() and mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS): mapreduce_spec.mapper.output_writer_class().finalize_job(mapreduce_state) queue_name = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default") done_callback = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK) done_task = None if done_callback: done_task = taskqueue.Task( url=done_callback, headers={"Mapreduce-Id": mapreduce_spec.mapreduce_id}, method=mapreduce_spec.params.get("done_callback_method", "POST")) def put_state(state): state.put(config=config) if done_task and not _run_task_hook( mapreduce_spec.get_hooks(), "enqueue_done_task", done_task, queue_name): done_task.add(queue_name, transactional=True) logging.info("Final result for job '%s' is '%s'", mapreduce_spec.mapreduce_id, mapreduce_state.result_status) db.run_in_transaction_custom_retries(5, put_state, mapreduce_state) cls._clean_up_mr(mapreduce_spec, base_path)
def increment(name): """Increment the value for a given sharded counter. Parameters: name - The name of the counter """ config = GeneralCounterShardConfig.get_or_insert(name, name=name) def txn(): index = random.randint(0, config.num_shards - 1) shard_name = name + str(index) counter = GeneralCounterShard.get_by_key_name(shard_name) if counter is None: counter = GeneralCounterShard(key_name=shard_name, name=name) counter.count += 1 counter.put() db.run_in_transaction_custom_retries(100, txn) memcache.incr(name, initial_value=0)
def saveValues(values, saver): """Saves the specified popularities. """ import logging from google.appengine.ext import db from soc.models.organization import Organization def txn(key, value): org = Organization.get_by_key_name(key) saver(org, value) org.put() for key, value in sorted(values.iteritems()): print key db.run_in_transaction_custom_retries(10, txn, key, value) print "done"
def update_suppression_summary(suppression_result): key_name = MemorySuppressionSummary.to_key_name(suppression_result) MemorySuppressionSummary.get_or_insert( key_name, monthly_timestamp=suppression_result.time_finished.date().replace( day=1), buildbot_root=suppression_result.build_step.buildbot_root, builder=suppression_result.build_step.builder, step_name=suppression_result.build_step.step_name, name=suppression_result.name, count=0) def tx_summary(): summary = MemorySuppressionSummary.get_by_key_name(key_name) summary.count += 1 summary.put() db.run_in_transaction_custom_retries(10, tx_summary)
def insert_gtest_results(build_step_key): """Inserts GTest results into the datastore, replacing any existing ones. Also records used parser version.""" step = BuildStep.get(build_step_key) log_contents = '' if step.log_gs: with cloudstorage.open(step.log_gs) as gs_file: log_contents = html2text(gs_file.read().decode('utf-8', 'replace')) else: try: blob_reader = blobstore.BlobReader(step.log_stdio) log_contents = html2text(blob_reader.read().decode( 'utf-8', 'replace')) except (ValueError, blobstore.BlobNotFoundError) as e: raise deferred.PermanentTaskFailure(e) gtest_results = gtest_parser.parse(log_contents) to_put = [] for fullname, result in gtest_results.iteritems(): # Only store failure results. if result['is_successful']: continue if isinstance(result['log'], unicode): log = db.Text(result['log']) else: log = db.Text(result['log'], encoding='utf-8') result_entity = GTestResult( parent=db.Key.from_path('GTestResult', str(step.key())), build_step=step, time_finished=step.time_finished, gtest_parser_version=gtest_parser.VERSION, is_crash_or_hang=result['is_crash_or_hang'], fullname=fullname, run_time_ms=result['run_time_ms'], log=log) to_put.append(result_entity) for chunk in chunks(to_put, BATCH_SIZE): db.put(chunk) def tx_parser_version(): step = BuildStep.get(build_step_key) orig_parser_version = step.gtest_parser_version if step.gtest_parser_version < gtest_parser.VERSION: step.gtest_parser_version = gtest_parser.VERSION step.put() return (orig_parser_version, step.gtest_parser_version) _, parser_version = \ db.run_in_transaction_custom_retries(10, tx_parser_version) query = GTestResult.all(keys_only=True) query.filter('build_step =', build_step_key) query.filter('gtest_parser_version <', parser_version) db.delete(query)
def insert_gtest_results(build_step_key): """Inserts GTest results into the datastore, replacing any existing ones. Also records used parser version.""" step = BuildStep.get(build_step_key) log_contents = '' if step.log_gs: with cloudstorage.open(step.log_gs) as gs_file: log_contents = html2text(gs_file.read().decode('utf-8', 'replace')) else: try: blob_reader = blobstore.BlobReader(step.log_stdio) log_contents = html2text(blob_reader.read().decode('utf-8', 'replace')) except (ValueError, blobstore.BlobNotFoundError) as e: raise deferred.PermanentTaskFailure(e) gtest_results = gtest_parser.parse(log_contents) to_put = [] for fullname, result in gtest_results.iteritems(): # Only store failure results. if result['is_successful']: continue if isinstance(result['log'], unicode): log = db.Text(result['log']) else: log = db.Text(result['log'], encoding='utf-8') result_entity = GTestResult(parent=db.Key.from_path( 'GTestResult', str(step.key())), build_step=step, time_finished=step.time_finished, gtest_parser_version=gtest_parser.VERSION, is_crash_or_hang=result['is_crash_or_hang'], fullname=fullname, run_time_ms=result['run_time_ms'], log=log) to_put.append(result_entity) for chunk in chunks(to_put, BATCH_SIZE): db.put(chunk) def tx_parser_version(): step = BuildStep.get(build_step_key) orig_parser_version = step.gtest_parser_version if step.gtest_parser_version < gtest_parser.VERSION: step.gtest_parser_version = gtest_parser.VERSION step.put() return (orig_parser_version, step.gtest_parser_version) _, parser_version = \ db.run_in_transaction_custom_retries(10, tx_parser_version) query = GTestResult.all(keys_only=True) query.filter('build_step =', build_step_key) query.filter('gtest_parser_version <', parser_version) db.delete(query)
def _finalize_job(mapreduce_spec, mapreduce_state, base_path): """Finalize job execution. Finalizes output writer, invokes done callback an schedules finalize job execution. Args: mapreduce_spec: an instance of MapreduceSpec mapreduce_state: an instance of MapreduceState base_path: handler base path. """ config = util.create_datastore_write_config(mapreduce_spec) if (mapreduce_spec.mapper.output_writer_class() and mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS): mapreduce_spec.mapper.output_writer_class().finalize_job(mapreduce_state) def put_state(state): state.put(config=config) done_callback = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK) if done_callback: done_task = taskqueue.Task( url=done_callback, headers={"Mapreduce-Id": mapreduce_spec.mapreduce_id}, method=mapreduce_spec.params.get("done_callback_method", "POST")) queue_name = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default") if not _run_task_hook(mapreduce_spec.get_hooks(), "enqueue_done_task", done_task, queue_name): done_task.add(queue_name, transactional=True) FinalizeJobHandler.schedule(base_path, mapreduce_spec) db.run_in_transaction_custom_retries(5, put_state, mapreduce_state)
def _finalize_job(mapreduce_spec, mapreduce_state, base_path): """Finalize job execution. Finalizes output writer, invokes done callback an schedules finalize job execution. Args: mapreduce_spec: an instance of MapreduceSpec mapreduce_state: an instance of MapreduceState base_path: handler base path. """ config = util.create_datastore_write_config(mapreduce_spec) if (mapreduce_spec.mapper.output_writer_class() and mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS): mapreduce_spec.mapper.output_writer_class().finalize_job( mapreduce_state) def put_state(state): state.put(config=config) done_callback = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK) if done_callback: done_task = taskqueue.Task( url=done_callback, headers={"Mapreduce-Id": mapreduce_spec.mapreduce_id}, method=mapreduce_spec.params.get("done_callback_method", "POST")) queue_name = mapreduce_spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default") if not _run_task_hook(mapreduce_spec.get_hooks(), "enqueue_done_task", done_task, queue_name): done_task.add(queue_name, transactional=True) FinalizeJobHandler.schedule(base_path, mapreduce_spec) db.run_in_transaction_custom_retries(5, put_state, mapreduce_state)
def post(self): reqstart = time.time() parent_key = self.request.get('parent') child_key = self.request.get('child') tries = int(self.request.get('tries')) amount = int(self.request.get('amount')) timings = [] total = 0 #try: success = True for ii in range(0, tries): start = time.time() try: db.run_in_transaction_custom_retries(1, pay_allowance, parent_key, child_key, amount) except: success = False end = time.time() total += (end - start) timings.append(end - start) if not success: break reqend = time.time() #except: # self.response.out.write("error") self.response.out.write("Success:%s\n" % str(success)) self.response.out.write("Start time:" + str(reqstart) + '\n') self.response.out.write("End time:" + str(reqend) + '\n') self.response.out.write("Total Time Taken:" + str(reqend - reqstart) + '\n') self.response.out.write("Cumulative DB Time Taken:" + str(total) + "\n") self.response.out.write("Number of Keys:" + str(len(timings)) + "\n") self.response.out.write("Timings:") for index, ii in enumerate(timings): timings[index] = str(ii) self.response.out.write(','.join(timings)) self.response.out.write('\n')
def post(self): reqstart = time.time() parent_key = self.request.get('parent') child_key = self.request.get('child') tries = int(self.request.get('tries')) amount = int(self.request.get('amount')) timings = [] total = 0 #try: success = True for ii in range(0, tries): start = time.time() try: db.run_in_transaction_custom_retries(1, pay_allowance, parent_key, child_key, amount) except: success = False end = time.time() total += (end - start) timings.append(end-start) if not success: break reqend = time.time() #except: # self.response.out.write("error") self.response.out.write("Success:%s\n"%str(success)) self.response.out.write("Start time:" + str(reqstart) + '\n') self.response.out.write("End time:" + str(reqend) + '\n') self.response.out.write("Total Time Taken:" + str(reqend - reqstart) + '\n') self.response.out.write("Cumulative DB Time Taken:" + str(total) + "\n") self.response.out.write("Number of Keys:" + str(len(timings)) + "\n") self.response.out.write("Timings:") for index,ii in enumerate(timings): timings[index] = str(ii) self.response.out.write(','.join(timings)) self.response.out.write('\n')
def mail_queue_expander(request): BATCH_SIZE = 5 edition = db.get(request.form['edition']) if not edition: pass page = int(request.form.get('page', 0)) subscriber_q = Query(subscriptions.models.Subscription, keys_only=True).filter('site =', edition.site).filter( 'active =', True) if request.form.has_key('cursor'): subscriber_q = subscriber_q.with_cursor(request.form['cursor']) subscribers = subscriber_q.fetch(BATCH_SIZE) if not subscribers: edition.status = 'complete' edition.put() return task = Task(params={ 'edition': edition.key(), 'cursor': subscriber_q.cursor(), 'page': page + 1 }, name="%s-%s-%s-%s" % (edition.site.slug, edition.issue_num, edition.publish_after.strftime("%Y%j%H%M-%S"), page + 1)) try: MailQueueExpander.add(task) except (TaskAlreadyExistsError, TombstonedTaskError): raise for sub in subscribers: def _tx(): pending_email = PendingEmail(subscription=sub, edition=edition) db.put(pending_email) SendNewsletter.add( Task(params={'pending_email': pending_email.key()}), transactional=True) db.run_in_transaction_custom_retries(10, _tx)
def post(self): id = db.Key(self.request.get('id')) rid = self.request.get('rid') user = users.get_current_user() component = ProjectComponent.gql( "where user = :user_id and __key__ = :component_id", user_id=user, component_id=id).fetch(1)[0] if component: self.response.out.write(simplejson.dumps( {'row' : db.run_in_transaction_custom_retries(1000,self.incRow, component), 'rid' : rid, 'id' : self.request.get('id')}))
def post(self): reqstart = time.time() keys = self.request.get('keys') amount = int(self.request.get('value')) keys = keys.split(':') error = False total = 0 timings = [] for key in keys: try: start = time.time() db.run_in_transaction_custom_retries(1, increment_counter, key, amount, False) end = time.time() timings.append(end - start) total += end - start except: error = True reqend = time.time() if error: self.response.out.write("Success:False") else: self.response.out.write("Success:True\n") self.response.out.write("Start time:" + str(reqstart) + '\n') self.response.out.write("End time:" + str(reqend) + '\n') self.response.out.write("Total Time Taken:" + str(reqend - reqstart) + '\n') self.response.out.write("Cumulative DB Time Taken:" + str(total) + "\n") self.response.out.write("Number of Keys:" + str(len(timings)) + "\n") self.response.out.write("Timings:") for index, ii in enumerate(timings): timings[index] = str(ii) self.response.out.write(','.join(timings)) self.response.out.write('\n')
def send_newsletter(request): message=db.get(request.form['pending_email']) if message.sent_on: return edition=message.edition to=message.subscription.parent().email sender="%s <%s@%s.appspotmail.com>"%(edition.site.name,edition.site.slug, os.environ['APPLICATION_ID']) subject=edition.subject body=edition.body_txt html=edition.body_html def _tx(): message.sent_on=datetime.now() message.put() batch=db.run_in_transaction_custom_retries(10,_tx) mail.send_mail(sender,to,subject ,body, html=html)
def create(self): """データストアに新規に作成""" def _create(self): # キーが存在して無ければ if memcache.get( self.skey, namespace=SESSION_NAMESPACE ) is None: # memcacheに登録 memcache.add( self.skey, self._sp, time=MEMCACHE_TIME, namespace=SESSION_NAMESPACE ) return True # すでに存在していれば失敗 return False # トランザクション内で実行 return db.run_in_transaction_custom_retries( 1, _create, self )
def send_newsletter(request): message = db.get(request.form['pending_email']) if message.sent_on: return edition = message.edition to = message.subscription.parent().email sender = "%s <%s@%s.appspotmail.com>" % ( edition.site.name, edition.site.slug, os.environ['APPLICATION_ID']) subject = edition.subject body = edition.body_txt html = edition.body_html def _tx(): message.sent_on = datetime.now() message.put() batch = db.run_in_transaction_custom_retries(10, _tx) mail.send_mail(sender, to, subject, body, html=html)
def create(self): """データストアに新規に作成""" def _create(self): # すでに存在の判定 model = DataStoreSessionModel.get_by_key_name( self.skey ) if model is None: # 新規作成 m = DataStoreSessionModel( key_name=self.skey ) m.data = str(self._sp) m.put() return True return False # トランザクション内で実行 return db.run_in_transaction_custom_retries( 1, _create, self )
def create_new_account( id, pw ): """新規Accountの作成""" # 指定されたIDが存在するか調査して作成 def check_and_create( id ): model = Account.get_by_key_name( id ) # すでに存在していたらNoneを返す if model is None: # DataStoreにModel作成 model = Account( key_name=id ) model.id = id model.pw = pw model.put() return model return None # トランザクション内で実行 return db.run_in_transaction_custom_retries( 1, check_and_create, id )
def post(self): commit_list = {'puts':[],'deletes':[],'tasks':[]} logging.info('Prepping transaction worker...') if self.request.get('_tx_ticket', default_value=None) is not None: mode = self.request.get('_tx_worker', default_value='QueuedTransaction') worker = getattr(data, self.request.get('_tx_worker', default_value='EntityCreateTask')) logging.info('_tx_ticket: '+str(self.request.get('_tx_ticket'))) ticket = worker.get_by_key_name(self.request.get('_tx_ticket')) if isinstance(ticket, list): ticket = ticket[0] logging.info('ticket key: '+str(self.request.get('_tx_ticket'))) logging.info('Retrieved ticket. '+str(ticket)) if ticket is not None: ## tag ticket as started mode = self.request.get('_tx_mode',default_value=None) logging.info('Beginning work with mode '+str(mode)+'...') try: if mode == 'writeOperation': def txn(ticket): commit_list['puts'] = ticket.subject db.put(commit_list['puts']) elif mode == 'deleteOperation': def txn(ticket): commit_list['deletes'] = ticket.subject db.delete(commit_list['deletes']) elif mode == 'entityCreate': logging.info('Defining entityCreate txn...') def txn(ticket): logging.info('Beginning transaction.') entity, natural = DataController.generateNaturalKind(ticket.subject) logging.info('Split natural kind. N: '+str(natural)+', E: '+str(entity)) if ticket.queue_indexing: logging.info('Indexing requested. Queueing request...') index_queue, index_task = IndexController.queueNewEntity(entity, return_task=True) index_task.add(index_queue.name, transactional=True) if ticket.queue_caching: logging.info('Caching requested. Queueing request...') cache_queue, cache_task = CacheController.queueNewEntity(entity, return_task=True) cache_task.add(cache_queue.name, transactional=True) ## @TODO: figure out way to merge descriptors in and put them too #if ticket.attachments is not None: # for item in ticket.attachments: # pass logging.info('Putting commit list...') return True elif mode == 'entityUpdate': def txn(ticket): commit_list['puts'] = ticket.subject db.put(commit_list['puts']) elif mode == 'entityDelete': def txn(ticket): ## @TODO: Fill out procedures for entity delete besides deleting entire entity group q = db.Query().ancestor(ticket.subject) q.keys_only = True c = q.count() commit_list['deletes'] = q.fetch(c.count()) db.delete(commit_list['deletes']) elif mode == None: ## @TODO: error handling self.response.set_status(404) self.render_raw('404 Fail: No Mode Given') else: ## @TODO: error handling self.response.set_status(404) self.render_raw('404 Fail: Invalid Mode') logging.info('Ticket dump: '+str(ticket)) logging.info('Ticket properties: '+str(ticket.properties())) db.run_in_transaction_custom_retries(3, txn, ticket) logging.info('Transaction complete.') #except: # ## @TODO: error handling # logging.critical('Error 500 during transaction processing.') # self.response.set_status(500) # self.render_raw('<b>500 Error:</b> shit failed and blew up no idea why') finally: ## @TODO: update ticket ticket.status = 'complete' ticket.put() self.render_raw('<b>Transaction success</b>') else: ## @TODO: error handling logging.critical('Ticket not found.') self.response.set_status(404) self.render_raw('404 Ticket Invalid: Not Found (get_by_key_name returned None)') else: ## @TODO: error handling logging.critical('Ticket not provided.') self.response.set_status(404) self.render_raw('404 Ticket Invalid: Not Found')
log_contents = '' if step.log_gs: with cloudstorage.open(step.log_gs) as gs_file: log_contents = html2text(gs_file.read().decode('utf-8', 'replace')) else: try: blob_reader = blobstore.BlobReader(step.log_stdio) log_contents = html2text(blob_reader.read().decode('utf-8', 'replace')) except (ValueError, blobstore.BlobNotFoundError), e: raise deferred.PermanentTaskFailure(e) suppression_results = suppression_parser.parse(log_contents.splitlines(True)) def tx_reparse(): step = BuildStep.get(build_step_key) insert_suppression_results(step, suppression_results) step.put() db.run_in_transaction_custom_retries(10, tx_reparse) def update_parsed_data(_param, chunk): """Ensures that all build steps' parsed data is in sync with current settings. """ parse_gtest = dict((s.name, s.parse_gtest) for s in StepName.all()) parse_suppression = dict( (s.name, s.parse_suppression) for s in StepName.all()) for build_step_key in chunk: build_step = BuildStep.get(build_step_key) if not build_step: continue
def fetch_build(buildbot_root, builder, build_number): """Fetches data about a single build.""" build_url = '%s/builders/%s/builds/%d?filter=1' % ( buildbot_root, urllib.quote(builder), build_number) build_response = urlfetch.fetch(build_url, deadline=URLFETCH_DEADLINE) if not build_response or build_response.status_code != 200: return build_json = json.loads(build_response.content) # Register the step names. for step in build_json['steps']: StepName.get_or_insert(key_name=step['name'], name=step['name'], parse_gtest=False, parse_suppression=False) key_name = Build.to_key_name(buildbot_root, builder, build_number, build_json['times']) Build.get_or_insert( key_name, buildbot_root=buildbot_root, builder=builder, build_number=build_number, time_started=datetime.datetime.fromtimestamp(build_json['times'][0]), time_finished=datetime.datetime.fromtimestamp(build_json['times'][1]), is_fetched=False, status=build_json.get('results', SUCCESS)) def tx_build(): build = Build.get_by_key_name(key_name) if build.is_fetched: return to_put = [] for step in build_json['steps']: # Skip steps that didn't run (e.g. when the previous step failed). if 'isFinished' not in step: continue if 'results' in step: status = step['results'][0] else: status = SUCCESS if 'logs' not in step: # This can happen with steps like 'trigger' that have no logs. continue logs_dict = dict(step['logs']) if 'stdio' in logs_dict: stdio_url = logs_dict['stdio'] log = BuildStep(parent=build, step_name=step['name'], step_number=step.get('step_number', 0), status=status, time_started=datetime.datetime.fromtimestamp( step['times'][0]), time_finished=datetime.datetime.fromtimestamp( step['times'][1]), is_fetched=False, is_too_large=False, fetch_timestamp=datetime.datetime.now(), stdio_url=stdio_url, gtest_parser_version=-1, suppression_parser_version=-1, buildbot_root=build.buildbot_root, builder=build.builder, build_number=build.build_number) to_put.append(log) build.is_fetched = True db.put(to_put) db.run_in_transaction_custom_retries(10, tx_build) parse_gtest = dict((s.name, s.parse_gtest) for s in StepName.all()) parse_suppression = dict( (s.name, s.parse_suppression) for s in StepName.all()) def tx_steps(): build = Build.get_by_key_name(key_name) return BuildStep.all().ancestor(build) for build_step in db.run_in_transaction_custom_retries(10, tx_steps): fetch_step(build_step.key(), build_step.stdio_url, parse_gtest[build_step.step_name], parse_suppression[build_step.step_name])
else: try: blob_reader = blobstore.BlobReader(step.log_stdio) log_contents = html2text(blob_reader.read().decode( 'utf-8', 'replace')) except (ValueError, blobstore.BlobNotFoundError), e: raise deferred.PermanentTaskFailure(e) suppression_results = suppression_parser.parse( log_contents.splitlines(True)) def tx_reparse(): step = BuildStep.get(build_step_key) insert_suppression_results(step, suppression_results) step.put() db.run_in_transaction_custom_retries(10, tx_reparse) def update_parsed_data(_param, chunk): """Ensures that all build steps' parsed data is in sync with current settings. """ parse_gtest = dict((s.name, s.parse_gtest) for s in StepName.all()) parse_suppression = dict( (s.name, s.parse_suppression) for s in StepName.all()) for build_step_key in chunk: build_step = BuildStep.get(build_step_key) if not build_step: continue
def fetch_build(buildbot_root, builder, build_number): """Fetches data about a single build.""" build_url = '%s/builders/%s/builds/%d?filter=1' % (buildbot_root, urllib.quote(builder), build_number) build_response = urlfetch.fetch(build_url, deadline=URLFETCH_DEADLINE) if not build_response or build_response.status_code != 200: return build_json = json.loads(build_response.content) # Register the step names. for step in build_json['steps']: StepName.get_or_insert(key_name=step['name'], name=step['name'], parse_gtest=False, parse_suppression=False) key_name = Build.to_key_name(buildbot_root, builder, build_number, build_json['times']) Build.get_or_insert( key_name, buildbot_root=buildbot_root, builder=builder, build_number=build_number, time_started=datetime.datetime.fromtimestamp(build_json['times'][0]), time_finished=datetime.datetime.fromtimestamp(build_json['times'][1]), is_fetched=False, status=build_json.get('results', SUCCESS)) def tx_build(): build = Build.get_by_key_name(key_name) if build.is_fetched: return to_put = [] for step in build_json['steps']: # Skip steps that didn't run (e.g. when the previous step failed). if 'isFinished' not in step: continue if 'results' in step: status = step['results'][0] else: status = SUCCESS if 'logs' not in step: # This can happen with steps like 'trigger' that have no logs. continue logs_dict = dict(step['logs']) if 'stdio' in logs_dict: stdio_url = logs_dict['stdio'] log = BuildStep( parent=build, step_name=step['name'], step_number=step.get('step_number', 0), status=status, time_started=datetime.datetime.fromtimestamp( step['times'][0]), time_finished=datetime.datetime.fromtimestamp( step['times'][1]), is_fetched=False, is_too_large=False, fetch_timestamp=datetime.datetime.now(), stdio_url=stdio_url, gtest_parser_version=-1, suppression_parser_version=-1, buildbot_root=build.buildbot_root, builder=build.builder, build_number=build.build_number) to_put.append(log) build.is_fetched = True db.put(to_put) db.run_in_transaction_custom_retries(10, tx_build) parse_gtest = dict((s.name, s.parse_gtest) for s in StepName.all()) parse_suppression = dict( (s.name, s.parse_suppression) for s in StepName.all()) def tx_steps(): build = Build.get_by_key_name(key_name) return BuildStep.all().ancestor(build) for build_step in db.run_in_transaction_custom_retries(10, tx_steps): fetch_step(build_step.key(), build_step.stdio_url, parse_gtest[build_step.step_name], parse_suppression[build_step.step_name])