def delete(self, **kwargs): if 'application/json' not in self.request.accept: self.response.status = 406 self.response.status_message = "Not Acceptable, API only supports application/json." return if 'id' in kwargs: s = ndb.Key(db_defs.Shelter, int(kwargs['id'])).get() if s: s.key.delete() d = db_defs.Dog.query(db_defs.Dog.shelter == s.key).fetch(keys_only=True) if d: ndb.delete_multi(d) c = db_defs.Cat.query(db_defs.Cat.shelter == s.key).fetch(keys_only=True) if c: ndb.delete_multi(c) return else: self.response.status = 400 self.response.status_message = "Invalid request, shelter unknown" message = {} message['Failed'] = "Invalid request, shelter unknown" self.response.write(json.dumps(message)) return else: self.response.status = 400 self.response.status_message = "Invalid request, id is required" message = {} message['Failed'] = "Invalid request, id is required" self.response.write(json.dumps(message)) return
def handleRequest(self): if self.request.get("action") == "updateData": key = self.request.get("key") data = MiscData.query(MiscData.key == key).fetch(1) if len(data) == 0: data = MiscData(key=key, value=self.request.get("value")) data.put() else: data = data[0] data.value = self.request.get("value") data.put() elif self.request.get("action") == "cleanup": ndb.delete_multi( MiscData.query().fetch(keys_only=True) ) self.response.write('Data cleaned successfully!') self.response.headers["Content-Type"] = "text/plain" elif self.request.get("action") == "getData": key = self.request.get("key") data = MiscData.query(MiscData.key == key).fetch(1) if len(data) != 0: self.response.headers["Content-Type"] = "text/plain" self.response.write(data[0].value) else: self.error(404) self.write("<h1>The key was not found!</h1>") else: data = MiscData.query().fetch() self.response.write('<html><head><title>Horatiu Misc Stuff</title></head><body>') self.response.write('<h1>Welcome to horatiu misc stuff!</h1> <h2>Available data:</h2><ul>') for d in data: self.response.write('<li><b>' + d.key + '</b> - ' + str(d.date if d.date is not None else 'none') + '<br><pre>' + d.value + '</pre></li>') self.response.write('</body></html>')
def get(self): # create tree of event categories ndb.delete_multi(Category.query().iter(keys_only=True)) event_root_key = Category(title='Event').put() music_key = Category(title='Music', parent=event_root_key).put() concert_key = Category(title='Concert', parent=music_key).put() musical_key = Category(title='Musical', parent=music_key).put() sport_key = Category(title='Sport', parent=event_root_key).put() football_key = Category(title='Football', parent=sport_key).put() wintersport_key = Category(title='Wintersports', parent=sport_key).put() slalom_key = Category(title='Slalom', parent=wintersport_key).put() biathlon_key = Category(title='Biathlon', parent=wintersport_key).put() # create some events ndb.delete_multi(Event.query().iter(keys_only=True)) Event(title="Cats", category=musical_key).put() Event(title="Wicked", category=musical_key).put() Event(title="St. Pauli vs. HSV", category=football_key).put() Event(title="Schanzenslalom 2015", category=slalom_key).put() Event(title="Broilers", category=concert_key).put() return self.redirect('/ndb/noancestor')
def post(self): asa_number_str = self.request.get('asa_number') if (asa_number_str is None) or (len( asa_number_str ) == 0): logging.error( "Missing asa_number in swimmer nuke request." ) self.response.set_status( 400 ) return asa_number = int( asa_number_str ) logging.info( "Nuking " + asa_number_str ) keys_to_delete = [] # A couple of local helper functions def delete_swims( swims ): for swim in swims: keys_to_delete.append( swim.key ) def delete_model( model ): if model is not None: keys_to_delete.append( model.key ) # Nuke all the swims for event in short_course_events: delete_swims( Swim.fetch_all( asa_number, event ) ) for event in long_course_events: delete_swims( Swim.fetch_all( asa_number, event ) ) # And everything else delete_model( SwimList.get( asa_number ) ) delete_model( Swimmer.get( "Winsford", asa_number ) ) delete_model( SwimmerCat1.get( "Winsford", asa_number ) ) ndb.delete_multi( keys_to_delete )
def insert(cls, source, syndication, original): """Insert a new (non-blank) syndication -> original relationship. This method does a check-and-set within transaction to avoid including duplicate relationships. If blank entries exists for the syndication or original URL (i.e. syndication -> None or original -> None), they will first be removed. If non-blank relationships exist, they will be retained. Args: source: models.Source subclass syndication: string (not None) original: string (not None) Return: the new SyndicatedPost or a preexisting one if it exists """ # check for an exact match duplicate = cls.query(cls.syndication == syndication, cls.original == original, ancestor=source.key).get() if duplicate: return duplicate # delete blanks (expect at most 1 of each) ndb.delete_multi( cls.query(ndb.OR( ndb.AND(cls.syndication == syndication, cls.original == None), ndb.AND(cls.original == original, cls.syndication == None)), ancestor=source.key).fetch(keys_only=True)) r = cls(parent=source.key, original=original, syndication=syndication) r.put() return r
def refreshData(): try: spider = citySpider() city_list = spider.parse() """First empty the data-store""" city_query = cityDB.query() entity_list = city_query.fetch() key_list = [] for entity in entity_list: key_list.append(entity.key) ndb.delete_multi(key_list) """Store fresh cities""" entity_list = [] for city in city_list: city_name = "" for i in xrange(len(city[0])): # replcaing spaces with '-'(dash) if city[0][i] == ' ': city_name += '-' else: city_name += city[0][i] city_name = city_name.lower() mykey = ndb.Key('cityDB',city_name) if cityDB.get_by_id(city_name) is None: entity = cityDB(key = mykey, alt_name = city[1], timestamp = dt.datetime(2000,1,1,0,0,0,0)) else: raise Exception("Assertion Fail. Two cities with same name encountered") entity_list.append(entity) ndb.put_multi(entity_list) except Exception as exp: raise exp
def get(self): screen_name = self.request.cookies.get('screen_name') ui = Twittero.query(Twittero.screen_name == screen_name).get() ndb.delete_multi([f.key for f in ui.lost_foll()]) auth.set_access_token(ui.access_k, ui.access_s) api = tweepy.API(auth) old_list = ui.followers_ids() new_list = api.followers_ids() put_queue = [] new_foll = [nf for nf in new_list if nf not in old_list] for nf in new_foll: f = Followers(ui=ui.key, user_id=nf, screen_name=api.get_user(nf).screen_name, new=True) put_queue.append(f) for lf in old_list: if lf not in new_list: exfollower = Followers.query(Followers.user_id == lf).get() exfollower.lost = True put_queue.append(exfollower) for sf in old_list: if sf in new_list: follower = Followers.query(Followers.user_id == sf).get() if follower.new is True: follower.new = False put_queue.append(follower) ndb.put_multi(put_queue) self.redirect('/twitter/')
def get(self): from geomancer.model import Cache, Locality, Clause, Georef from google.appengine.ext import ndb from google.appengine.api import memcache for x in [Cache, Locality, Clause, Georef]: ndb.delete_multi(x.query().iter(keys_only=True)) memcache.flush_all()
def handle_get(self, owner, repo, version): # FIXME: Make deletion transactional with check on library that tag is excluded. version_key = ndb.Key(Library, Library.id(owner, repo), Version, version) ndb.delete_multi(ndb.Query(ancestor=version_key).iter(keys_only=True)) if VersionCache.update(version_key.parent()): task_url = util.update_indexes_task(owner, repo) util.new_task(task_url, target='manage')
def get(self): delta = datetime.timedelta(days=7) now = datetime.datetime.now() period = now - delta bmq = Bookmarks.query(Bookmarks.trashed == True, Bookmarks.data < period) ndb.delete_multi([bm.key for bm in bmq])
def add_by_keys(cls,tag_rel_keys,_incr_step=1): """Add relation by keys Toplevels are added automatically.""" keys = tag_rel_keys dbs = ndb.get_multi(keys) dbs_new = [] keys_del = [] for db, key in zip(dbs, keys): if not db: tag_name, related_to, collection = cls.from_key(key) db = cls.get_or_insert(key.id(),tag_name=tag_name,related_to=related_to,\ collection=collection,cnt=0) if collection != Collection.top_key(): top_key = cls.to_key(tag_name,related_to,Collection.top_key()) db.toplevel = top_key cls.get_or_insert(top_key.id(),tag_name=tag_name,related_to=related_to,\ collection=Collection.top_key(),cnt=0) db.incr(_incr_step) if db.count <= 0: keys_del.append(db.key) if getattr(db,"toplevel",None): db_top = db.toplevel.get() if db_top.count <= 1: # its 0 after put() keys_del.append(db.toplevel) else: dbs_new.append(db) ndb.delete_multi(keys_del) #TODO async delete return ndb.put_multi(dbs_new)
def parse_autocomplete_update_data(): # clear past data cons_hashmaps = _ConsolidationHashMap.query().fetch(1000) ndb.delete_multi([m.key for m in cons_hashmaps]) # go past_data_records = _AutocompleteUpdateJobRawData.query().fetch(1000) if len(past_data_records) >= 1000: logging.warning("There are more than 1000 data records.") past_data = [] for data_record in past_data_records: assert isinstance(data_record, _AutocompleteUpdateJobRawData) past_data.append(data_record.rows) # flatten data data = [item for sublist in past_data for item in sublist] del past_data consolidated_books = consolidate_books(data) NUM_SHARDS = 20 for shard_num in range(NUM_SHARDS): consolidated_record = _ConsolidationHashMap( id='shard{}'.format(shard_num), hashmap={k: v for k, v in consolidated_books.iteritems() if k % NUM_SHARDS == shard_num} ) consolidated_record.put() per_subprocess = 1000 for offset in range(0, len(data), per_subprocess): next_offset = min(offset + per_subprocess, len(data)) deferred.defer(save_consolidated_autocomplete_data, data[offset:next_offset], offset, _countdown=int(offset / 100))
def remove_all_existing(): """Removes all existing Photos owned by the test user.""" existing_query = models.Photo.query(models.Photo.owner == TEST_USER) more_results = True while more_results: keys, _, more_results = existing_query.fetch_page(10, keys_only=True) ndb.delete_multi(keys)
def deleteAllResources(self): try: resource_keys = self.query().fetch(keys_only=True) ndb.delete_multi(resource_keys) logging.debug('deleteAllResources success') except: logging.error('deleteAllResources failed')
def delete_books(): rec_keys = BookRecord.query().fetch(1000, keys_only=True) if rec_keys: ndb.delete_multi(rec_keys) deferred.defer(delete_books) else: logging.info("All books deleted!")
def delete_suggestions(): rec_keys = SuggestionsRecord.query().fetch(1000, keys_only=True) if rec_keys: ndb.delete_multi(rec_keys) deferred.defer(delete_suggestions) else: logging.info("All suggestions deleted!")
def deleteAllLessons(self): try: lesson_keys = self.query().fetch(keys_only=True) ndb.delete_multi(lesson_keys) logging.debug('deleteAllLessons success') except: logging.error('deleteAllLessons failed')
def post(self): self.response.headers['Content-Type'] = 'application/json' room_exists = True roomlist_name = utils.DEFAULT_ROOMLIST_NAME room_id = self.request.get('room_id') if not room_id: room_exists = False else: try: room = models.Room.get_by_id(int(room_id),parent=utils.roomlist_key(roomlist_name)) if room == None: room_exists = False except: room_exists = False if not room_exists: self.response.write(json.dumps({"status": "NOT OK", "message": "The requested room was not found."})) else: allowed = utils.checkPassword(self.request.get('password', ''), room.password) if not allowed: self.response.write(json.dumps({"status": "NOT OK", "message": "The correct password was not provided."})) else: if not room.creator == long(self.request.get('user_id')): self.response.write(json.dumps({"status": "NOT OK", "message": "Only the creator of a room can delete it."})) else: ndb.delete_multi(ndb.Query(ancestor = room.key).iter(keys_only = True)) self.response.write(json.dumps({"status":"OK"}))
def delete(self, committer_id, commit_message, force_deletion=False): if force_deletion: current_version = self.version version_numbers = [str(num + 1) for num in range(current_version)] snapshot_ids = [ self._get_snapshot_id(self.id, version_number) for version_number in version_numbers] metadata_keys = [ ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids] ndb.delete_multi(metadata_keys) content_keys = [ ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id) for snapshot_id in snapshot_ids] ndb.delete_multi(content_keys) super(VersionedModel, self).delete() else: self._require_not_marked_deleted() self.deleted = True CMD_DELETE = '%s_mark_deleted' % self._AUTOGENERATED_PREFIX commit_cmds = [{ 'cmd': CMD_DELETE }] self._trusted_commit( committer_id, self._COMMIT_TYPE_DELETE, commit_message, commit_cmds)
def post(self): stage = self.request.get('stage') c = ndb.Cursor(urlsafe=self.request.get('cursor')) if stage == 'hash': games, curs, more = GameHistory.query().fetch_page(10, start_cursor=c) map(self.handle_game, games) if len(games) > 0 and more and curs: taskqueue.add(url='/remove_duplicates', params={'stage': 'hash', 'cursor': curs.urlsafe()}, queue_name='fast') elif stage == 'mark': game, curs, more = GameHistory.query().fetch_page(1, start_cursor=c) game = game[0] if game is None: self.abort(200) if not game.ignored or game.hash is None: duplicates = GameHistory.query(GameHistory.hash == game.hash).fetch() duplicates.sort(key=lambda el: el.key.id()) duplicates.pop() for el in duplicates: el.ignored = True ndb.put_multi(duplicates) if more and curs: taskqueue.add(url='/remove_duplicates', params={'stage': 'mark', 'cursor': curs.urlsafe()}, queue_name='fast') elif stage == 'remove': ndb.delete_multi(GameHistory.query(GameHistory.ignored == True).fetch(keys_only=True))
def _clean_map(crawl_db_datum): """Delete entities map function. Delete unnecessary entities, also FetchedDbDatum. Args: crawl_db_datum: The entity of crawl_db_datum. Returns: url_str: Deleted urls. """ delete_keys = [] clean_all = memcache.get(CLEAN_ALL_KEY) delete_fetched_datum = FetchedDbDatum.get_by_id(crawl_db_datum.url) if delete_fetched_datum is not None: delete_keys.append(delete_fetched_datum.key) data = ndb.Model.to_dict(crawl_db_datum) fetch_status = data.get("last_status", 2) url="" clean_all = memcache.get(CLEAN_ALL_KEY) if clean_all: delete_keys.append(crawl_db_datum.key) else: if fetch_status in [FETCHED, SKIPPED, FAILED]: delete_keys.append(crawl_db_datum.key) ndb.delete_multi(delete_keys) yield(url+"\n")
def txn(): last_version = state['version'] version_key = ndb.Key(_Version, 1, parent=datasource_key) version_entity = version_key.get() if version_entity is None: version_entity = _Version(version=0, id=1, parent=datasource_key) try: updates = _get_updates(datasource_key, version_entity.version, last_version).get_result() except _MemcacheReadFailure: logging.warning('Memcache failure when saving data to disk.', exc_info=sys.exc_info()) _flush(service, datasource_key) return if 'last_transaction' in state: for update in state['last_transaction']: updates.append(update) last_version = update['version'] version_entity.version = last_version entities_put = [version_entity] keys_delete = [] for update in updates: if len(update['path']) == 0: id = '/' else: id = '/'.join(update['path']) encoded_value = update['new_value'] if json.loads(encoded_value) is not None: entities_put.append(_Value(id=id, parent=datasource_key, value=encoded_value)) else: keys_delete.append(ndb.Key(_Value, id, parent=datasource_key)) ndb.put_multi(entities_put) ndb.delete_multi(keys_delete) return last_version
def deletar_form(jogo_id): chave = ndb.Key(Game, int(jogo_id)) chave.delete() query = Autor.find_origins(chave) chaves_autores = query.fetch(keys_only=True) ndb.delete_multi(chaves_autores) return RedirectResponse(router.to_path(index))
def test_phone_numbers(contact_key): """A test for 'phone_numbers' property.""" models.PhoneNumber(parent=contact_key, phone_type='home', number='(650) 555 - 2200').put() models.PhoneNumber(parent=contact_key, phone_type='mobile', number='(650) 555 - 2201').put() contact = contact_key.get() for phone in contact.phone_numbers: # it doesn't ensure any order if phone.phone_type == 'home': assert '(650) 555 - 2200' == phone.number elif phone.phone_type == 'mobile': assert phone.number == '(650) 555 - 2201' # filer the phone numbers by type. Note that this is an # ancestor query. query = contact.phone_numbers.filter( models.PhoneNumber.phone_type == 'home') entities = query.fetch() assert 1 == len(entities) assert entities[0].number == '(650) 555 - 2200' # delete the mobile phones query = contact.phone_numbers.filter( models.PhoneNumber.phone_type == 'mobile') ndb.delete_multi([e.key for e in query]) # make sure there's no mobile phones any more query = contact.phone_numbers.filter( models.PhoneNumber.phone_type == 'mobile') entities = query.fetch() assert 0 == len(entities)
def create(cls, user, subject, token=None, expiration=None, delete_past_tokens=False): """Creates a new token for the given user. :param user: User unique ID. :param subject: The subject of the key. Examples: - 'auth' - 'signup' :param token: Optionally an existing token may be provided. If None, a random token will be generated. :returns: The newly created :class:`UserToken`. """ user = str(user) token = token or security.generate_random_string(entropy=128) key = cls.get_key(user, subject, token) if delete_past_tokens: ndb.delete_multi(cls.query(cls.user == user, cls.subject == subject).iter(keys_only=True)) entity = cls(key=key, user=user, subject=subject, token=token, expiration=expiration) entity.put() return entity
def get(self): logging.debug('ExpireMemberCandidates.get hit') now = datetime.datetime.now() expireds = MemberCandidate.query(MemberCandidate.expire <= now).fetch() if expireds: logging.info('Expiring %d MemberCandidate items', len(expireds)) ndb.delete_multi([expired.key for expired in expireds])
def delete_layer(cls, layer_index, latest_created_on_datetime): """Deletes all entities in the given layer which were created before the given datetime. """ query = cls.query().filter(cls.realtime_layer == layer_index).filter( cls.created_on < latest_created_on_datetime) ndb.delete_multi(query.iter(keys_only=True))
def delete(self): '''Delete the photograph. To delete a photograph, need to delete the following: all comments all scores the UserComp if it exists the blob Also, if an extra photo, reduce the extra photo count by one ''' all_keys = [] if self.competition: user_comp = UserComp.query( UserComp.user == self.user, UserComp.comp == self.competition ).get() all_keys.append(user_comp.key) else: user = self.user.get() user.extra_photo_count -= 1 user.put() for comment_key in Comment.query( Comment.photo == self.key).fetch(keys_only=True): all_keys.append(comment_key) for score_key in Scores.query( Scores.photo == self.key).fetch(keys_only=True): all_keys.append(score_key) all_keys.append(self.key) blobstore.delete(self.blob) ndb.delete_multi(all_keys)
def get(self): streams = Stream.query().fetch() ndb.delete_multi(stream_name_set.query().fetch(keys_only=True)) for stream in streams: name_set = stream_name_set() name_set.name = stream.name name_set.put()
def delete_user(user_id, anonymous=False): """Delete the User and associated pic, UserQuestions and UserSeasons.""" cls = models.AnonymousUser if anonymous else models.User user = cls.get_by_id(user_id) if not user: return user_key = user.key to_delete = [user_key] # Delete UserQuestion. user_question_query = models.UserQuestion.query( models.UserQuestion.user == user_key) for user_question_key in user_question_query.iter(keys_only=True): to_delete.append(user_question_key) # Delete UserSeason. if not anonymous: user_season_query = models.UserSeason.query( models.UserSeason.user == user_key) for user_season_key in user_season_query.iter(keys_only=True): to_delete.append(user_season_key) ndb.delete_multi(to_delete) # Delete the user pic. if user.pic: blobstore.delete(user.pic)
def play_song(self,request): """Set Play song Flag""" current_user = endpoints.get_current_user() user_query = User.query(User.user_id == current_user.user_id()) user = user_query.get() song_query = Song.query(ndb.AND(Song.track_id == request.track_id,Song.party_key == user.party_key)) if song_query.get(): song = song_query.get() if not song.played: song.played = True; song.put() #delete all the activities for that song in that party activity_query = Activity.query(Activity.song == song.key,Activity.party_key == user.party_key).fetch(keys_only=True) ndb.delete_multi(activity_query) return add_response(response = 'Song has been updated') else: return add_response(response = 'song not found')
def get(self, event_key): teams, event_teams, et_keys_to_del = EventTeamUpdater.update(event_key) teams = TeamManipulator.createOrUpdate(teams) if teams: event_teams = EventTeamManipulator.createOrUpdate(event_teams) if et_keys_to_del: ndb.delete_multi(et_keys_to_del) template_values = { 'event_teams': event_teams, 'deleted_event_teams_keys': et_keys_to_del } path = os.path.join(os.path.dirname(__file__), '../templates/math/eventteam_update_do.html') self.response.out.write(template.render(path, template_values))
def cleanup(): user = UserPrefs.current() if not user.isGroupAdmin(): return "", 403 heading="Cleanup" baselink="/admin/cleanup/" breadcrumbs = [{'link':'/', 'text':'Hem'}, {'link':'/admin', 'text':'Admin'}, {'link':'/admin/cleanup', 'text':'Cleanup'}] if request.method == 'POST' and request.form is not None: commit = "commit" in request.form and request.form['commit'] == 'on' tps = TroopBadge.query().fetch() tp_keys_to_remove = [] items = [] if not commit: items.append('testmode') for tp in tps: badge = Badge.get_by_id(tp.badge_key.id()) if badge is None: tp_keys_to_remove.append(tp.key) items.append(str(tp.key)) if commit: ndb.delete_multi(tp_keys_to_remove) return render_template('table.html', heading=heading, baselink=baselink, tabletitle="Items to remove", items=items, breadcrumbs=breadcrumbs) else: form = htmlform.HtmlForm('cleanup', submittext="Radera", buttonType="btn-danger", descriptionText=u"Cleanup of bad TroopBadge records") form.AddField('commit', 'on', u'Commit to database', 'checkbox', False) return render_template('form.html', heading=heading, baselink=baselink, form=str(form), breadcrumbs=breadcrumbs)
def delete(self, committer_id, commit_message, force_deletion=False): """Deletes this model instance. Args: committer_id: str. The user_id of the user who committed the change. commit_message: str. force_deletion: bool. If True this model is deleted completely from storage, otherwise it is only marked as deleted. Default is False. Raises: Exception: This model instance has been already deleted. """ if force_deletion: current_version = self.version version_numbers = [str(num + 1) for num in range(current_version)] snapshot_ids = [ self._get_snapshot_id(self.id, version_number) for version_number in version_numbers] metadata_keys = [ ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids] ndb.delete_multi(metadata_keys) content_keys = [ ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id) for snapshot_id in snapshot_ids] ndb.delete_multi(content_keys) super(VersionedModel, self).delete() else: self._require_not_marked_deleted() # pylint: disable=protected-access self.deleted = True commit_cmds = [{ 'cmd': '%s_mark_deleted' % self._AUTOGENERATED_PREFIX }] self._trusted_commit( committer_id, self._COMMIT_TYPE_DELETE, commit_message, commit_cmds)
def get(self): webhooks = MobileClient.query( MobileClient.client_type == ClientType.WEBHOOK).fetch() failures = [] from helpers.tbans_helper import TBANSHelper for client in webhooks: if not TBANSHelper.ping(client): failures.append(client.key) count = len(failures) if failures: ndb.delete_multi(failures) logging.info("Deleted {} broken webhooks".format(count)) template_values = {'count': count} path = os.path.join(os.path.dirname(__file__), '../../templates/admin/webhooks_clear_do.html') self.response.out.write(template.render(path, template_values))
def removeSubscriber(email): try: q = Subscriber.query(ancestor=ndb.Key('Subscribers', 'Bball'), default_options=ndb.QueryOptions(keys_only=True)) except: return False # Sanitize name, email = emailParser(email) if (email == None): return False playerKeys = q.filter(Subscriber.email == email) if playerKeys.count(): ndb.delete_multi(playerKeys) return True return False
def delete(self, **kwargs): if 'application/json' not in self.request.accept: self.response.status = 406 self.response.status_message = "Not Acceptable, API only supports application/json MIME type" return if 'id' in kwargs: self.response.write("User exists") out = ndb.Key(db_models.User, int(kwargs['id'])).get() # delete all lists that user authored userLists = db_models.User.query( db_models.List.author == out.key).fetch(keys_only=True) ndb.delete_multi(userLists) # delete user out.key.delete() self.response.write("User deleted") else: self.response.status = 404 self.response.status_message = "Error, user not found" return
def delete(self, key): if not is_googler(): self.response.set_status(403, 'Permission Denied') return if key == 'all': all_keys = TSAlertsJSON.query().fetch(keys_only=True) ndb.delete_multi(all_keys) for k in all_keys: logging.info('deleting key from memcache: ' + k.id()) memcache.delete(k.id()) self.response.set_status(200, 'Cleared all alerts') return changed_alert = TSAlertsJSON.query_hash(key) if not changed_alert: self.response.write('This alert does not exist.') self.response.set_status(404, 'Alert does not exist') return memcache.delete(key) changed_alert.key.delete()
def delete_kilpailu(kilpailu_id): try: sarjat = getSarjat(kilpailu_id) joukkueet = [] for s in sarjat: joukkue = [] try: joukkue = [x for x in getJoukkueet(s.key.urlsafe())] except Exception as ex: print ex.message.encode("utf-8") for j in joukkue: joukkueet.append(j.key) s.key.delete() ndb.delete_multi(joukkueet) getKilpailu(kilpailu_id).key.delete() return True except Exception as e: print e.message.encode("utf-8") return False
def get(self): clients = MobileClient.query().fetch() clients = sorted(clients, key=lambda x: (x.messaging_id, x.updated)) last = None to_remove = [] last = None for client in clients: if last is not None and client.messaging_id == last.messaging_id: logging.info("Removing") to_remove.append(client.key) last = client count = len(to_remove) if to_remove: ndb.delete_multi(to_remove) logging.info("Removed {} duplicate mobile clients".format(count)) template_values = {'count': count} path = os.path.join(os.path.dirname(__file__), '../../templates/admin/mobile_clear_do.html') self.response.out.write(template.render(path, template_values))
def create_database(): """Creates a new database of Picture entities""" # Delete all existing entries on Datastore ndb.delete_multi(Picture.query().iter(keys_only=True)) ndb.delete_multi(RecordCount.query().iter(keys_only=True)) # Load curated list of URLS on the server urls = pickle.load(open('raw_list.p','r')) record_count = RecordCount(count = len(urls)) record_count.put() picture_list = [] counter = 1 for url in urls: picture_list.append(Picture(id=counter,link=url)) counter += 1 ndb.put_multi(picture_list)
def delete_multi(keys): """Deletes models corresponding to a sequence of keys. Args: keys: list(str). A list of keys. Returns: list(None). A list of Nones, one per deleted model. """ return ndb.delete_multi(keys)
def get(self): if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' self.redirect(users.create_login_url(self.request.uri)) user = users.get_current_user() # delete the resource and its reservations when click the 'delete' button # the delete button is only present when the user is the creator deleteResourceStr = self.request.get('delResourceID') if deleteResourceStr != '': deleteResourceKey = ndb.Key(urlsafe=deleteResourceStr) deleteReservationKeys = Reservation.query(ancestor = deleteResourceKey).fetch(keys_only=True) ndb.delete_multi(deleteReservationKeys) deleteResourceKey.delete() # query all resources and order the by the lastReserveDate resources = None if Resource: if Resource.lastReserveDate: resource_query = Resource.query().order(-Resource.lastReserveDate) resources = resource_query.fetch() elif Resource.modDate: resource_query = Resource.query().order(-Resource.modDate) resources = resource_query.fetch() nowStr = datetime.now().strftime('%Y-%m-%d %H:%M:%S') now = datetime.strptime(nowStr, '%Y-%m-%d %H:%M:%S') template_values = { 'resources': resources, 'user': user, 'url': url, 'url_linktext': url_linktext, 'now': now } template = JINJA_ENVIRONMENT.get_template('allResources.html') self.response.write(template.render(template_values))
def get(self): # Get the state variable from url returned by Google returned_state = self.request.get('state') # Get the stored state variable state_var = OauthVar.query().get() # If the variables match, continue if returned_state == state_var.state_var: # Create parameters to send in POST request form_fields = { 'code': self.request.get('code'), 'client_id': '518379713624-146hkku1jvqvti9o3vb3m733lav400bu.apps.googleusercontent.com', 'client_secret': 'Ln2qFdruqZ5hbujJh6ykn2KC', 'redirect_uri': 'https://final-project-161802.appspot.com/oauth', 'grant_type': 'authorization_code', } # Delete any stray authentication tokens that may be stored all_tokens = AuthToken.query().fetch(keys_only=True) ndb.delete_multi(all_tokens) # POST request that exchanges the access code for a token try: form_data = urllib.urlencode(form_fields) headers = {'Content-Type': 'application/x-www-form-urlencoded'} result = urlfetch.fetch( url='https://www.googleapis.com/oauth2/v4/token', payload=form_data, method=urlfetch.POST, headers=headers) except urlfetch.Error: logging.exception('Caught exception fetching url') # Store and obtain token token_results = json.loads(result.content) auth_token = "Bearer " + token_results.get('access_token') self.response.write("Obtained token: ") self.response.write(auth_token) # Store token in dict auth_tok = AuthToken(auth_token=auth_token) auth_tok.put()
def get(self): end_ts = datetime.datetime.now() - datetime.timedelta(days=settings.ACTIVE_DEVICES_HISTORY_KEEP_DAYS) keys = RecentlyActiveDevicesStats15Minutes.query_keys_by_created(end_ts) output = {} try: if keys: ndb.delete_multi(keys=keys[:50]) output['success'] = True else: output['success'] = False output['error'] = "No more to purge" except Exception as e: output['success'] = False output['error'] = e.message current_total = RecentlyActiveDevicesStats15Minutes.query().count() output['total'] = current_total self.response.write(json.dumps(output))
def initialise(): EXAMPLESMAPCACHE = [] log.info("[%s]ExampleMap initialising Data Store" % (getInstanceId(short=True))) loops = 0 ret = 0 while loops < 10: keys = ExampleMap.query().fetch(keys_only=True, use_memcache=False, use_cache=False) count = len(keys) if count == 0: break log.info("[%s]ExampleMap deleting %s keys" % (getInstanceId(short=True), count)) ndb.delete_multi(keys, use_memcache=False, use_cache=False) ret += count loops += 1 time.sleep(0.01) return {"ExampleMap": ret}
def get_plan_for_member_key_for_gig_key(the_member_key, the_gig_key, keys_only=False): plan_query = Plan.lquery(Plan.member==the_member_key, ancestor=the_gig_key) plans = plan_query.fetch(keys_only=keys_only) if len(plans)>1: logging.error("gig/member with multiple plans! gk={0} mk={1}".format(the_gig_key.urlsafe(),the_member_key.urlsafe())) # return None #todo what to do if there's more than one plan # more than one plan! Just delete the others - not sure how they got here the_plan = plans[0] if keys_only: delplan_keys = plans[1:] else: delplan_keys = [p.key for p in plans[1:]] ndb.delete_multi(delplan_keys) return the_plan if len(plans)>0: return plans[0] else: # no plan? make a new one return new_plan(the_gig_key, the_member_key, 0)
def delete(self, user=None): if not is_admin_user(user): raise RuntimeError('Only admin can delete pages.') self.update_content('', self.revision, user=user, dont_create_rev=True, dont_defer=True) self._update_inlinks({}, {'relatedTo': [p[0] for p in self.paths[:-1]]}) self.related_links = {} self.modifier = None self.updated_at = None self.revision = 0 self.put() ndb.delete_multi(r.key for r in self.revisions) caching.del_titles()
def post(self): self._require_admin() event = Event.get_by_id(self.request.get("event_key_name")) matches_to_delete = list() match_keys_to_delete = list() if event is not None: for match in Match.query(Match.event == event.key): if match.key.id() != match.key_name: matches_to_delete.append(match) match_keys_to_delete.append(match.key_name) ndb.delete_multi(matches_to_delete) self.template_values.update({ "match_keys_deleted": match_keys_to_delete, "tried_delete": True }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/matches_cleanup.html') self.response.out.write(template.render(path, self.template_values))
def get(self): year = date.today().year - 1 # Compile key regex # Matches event (2014ctgro), team@event (frc2014_2014ctgro), firehose (2014*) ps = "^{}[a-z]+|_{}[a-z]+|{}\*$".format(year, year, year) logging.info("Pattern: {}".format(ps)) p = re.compile(ps) subs = Subscription.query().fetch() to_delete = [] for sub in subs: if p.match(sub.model_key): to_delete.append(sub.key) count = len(to_delete) if to_delete: ndb.delete_multi(to_delete) logging.info("Removed {} old subscriptions".format(count)) template_values = {'count': count} path = os.path.join(os.path.dirname(__file__), '../../templates/admin/subs_clear_do.html') self.response.out.write(template.render(path, template_values))
def get(self): logging.info('BulkDeleteMailItems') if not users.is_current_user_admin(): self.error(401) return logging.info('Passed Auth') # olderthan = datetime.datetime(2018,1,1) olderthan = datetime.datetime.now() - datetime.timedelta(days=180) q = MailFeedItem.query().filter(MailFeedItem.created < olderthan) items = q.fetch(5000, keys_only=True) self.response.out.write('<p>%d items</p>' % len(items)) ndb.delete_multi(items) self.response.out.write('<p><button onClick="history.back()">' + 'DONE</button></p>')
def cancel_game(self, request): """Cancels an active game. Args: request: The GAME_REQUEST object. Returns: StringMessage: A message that is sent to the client, saying that the game has been canceled. Raises: endpoints.ConflictException: If the game is already over. endpoints.BadRequestException: if url safe game key is invalid. """ game = get_by_urlsafe(request.url_safe_game_key, Game) check_game_not_over(game) game_pieces = Piece.query(Piece.game == game.key).fetch(keys_only=True) game_misses = Miss.query(Miss.game == game.key).fetch(keys_only=True) ndb.delete_multi(game_pieces) ndb.delete_multi(game_misses) game.key.delete() return StringMessage(message="Game deleted")
def clean_db(self): MAX_PER_BATCH = 300 entities = [Transacao, Conta, Dispositivo] for entity in entities: print "Deleting all tale related instances of ", entity.__name__ cursor = ndb.Cursor() has_more = True while has_more: instances, cursor, has_more = entity.query( entity.key >= ndb.Key(entity, self.PREFIX)).fetch_page( MAX_PER_BATCH, start_cursor=cursor) next_batch = [ instance.key for instance in instances if unicode(instance.id).startswith(self.PREFIX) ] if next_batch: ndb.delete_multi(next_batch) else: break print "Done deleting all tale related instances"
def run_analysis(org_key=None): """ Task that deletes all existing CombinedUsers and regenerates them from the underlying IntakeUsers. This task can take a long time to run, so it should be run on a dedicated instance. """ DELETE_BATCH = 500 ANALYZE_BATCH = 50 # Clear out the existing combined users cu_query = CombinedUser.query() if org_key: org_key = ndb.Key(urlsafe=org_key) cu_query = cu_query.filter(CombinedUser.orgs == org_key) while True: results, cursor, more = cu_query.fetch_page(DELETE_BATCH, keys_only=True) ndb.delete_multi(results) if not more: break # Analyze all the intake users, on a per-organization basis if org_key: org_keys = [org_key] else: org_keys = Organization.query().iter(keys_only=True) for org_key in org_keys: counter_key = "analysis::run_count::%s" % org_key.urlsafe() memcache.set(key=counter_key, value=0) iu_query = IntakeUser.query(IntakeUser.org == org_key) for iu_key in iu_query.iter(keys_only=True): memcache.incr(counter_key) #deferred.defer(analyze_user, intake_user_key=iu_key) analyze_user(iu_key) generate_csv(org_key.get().name) return "Great Success"
def gds_empty(): # Collect together keys for all entities to be removed keys = [] kind = 'Input' name = 'Params' key = ndb.Key(kind, name) keys.append(key) kind = 'Parameters' name = 'Values' key = ndb.Key(kind, name) keys.append(key) kind = 'Results' name = 'hist' key = ndb.Key(kind, name) keys.append(key) name = 'cov' key = ndb.Key(kind, name) keys.append(key) # Delete multiple entities at once ndb.delete_multi(keys) # Delete MC results by query, to allow for gaps in key numbers query = Results.query(Results.method == 'lambda') for res in query: res.key.delete() query = Results.query(Results.method == 'EC2') for res in query: res.key.delete() return
def test_add_many_birthdays(self): User.add_many_birthdays([]) self.assertListEqual([], User.query().fetch(), 'List empty. No users added') User.add_many_birthdays([{ 'email': '*****@*****.**', 'birthday': '1985-03-01' }, { 'email': '*****@*****.**', 'birthday': '--02-02' }, { 'email': '*****@*****.**', 'birthday': '09-05' }, { 'email': '*****@*****.**', 'birthday': 'blablabla' }]) #Check users where created and dates are stored OK user1 = User.query(User.email == '*****@*****.**').get() self.assertEqual(user1.birth_year, 1985, 'Year not the same') self.assertEqual(user1.birth_month, 3, 'Month not the same') self.assertEqual(user1.birth_day, 1, 'Day not the same') user2 = User.query(User.email == '*****@*****.**').get() self.assertIsNone(user2.birth_year, 'Year not set') self.assertEqual(user2.birth_month, 2, 'Month not the same') self.assertEqual(user2.birth_day, 2, 'Day not the same') user3 = User.query(User.email == '*****@*****.**').get() self.assertIsNone(user3.birth_year, 'Year not set') self.assertEqual(user3.birth_month, 9, 'Month not the same') self.assertEqual(user3.birth_day, 5, 'Day not the same') #Check user was not created self.assertIsNone( User.query(User.email == 'invalid@eforcers,com').get(), 'Invalid user was not supposed to be created') #Clean yourself ndb.delete_multi([user1.key, user2.key, user3.key])
def get(self): webhooks = MobileClient.query( MobileClient.client_type == ClientType.WEBHOOK).fetch() failures = [] notification = PingNotification()._render_webhook() for key in webhooks: if not NotificationSender.send_webhook( notification, [(key.messaging_id, key.secret)]): failures.append(key.key) count = len(failures) if failures: ndb.delete_multi(failures) logging.info("Deleted {} broken webhooks".format(count)) template_values = {'count': count} path = os.path.join(os.path.dirname(__file__), '../../templates/admin/webhooks_clear_do.html') self.response.out.write(template.render(path, template_values))
def FetchAndDelete(self, query): """Fetches keys from query in batches; deletes the corresponding entities. Args: query: A ndb.Query object. All results will be deleted. Returns: A count of the number of entities that have been deleted. """ count = 0 try: cursor = None more = True while more: page, cursor, more = query.fetch_page(100, start_cursor=cursor, keys_only=True) ndb.delete_multi(page) count += len(page) except runtime.DeadlineExceededError: pass return count
def refresh(cls, duration=5): duration = int(duration) date_limit = datetime.datetime.now() - datetime.timedelta( minutes=duration) views = View.query(View.date > date_limit).fetch() stream_freq = {} for view in views: stream_freq[view.stream_id] = stream_freq.get(view.stream_id, 0) + 1 sorted_streams = sorted(stream_freq.items(), key=operator.itemgetter(1), reverse=True) ndb.delete_multi(Leaderboard.query().fetch(keys_only=True)) for stream in sorted_streams: if stream[0].get(): Leaderboard(stream_id=stream[0], view_count=stream[1], interval=duration).put()
def testRowsAlreadyDeferred(self, mock_exists, mock_reload, mock_insert_data): mock_exists.return_value = True # Create some test rows, but then delete some of them. This simulates the # scenario where some entities have been queued for streaming in a # just-completed dispatch task, but weren't processed and deleted until # after we've already gathered up their Keys again in a new dispatch task. total_rows = 20 repeat_rows = 7 entities = test_utils.RandomDatastoreEntities(bq_models.ExecutionRow, total_rows) keys = ndb.put_multi(entities) ndb.delete_multi(keys[:repeat_rows]) export._StreamPage(constants.GAE_STREAMING_TABLES.EXECUTION, keys) expected_inserts = total_rows - repeat_rows actual_inserts = len(mock_insert_data.call_args_list[0][0][1]) self.assertEqual(expected_inserts, actual_inserts)