def rate(): id = request.values['id'] rating = request.values['rating'] uid = uuid.UUID(id) rating = int(rating) if not 0 <= rating <= 5: raise GenericError('rating must be between 0 and 5 (inclusive)') if rating == 0: delete(r for r in RatingTrack if r.user.id == request.user.id and r.rated.id == uid) delete(r for r in RatingFolder if r.user.id == request.user.id and r.rated.id == uid) else: try: rated = Track[uid] rating_cls = RatingTrack except ObjectNotFound: try: rated = Folder[uid] rating_cls = RatingFolder except ObjectNotFound: raise NotFound('Track or Folder') try: rating_info = rating_cls[request.user, uid] rating_info.rating = rating except ObjectNotFound: rating_cls(user=request.user, rated=rated, rating=rating) return request.formatter.empty
def rate(): id = request.values['id'] rating = request.values['rating'] uid = uuid.UUID(id) rating = int(rating) if not 0 <= rating <= 5: raise GenericError('rating must be between 0 and 5 (inclusive)') if rating == 0: delete(r for r in RatingTrack if r.user.id == request.user.id and r.rated.id == uid) delete(r for r in RatingFolder if r.user.id == request.user.id and r.rated.id == uid) else: try: rated = Track[uid] rating_cls = RatingTrack except ObjectNotFound: try: rated = Folder[uid] rating_cls = RatingFolder except ObjectNotFound: raise NotFound('Track or Folder') try: rating_info = rating_cls[request.user, uid] rating_info.rating = rating except ObjectNotFound: rating_cls(user = request.user, rated = rated, rating = rating) return request.formatter.empty
def process_notification(): with orm.db_session: next_pending = ( PendingNotification.select().order_by(lambda p: p.issued).first()) if next_pending is not None: logging.debug("Processing pending notification %s", next_pending) message = next_pending.message subscriber = next_pending.subscriber app = message.app token = (WnsAccessToken.select(lambda t: t.app == app).order_by( orm.desc(lambda t: t.issued)).first()) if token is None: _refresh_access_token(app) return False headers = { 'Content-Type': message.content_type, 'X-WNS-Type': message.wns_type, 'Authorization': 'Bearer {}'.format(token.content), } data = message.content result = _do_post_request(subscriber.channel_url, data=data, headers=headers) logging.debug("Got response for posting notification: %s", (result.status_code, result.text, result.headers)) is_ok = False purge = False if result.status_code == 401: _refresh_access_token(app) elif result.status_code == 410: # Channel expired purge = True elif result.status_code == 403: # Channel associated with wrong app logging.info("Channel associated with wrong app: {}".format( subscriber.channel_url)) purge = True elif result.status_code == 404: # Invalid channel logging.info("Invalid channel URL: {}".format( subscriber.channel_url)) purge = True elif result.status_code == 200: next_pending.delete() is_ok = True elif 500 <= result.status_code < 600: # Error in Microsoft Store, try again later pass else: logging.error( "Unrecognized result from WNS: %s", (result.status_code, result.text, result.headers)) next_pending.delete() if purge: orm.delete(p for p in PendingNotification if p.subscriber == subscriber) subscriber.delete() return is_ok else: return True
def pytest_runtest_teardown(item, nextitem): """ """ marker = item.get_closest_marker("pony") # import test db db = _ponydb(item) provider = db.provider.dialect if marker: if not marker.kwargs.get("db_session", True): orm.db_session.__enter__() # delete all entries from db at end of test # unless @pytest.mark.pony(reset_db=False) is specified if not marker.kwargs.get("db_session", True): orm.db_session.__enter__() if marker.kwargs.get("reset_db", True): orm.rollback( ) # clear possible uncommitted things before delete so the base is Ok. Not good with # commit for entity in db.entities.values(): orm.delete(e for e in entity) # reset sequence : postgres support if provider == "PostgreSQL": _pg_reset_sequences(db) # delete or not the db_session is closed orm.db_session.__exit__()
def setUp(self, no_db=False): super().setUp() if not hasattr(self, "app"): self.app = App() Path(self.app.get_application_config()).unlink(missing_ok=True) if not self.app.config: self.app.config = ConfigParser() self.app.build_config(self.app.config) config = self.app.config for section, values in DEFAULT_SETTINGS.items(): config.setdefaults(section, values) if not no_db: init_update_matiere(db, reset=True) with db_session: for entity in db.entities.values(): if entity.__name__ != "Matiere": delete(e for e in entity) self.T = TempFile() EventLoop.ensure_window() self.window = EventLoop.window if self.TIMER: self.debut_time = time.time()
def upgrade_bw_accounting_db_8to9(self): """ Upgrade the database with bandwidth accounting information from 8 to 9. Specifically, this upgrade wipes all transactions and addresses an issue where payouts with the wrong amount were made. Also see https://github.com/Tribler/tribler/issues/5789. """ to_version = 9 database_path = self.session.config.get_state_dir( ) / 'sqlite' / 'bandwidth.db' if not database_path.exists() or get_db_version(database_path) >= 9: return # No need to update if the database does not exist or is already updated db = BandwidthDatabase(database_path, self.session.trustchain_keypair.key.pk) # Wipe all transactions and bandwidth history with db_session: delete(tx for tx in db.BandwidthTransaction) delete(item for item in db.BandwidthHistory) # Update db version db_version = db.MiscData.get(name="db_version") db_version.value = str(to_version) db.shutdown()
def update_project(self, data): with db_session: try: data = json.loads(data) program = data.get("program") del data["program"] Template[program].set(**data) delete(p for p in Project if p.program == program) process_name = data.get("process_name") command = data.get("command") numprocess = int(data.get("numprocess")) port = int(data.get("port")) for i in range(numprocess): Project(program=program, process_name=process_name.format(port=i), command=command.format(port=i + port), port=i + port) return json.dumps(dict(status="ok", msg="")) except Exception, e: return json.dumps(dict(status="fail", msg=e.message))
def __exit__(self, *exc_info): db = self.test.db for entity in db.entities.values(): if entity._database_.schema is None: break if entity.__name__ == 'Migration': continue delete(i for i in entity)
def save_policy(self, model): delete(cr for cr in self.rule_cls) for sec in ["p", "g"]: if sec not in model.model.keys(): continue for ptype, ast in model.model[sec].items(): for rule in ast.policy: self._save_policy_line(ptype, rule)
def remove_alias(path): """ Remove a path alias. Arguments: path -- the path to remove the alias of """ orm.delete(p for p in model.PathAlias if p.path == path) orm.commit()
def save_selected_words(word_value, selected_data): delete(wsim for w in Word for wsim in w.similar_to if w.value == word_value) word = Word.get_or_create(value=word_value) word.similar_to = [WordSimilarity.get_or_create( value=sim, similar_word=Word.get_or_create(value=w), subject_word=word ) for w, sim in selected_data]
def remove_student_from_list(list_id: int, student_id: int): """Удаляет студента из списка. Args: list_id: Идентификатор списка student_id: Идентификатор студента """ orm.delete(item for item in models.ListStudents if item.list.id == list_id and item.student.id == student_id)
def expire_record(record): """ Expire a record for a missing entry """ load_message.cache_clear() # This entry no longer exists so delete it, and anything that references it # SQLite doesn't support cascading deletes so let's just clean up # manually orm.delete(pa for pa in model.PathAlias if pa.entry == record) record.delete() orm.commit()
def unstar_single(cls, starcls, eid): """Unstars an entity :param cls: entity class, Folder, Artist, Album or Track :param starcls: matching starred class, StarredFolder, StarredArtist, StarredAlbum or StarredTrack :param eid: id of the entity to unstar """ delete(s for s in starcls if s.user.id == request.user.id and s.starred.id == eid)
def remove_aliases(target): """ Remove all aliases to a destination """ if isinstance(target, model.Entry): orm.delete(p for p in model.PathAlias if p.entry == target) elif isinstance(target, model.Category): orm.delete(p for p in model.PathAlias if p.category == target) else: raise TypeError("Unknown type {}".format(type(target))) orm.commit()
def unstar_single(cls, eid): """ Unstars an entity :param cls: entity class, Folder, Artist, Album or Track :param eid: id of the entity to unstar """ uid = uuid.UUID(eid) starred_cls = getattr(sys.modules[__name__], 'Starred' + cls.__name__) delete(s for s in starred_cls if s.user.id == request.user.id and s.starred.id == uid) return None
def remove_projects(self, programs): with db_session: try: programs = json.loads(programs) delete(p for p in Template if p.program in programs) delete(p for p in Project if p.program in programs) return json.dumps(dict(status="ok", msg="")) except Exception, e: return json.dumps(dict(status="fail", msg=e.message))
def load_metafile(filepath): """ Load a metadata file from the filesystem """ try: with open(filepath, 'r', encoding='utf-8') as file: return email.message_from_file(file) except FileNotFoundError: logger.warning("Category file %s not found", filepath) orm.delete(c for c in model.Category if c.file_path == filepath) orm.commit() return None
def unstar_single(cls, eid): """ Unstars an entity :param cls: entity class, Folder, Artist, Album or Track :param eid: id of the entity to unstar """ starred_cls = getattr(sys.modules[__name__], "Starred" + cls.__name__) delete(s for s in starred_cls if s.user.id == request.user.id and s.starred.id == eid) return None
def custom_delete(self): """ Custom image delete. Delete file from OS and image from database """ try: os.remove(self.image_path + os.sep + self.image_name) except FileNotFoundError: pass # clean images duplicates delete(duplicate for duplicate in ImageDuplicates if duplicate.image_src_id == self.id or duplicate.image_dup.id == self.id) self.delete()
def create_test_data(): delete(p for p in Flat) commit() Flat( id=1, city="Москва", district="Москва", location="Москва", price=1, ppm=1, square=1, ) commit()
def rate(): id = request.values["id"] rating = request.values["rating"] try: tid = get_entity_id(Track, id) except GenericError: tid = None try: fid = get_entity_id(Folder, id) except GenericError: fid = None uid = None rating = int(rating) if tid is None and fid is None: raise GenericError("Invalid ID") if not 0 <= rating <= 5: raise GenericError("rating must be between 0 and 5 (inclusive)") if rating == 0: if tid is not None: delete( r for r in RatingTrack if r.user.id == request.user.id and r.rated.id == tid ) else: delete( r for r in RatingFolder if r.user.id == request.user.id and r.rated.id == fid ) else: if tid is not None: rated = Track[tid] rating_cls = RatingTrack uid = tid else: rated = Folder[fid] rating_cls = RatingFolder uid = fid try: rating_info = rating_cls[request.user, uid] rating_info.rating = rating except ObjectNotFound: rating_cls(user=request.user, rated=rated, rating=rating) return request.formatter.empty
async def _delete_students(ans: bots.SimpleBotEvent): students_ids = await redis.hget( "index:{0}".format(ans.object.object.message.peer_id), "diff_db_vk", ) with orm.db_session: students_ids = students_ids.split(",") for st in students_ids: orm.delete(student for student in models.Student if student.vk_id == int(st)) await ans.answer( "{0} студент(ов) удалено".format(len(students_ids)), keyboard=kbs.preferences.configure_chat(ans.payload["chat_id"]), )
def create_easy_data(): delete(p for p in Flat) commit() for i in range(4): Flat( id=i, city="city", district="district", location="location", price=i, ppm=i, square=i**2, geo_lat=1, geo_long=1, ) commit()
def doit(): parser = ArgumentParser() parser.add_argument("-d", "--database", required=True) parser.add_argument("-f", "--wordsfile", required=True) parser.add_argument("-l", "--language", required=True) config = parser.parse_args() db.bind("sqlite", filename=config.database, create_db=True) db.generate_mapping(create_tables=True) with open(config.wordsfile, "r") as wordsfile: with db_session: # delete all LANG words from database delete(w for w in Word if w.lang == config.language) # import new wordlist for language for word in wordsfile.readlines(): Word(lang=config.language, word=word)
def purge(cutoff=86400): """ Purge old entries from the tweet cache Args: cutoff (int): Purge cache entries older than `cutoff` seconds. (Default is 1-day) Returns: int: The number of cache entries that have been purged (for logging) """ cutoff_ts = int(time.time()) - cutoff stale_count = count(c for c in TweetCache if c.created_at <= cutoff_ts) # No need to perform a delete query if there's nothing to delete if stale_count: delete(c for c in TweetCache if c.created_at <= cutoff_ts) return stale_count
def clear_event_data(days_to_keep=30): """ Remove all logs older than days_to_keep days :param days_to_keep: The number of days to keep and remove all other log entries. :return: Returns the number of rows deleted """ return delete( e for e in EventLog if e.event_time < (datetime.datetime.now() - datetime.timedelta(days=days_to_keep)))
def _refresh_access_token(app): orm.delete(t for t in WnsAccessToken if t.app == app) logging.info("invalid/missing access token for %s, fetching new", app.app_name) data = { "grant_type": "client_credentials", "client_id": app.client_id, "client_secret": app.client_secret, "scope": _config["token_scope"], } result = _do_post_request(_config["token_url"], data=data) result_json = result.json() logging.debug("got access token: %s", result.text) if result.status_code == 200: WnsAccessToken(app=app, content=result_json["access_token"], issued=datetime.utcnow())
def set_fingerprint(fullpath, fingerprint=None): """ Set the last known modification time for a file """ try: fingerprint = fingerprint or utils.file_fingerprint(fullpath) record = model.FileFingerprint.get(file_path=fullpath) if record: record.set(fingerprint=fingerprint, file_mtime=os.stat(fullpath).st_mtime) else: record = model.FileFingerprint( file_path=fullpath, fingerprint=fingerprint, file_mtime=os.stat(fullpath).st_mtime) orm.commit() except FileNotFoundError: orm.delete(fp for fp in model.FileFingerprint if fp.file_path == fullpath)
def deal_with_data_for_oracle_srxm_del_all_with_where2(org_id, this_year, this_month, this_day): rowcount = delete(srxm for srxm in ScadaReportXNMid if srxm.YEAR == this_year and srxm.MONTH == this_month and srxm.DAY == this_day and srxm.SRXM_ORG_ID == org_id) print( '...机构号:', org_id, '...Oracle...deal_with_data_for_oracle_srxm_del_all_with_where2...删除ScadaReportXNMid总条数: ' + str(rowcount) + ' 对应年月日为: ' + this_year + this_month + this_day) return rowcount
def convert_discovered_channels(self): # Reflect conversion state with db_session: v = self.mds.MiscData.get_for_update( name=CONVERSION_FROM_72_CHANNELS) if v: if v.value == CONVERSION_STARTED: # Just drop the entries from the previous try orm.delete(g for g in self.mds.ChannelMetadata if g.status == LEGACY_ENTRY) else: v.set(value=CONVERSION_STARTED) else: self.mds.MiscData(name=CONVERSION_FROM_72_CHANNELS, value=CONVERSION_STARTED) old_channels = self.get_old_channels() # We break it up into separate sessions and add sleep because this is going to be executed # on a background thread and we do not want to hold the DB lock for too long with db_session: for c in old_channels: if self.shutting_down: break try: self.mds.ChannelMetadata(**c) except: continue with db_session: for c in self.mds.ChannelMetadata.select().for_update()[:]: contents_len = c.contents_len title = c.title if is_forbidden(title): c.delete() elif contents_len: c.num_entries = contents_len else: c.delete() with db_session: v = self.mds.MiscData.get_for_update( name=CONVERSION_FROM_72_CHANNELS) v.value = CONVERSION_FINISHED
def deal_with_data_for_oracle_srxw_del_all_with_where(org_id, that_week_min): this_date = that_week_min.date() rowcount = delete( srxw for srxw in ScadaReportXNWeek if srxw.REPORT_BEGIN_DATE == this_date and srxw.SRXW_ORG_ID == org_id) print( '...清洗之前...先删除...机构号:', org_id, '...Oracle...deal_with_data_for_oracle_srxw_del_all_with_where...删除ScadaReportXNWeek总条数: ' + str(rowcount) + ' 对应REPORT_BEGIN_DATE周二开始周期时间为: ', that_week_min) return rowcount
def convert_discovered_channels(self): # Reflect conversion state with db_session: v = self.mds.MiscData.get_for_update(name=CONVERSION_FROM_72_CHANNELS) if v: if v.value == CONVERSION_STARTED: # Just drop the entries from the previous try orm.delete(g for g in self.mds.ChannelMetadata if g.status == LEGACY_ENTRY) else: v.set(value=CONVERSION_STARTED) else: self.mds.MiscData(name=CONVERSION_FROM_72_CHANNELS, value=CONVERSION_STARTED) old_channels = self.get_old_channels() # We break it up into separate sessions and add sleep because this is going to be executed # on a background thread and we do not want to hold the DB lock for too long with db_session: for c in old_channels: if self.shutting_down: return try: self.mds.ChannelMetadata(**c) except: continue with db_session: for c in self.mds.ChannelMetadata.select().for_update()[:]: contents_len = c.contents_len if contents_len: c.num_entries = contents_len else: c.delete() with db_session: v = self.mds.MiscData.get_for_update(name=CONVERSION_FROM_72_CHANNELS) v.value = CONVERSION_FINISHED
def garbage_collect(self): orm.delete(g for g in self.older_entries if g.deleted)
def database(): with orm.db_session: orm.delete(i for i in SiteSnapshot) yield db db.rollback()
def remove_user(tg_user): delete(u for u in TGUser if u.tgid == tg_user.tgid)