def downgrade(migrate_engine): meta.bind = migrate_engine instance_table = Table('instances', meta, autoload=True) instance_table.c.datastore_version_id.alter(nullable=True) update( table=instance_table, whereclause="datastore_version_id='%s'" % LEGACY_VERSION_ID, values=dict(datastore_version_id=None) ).execute() datastores_table = Table('datastores', meta, autoload=True) datastore_versions_table = Table('datastore_versions', meta, autoload=True) delete( table=datastore_versions_table, whereclause="id='%s'" % LEGACY_VERSION_ID ).execute() delete( table=datastores_table, whereclause="id='%s'" % LEGACY_DATASTORE_ID ).execute()
def clear_data_for_current_system(self): p = alias(FileStorage) ps = alias(FileStorage) pl = alias(RpmDetailPatchStorageLink) pls = alias(RpmDetailPatchStorageLink) rd = alias(RpmDetail) s = alias(System) delete_links_sql = delete(pl).where( exists( select([1]).select_from( pls.join( rd, pls.c.rpm_detail_id == rd.c.rpm_detail_id).join( s, rd.c.system_id == s.c.system_id)).where( s.c.system_id == self.system.system_id).where( pl.c.id == pls.c.id))) delete_patches_sql = delete(p).where( not_( exists( select([1]).select_from( pl.join(ps, pl.c.file_storage_id == ps.c.id)).where( p.c.id == ps.c.id)))) result_links = self._session.execute(delete_links_sql) if result_links.rowcount: log.info(f"Removed {result_links.rowcount} previous patch links") result_patches = self._session.execute(delete_patches_sql) if result_patches.rowcount: log.info(f"Removed {result_patches.rowcount} previous patches")
def upgrade_2(session, metadata): """ Remove the individual proxy settings, after the implementation of central proxy settings. Added in 2.5 (3.0 development) """ settings = Registry().get('settings') op = get_upgrade_op(session) metadata_table = Table('metadata', metadata, autoload=True) proxy, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'proxy_server')).first() or ('', ) if proxy and not \ (proxy == settings.value('advanced/proxy http') or proxy == settings.value('advanced/proxy https')): http_proxy = '' https_proxy = '' name, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'name')).first() msg_box = QtWidgets.QMessageBox() msg_box.setText( translate( 'BiblesPlugin', f'The proxy server {proxy} was found in the bible {name}.<br>' f'Would you like to set it as the proxy for OpenLP?')) msg_box.setIcon(QtWidgets.QMessageBox.Question) msg_box.addButton(QtWidgets.QMessageBox.No) http_button = msg_box.addButton('http', QtWidgets.QMessageBox.ActionRole) both_button = msg_box.addButton(translate('BiblesPlugin', 'both'), QtWidgets.QMessageBox.ActionRole) https_button = msg_box.addButton('https', QtWidgets.QMessageBox.ActionRole) msg_box.setDefaultButton(both_button) msg_box.exec() clicked_button = msg_box.clickedButton() if clicked_button in [http_button, both_button]: http_proxy = proxy settings.setValue('advanced/proxy http', proxy) if clicked_button in [https_button, both_button]: https_proxy = proxy settings.setValue('advanced/proxy https', proxy) if http_proxy or https_proxy: username, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'proxy_username')).first() proxy, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'proxy_password')).first() settings.setValue('advanced/proxy username', username) settings.setValue('advanced/proxy password', proxy) settings.setValue('advanced/proxy mode', ProxyMode.MANUAL_PROXY) op.execute(delete(metadata_table, metadata_table.c.key == 'proxy_server')) op.execute(delete(metadata_table, metadata_table.c.key == 'proxy_username')) op.execute(delete(metadata_table, metadata_table.c.key == 'proxy_password'))
def run(self): self.dbsession = models.DBSession() try: self._run() except Exception as e: log.exception('Caught Failure in pull:') self.completion_code = 'Unknown failure: %s' % e if self.completion_code is not None: # failure machinery transaction.abort() cfg = models.get_config(self.dbsession) if not cfg.update_log: update_log = [] log_msg = '%s: Initial Update Failed: %s' % (datetime.now().isoformat(), self.completion_code) else: update_log = [cfg.update_log] log_msg = '%s: Update Failed: %s' % (datetime.now().isoformat(), self.completion_code) cfg.update_log = '\n'.join([log_msg] + update_log) session = self.dbsession failure_count = cfg.update_failure_count = (cfg.update_failure_count or 0) + 1 self.dbsession.flush() if failure_count > 3: d = delete(models.KeywordCache.__table__) session.execute(d) d = delete(models.Users.__table__) session.execute(d) d = delete(models.Record.__table__) session.execute(d) d = delete(models.Record_Data.__table__) session.execute(d) d = delete(models.KeywordCache.__table__) session.execute(d) transaction.commit() session = self.dbsession connection = session.connection() connection.execute('vacuum') transaction.commit() log.critical('Done Sync') if self.completion_code is None: # completion code will be non-none on error self.status = 100 self.completion_code = 'ok'
def _delete_attr_from_schema(db: Session, attr_def: AttributeDefinition, schema: Schema): ValueModel = attr_def.attribute.type.value.model db.execute(delete(AttributeDefinition).where(AttributeDefinition.id == attr_def.id)) db.execute(delete(ValueModel) .where(ValueModel.attribute_id == attr_def.attribute_id) .where(ValueModel.entity_id == Entity.id) .where(Entity.schema_id == schema.id) .execution_options(synchronize_session=False) )
def build_analytics(dataset: Dataset): resolver = get_resolver() with engine_tx() as conn: resolve_all_canonical(conn, resolver) db = Database(dataset, resolver) loader = db.view(dataset) with engine_tx() as conn: conn.execute(delete(analytics_dataset_table)) conn.execute(delete(analytics_country_table)) conn.execute(delete(analytics_entity_table)) entities: List[Dict[str, Any]] = [] members: List[Dict[str, str]] = [] countries: List[Dict[str, str]] = [] for idx, entity in enumerate(loader): if idx > 0 and idx % 10000 == 0: log.info("Denormalised %d entities..." % idx) for dataset in Dataset.all(): if len(entity.datasets.intersection(dataset.scope_names)) > 0: members.append({"entity_id": entity.id, "dataset": dataset.name}) if len(members) >= BATCH_SIZE: stmt = insert(analytics_dataset_table).values(members) conn.execute(stmt) members = [] for country in entity.get_type_values(registry.country): countries.append({"entity_id": entity.id, "country": country}) if len(countries) >= BATCH_SIZE: stmt = insert(analytics_country_table).values(countries) conn.execute(stmt) countries = [] ent = { "id": entity.id, "schema": entity.schema.name, "caption": entity.caption, "target": entity.target, "first_seen": entity.first_seen, "last_seen": entity.last_seen, "properties": entity.properties, } entities.append(ent) if len(entities) >= BATCH_SIZE: stmt = insert(analytics_entity_table).values(entities) conn.execute(stmt) entities = [] if len(members): conn.execute(insert(analytics_dataset_table).values(members)) if len(entities): conn.execute(insert(analytics_entity_table).values(entities))
def populate_daily_stats(self): with self._session() as session: query = delete(DailyStat) session.execute(query) query = session.query(IAItem.id, IAItem.public_date) for ia_item_id, public_date in query: date = public_date.date() total_size = 0 rows = session.query(File.size)\ .filter_by(ia_item_id=ia_item_id)\ .filter(File.job_id.isnot(None)) for size, in rows: total_size += size session.execute( insert(DailyStat).prefix_with('OR IGNORE'), {'date': date} ) query = update(DailyStat)\ .values({'size': DailyStat.size + total_size})\ .where(DailyStat.date == date) session.execute(query)
async def i_delete_analysis_module_type(self, amt: AnalysisModuleType): assert isinstance(amt, AnalysisModuleType) async with self.get_db() as db: await db.execute( delete(AnalysisModuleTracking).where( AnalysisModuleTracking.name == amt.name)) await db.commit()
def _delete_multi_relation(self, cols, item_model, to_delete): to_delete = [dict(list(zip(cols, x))) for x in to_delete] session = self.dbsession d = delete(item_model).where(and_(*[getattr(item_model.c, x) == bindparam(x) for x in cols])) session.execute(d, to_delete)
async def i_delete_analysis_details(self, uuid: str) -> bool: """Deletes the analysis details for the given Analysis referenced by id.""" async with self.get_db() as db: result = await db.execute(delete(AnalysisDetailsTracking).where(AnalysisDetailsTracking.uuid == uuid)) await db.commit() return result.rowcount > 0
def purge_entries(max_age=30, session=default_session): """remove old entries""" today = datetime.now() oldest = today - timedelta(days=max_age) entry = Entry.__table__ session.execute(delete(entry).where(entry.c.date<oldest)) session.commit()
def delete(self, identifier): ''' Deletes the identifier from the database and cleans up the cache. ''' query = delete(self.table).where(self._where_clause_for_identifier(identifier)) self.connection.execute(query) self.cache = None
async def i_delete_root_analysis(self, uuid: str) -> bool: """Deletes the given RootAnalysis JSON data by uuid, and any associated analysis details.""" async with self.get_db() as db: result = await db.execute(delete(RootAnalysisTracking).where(RootAnalysisTracking.uuid == uuid)) await db.commit() return result.rowcount > 0
def _update_caches(self): session = self.dbsession d = delete(models.KeywordCache.__table__) session.execute(d) fields = [u'ORG_LEVEL_%d' % i for i in range(1, 6)] q = (session.query(models.Record_Data.LangID, models.Record_Data.Value). join(models.Field, models.Record_Data.FieldID == models.Field.FieldID). filter(models.Field.FieldName.in_(fields))) vals = {(x.LangID, x.Value.strip()) for x in q} q = (session.query(models.Record_Data.LangID, models.Record_Data.Value). join(models.Field, models.Record_Data.FieldID == models.Field.FieldID). filter(models.Field.FieldName == u'TAXONOMY')) vals.update((x.LangID, y.strip()) for x in q for y in x.Value.split(';')) cols = ['LangID', 'Value'] if vals: session.execute(models.KeywordCache.__table__.insert(), [dict(list(zip(cols, x))) for x in vals if x[-1]]) field_names = ['LOCATED_IN_CM'] + ['ORG_LEVEL_%d' % i for i in range(1, 6)] fields = dict(session.query(models.Field.FieldName, models.Field.FieldID).filter(models.Field.FieldName.in_(field_names)).all()) sql = ''' UPDATE Record SET LOCATED_IN_Cache= (SELECT Value FROM Record_Data WHERE Record.NUM=NUM AND Record.LangID=LangID AND FieldID=?) ''' session.connection().execute(sql, fields['LOCATED_IN_CM']) sql = ''' UPDATE Record SET OrgName_Cache= (SELECT group_concat(Value, ', ') FROM (SELECT Value FROM Record_Data AS rd WHERE Record.NUM=rd.NUM AND Record.LangID=LangID AND rd.FieldID in (?,?,?,?,?) ORDER BY (SELECT FieldName FROM Field WHERE FieldID=rd.FieldID) ) AS iq) ''' session.connection().execute(sql, *[fields[x] for x in field_names[1:]]) sql = ''' UPDATE Record SET LOCATED_IN_CM = (SELECT (SELECT CM_ID FROM Community_Name WHERE Community_Name.Name=Record_Data.Value ORDER BY Community_Name.LangID LIMIT 1 ) FROM Record_Data WHERE FieldID=(SELECT FieldID FROM Field WHERE FieldName='LOCATED_IN_CM') AND Record_Data.NUM=Record.NUM AND Record_Data.LangID=Record.LangID ORDER BY Record_Data.LangID LIMIT 1 ) ''' session.execute(sql)
def delete_orphaned(cls): with new_session() as session: subquery = select([ErrorReport.id])\ .where(ErrorReport.item_id == Item.id)\ .limit(1) query = delete(ErrorReport).where(~exists(subquery)) session.execute(query)
def checkin_item(item_id, tamper_key, results): item_stat = { 'project': '', 'username': '', 'scanned': 0, 'found': len(results) } with new_session() as session: row = session.query( Item.project_id, Item.username, Item.upper_sequence_num, Item.lower_sequence_num, Item.ip_address, Item.datetime_claimed ) \ .filter_by(id=item_id, tamper_key=tamper_key).first() if not row: raise InvalidClaim() (project_id, username, upper_sequence_num, lower_sequence_num, ip_address, datetime_claimed) = row item_stat['project'] = project_id item_stat['username'] = username item_stat['scanned'] = upper_sequence_num - lower_sequence_num + 1 item_stat['started'] = datetime_claimed.replace( tzinfo=datetime.timezone.utc).timestamp() query_args = [] # tz instead of utcnow() for Unix timestamp in UTC instead of local time = datetime.datetime.now(datetime.timezone.utc) item_stat['finished'] = time.timestamp() for shortcode in results.keys(): url = results[shortcode]['url'] encoding = results[shortcode]['encoding'] query_args.append({ 'project_id': project_id, 'shortcode': shortcode, 'url': url, 'encoding': encoding, 'datetime': time }) if len(query_args) > 0: query = insert(Result) session.execute(query, query_args) session.execute(delete(Item).where(Item.id == item_id)) Budget.check_in(project_id, ip_address) if Stats.instance: Stats.instance.update(item_stat) return item_stat
def delete(self, table, condition=None): if isinstance(table, str): table = self.get_table(table) if isinstance(condition, dict): condition = build_condition_from_dict(table, condition) stmt = delete(table).where(condition) return self.execute(stmt)
def remove_many(self, urls): assert not isinstance(urls, (str, bytes)), \ 'Expected list-like. Got {}.'.format(urls) with self._session() as session: for url in urls: url_str_id = session.query(URLString.id)\ .filter_by(url=url).scalar() query = delete(URL).where(URL.url_str_id == url_str_id) session.execute(query)
async def test_delete_person(self, dbsession, root_component): # Simulate removing the test person in the application async with Context() as root_ctx: await root_component.start(root_ctx) async with Context() as ctx: session = ctx.require_resource(Session) session.execute(delete(Person)) # The testing code should not see any rows now assert dbsession.scalar(func.count(Person.id)) == 0
def generate_delete(self): target_table = self.target_alias.sql_table delete_sqls = [] # also add the staging delete if this table has a staging table assigned staging_alias = self.target_alias.get('load', {}).get('staging_alias') if staging_alias: staging_delete = GDWDelete( staging_alias, self.start, self.end) delete_sqls += staging_delete.generate_delete() # dont delete dimensions. The load will take care of deletes if self.target_alias.get('load', {}).get('how') == 'dimension': return delete_sqls # by default, truncate table delete_definition = self.target_alias.get('delete', {}) how = delete_definition.get('how', 'truncate') what = delete_definition.get('what', 'all') if how == 'truncate': if what == 'all': delete_sqls.append(text('TRUNCATE TABLE {};'.format(target_table))) elif what == 'partition': raise RuntimeError('TODO: No truncate partition developed yet') else: raise RuntimeError('Truncate must specify date_range or all') elif how == 'delete': if what == 'all': delete_sqls.append(delete(table)) elif what == 'date_range': date_column_names = self.target_alias.date_columns date_fields = [target_table.c[col] for col in date_column_names] filter_dates = filter_date_range(date_fields, self.start, self.end) delete_sqls.append(delete(target_table, whereclause=filter_dates)) else: raise RuntimeError('TODO: add more delete options besides all and date_range') return delete_sqls
async def i_delete_config(self, key: str) -> bool: async with self.get_db() as db: if db is None: return self.temp_config.pop(key) is not None result = (await db.execute(delete(Config).where(Config.key == key) )).rowcount await db.commit() return result == 1
def remove_systematik_document_relation(self, systematik_id, document_id): ''' Does what the method name says. ''' where_condition = and_( self.dsref_table.c.systematik == systematik_id.node_id, self.dsref_table.c.roemisch == systematik_id.roman, self.dsref_table.c.sub == systematik_id.subfolder, self.dsref_table.c.hauptnr == document_id) delete_statement = delete(self.dsref_table).where(where_condition) self._get_connection().execute(delete_statement)
def main(args): if len(args) == 0 or any(not os.path.exists(f) for f in args): print("Usage: python -m perfharness delete testcase [testcase ...]") return config = load_config() db.db_connect(config) files = [os.path.basename(f) for f in args] db.session.execute(ex.delete(db.Run).where(db.Run.testcase.in_(files))) db.db_close()
async def memo_delete(bot, event: Message, sess: AsyncSession, keyword: str): """ 기억 레코드 삭제 `{PREFIX}잊어 키리토` (`키리토`에 관한 모든 기억 레코드를 삭제) """ await sess.execute(delete(Memo).where(Memo.keyword == keyword)) await sess.commit() await bot.say(event.channel, f"{format.code(keyword)}에 관한 기억 레코드를 모두 삭제했어요!")
def levenshein_merger_groups(interactive=True, builder=None): if builder: matchlogger = builder else: matchlogger = MatchLogBuilder() query = delete(AlternateTranslatorNames).where(AlternateTranslatorNames.group == None) db.session.execute(query) if interactive: callback=askuser_callback else: callback=matchlogger.add_match_group print("fetching series") with app.app_context(): items = models.Translators.query.options( joinedload(Translators.alt_names) ).all() altn = [] for item in items: for name in item.alt_names: altn.append((name.id, name.name, name.cleanname)) print("Sorting names") altn.sort(key=lambda x: (x[1], x[0])) print("Searching for duplicates from %s names" % len(altn)) done = 0 for nid, name, cleanname in altn: with app.app_context(): matches = search_for_tlname(cleanname, nid, altn) if matches: try: namerow = models.AlternateTranslatorNames.query.filter(models.AlternateTranslatorNames.id==nid).one() match_to_group(namerow, matches, callback) except sqlalchemy.orm.exc.NoResultFound: print("Row merged already?") done += 1 if done % 10 == 0: print("Done %s items of %s" % (done, len(altn))) print(len(items)) print("wat?") if not interactive: matchlogger.save_log("./translatorname-matchset.json")
def _delete_named_records(self, items, item_model, name_model, primary_key, primary_updates, name_updates): session = self.dbsession if name_model: source = set([(x[primary_key], x['LangID']) for x in items]) pk = getattr(name_model, primary_key) lang = name_model.LangID existing = set(session.query(pk, lang).all()) to_delete = existing - source d = delete(name_model.__table__).where(and_( getattr(name_model, primary_key) == bindparam(primary_key), name_model.LangID == bindparam('LangID'))) session.execute(d, [{primary_key: x[0], 'LangID': x[1]} for x in to_delete]) source = set([x[primary_key] for x in items]) pk = getattr(item_model, primary_key) existing = set(x[0] for x in session.query(pk).all()) to_delete = existing - source d = delete(item_model.__table__).where(getattr(item_model, primary_key) == bindparam(primary_key)) session.execute(d, [{primary_key: x} for x in to_delete])
def levenshein_merger_groups(interactive=True): query = delete(AlternateTranslatorNames).where( AlternateTranslatorNames.group == None) db.session.execute(query) matchlogger = MatchLogBuilder() if interactive: callback = askuser_callback else: callback = matchlogger.add_match_group print("fetching series") with app.app_context(): items = models.Translators.query.options( joinedload(Translators.alt_names)).all() altn = [] for item in items: for name in item.alt_names: altn.append((name.id, name.name, name.cleanname)) print("Sorting names") altn.sort(key=lambda x: (x[1], x[0])) print("Searching for duplicates from %s names" % len(altn)) done = 0 for nid, name, cleanname in altn: with app.app_context(): matches = search_for_tlname(cleanname, nid, altn) if matches: try: namerow = models.AlternateTranslatorNames.query.filter( models.AlternateTranslatorNames.id == nid).one() match_to_group(namerow, matches, callback) except sqlalchemy.orm.exc.NoResultFound: print("Row merged already?") done += 1 if done % 10 == 0: print("Done %s items of %s" % (done, len(altn))) print(len(items)) print("wat?") if not interactive: matchlogger.save_log("./translatorname-matchset.json")
def fset(self, value): session = object_session(self) session.flush() oldValue = [i[0] for i in fget(self)] oldValue.sort() if value: self.super_kind_gid = value[0] else: self.super_kind_gid = None newValue = list(value) newValue.sort() toRemove, toAdd = compareSorted(oldValue, newValue) if toRemove: session.execute( delete(info_kind_parents).where( and_(info_kind_parents.c.gid == self.gid, authors_users.c.user_id.in_(toRemove)))) if toAdd: session.execute( insert(info_kind_parents).values( [info_kind_parent(self.gid, gid) for gid in toAdd])) session.commit()
def save_resource( conn: Conn, path: str, dataset: Dataset, checksum: str, mime_type: Optional[str], size: int, title: Optional[str], ): if size == 0: q = delete(resource_table) q = q.where(resource_table.c.dataset == dataset.name) q = q.where(resource_table.c.path == path) conn.execute(q) return resource: Resource = { "dataset": dataset.name, "path": path, "mime_type": mime_type, "checksum": checksum, "timestamp": settings.RUN_TIME, "size": size, "title": title, } istmt = upsert_func(resource_table).values([resource]) stmt = istmt.on_conflict_do_update( index_elements=["path", "dataset"], set_=dict( mime_type=istmt.excluded.mime_type, checksum=istmt.excluded.checksum, timestamp=istmt.excluded.timestamp, size=istmt.excluded.size, title=istmt.excluded.title, ), ) conn.execute(stmt) return resource
def open_spider(self, spider): self.spider_name = spider.name if self.db_uri: self.engine = create_engine(self.db_uri) Session = sessionmaker(bind=self.engine) self.conn = Session() self.tables = {t.name: t for t in tables.values()} self.records = {t: [] for t in self.tables} metadata.create_all(self.engine) # delete any existing records from the tables for t, table in self.tables.items(): cols = [c.name for c in table.columns] if "scrape_id" in cols and "spider" in cols: self.conn.execute( delete(table).where( and_(table.c.scrape_id != self.crawl_id, table.c.spider == self.spider_name))) # do any tasks before the spider is run # if hasattr(spider, "name"): # if self.conn.engine.dialect.has_table(self.conn.engine, tables["organisation"].name): # self.conn.execute( # tables["organisation"].update()\ # .values(active=False)\ # .where( # tables["organisation"].c.id == tables["organisation_sources"].select( # ).with_only_columns([ # tables["organisation_sources"].c.organisation_id # ]).where( # tables["organisation_sources"].c.source_id == getattr(spider, "name") # ) # ) # ) self.commit_records(spider)
def remove_subscription_of_user(user, group): stmt = delete(Subscription).\ where(Subscription.user_id == user.id).\ where(Subscription.group_id == group.id) db.session.execute(stmt)
def borrar_dods(sesion): stmt = delete(Dod) result = sesion.connection().execute(stmt) sesion.commit()
def remove_subscription(self, user_id, package_name): table = UserPackage.__table__ self.session.execute(delete(table).where(table.c.user_id==user_id).where(table.c.package_name==package_name))
def send(self, data, keys, referrer): ''' Sends form to user's email. Assumes sender's email has been verified. ''' subject = data.get('_subject') or \ 'New submission from %s' % referrer_to_path(referrer) reply_to = (data.get('_replyto', data.get('email', data.get('Email'))) or '').strip() cc = data.get('_cc', None) next = next_url(referrer, data.get('_next')) spam = data.get('_gotcha', None) format = data.get('_format', None) # turn cc emails into array if cc: cc = [email.strip() for email in cc.split(',')] # prevent submitting empty form if not any(data.values()): return {'code': Form.STATUS_EMAIL_EMPTY} # return a fake success for spam if spam: g.log.info('Submission rejected.', gotcha=spam) return {'code': Form.STATUS_EMAIL_SENT, 'next': next} # validate reply_to, if it is not a valid email address, reject if reply_to and not IS_VALID_EMAIL(reply_to): g.log.info('Submission rejected. Reply-To is invalid.', reply_to=reply_to) return { 'code': Form.STATUS_REPLYTO_ERROR, 'address': reply_to, 'referrer': referrer } # increase the monthly counter request_date = datetime.datetime.now() self.increase_monthly_counter(basedate=request_date) # increment the forms counter self.counter = Form.counter + 1 # if submission storage is disabled and form is upgraded, don't store submission if self.disable_storage and self.upgraded: pass else: DB.session.add(self) # archive the form contents sub = Submission(self.id) sub.data = { key: data[key] for key in data if key not in KEYS_NOT_STORED } DB.session.add(sub) # commit changes DB.session.commit() # sometimes we'll delete all archived submissions over the limit if random.random() < settings.EXPENSIVELY_WIPE_SUBMISSIONS_FREQUENCY: records_to_keep = settings.ARCHIVED_SUBMISSIONS_LIMIT total_records = DB.session.query(func.count(Submission.id)) \ .filter_by(form_id=self.id) \ .scalar() if total_records > records_to_keep: newest = self.submissions.with_entities( Submission.id).limit(records_to_keep) DB.engine.execute( delete(table('submissions')). \ where(Submission.form_id == self.id). \ where(~Submission.id.in_(newest)) ) # check if the forms are over the counter and the user is not upgraded overlimit = False monthly_counter = self.get_monthly_counter() if monthly_counter > settings.MONTHLY_SUBMISSIONS_LIMIT and not self.upgraded: overlimit = True if monthly_counter == int(settings.MONTHLY_SUBMISSIONS_LIMIT * 0.9) and not self.upgraded: # send email notification send_email(to=self.email, subject="[WARNING] Approaching submission limit", text=render_template('email/90-percent-warning.txt'), html=render_template('email/90-percent-warning.html'), sender=settings.DEFAULT_SENDER) now = datetime.datetime.utcnow().strftime('%I:%M %p UTC - %d %B %Y') if not overlimit: text = render_template('email/form.txt', data=data, host=self.host, keys=keys, now=now) # check if the user wants a new or old version of the email if format == 'plain': html = render_template('email/plain_form.html', data=data, host=self.host, keys=keys, now=now) else: html = render_template('email/form.html', data=data, host=self.host, keys=keys, now=now) else: if monthly_counter - settings.MONTHLY_SUBMISSIONS_LIMIT > 25: g.log.info('Submission rejected. Form over quota.', monthly_counter=monthly_counter) # only send this overlimit notification for the first 25 overlimit emails # after that, return an error so the user can know the website owner is not # going to read his message. return {'code': Form.STATUS_OVERLIMIT} text = render_template('email/overlimit-notification.txt', host=self.host) html = render_template('email/overlimit-notification.html', host=self.host) # if emails are disabled and form is upgraded, don't send email notification if self.disable_email and self.upgraded: return {'code': Form.STATUS_NO_EMAIL, 'next': next} else: result = send_email(to=self.email, subject=subject, text=text, html=html, sender=settings.DEFAULT_SENDER, reply_to=reply_to, cc=cc, headers={ 'List-Unsubscribe-Post': 'List-Unsubscribe=One-Click', 'List-Unsubscribe': '<' + url_for('unconfirm_form', form_id=self.id, digest=self.unconfirm_digest(), _external=True) + '>' }) if not result[0]: g.log.warning('Failed to send email.', reason=result[1], code=result[2]) if result[1].startswith('Invalid replyto email address'): return { 'code': Form.STATUS_REPLYTO_ERROR, 'address': reply_to, 'referrer': referrer } return { 'code': Form.STATUS_EMAIL_FAILED, 'mailer-code': result[2], 'error-message': result[1] } return {'code': Form.STATUS_EMAIL_SENT, 'next': next}
def send(self, data, keys, referrer): ''' Sends form to user's email. Assumes sender's email has been verified. ''' subject = data.get('_subject') or \ 'New submission from %s' % referrer_to_path(referrer) reply_to = (data.get( '_replyto', data.get('email', data.get('Email')) ) or '').strip() cc = data.get('_cc', None) next = next_url(referrer, data.get('_next')) spam = data.get('_gotcha', None) format = data.get('_format', None) # turn cc emails into array if cc: cc = [email.strip() for email in cc.split(',')] # prevent submitting empty form if not any(data.values()): return {'code': Form.STATUS_EMAIL_EMPTY} # return a fake success for spam if spam: g.log.info('Submission rejected.', gotcha=spam) return {'code': Form.STATUS_EMAIL_SENT, 'next': next} # validate reply_to, if it is not a valid email address, reject if reply_to and not IS_VALID_EMAIL(reply_to): g.log.info('Submission rejected. Reply-To is invalid.', reply_to=reply_to) return { 'code': Form.STATUS_REPLYTO_ERROR, 'address': reply_to, 'referrer': referrer } # increase the monthly counter request_date = datetime.datetime.now() self.increase_monthly_counter(basedate=request_date) # increment the forms counter self.counter = Form.counter + 1 # if submission storage is disabled and form is upgraded, don't store submission if self.disable_storage and self.upgraded: pass else: DB.session.add(self) # archive the form contents sub = Submission(self.id) sub.data = {key: data[key] for key in data if key not in KEYS_NOT_STORED} DB.session.add(sub) # commit changes DB.session.commit() # sometimes we'll delete all archived submissions over the limit if random.random() < settings.EXPENSIVELY_WIPE_SUBMISSIONS_FREQUENCY: records_to_keep = settings.ARCHIVED_SUBMISSIONS_LIMIT total_records = DB.session.query(func.count(Submission.id)) \ .filter_by(form_id=self.id) \ .scalar() if total_records > records_to_keep: newest = self.submissions.with_entities(Submission.id).limit(records_to_keep) DB.engine.execute( delete(table('submissions')). \ where(Submission.form_id == self.id). \ where(~Submission.id.in_(newest)) ) # url to request_unconfirm_form page unconfirm = url_for('request_unconfirm_form', form_id=self.id, _external=True) # check if the forms are over the counter and the user is not upgraded overlimit = False monthly_counter = self.get_monthly_counter() monthly_limit = settings.MONTHLY_SUBMISSIONS_LIMIT \ if self.id > settings.FORM_LIMIT_DECREASE_ACTIVATION_SEQUENCE \ else settings.GRANDFATHER_MONTHLY_LIMIT if monthly_counter > monthly_limit and not self.upgraded: overlimit = True if monthly_counter == int(monthly_limit * 0.9) and not self.upgraded: # send email notification send_email( to=self.email, subject="Formspree Notice: Approaching submission limit.", text=render_template('email/90-percent-warning.txt', unconfirm_url=unconfirm, limit=monthly_limit ), html=render_template_string( TEMPLATES.get('90-percent-warning.html'), unconfirm_url=unconfirm, limit=monthly_limit ), sender=settings.DEFAULT_SENDER ) now = datetime.datetime.utcnow().strftime('%I:%M %p UTC - %d %B %Y') if not overlimit: g.log.info('Submitted.') text = render_template('email/form.txt', data=data, host=self.host, keys=keys, now=now, unconfirm_url=unconfirm) # check if the user wants a new or old version of the email if format == 'plain': html = render_template('email/plain_form.html', data=data, host=self.host, keys=keys, now=now, unconfirm_url=unconfirm) else: html = render_template_string(TEMPLATES.get('form.html'), data=data, host=self.host, keys=keys, now=now, unconfirm_url=unconfirm) else: g.log.info('Submission rejected. Form over quota.', monthly_counter=monthly_counter) # send an overlimit notification for the first x overlimit emails # after that, return an error so the user can know the website owner is not # going to read his message. if monthly_counter <= monthly_limit + settings.OVERLIMIT_NOTIFICATION_QUANTITY: subject = 'Formspree Notice: Your submission limit has been reached.' text = render_template('email/overlimit-notification.txt', host=self.host, unconfirm_url=unconfirm, limit=monthly_limit) html = render_template_string(TEMPLATES.get('overlimit-notification.html'), host=self.host, unconfirm_url=unconfirm, limit=monthly_limit) else: return {'code': Form.STATUS_OVERLIMIT} # if emails are disabled and form is upgraded, don't send email notification if self.disable_email and self.upgraded: return {'code': Form.STATUS_NO_EMAIL, 'next': next} else: result = send_email( to=self.email, subject=subject, text=text, html=html, sender=settings.DEFAULT_SENDER, reply_to=reply_to, cc=cc, headers={ 'List-Unsubscribe-Post': 'List-Unsubscribe=One-Click', 'List-Unsubscribe': '<' + url_for( 'unconfirm_form', form_id=self.id, digest=self.unconfirm_digest(), _external=True ) + '>' } ) if not result[0]: g.log.warning('Failed to send email.', reason=result[1], code=result[2]) if result[1].startswith('Invalid replyto email address'): return { 'code': Form.STATUS_REPLYTO_ERROR, 'address': reply_to, 'referrer': referrer } return { 'code': Form.STATUS_EMAIL_FAILED, 'mailer-code': result[2], 'error-message': result[1] } return {'code': Form.STATUS_EMAIL_SENT, 'next': next}
def delete_one(cls, report_id): with new_session() as session: query = delete(ErrorReport).where(ErrorReport.id == report_id) session.execute(query)
def send(self, submitted_data, referrer): ''' Sends form to user's email. Assumes sender's email has been verified. ''' if type(submitted_data) in (ImmutableMultiDict, ImmutableOrderedMultiDict): data, keys = http_form_to_dict(submitted_data) else: data, keys = submitted_data, submitted_data.keys() subject = data.get('_subject', 'New submission from %s' % referrer_to_path(referrer)) reply_to = data.get('_replyto', data.get('email', data.get('Email', ''))).strip() cc = data.get('_cc', None) next = next_url(referrer, data.get('_next')) spam = data.get('_gotcha', None) format = data.get('_format', None) # turn cc emails into array if cc: cc = [email.strip() for email in cc.split(',')] # prevent submitting empty form if not any(data.values()): return { 'code': Form.STATUS_EMAIL_EMPTY } # return a fake success for spam if spam: g.log.info('Submission rejected.', gotcha=spam) return { 'code': Form.STATUS_EMAIL_SENT, 'next': next } # validate reply_to, if it is not a valid email address, reject if reply_to and not IS_VALID_EMAIL(reply_to): g.log.info('Submission rejected. Reply-To is invalid.', reply_to=reply_to) return { 'code': Form.STATUS_REPLYTO_ERROR, 'error-message': '"%s" is not a valid email address.' % reply_to } # increase the monthly counter request_date = datetime.datetime.now() self.increase_monthly_counter(basedate=request_date) # increment the forms counter self.counter = Form.counter + 1 DB.session.add(self) # archive the form contents sub = Submission(self.id) sub.data = data DB.session.add(sub) # commit changes DB.session.commit() # delete all archived submissions over the limit records_to_keep = settings.ARCHIVED_SUBMISSIONS_LIMIT newest = self.submissions.with_entities(Submission.id).limit(records_to_keep) DB.engine.execute( delete('submissions'). \ where(Submission.form_id == self.id). \ where(~Submission.id.in_(newest)) ) # check if the forms are over the counter and the user is not upgraded overlimit = False monthly_counter = self.get_monthly_counter() if monthly_counter > settings.MONTHLY_SUBMISSIONS_LIMIT: overlimit = True if self.controllers: for c in self.controllers: if c.upgraded: overlimit = False break now = datetime.datetime.utcnow().strftime('%I:%M %p UTC - %d %B %Y') if not overlimit: text = render_template('email/form.txt', data=data, host=self.host, keys=keys, now=now) # check if the user wants a new or old version of the email if format == 'plain': html = render_template('email/plain_form.html', data=data, host=self.host, keys=keys, now=now) else: html = render_template('email/form.html', data=data, host=self.host, keys=keys, now=now) else: if monthly_counter - settings.MONTHLY_SUBMISSIONS_LIMIT > 25: g.log.info('Submission rejected. Form over quota.', monthly_counter=monthly_counter) # only send this overlimit notification for the first 25 overlimit emails # after that, return an error so the user can know the website owner is not # going to read his message. return { 'code': Form.STATUS_OVERLIMIT } text = render_template('email/overlimit-notification.txt', host=self.host) html = render_template('email/overlimit-notification.html', host=self.host) result = send_email( to=self.email, subject=subject, text=text, html=html, sender=settings.DEFAULT_SENDER, reply_to=reply_to, cc=cc ) if not result[0]: g.log.warning('Failed to send email.', reason=result[1], code=result[2]) if result[1].startswith('Invalid replyto email address'): return { 'code': Form.STATUS_REPLYTO_ERROR} return{ 'code': Form.STATUS_EMAIL_FAILED, 'mailer-code': result[2], 'error-message': result[1] } return { 'code': Form.STATUS_EMAIL_SENT, 'next': next }
def _drain_to_working_set(self, size=1000): logger.info("Draining to working set %s", self.working_set_filename) assert not os.path.exists(self.working_set_filename) with new_session() as session: query = session.query(Result) if self.after: query = query.filter(Result.datetime > self.after) with open(self.working_set_filename, "wb") as work_file: last_id = -1 num_results = 0 running = True while running: # Optimized for SQLite scrolling window rows = query.filter(Result.id > last_id).limit(size).all() if not rows: break delete_ids = [] for result in rows: line = base64.b64encode( pickle.dumps( { "id": result.id, "project_id": result.project_id, "shortcode": result.shortcode, "url": result.url, "encoding": result.encoding, "datetime": result.datetime, } ) ) work_file.write(line) work_file.write(b"\n") num_results += 1 self.items_count += 1 delete_ids.append(result.id) if num_results % 10000 == 0: logger.info("Drain progress: %d", num_results) if num_results % 100000 == 0: # Risky, but need to do this since WAL # performance is low on large transactions logger.info("Checkpoint. (Don't delete stray files if program crashes!)") work_file.flush() session.commit() if self.max_items and num_results >= self.max_items: logger.info("Reached max items %d.", self.max_items) running = False break if self.settings["delete"]: delete_query = delete(Result).where(Result.id == bindparam("id")) session.execute(delete_query, [{"id": result_id} for result_id in delete_ids])
def delete_all(model_type: Type[BaseModel]) -> Delete: return delete(model_type)
def delete_dynamic_profiles() -> Delete: return delete(PlayerProfile) \ .where(PlayerProfile.persistent == False)
def delete_absent_users() -> Delete: return delete(User) \ .where(User.roles.is_(None), User.display_name.is_(None))
def send(self, http_form, referrer): ''' Sends form to user's email. Assumes sender's email has been verified. ''' data, keys = http_form_to_dict(http_form) subject = data.get('_subject', 'New submission from %s' % referrer_to_path(referrer)) reply_to = data.get('_replyto', data.get('email', data.get('Email', None))) cc = data.get('_cc', None) next = next_url(referrer, data.get('_next')) spam = data.get('_gotcha', None) # prevent submitting empty form if not any(data.values()): return { 'code': Form.STATUS_EMAIL_EMPTY } # return a fake success for spam if spam: return { 'code': Form.STATUS_EMAIL_SENT, 'next': next } # increase the monthly counter request_date = datetime.datetime.now() self.increase_monthly_counter(basedate=request_date) # increment the forms counter self.counter = Form.counter + 1 DB.session.add(self) # archive the form contents sub = Submission(self.id) sub.data = data DB.session.add(sub) # commit changes DB.session.commit() # delete all archived submissions over the limit records_to_keep = settings.ARCHIVED_SUBMISSIONS_LIMIT newest = self.submissions.with_entities(Submission.id).limit(records_to_keep) DB.engine.execute( delete('submissions'). \ where(Submission.form_id == self.id). \ where(~Submission.id.in_(newest)) ) # check if the forms are over the counter and the user is not upgraded overlimit = False monthly_counter = self.get_monthly_counter() if monthly_counter > settings.MONTHLY_SUBMISSIONS_LIMIT: overlimit = True if self.controllers: for c in self.controllers: if c.upgraded: overlimit = False break now = datetime.datetime.utcnow().strftime('%I:%M %p UTC - %d %B %Y') if not overlimit: text = render_template('email/form.txt', data=data, host=self.host, keys=keys, now=now) html = render_template('email/form.html', data=data, host=self.host, keys=keys, now=now) else: if monthly_counter - settings.MONTHLY_SUBMISSIONS_LIMIT > 25: # only send this overlimit notification for the first 25 overlimit emails # after that, return an error so the user can know the website owner is not # going to read his message. return { 'code': Form.STATUS_EMAIL_FAILED } text = render_template('email/overlimit-notification.txt', host=self.host) html = render_template('email/overlimit-notification.html', host=self.host) result = send_email(to=self.email, subject=subject, text=text, html=html, sender=settings.DEFAULT_SENDER, reply_to=reply_to, cc=cc) if not result[0]: return{ 'code': Form.STATUS_EMAIL_FAILED } return { 'code': Form.STATUS_EMAIL_SENT, 'next': next }