def upgrade(pyramid_env): # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: dups = list(db.execute( """SELECT array_agg(id) FROM sub_graph_idea_association GROUP BY idea_id, sub_graph_id HAVING count(id) > 1""")) if dups: extras = list(chain(*[l[1:] for l in dups])) db.execute( 'DELETE FROM sub_graph_idea_association WHERE id IN (%s)' % ( ','.join(extras))) dups = list(db.execute( """SELECT array_agg(id) FROM sub_graph_idea_link_association GROUP BY idea_link_id, sub_graph_id HAVING count(id) > 1""")) if dups: extras = list(chain(*[l[1:] for l in dups])) db.execute( 'DELETE FROM sub_graph_idea_link_association WHERE id IN (%s)' % ( ','.join(extras))) with context.begin_transaction(): op.create_unique_constraint( "%s_%s_sub_graph_idea_association_UNQC_idea_id_sub_graph_id" % ( config.get('db_schema'), config.get('db_user')), "sub_graph_idea_association", ["idea_id", "sub_graph_id"]) op.create_unique_constraint( "%s_%s_sub_graph_idea_link_association_UNQC_idea_link_id_sub_graph_id" % ( config.get('db_schema'), config.get('db_user')), "sub_graph_idea_link_association", ["idea_link_id", "sub_graph_id"])
def __table_args__(cls): schema, user = config.get('db_schema'), config.get('db_user') return (UniqueConstraint(getattr(cls, cls.target_id_name), cls.namespace, cls.key, name="%s_%s_%s_unique_constraint" % (schema, user, cls.__tablename__)), )
def upgrade(pyramid_env): schema = config.get('db_schema')+"."+config.get('db_user') with context.begin_transaction(): #No one should have active notifications yet op.execute( '''DELETE FROM notification''') op.add_column('notification', sa.Column('sqla_type', String, nullable=False, index=True)) op.create_table('notification_on_post', sa.Column('id', sa.Integer, sa.ForeignKey( 'notification.id', ondelete="CASCADE", onupdate="CASCADE"), primary_key=True), sa.Column('post_id', sa.Integer, sa.ForeignKey( 'post.id', ondelete="CASCADE", onupdate="CASCADE"), nullable=False), ) op.drop_column('notification', 'event_source_type') op.drop_column('notification', 'event_source_object_id')
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_index("%s_%s_ix_social_auth_code_code" % ( config.get('db_schema'), config.get('db_user')), "social_auth_code") op.drop_constraint( "%s_%s_social_auth_nonce_UNQC_server_url_timestamp_salt" % ( config.get('db_schema'), config.get('db_user')), "social_auth_nonce") op.drop_constraint( "%s_%s_social_auth_code_UNQC_code_email" % ( config.get('db_schema'), config.get('db_user')), "social_auth_code") op.drop_constraint( "%s_%s_social_auth_association_UNQC_server_url_handle" % ( config.get('db_schema'), config.get('db_user')), "social_auth_association") op.drop_constraint( "%s_%s_social_auth_account_UNQC_provider_id_uid" % ( config.get('db_schema'), config.get('db_user')), "social_auth_account") op.drop_table('social_auth_nonce') op.drop_table('social_auth_code') op.drop_table('social_auth_association') op.drop_table('social_auth_account') op.execute("DELETE FROM abstract_agent_account" " WHERE type = 'social_auth_account'")
def upgrade(pyramid_env): # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: dups = list( db.execute("""SELECT array_agg(id) FROM sub_graph_idea_association GROUP BY idea_id, sub_graph_id HAVING count(id) > 1""")) if dups: extras = list(chain(*[l[1:] for l in dups])) db.execute( 'DELETE FROM sub_graph_idea_association WHERE id IN (%s)' % (','.join(extras))) dups = list( db.execute( """SELECT array_agg(id) FROM sub_graph_idea_link_association GROUP BY idea_link_id, sub_graph_id HAVING count(id) > 1""")) if dups: extras = list(chain(*[l[1:] for l in dups])) db.execute( 'DELETE FROM sub_graph_idea_link_association WHERE id IN (%s)' % (','.join(extras))) with context.begin_transaction(): op.create_unique_constraint( "%s_%s_sub_graph_idea_association_UNQC_idea_id_sub_graph_id" % (config.get('db_schema'), config.get('db_user')), "sub_graph_idea_association", ["idea_id", "sub_graph_id"]) op.create_unique_constraint( "%s_%s_sub_graph_idea_link_association_UNQC_idea_link_id_sub_graph_id" % (config.get('db_schema'), config.get('db_user')), "sub_graph_idea_link_association", ["idea_link_id", "sub_graph_id"])
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_index( "%s_%s_ix_social_auth_code_code" % (config.get('db_schema'), config.get('db_user')), "social_auth_code") op.drop_constraint( "%s_%s_social_auth_nonce_UNQC_server_url_timestamp_salt" % (config.get('db_schema'), config.get('db_user')), "social_auth_nonce") op.drop_constraint( "%s_%s_social_auth_code_UNQC_code_email" % (config.get('db_schema'), config.get('db_user')), "social_auth_code") op.drop_constraint( "%s_%s_social_auth_association_UNQC_server_url_handle" % (config.get('db_schema'), config.get('db_user')), "social_auth_association") op.drop_constraint( "%s_%s_social_auth_account_UNQC_provider_id_uid" % (config.get('db_schema'), config.get('db_user')), "social_auth_account") op.drop_table('social_auth_nonce') op.drop_table('social_auth_code') op.drop_table('social_auth_association') op.drop_table('social_auth_account') op.execute("DELETE FROM abstract_agent_account" " WHERE type = 'social_auth_account'")
def connect(): global _es if _es is None: server = config.get('elasticsearch_host', 'localhost') + ':9200' auth = config.get('elastic_search_basic_auth', None) _es = Elasticsearch(server, **{'http_auth': a for a in (auth, ) if a}) return _es
def upgrade(pyramid_env): schema = config.get('db_schema')+"."+config.get('db_user') with context.begin_transaction(): from assembl.models.notification import * #No one should have active notifications yet op.execute( '''DELETE FROM notification''') op.add_column('notification', sa.Column('sqla_type', String, nullable=False, index=True)) op.create_table('notification_on_post', sa.Column('id', sa.Integer, sa.ForeignKey( 'notification.id', ondelete="CASCADE", onupdate="CASCADE"), primary_key=True), sa.Column('post_id', sa.Integer, sa.ForeignKey( 'post.id', ondelete="CASCADE", onupdate="CASCADE"), nullable=False), ) op.drop_column('notification', 'event_source_type') op.drop_column('notification', 'event_source_object_id')
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( config.get('db_schema') + "_" + config.get('db_user') + "_import_record_UNQC_discussion_id_external_iri_id", "import_record") op.drop_table('import_record')
def downgrade(pyramid_env): from assembl import models as m db = m.get_session_maker()() with context.begin_transaction(): op.create_table( "user_language_preference_temp", sa.Column("id", sa.Integer, primary_key=True), sa.Column("user_id", sa.Integer), sa.Column("lang_code", sa.String), sa.Column("preferred_order", sa.Integer), sa.Column("locale_id", sa.Integer), sa.Column("explicitly_defined", sa.Boolean, server_default="0")) with transaction.manager: from assembl.models.auth import LanguagePreferenceOrder op.execute("""INSERT INTO user_language_preference_temp (id, user_id, locale_id, preferred_order) SELECT id, user_id, locale_id, source_of_evidence FROM user_language_preference""") locale_ids = db.execute( """SELECT DISTINCT locale_id, locale.code FROM user_language_preference JOIN locale ON (locale.id=locale_id)""") for locale_id, locale_name in locale_ids: op.execute("UPDATE user_language_preference_temp SET lang_code = '%s' WHERE locale_id = %d" % ( locale_name, locale_id)) op.execute("""UPDATE user_language_preference_temp SET explicitly_defined = 1 WHERE preferred_order = %d""" % (LanguagePreferenceOrder.Explicit,)) op.execute("DELETE FROM user_language_preference") mark_changed() with context.begin_transaction(): op.add_column( 'user_language_preference', sa.Column( 'explicitly_defined', sa.Boolean, nullable=False, server_default=TextClause("0"))) op.add_column( 'user_language_preference', sa.Column( 'lang_code', sa.String(), nullable=False, server_default="")) op.drop_index( '%s_%s_user_language_preference_UNQC_user_id_locale_id' % ( config.get('db_schema'), config.get('db_user'))) op.create_index( '%s_%s_user_language_preference_UNQC_user_id_lang_code' % ( config.get('db_schema'), config.get('db_user')), 'user_language_preference', ['user_id', 'lang_code'], unique=True) op.drop_column('user_language_preference', 'source_of_evidence') op.drop_column('user_language_preference', 'translate_to') op.drop_column('user_language_preference', 'locale_id') with transaction.manager: op.execute("""INSERT INTO user_language_preference (id, user_id, lang_code, preferred_order, explicitly_defined) SELECT id, user_id, lang_code, preferred_order, explicitly_defined FROM user_language_preference_temp""") mark_changed() with context.begin_transaction(): op.drop_table("user_language_preference_temp")
def upgrade(pyramid_env): with context.begin_transaction(): op.execute(""" UPDATE post p SET parent_id = COALESCE( ( SELECT new_post_parent.id AS new_post_parent_id FROM post AS post_to_correct JOIN post AS bad_post_parent ON (post_to_correct.parent_id = bad_post_parent.id) JOIN post AS new_post_parent ON (new_post_parent.message_id = bad_post_parent.message_id AND new_post_parent.id <> bad_post_parent.id) WHERE post_to_correct.parent_id IN ( SELECT MAX(post.id) as max_post_id FROM imported_post JOIN post ON (post.id=imported_post.id) GROUP BY message_id, source_id HAVING COUNT(post.id)>1 ) AND p.id = post_to_correct.id ), p.parent_id ) """) op.execute(""" DELETE FROM post WHERE post.id IN ( SELECT MAX(post.id) as max_post_id FROM imported_post JOIN post ON (post.id=imported_post.id) GROUP BY message_id, source_id HAVING COUNT(post.id)>1 ) """) op.add_column( 'imported_post', sa.Column( 'source_post_id', sa.Unicode(), nullable=False, index=True, )) op.execute(""" UPDATE imported_post p SET source_post_id = ( SELECT message_id FROM post WHERE p.id = post.id ) """) op.create_unique_constraint( config.get('db_schema') + "_" + config.get('db_user') + "_imported_post_UNQC_source_post_id_source_id", "imported_post", ["source_post_id", "source_id"]) pass # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: pass
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "_".join(( config.get('db_schema'), config.get('db_user'), "preferences_UNQC_name")), "preferences")
def get_curl_query(query): auth = config.get('elastic_search_basic_auth', '') if auth: auth = '-u ' + auth return "curl {} -XGET '{}:{}/_search?pretty' -d '{}'".format( auth, config.get('elasticsearch_host', 'localhost'), config.get('elasticsearch_port', '9200'), json.dumps(query).replace("'", "\\u0027"))
def connect(): global _es if _es is None: port = config.get('elasticsearch_port', '9200') server = config.get('elasticsearch_host', 'localhost') + ':' + port auth = config.get('elastic_search_basic_auth', None) _es = Elasticsearch(server, **{'http_auth': a for a in (auth,) if a}) return _es
def upgrade(pyramid_env): with context.begin_transaction(): op.create_unique_constraint( "_".join(( config.get('db_schema'), config.get('db_user'), "preferences_UNQC_name")), "preferences", ["name"])
def upgrade(pyramid_env): # 4082 is the virtuoso maximum for VARCHAR. It refuses to modify to VARCHAR schema, user = config.get('db_schema'), config.get('db_user') with context.begin_transaction(): for table, column, nullable in url_columns: op.execute( "alter table %s.%s.%s modify column %s varchar(4082) %s" % ( schema, user, table, column, "" if nullable else " NOT NULL"))
def __table_args__(cls): schema, user = config.get('db_schema'), config.get('db_user') return (UniqueConstraint( getattr(cls, cls.target_id_name), cls.namespace, cls.key, cls.user_id, name="%s_%s_%s_unique_constraint" % ( schema, user, cls.__tablename__)),)
def downgrade(pyramid_env): if not using_virtuoso(): return full_schema = '.'.join((config.get('db_schema'), config.get('db_user'))) with context.begin_transaction(): for seqname in history_sequences: op.execute( "SELECT sequence_set('{0}', sequence_set('{1}.{0}', 0, 1), 0)" .format(seqname, full_schema))
def get_base_url(self): """ Abstracted so that we can support virtual hosts or communities in the future and access the urls when we can't rely on pyramid's current request (such as when celery generates notifications """ from assembl.lib import config port = config.get('public_port') portString = (':'+port) if port != 80 else '' return 'http://'+config.get('public_hostname')+portString
def upgrade(pyramid_env): with context.begin_transaction(): op.execute(""" UPDATE post p SET parent_id = COALESCE( ( SELECT new_post_parent.id AS new_post_parent_id FROM post AS post_to_correct JOIN post AS bad_post_parent ON (post_to_correct.parent_id = bad_post_parent.id) JOIN post AS new_post_parent ON (new_post_parent.message_id = bad_post_parent.message_id AND new_post_parent.id <> bad_post_parent.id) WHERE post_to_correct.parent_id IN ( SELECT MAX(post.id) as max_post_id FROM imported_post JOIN post ON (post.id=imported_post.id) GROUP BY message_id, source_id HAVING COUNT(post.id)>1 ) AND p.id = post_to_correct.id ), p.parent_id ) """) op.execute(""" DELETE FROM post WHERE post.id IN ( SELECT MAX(post.id) as max_post_id FROM imported_post JOIN post ON (post.id=imported_post.id) GROUP BY message_id, source_id HAVING COUNT(post.id)>1 ) """) op.add_column( 'imported_post', sa.Column( 'source_post_id', sa.Unicode(), nullable=False, index=True, ) ) op.execute(""" UPDATE imported_post p SET source_post_id = ( SELECT message_id FROM post WHERE p.id = post.id ) """) op.create_unique_constraint(config.get('db_schema')+"_"+config.get('db_user')+"_imported_post_UNQC_source_post_id_source_id", "imported_post", ["source_post_id","source_id"]) pass # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: pass
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "%s_%s_sub_graph_idea_association_UNQC_idea_id_sub_graph_id" % ( config.get('db_schema'), config.get('db_user')), "sub_graph_idea_association") op.drop_constraint( "%s_%s_sub_graph_idea_link_association_UNQC_idea_link_id_sub_graph_id" % ( config.get('db_schema'), config.get('db_user')), "sub_graph_idea_link_association")
def downgrade(pyramid_env): langstring_idsequence = "%s.%s.langstring_idsequence" % ( config.get("db_schema"), config.get("db_user")) with context.begin_transaction(): op.drop_column("content", "body_id") op.drop_column("content", "subject_id") op.execute("delete from langstring_entry") op.execute("delete from langstring") op.execute("sequence_set('%s', 1, 0)" % langstring_idsequence) mark_changed()
def upgrade(pyramid_env): with context.begin_transaction(): op.execute("delete from locale_label") op.create_unique_constraint( "%s_%s_locale_label_UNQC_named_locale_id_locale_id_of_label" % (config.get('db_schema'), config.get('db_user')), "locale_label", ["named_locale_id", "locale_id_of_label"]) with transaction.manager: from assembl import models as m m.LocaleLabel.load_names()
def upgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "%s_%s_user_language_preference_UNQC_user_id_locale_id" % ( config.get('db_schema'), config.get('db_user')), "user_language_preference") op.create_unique_constraint( "%s_%s_user_language_preference_UNQC_user_id_locale_id_source_of_evidence" % ( config.get('db_schema'), config.get('db_user')), "user_language_preference", ["user_id", "locale_id", "source_of_evidence"])
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "%s_%s_sub_graph_idea_association_UNQC_idea_id_sub_graph_id" % (config.get('db_schema'), config.get('db_user')), "sub_graph_idea_association") op.drop_constraint( "%s_%s_sub_graph_idea_link_association_UNQC_idea_link_id_sub_graph_id" % (config.get('db_schema'), config.get('db_user')), "sub_graph_idea_link_association")
def upgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "%s_%s_user_language_preference_UNQC_user_id_locale_id" % (config.get('db_schema'), config.get('db_user')), "user_language_preference") op.create_unique_constraint( "%s_%s_user_language_preference_UNQC_user_id_locale_id_source_of_evidence" % (config.get('db_schema'), config.get('db_user')), "user_language_preference", ["user_id", "locale_id", "source_of_evidence"])
def upgrade(pyramid_env): from assembl.models.notification import ( NotificationSubscription, NotificationSubscriptionClasses) with context.begin_transaction(): tname = "notification_subscription" cname = 'ck_%s_%s_%s_notification_subscription_classes' % ( config.get('db_schema'), config.get('db_user'), tname) op.drop_constraint(cname, tname) op.create_check_constraint( cname, tname, NotificationSubscription.type.in_( NotificationSubscriptionClasses.values()))
def delete_boolean_constraint(db, table, column): # The CHECK constraints are generally unnamed. # Dropping the column does not delete the constraint. WHY???? username = config.get('db_user') schema = config.get('db_schema') constraints = list(db.execute("select c_text, c_mode from db.dba.sys_constraints where c_table = '%s.%s.%s'" % ( schema, username, table))) for constraint_name, constraint_code in constraints: # column name substring would be annoying... if column in constraint_code: db.execute('alter table "%s"."%s"."%s" drop constraint "%s"' % ( schema, username, table, constraint_name))
def upgrade(pyramid_env): from assembl.models.notification import (NotificationSubscription, NotificationSubscriptionClasses) with context.begin_transaction(): tname = "notification_subscription" cname = 'ck_%s_%s_%s_notification_subscription_classes' % ( config.get('db_schema'), config.get('db_user'), tname) op.drop_constraint(cname, tname) op.create_check_constraint( cname, tname, NotificationSubscription.type.in_( NotificationSubscriptionClasses.values()))
def upgrade(pyramid_env): with context.begin_transaction(): op.create_table( 'import_record', sa.Column('id', sa.Integer, primary_key=True), sa.Column('discussion_id', sa.Integer, sa.ForeignKey("discussion.id"), nullable=False), sa.Column('external_iri_id', IRI_ID, nullable=False), sa.Column('internal_iri_id', IRI_ID, nullable=False, index=True), sa.Column('server_iri_id', IRI_ID, nullable=False), sa.Column('last_modified', sa.DateTime, default=datetime.utcnow)) op.create_unique_constraint(config.get('db_schema')+"_"+config.get('db_user')+"_import_record_UNQC_discussion_id_external_iri_id", "import_record", ["discussion_id","external_iri_id"])
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "%s_%s_social_auth_account_UNQC_provider_id_provider_domain_uid" % (config.get('db_schema'), config.get('db_user')), "social_auth_account") op.drop_column("social_auth_account", "provider_domain") op.add_column("social_auth_account", sa.Column("domain", sa.String(200))) op.create_unique_constraint( "%s_%s_social_auth_account_UNQC_provider_id_uid" % (config.get('db_schema'), config.get('db_user')), "social_auth_account", ["provider_id", "uid"])
def upgrade(pyramid_env): with context.begin_transaction(): op.create_table( 'social_auth_nonce', sa.Column('id', sa.Integer, primary_key=True), sa.Column('server_url', sa.String(255)), sa.Column('timestamp', sa.Integer), sa.Column('salt', sa.String(40)), sa.schema.UniqueConstraint('server_url', 'timestamp', 'salt')) op.create_table( 'social_auth_code', sa.Column('id', sa.Integer, primary_key=True), sa.Column('email', sa.String(200)), sa.Column('code', sa.String(32)), # index = True sa.schema.UniqueConstraint('code', 'email')) op.create_index('%s_%s_ix_social_auth_code_code' % (config.get('db_schema'), config.get('db_user')), 'social_auth_code', ['code'], unique=False) op.create_table('social_auth_association', sa.Column('id', sa.Integer, primary_key=True), sa.Column('server_url', sa.String(255)), sa.Column('handle', sa.String(255)), sa.Column('secret', sa.String(255)), sa.Column('issued', sa.Integer), sa.Column('lifetime', sa.Integer), sa.Column('assoc_type', sa.String(64)), sa.schema.UniqueConstraint('server_url', 'handle')) op.create_table( "social_auth_account", sa.Column("id", sa.Integer, sa.ForeignKey('abstract_agent_account.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True), sa.Column("provider_id", sa.Integer, sa.ForeignKey('identity_provider.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False), sa.Column("username", sa.String(200)), sa.Column("domain", sa.String(200)), sa.Column("uid", sa.String(255), nullable=False), sa.Column("extra_data", sa.Text), sa.Column("picture_url", sa.String), sa.UniqueConstraint('provider_id', 'uid')) # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: pass
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_constraint( "%s_%s_social_auth_account_UNQC_provider_id_provider_domain_uid" % ( config.get('db_schema'), config.get('db_user')), "social_auth_account") op.drop_column("social_auth_account", "provider_domain") op.add_column( "social_auth_account", sa.Column("domain", sa.String(200))) op.create_unique_constraint( "%s_%s_social_auth_account_UNQC_provider_id_uid" % ( config.get('db_schema'), config.get('db_user')), "social_auth_account", ["provider_id", "uid"])
def upgrade(pyramid_env): with context.begin_transaction(): try: op.drop_index('ix_document_uri_id', 'document') except: try: op.drop_index('ix_%s_%s_document_uri_id' % ( config.get('db_schema'), config.get('db_user')), 'document') except: pass op.execute( "CREATE UNIQUE INDEX ix_document_discussion_id_uri_id ON %s.%s.document (discussion_id, uri_id)" % ( config.get('db_schema'), config.get('db_user')))
def upgrade(pyramid_env): with context.begin_transaction(): op.create_table( 'social_auth_nonce', sa.Column('id', sa.Integer, primary_key=True), sa.Column('server_url', sa.String(255)), sa.Column('timestamp', sa.Integer), sa.Column('salt', sa.String(40)), sa.schema.UniqueConstraint('server_url', 'timestamp', 'salt')) op.create_table( 'social_auth_code', sa.Column('id', sa.Integer, primary_key=True), sa.Column('email', sa.String(200)), sa.Column('code', sa.String(32)), # index = True sa.schema.UniqueConstraint('code', 'email')) op.create_index( '%s_%s_ix_social_auth_code_code' % ( config.get('db_schema'), config.get('db_user')), 'social_auth_code', ['code'], unique=False) op.create_table( 'social_auth_association', sa.Column('id', sa.Integer, primary_key=True), sa.Column('server_url', sa.String(255)), sa.Column('handle', sa.String(255)), sa.Column('secret', sa.String(255)), sa.Column('issued', sa.Integer), sa.Column('lifetime', sa.Integer), sa.Column('assoc_type', sa.String(64)), sa.schema.UniqueConstraint('server_url', 'handle')) op.create_table( "social_auth_account", sa.Column("id", sa.Integer, sa.ForeignKey( 'abstract_agent_account.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True), sa.Column("provider_id", sa.Integer, sa.ForeignKey( 'identity_provider.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False), sa.Column("username", sa.String(200)), sa.Column("domain", sa.String(200)), sa.Column("uid", sa.String(255), nullable=False), sa.Column("extra_data", sa.Text), sa.Column("picture_url", sa.String), sa.UniqueConstraint('provider_id', 'uid')) # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: pass
def get_theme_base(config): """Returns the full bucket path, as well a the bucket name. This is set in the configuration of the server.""" from urlparse import urlunparse def parse_path(path): if path.endswith('/'): base_bucket = path.split('/')[-2] bucket_path = "/".join(path.split("/")[:-2]) else: base_bucket = path.split('/')[-1] bucket_path = "/".join(path.split("/")[:-1]) return (bucket_path, base_bucket) bucket_path = config.get('theme_base', None) if bucket_path: if is_url(bucket_path): parsed = urlparse(bucket_path) path = parsed.path base_path, theme_name = parse_path(path) base_url = urlunparse( (parsed.scheme, parsed.netloc, base_path, None, None, None)) return (base_url, theme_name) else: return parse_path(bucket_path) return (None, None)
def verify_email_token(token): id, hash = token.split('f', 1) email = EmailAccount.get(int(id)) if email and verify_password( str(email.id) + email.email + config.get('security.email_token_salt'), hash, HashEncoding.HEX): return email
def verify_email_token(token, max_age=None): data, valid = verify_data_token(token, max_age=max_age) if valid == Validity.BAD_HASH: try: data = int(data) except: return None, Validity.INVALID_FORMAT account = AbstractAgentAccount.get(data) if not account: return None, Validity.DATA_NOT_FOUND data, valid = verify_data_token(token, account.email, max_age) return account, valid # Try decoding legacy try: id, hash = token.split('f', 1) account = AbstractAgentAccount.get(int(id)) if not account: return None, Validity.DATA_NOT_FOUND if verify_password( str(account.id) + account.email + config.get( 'security.account_token_salt'), hash, HashEncoding.HEX): return account, Validity.VALID return account, Validity.BAD_HASH except: return None, Validity.INVALID_FORMAT
def discussionCreated(self, discussion): from assembl.models import IMAPMailbox mailbox = None password = b64encode(urandom(12)) for source in discussion.sources: if isinstance(source, IMAPMailbox): mailbox = source break if not mailbox: email = "@".join((discussion.slug, config.get("imap_domain"))) mailbox = IMAPMailbox(name=discussion.slug + " imap", host='localhost', username=email, password=password, admin_sender=email, discussion=discussion, folder='inbox', port=143, use_ssl=False) discussion.db.add(mailbox) if not mailbox.password: mailbox.password = password with TemporaryFile() as stderr: rcode = call(['sudo', 'vmm', 'ua', email, password], stderr=stderr) if rcode != 0: stderr.seek(0) error = stderr.read() if b" already exists" not in error: raise RuntimeError("vmm useradd failed: %d\n%s" % (rcode, error))
def mime_type(request): url = request.params.get('url', None) if not url: raise HTTPBadRequest("Missing 'url' parameter") parsed = urlparse(url) if not parsed or parsed.scheme not in ('http', 'https'): raise HTTPBadRequest("Wrong scheme") if parsed.netloc.split(":")[0] == config.get('public_hostname'): # is it one of our own documents? # If so, detect it and shortcut to avoid the pyramid handler calling # another pyramid handler, as this exhausts pyramid threads rapidly # and can deadlock the whole application r = re.match( r'^https?://[\w\.]+(?:\:\d+)?/data/.*/documents/(\d+)/data(?:\?.*)?$', url) if r: document_id = r.groups(0)[0] from sqlalchemy.sql.functions import func mimetype, create_date, size = File.default_db.query( File.mime_type, File.creation_date, func.length(File.data) ).filter_by(id=int(document_id)).first() return Response( body=None, content_type=str(mimetype), content_length=size, last_modified=create_date) try: result = requests.head(url, timeout=15) except requests.ConnectionError: return Response( status=503, location=url) return Response( content_type=result.headers.get('Content-Type', None), status=result.status_code, location=result.url)
def upgrade(pyramid_env): from assembl import models as m from assembl.models.idea_content_link import ExtractStates, extract_states_identifiers with context.begin_transaction(): schema = config.get('db_schema') # User can be a machine op.add_column( 'user', sa.Column('is_machine', sa.Boolean(), default=False, server_default='0')) # Add the extract state: The extract can be Published or Submitted extract_states = sa.Enum(*extract_states_identifiers, name='extract_states') extract_states.create(op.get_bind()) op.add_column( 'extract', sa.Column( 'extract_state', sa.Enum(*extract_states_identifiers, name='extract_states'), nullable=False, default=ExtractStates.PUBLISHED.value, server_default=ExtractStates.PUBLISHED.value), schema=schema ) # Add the machine user db = m.get_session_maker()() with transaction.manager: from assembl.indexing import join_transaction join_transaction() m.User.populate_db(db)
def upgrade(pyramid_env): with context.begin_transaction(): op.create_table( 'import_record', sa.Column('id', sa.Integer, primary_key=True), sa.Column('discussion_id', sa.Integer, sa.ForeignKey("discussion.id"), nullable=False), sa.Column('external_iri_id', IRI_ID, nullable=False), sa.Column('internal_iri_id', IRI_ID, nullable=False, index=True), sa.Column('server_iri_id', IRI_ID, nullable=False), sa.Column('last_modified', sa.DateTime, default=datetime.utcnow)) op.create_unique_constraint( config.get('db_schema') + "_" + config.get('db_user') + "_import_record_UNQC_discussion_id_external_iri_id", "import_record", ["discussion_id", "external_iri_id"])
def upgrade(pyramid_env): with context.begin_transaction(): try: op.drop_index('ix_document_uri_id', 'document') except: try: op.drop_index( 'ix_%s_%s_document_uri_id' % (config.get('db_schema'), config.get('db_user')), 'document') except: pass op.execute( "CREATE UNIQUE INDEX ix_document_discussion_id_uri_id ON %s.%s.document (discussion_id, uri_id)" % (config.get('db_schema'), config.get('db_user')))
def __init__(self, discussion, apikey=None): super(GoogleTranslationService, self).__init__(discussion) import apiclient.discovery # Look it up in config. TODO: Admin property of discussion apikey = config.get("google.server_api_key") self.client = apiclient.discovery.build( 'translate', 'v2', developerKey=apikey)
def downgrade(pyramid_env): schema = config.get("db_schema") with context.begin_transaction(): for index in foreign_keys: table, column = index.split(".") op.drop_index( index_name(schema, table, column), table)
def verify_email_token(token): id, hash = token.split('f', 1) email = EmailAccount.get(id=int(id)) if email and verify_password( str(email.id) + email.email + config.get( 'security.email_token_salt'), hash, True): return email
def verify_password(password, hash, encoding=HashEncoding.BINARY, salt_size=SALT_SIZE): """ Verifies a password against a salted hash """ if encoding == HashEncoding.BINARY: salt, hash = hash[:salt_size], hash[salt_size:] elif encoding == HashEncoding.HEX: salt_len = 2 * salt_size salt, hash = unhexlify(hash[:salt_len]), unhexlify(hash[salt_len:]) elif encoding == HashEncoding.BASE64: hash = str(unquote(hash)) salt_len = 4 * int((salt_size + 2) / 3) salt, hash = (urlsafe_b64decode(hash[:salt_len]), urlsafe_b64decode(hash[salt_len:])) else: raise ValueError() hasher = hashlib.new(config.get('security.hash_algorithm') or 'sha256') if not isinstance(password, unicode): password = password.decode('utf-8') hasher.update(password.encode('utf-8')) hasher.update(salt) return hasher.digest() == hash
def send_confirmation_email(request, email): mailer = get_mailer(request) localizer = get_localizer(request) confirm_what = _('email') if isinstance(email.profile, User) and not email.profile.verified: confirm_what = _('account') data = { 'name': email.profile.name, 'email': email.email, 'confirm_what': localizer.translate(confirm_what), 'confirm_url': request.route_url('user_confirm_email', ticket=email_token(email)) } message = Message( subject=localizer.translate(_('confirm_title', default="Please confirm your ${confirm_what} with Assembl", mapping=data)), sender=config.get('assembl.admin_email'), recipients=["%s <%s>" % (email.profile.name, email.email)], body=localizer.translate(_('confirm_email', default=u"""Hello, ${name}! Please confirm your ${confirm_what} <${email}> with Assembl by clicking on the link below. <${confirm_url}> """, mapping=data)), html=localizer.translate(_('confirm_email_html', default=u"""<p>Hello, ${name}!</p> <p>Please <a href="${confirm_url}">confirm your ${confirm_what}</a> <${email}> with Assembl.</p> """, mapping=data))) #if deferred: # mailer.send_to_queue(message) #else: mailer.send(message)
def downgrade(pyramid_env): with context.begin_transaction(): op.drop_index("%s_%s_locale_UNQC_code" % (config.get('db_schema'), config.get('db_user'))) op.add_column("locale", sa.Column("temp_code", sa.String)) op.execute("UPDATE locale set temp_code = code") with context.begin_transaction(): op.drop_column("locale", "code") op.add_column("locale", sa.Column("code", sa.String(20))) op.execute("UPDATE locale set code = temp_code") with context.begin_transaction(): op.drop_column("locale", "temp_code") op.create_index('%s_%s_locale_UNQC_code' % (config.get('db_schema'), config.get('db_user')), 'locale', ['code'], unique=True)
def upgrade(pyramid_env): from assembl import models as m from assembl.models.idea_content_link import ExtractStates, extract_states_identifiers with context.begin_transaction(): schema = config.get('db_schema') # User can be a machine op.add_column( 'user', sa.Column('is_machine', sa.Boolean(), default=False, server_default='0')) # Add the extract state: The extract can be Published or Submitted extract_states = sa.Enum(*extract_states_identifiers, name='extract_states') extract_states.create(op.get_bind()) op.add_column('extract', sa.Column('extract_state', sa.Enum(*extract_states_identifiers, name='extract_states'), nullable=False, default=ExtractStates.PUBLISHED.value, server_default=ExtractStates.PUBLISHED.value), schema=schema) # Add the machine user db = m.get_session_maker()() with transaction.manager: from assembl.indexing import join_transaction join_transaction() m.User.populate_db(db)
def sanitize_next_view(next_view): if next_view and ':/' in next_view: parsed = urlparse(next_view) if not parsed: return None if parsed.netloc != config.get("public_hostname"): return None if parsed.scheme == 'http': if asbool(config.get("require_secure_connection")): return None elif parsed.scheme == 'https': if not asbool(config.get("accept_secure_connection")): return None else: return None return next_view
def upgrade(pyramid_env): with context.begin_transaction(): op.add_column( "content", sa.Column("subject_id", sa.Integer, sa.ForeignKey("langstring.id"))) op.add_column( "content", sa.Column("body_id", sa.Integer, sa.ForeignKey("langstring.id"))) langstring_idsequence = "%s.%s.langstring_idsequence" % ( config.get("db_schema"), config.get("db_user")) # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: discussion_locales = db.execute( "select id, preferred_locales from discussion") discussion_locales = { id: (locs or 'und').split(' ')[0] for (id, locs) in discussion_locales } locales = dict(list(db.execute("select code, id from locale"))) locale_id_for_discussion = { id: locales[loc] for (id, loc) in discussion_locales.iteritems() } for target in ("subject", "body"): posts = db.execute("select id, discussion_id, %s from content" % target) for post_id, discussion_id, content in posts: (langstring_id, ) = next( iter( db.execute("select sequence_next('%s')" % langstring_idsequence))) db.execute("INSERT into langstring values (%d)" % (langstring_id, )) db.execute( text( """INSERT into langstring_entry (langstring_id, locale_id, value) values (:langstring_id, :locale_id, :value)""").bindparams( langstring_id=langstring_id, locale_id=locale_id_for_discussion[discussion_id], value=content)) db.execute("UPDATE content set %s_id = %d WHERE id=%d" % (target, langstring_id, post_id)) mark_changed()
def get_provider_data(get_route, providers=None): from assembl.models.auth import IdentityProvider if providers is None: providers = aslist(config.get('login_providers')) providers_by_name = IdentityProvider.default_db.query( IdentityProvider.name, IdentityProvider.provider_type ).order_by(IdentityProvider.id).all() saml_providers = [] if 'saml' in providers: providers.remove('saml') saml_providers = config.get('SOCIAL_AUTH_SAML_ENABLED_IDPS') if not isinstance(saml_providers, dict): saml_providers = json.loads(saml_providers) provider_data = [ { "name": name.capitalize(), "type": ptype, "extra": {}, "add_social_account": get_route( 'add_social_account', backend=ptype), "login": get_route('social.auth', backend=ptype), } for (name, ptype) in providers_by_name if ptype in providers ] if 'yahoo' in providers: for provider in provider_data: if provider['type'] == 'yahoo': provider['extra'] = { "oauth": True, "openid_identifier": 'yahoo.com', } if saml_providers: provider_data.extend([ { "name": data["description"], "type": "saml", "add_social_account": get_route( 'add_social_account', backend='saml'), "login": get_route('social.auth', backend='saml'), "extra": { "idp": prov_id } } for prov_id, data in saml_providers.iteritems() ]) return provider_data