Ejemplo n.º 1
0
def add_location_model(db):
    """ Add location model """
    metadata = MetaData(bind=db.bind)

    # Create location table
    Location_V0.__table__.create(db.bind)
    db.commit()

    # Inspect the tables we need
    user = inspect_table(metadata, "core__users")
    collections = inspect_table(metadata, "core__collections")
    media_entry = inspect_table(metadata, "core__media_entries")
    media_comments = inspect_table(metadata, "core__media_comments")

    # Now add location support to the various models
    col = Column("location", Integer, ForeignKey(Location_V0.id))
    col.create(user)

    col = Column("location", Integer, ForeignKey(Location_V0.id))
    col.create(collections)

    col = Column("location", Integer, ForeignKey(Location_V0.id))
    col.create(media_entry)

    col = Column("location", Integer, ForeignKey(Location_V0.id))
    col.create(media_comments)

    db.commit()
def upgrade():
    """
    Removes comments which have been deleted and exist as a tombstone but still
    have their Comment wrapper.
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    comment_table = inspect_table(metadata, "core__comment_links")
    gmr_table = inspect_table(metadata, "core__generic_model_reference")

    # Get the Comment wrappers
    comment_wrappers = list(db.execute(comment_table.select()))

    for wrapper in comment_wrappers:
        # Query for the graveyard GMR comment
        gmr = db.execute(gmr_table.select().where(and_(
            gmr_table.c.id == wrapper.comment_id,
            gmr_table.c.model_type == "core__graveyard"
        ))).first()

        if gmr is not None:
            # Okay delete this wrapper as it's to a deleted comment
            db.execute(comment_table.delete().where(
                comment_table.c.id == wrapper.id
            ))
def upgrade():
    """
    The problem is deletions are occuring and as we expect the
    GenericModelReference objects are being updated to point to the tombstone
    object. The issue is that collections now contain deleted items, this
    causes problems when it comes to rendering them for example.

    This migration is to remove any Graveyard objects (tombstones) from any
    Collection.
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
   
    gmr_table = inspect_table(metadata, "core__generic_model_reference")
    collection_items_table = inspect_table(metadata, "core__collection_items")
    graveyard_table = inspect_table(metadata, "core__graveyard")

    res = list(db.execute(graveyard_table.select()))
    for tombstone in res:
        # Get GMR for tombstone
        gmr = db.execute(gmr_table.select().where(and_(
            gmr_table.c.obj_pk == tombstone.id,
            gmr_table.c.model_type == "core__graveyard"
        ))).first()

        # If there is no GMR, we're all good as it's required to be in a
        # collection
        if gmr is None:
            continue

        # Delete all the CollectionItem objects for this GMR
        db.execute(collection_items_table.delete().where(
            collection_items_table.c.object_id == gmr.id
        ))
Ejemplo n.º 4
0
def add_new_notification_tables(db):
    metadata = MetaData(bind=db.bind)

    user_table = inspect_table(metadata, 'core__users')
    mediaentry_table = inspect_table(metadata, 'core__media_entries')
    mediacomment_table = inspect_table(metadata, 'core__media_comments')

    CommentSubscription_v0.__table__.create(db.bind)

    Notification_v0.__table__.create(db.bind)
    CommentNotification_v0.__table__.create(db.bind)
    ProcessingNotification_v0.__table__.create(db.bind)
Ejemplo n.º 5
0
def remove_gps_from_image(db):
    """
    This will remove GPS coordinates from the image model to put them
    on the new Location model.
    """
    metadata = MetaData(bind=db.bind)
    image_table = inspect_table(metadata, "image__mediadata")
    location_table = inspect_table(metadata, "core__locations")
    media_entries_table = inspect_table(metadata, "core__media_entries")

    # First do the data migration
    for row in db.execute(image_table.select()):
        fields = {
            "longitude": row.gps_longitude,
            "latitude": row.gps_latitude,
            "altitude": row.gps_altitude,
            "direction": row.gps_direction,
        }

        # Remove empty values
        for k, v in fields.items():
            if v is None:
                del fields[k]

        # No point in adding empty locations
        if not fields:
            continue

        # JSONEncoded is actually a string field just json.dumped
        # without the ORM we're responsible for that.
        fields = json.dumps(fields)

        location = db.execute(location_table.insert().values(position=fields))

        # now store the new location model on Image
        db.execute(
            media_entries_table.update()
            .values(location=location.inserted_primary_key[0])
            .where(media_entries_table.c.id == row.media_entry)
        )

    db.commit()

    # All that data has been migrated across lets remove the fields
    image_table.columns["gps_longitude"].drop()
    image_table.columns["gps_latitude"].drop()
    image_table.columns["gps_altitude"].drop()
    image_table.columns["gps_direction"].drop()

    db.commit()
Ejemplo n.º 6
0
def unique_collections_slug(db):
    """Add unique constraint to collection slug"""
    metadata = MetaData(bind=db.bind)
    collection_table = inspect_table(metadata, "core__collections")
    existing_slugs = {}
    slugs_to_change = []

    for row in db.execute(collection_table.select()):
        # if duplicate slug, generate a unique slug
        if row.creator in existing_slugs and row.slug in \
           existing_slugs[row.creator]:
            slugs_to_change.append(row.id)
        else:
            if not row.creator in existing_slugs:
                existing_slugs[row.creator] = [row.slug]
            else:
                existing_slugs[row.creator].append(row.slug)

    for row_id in slugs_to_change:
        new_slug = unicode(uuid.uuid4())
        db.execute(collection_table.update().
                   where(collection_table.c.id == row_id).
                   values(slug=new_slug))
    # sqlite does not like to change the schema when a transaction(update) is
    # not yet completed
    db.commit()

    constraint = UniqueConstraint('creator', 'slug',
                                  name='core__collection_creator_slug_key',
                                  table=collection_table)
    constraint.create()

    db.commit()
Ejemplo n.º 7
0
def wants_notifications(db):
    """Add a wants_notifications field to User model"""
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, "core__users")
    col = Column('wants_notifications', Boolean, default=True)
    col.create(user_table)
    db.commit()
Ejemplo n.º 8
0
def fix_CollectionItem_v0_constraint(db_conn):
    """Add the forgotten Constraint on CollectionItem"""

    global collectionitem_unique_constraint_done
    if collectionitem_unique_constraint_done:
        # Reset it. Maybe the whole thing gets run again
        # For a different db?
        collectionitem_unique_constraint_done = False
        return

    metadata = MetaData(bind=db_conn.bind)

    CollectionItem_table = inspect_table(metadata, 'core__collection_items')

    constraint = UniqueConstraint('collection', 'media_entry',
        name='core__collection_items_collection_media_entry_key',
        table=CollectionItem_table)

    try:
        constraint.create()
    except ProgrammingError:
        # User probably has an install that was run since the
        # collection tables were added, so we don't need to run this migration.
        pass

    db_conn.commit()
Ejemplo n.º 9
0
def add_transcoding_progress(db_conn):
    metadata = MetaData(bind=db_conn.bind)

    media_entry = inspect_table(metadata, 'core__media_entries')

    col = Column('transcoding_progress', SmallInteger)
    col.create(media_entry)
    db_conn.commit()
Ejemplo n.º 10
0
def add_license_preference(db):
    metadata = MetaData(bind=db.bind)

    user_table = inspect_table(metadata, 'core__users')

    col = Column('license_preference', Unicode)
    col.create(user_table)
    db.commit()
Ejemplo n.º 11
0
def add_mediaentry_collected(db_conn):
    metadata = MetaData(bind=db_conn.bind)

    media_entry = inspect_table(metadata, 'core__media_entries')

    col = Column('collected', Integer, default=0)
    col.create(media_entry)
    db_conn.commit()
Ejemplo n.º 12
0
def upload_limits(db):
    """Add user upload limit columns"""
    metadata = MetaData(bind=db.bind)

    user_table = inspect_table(metadata, 'core__users')
    media_entry_table = inspect_table(metadata, 'core__media_entries')

    col = Column('uploaded', Integer, default=0)
    col.create(user_table)

    col = Column('upload_limit', Integer)
    col.create(user_table)

    col = Column('file_size', Integer, default=0)
    col.create(media_entry_table)

    db.commit()
Ejemplo n.º 13
0
def add_file_metadata(db):
    """Add file_metadata to MediaFile"""
    metadata = MetaData(bind=db.bind)
    media_file_table = inspect_table(metadata, "core__mediafiles")

    col = Column('file_metadata', MutationDict.as_mutable(JSONEncoded))
    col.create(media_file_table)

    db.commit()
Ejemplo n.º 14
0
def add_orig_metadata_column(db_conn):
    metadata = MetaData(bind=db_conn.bind)

    vid_data = inspect_table(metadata, "video__mediadata")

    col = Column('orig_metadata', Unicode,
                 default=None, nullable=True)
    col.create(vid_data)
    db_conn.commit()
Ejemplo n.º 15
0
def change_metadata_format(db):
    """Change orig_metadata format for multi-stream a-v"""
    db_metadata = MetaData(bind=db.bind)

    vid_data = inspect_table(db_metadata, "video__mediadata")

    for row in db.execute(vid_data.select()):
        if not row.orig_metadata:
            continue

        metadata = json.loads(row.orig_metadata)

        # before this migration there was info about only one video or audio
        # stream. So, we store existing info as the first item in the list
        new_metadata = {'audio': [], 'video': [], 'common': {}}
        video_key_map = {  # old: new
                'videoheight': 'height',
                'videowidth': 'width',
                'videorate': 'rate',
                }
        audio_key_map = {  # old: new
                'audiochannels': 'channels',
                }
        common_key_map = {
                'videolength': 'length',
                }

        new_metadata['video'] = [dict((v, metadata.get(k))
                for k, v in video_key_map.items() if metadata.get(k))]
        new_metadata['audio'] = [dict((v, metadata.get(k))
                for k, v in audio_key_map.items() if metadata.get(k))]
        new_metadata['common'] = dict((v, metadata.get(k))
                for k, v in common_key_map.items() if metadata.get(k))

        # 'mimetype' should be in tags
        new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}
        if 'tags' in metadata:
            new_metadata['video'][0]['tags'] = {}
            new_metadata['audio'][0]['tags'] = {}

            tags = metadata['tags']

            video_keys = ['encoder', 'encoder-version', 'video-codec']
            audio_keys = ['audio-codec']

            for t, v in tags.items():
                if t in video_keys:
                    new_metadata['video'][0]['tags'][t] = tags[t]
                elif t in audio_keys:
                    new_metadata['audio'][0]['tags'][t] = tags[t]
                else:
                    new_metadata['common']['tags'][t] = tags[t]
        db.execute(vid_data.update()
                .where(vid_data.c.media_entry==row.media_entry)
                .values(orig_metadata=json.dumps(new_metadata)))
    db.commit()
Ejemplo n.º 16
0
def add_metadata_column(db):
    metadata = MetaData(bind=db.bind)

    media_entry = inspect_table(metadata, 'core__media_entries')

    col = Column('media_metadata', MutationDict.as_mutable(JSONEncoded),
        default=MutationDict())
    col.create(media_entry)

    db.commit()
def upgrade():
    """"
    This replaces the Notification.obj with the ID of the Comment (i.e. comment
    link) ID instead of the TextComment object.
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    notification_table = inspect_table(metadata, "core__notifications")
    comment_table = inspect_table(metadata, "core__comment_links")
    gmr_table = inspect_table(metadata, "core__generic_model_reference")

    # Get the notifications.
    notifications = list(db.execute(notification_table.select()))

    # Iterate through all the notifications
    for notification in notifications:
        # Lookup the Comment link object from the notification's ID
        comment_link = db.execute(comment_table.select().where(
            comment_table.c.comment_id == notification.object_id
        )).first()

        # Find the GMR for this comment or make one if one doesn't exist.
        gmr = db.execute(gmr_table.select().where(and_(
            gmr_table.c.obj_pk == comment_link.id,
            gmr_table.c.model_type == "core__comment_links"
        ))).first()

        # If it doesn't exist we need to create one.
        if gmr is None:
            gmr = db.execute(gmr_table.insert().values(
                obj_pk=comment_link.id,
                model_type="core__comment_links"
            )).inserted_primary_key[0]
        else:
            gmr = gmr.id

        # Okay now we need to update the notification with the ID of the link
        # rather than the ID of TextComment object.
        db.execute(notification_table.update().values(
            object_id=gmr
        ).where(
            notification_table.c.id == notification.id
        ))
Ejemplo n.º 18
0
def drop_MediaEntry_collected(db):
    """
    Drop unused MediaEntry.collected column
    """
    metadata = MetaData(bind=db.bind)

    media_collected= inspect_table(metadata, 'core__media_entries')
    media_collected = media_collected.columns['collected']

    media_collected.drop()

    db.commit()
def downgrade():
    """
    This puts back the TextComment ID for the notification.object_id field
    where we're using the Comment object (i.e. the comment link ID)
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    notification_table = inspect_table(metadata, "core__notifications")
    comment_table = inspect_table(metadata, "core__comment_links")

    # Notificaitons
    notifications = list(db.execute(notification_table.select()))

    # Iterate through all the notifications
    for notification in notifications:
        # Lookup the Comment link object from the notification's ID
        comment_link = db.execute(comment_table.select().where(
            comment_table.c.id == notification.object_id
        )).first()

        # Find the GMR for the TextComment
        gmr = db.execute(gmr_table.select().where(and_(
            gmr_table.c.obj_pk == comment_link.id,
            gmr_table.c.model_type == "core__comment_links"
        ))).first()

        if gmr is None:
            gmr = db.execute(gmr_table.insert().values(
                obj_pk=comment_link.id,
                model_type="core__comment_links"
            )).inserted_primary_key[0]
        else:
            gmr = gmr.id

        # Update the notification with the TextComment (i.e. the comment object)
        db.execute(notification_table.update().values(
            object_id=gmr
        ).where(
            notification_table.c.id == notification.id
        ))
Ejemplo n.º 20
0
def webm_640_to_webm_video(db):
    metadata = MetaData(bind=db.bind)

    file_keynames = inspect_table(metadata, 'core__file_keynames')

    for row in db.execute(file_keynames.select()):
        if row.name == 'webm_640':
            db.execute(
                file_keynames.update(). \
                where(file_keynames.c.id==row.id).\
                values(name='webm_video'))

    db.commit()
Ejemplo n.º 21
0
def mediaentry_new_slug_era(db):
    """
    Update for the new era for media type slugs.

    Entries without slugs now display differently in the url like:
      /u/cwebber/m/id=251/

    ... because of this, we should back-convert:
     - entries without slugs should be converted to use the id, if possible, to
       make old urls still work
     - slugs with = (or also : which is now also not allowed) to have those
       stripped out (small possibility of breakage here sadly)
    """
    def slug_and_user_combo_exists(slug, uploader):
        return db.execute(
            media_table.select(
                and_(media_table.c.uploader == uploader,
                     media_table.c.slug == slug))).first() is not None

    def append_garbage_till_unique(row, new_slug):
        """
        Attach junk to this row until it's unique, then save it
        """
        if slug_and_user_combo_exists(new_slug, row.uploader):
            # okay, still no success;
            # let's whack junk on there till it's unique.
            new_slug += '-' + uuid.uuid4().hex[:4]
            # keep going if necessary!
            while slug_and_user_combo_exists(new_slug, row.uploader):
                new_slug += uuid.uuid4().hex[:4]

        db.execute(
            media_table.update(). \
            where(media_table.c.id==row.id). \
            values(slug=new_slug))

    metadata = MetaData(bind=db.bind)

    media_table = inspect_table(metadata, 'core__media_entries')

    for row in db.execute(media_table.select()):
        # no slug, try setting to an id
        if not row.slug:
            append_garbage_till_unique(row, unicode(row.id))
        # has "=" or ":" in it... we're getting rid of those
        elif u"=" in row.slug or u":" in row.slug:
            append_garbage_till_unique(
                row,
                row.slug.replace(u"=", u"-").replace(u":", u"-"))

    db.commit()
Ejemplo n.º 22
0
def mediaentry_new_slug_era(db):
    """
    Update for the new era for media type slugs.

    Entries without slugs now display differently in the url like:
      /u/cwebber/m/id=251/

    ... because of this, we should back-convert:
     - entries without slugs should be converted to use the id, if possible, to
       make old urls still work
     - slugs with = (or also : which is now also not allowed) to have those
       stripped out (small possibility of breakage here sadly)
    """

    def slug_and_user_combo_exists(slug, uploader):
        return db.execute(
            media_table.select(
                and_(media_table.c.uploader==uploader,
                     media_table.c.slug==slug))).first() is not None

    def append_garbage_till_unique(row, new_slug):
        """
        Attach junk to this row until it's unique, then save it
        """
        if slug_and_user_combo_exists(new_slug, row.uploader):
            # okay, still no success;
            # let's whack junk on there till it's unique.
            new_slug += '-' + uuid.uuid4().hex[:4]
            # keep going if necessary!
            while slug_and_user_combo_exists(new_slug, row.uploader):
                new_slug += uuid.uuid4().hex[:4]

        db.execute(
            media_table.update(). \
            where(media_table.c.id==row.id). \
            values(slug=new_slug))

    metadata = MetaData(bind=db.bind)

    media_table = inspect_table(metadata, 'core__media_entries')

    for row in db.execute(media_table.select()):
        # no slug, try setting to an id
        if not row.slug:
            append_garbage_till_unique(row, unicode(row.id))
        # has "=" or ":" in it... we're getting rid of those
        elif u"=" in row.slug or u":" in row.slug:
            append_garbage_till_unique(
                row, row.slug.replace(u"=", u"-").replace(u":", u"-"))

    db.commit()
Ejemplo n.º 23
0
def upgrade():
    """
    This ensures that the Report.object_id field is nullable, it seems for a
    short period of time it could have been NOT NULL but was fixed later.
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    report_table = inspect_table(metadata, "core__reports")

    # Check if the field has nullable on
    object_id_field = report_table.columns["object_id"]
    if object_id_field.nullable != True:
        # We have to alter this.
        object_id_field.alter(nullable=True)
Ejemplo n.º 24
0
def pw_hash_nullable(db):
    """Make pw_hash column nullable"""
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, "core__users")

    user_table.c.pw_hash.alter(nullable=True)

    # sqlite+sqlalchemy seems to drop this constraint during the
    # migration, so we add it back here for now a bit manually.
    if db.bind.url.drivername == 'sqlite':
        constraint = UniqueConstraint('username', table=user_table)
        constraint.create()

    db.commit()
Ejemplo n.º 25
0
def pw_hash_nullable(db):
    """Make pw_hash column nullable"""
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, "core__users")

    user_table.c.pw_hash.alter(nullable=True)

    # sqlite+sqlalchemy seems to drop this constraint during the
    # migration, so we add it back here for now a bit manually.
    if db.bind.url.drivername == 'sqlite':
        constraint = UniqueConstraint('username', table=user_table)
        constraint.create()

    db.commit()
def upgrade():
    """
    Removes comments which have been deleted and exist as a tombstone but still
    have their Comment wrapper.
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    comment_table = inspect_table(metadata, "core__comment_links")
    gmr_table = inspect_table(metadata, "core__generic_model_reference")

    # Get the Comment wrappers
    comment_wrappers = list(db.execute(comment_table.select()))

    for wrapper in comment_wrappers:
        # Query for the graveyard GMR comment
        gmr = db.execute(gmr_table.select().where(
            and_(gmr_table.c.id == wrapper.comment_id,
                 gmr_table.c.model_type == "core__graveyard"))).first()

        if gmr is not None:
            # Okay delete this wrapper as it's to a deleted comment
            db.execute(
                comment_table.delete().where(comment_table.c.id == wrapper.id))
def upgrade():
    """
    This migration is very similiar to that of 101510e3a713. It removes objects
    from Notification objects which are from Graveyard. It also iterates through
    any reports which might have been filed and sets the objects to None. 
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    notification_table = inspect_table(metadata, "core__notifications")
    report_table = inspect_table(metadata, "core__reports")
    graveyard_table = inspect_table(metadata, "core__graveyard")
    gmr_table = inspect_table(metadata, "core__generic_model_reference")
    
    res = list(db.execute(gmr_table.select()))
    for tombstone in res:
        # Look up the gmr for the tombstone8
        gmr = db.execute(gmr_table.select().where(and_(
            gmr_table.c.obj_pk == tombstone.id,
            gmr_table.c.model_type == "core__graveyard"
        ))).first()

        # If we can't find one we can skip it as it needs one to be part of
        # the notification objects
        if gmr is None:
            continue

        # Delete all notifications which link to the GMR as that's invalid.
        db.execute(notification_table.delete().where(
            notification_table.c.object_id == gmr.id
        ))

        # Deal with reports, we don't want to delete these though, they want to
        # still exist if the object that was reported was deleted as that can
        # be part of the resolution, just set it to None.
        db.execute(report_table.update().where(
            report_table.c.object_id == gmr.id
        ).values(object_id=None))
Ejemplo n.º 28
0
def fix_privilege_user_association_table(db):
    """
    There was an error in the PrivilegeUserAssociation table that allowed for a
    dangerous sql error. We need to the change the name of the columns to be
    unique, and properly referenced.
    """
    metadata = MetaData(bind=db.bind)

    privilege_user_assoc = inspect_table(
        metadata, 'core__privileges_users')

    # This whole process is more complex if we're dealing with sqlite
    if db.bind.url.drivername == 'sqlite':
        PrivilegeUserAssociation_R1.__table__.create(db.bind)
        db.commit()

        new_privilege_user_assoc = inspect_table(
            metadata, 'rename__privileges_users')
        result = db.execute(privilege_user_assoc.select())
        for row in result:
            # The columns were improperly named before, so we switch the columns
            user_id, priv_id = row['core__privilege_id'], row['core__user_id']
            db.execute(new_privilege_user_assoc.insert().values(
                user=user_id,
                privilege=priv_id))

        db.commit()

        privilege_user_assoc.drop()
        new_privilege_user_assoc.rename('core__privileges_users')

    # much simpler if postgres though!
    else:
        privilege_user_assoc.c.core__user_id.alter(name="privilege")
        privilege_user_assoc.c.core__privilege_id.alter(name="user")

    db.commit()
def upgrade():
    """"
    This replaces the Notification.obj with the ID of the Comment (i.e. comment
    link) ID instead of the TextComment object.
    """
    db = op.get_bind()
    metadata = MetaData(bind=db)
    notification_table = inspect_table(metadata, "core__notifications")
    comment_table = inspect_table(metadata, "core__comment_links")
    gmr_table = inspect_table(metadata, "core__generic_model_reference")

    # Get the notifications.
    notifications = list(db.execute(notification_table.select()))

    # Iterate through all the notifications
    for notification in notifications:
        # Lookup the Comment link object from the notification's ID
        comment_link = db.execute(comment_table.select().where(
            comment_table.c.comment_id == notification.object_id)).first()

        # Find the GMR for this comment or make one if one doesn't exist.
        gmr = db.execute(gmr_table.select().where(
            and_(gmr_table.c.obj_pk == comment_link.id,
                 gmr_table.c.model_type == "core__comment_links"))).first()

        # If it doesn't exist we need to create one.
        if gmr is None:
            gmr = db.execute(gmr_table.insert().values(
                obj_pk=comment_link.id,
                model_type="core__comment_links")).inserted_primary_key[0]
        else:
            gmr = gmr.id

        # Okay now we need to update the notification with the ID of the link
        # rather than the ID of TextComment object.
        db.execute(notification_table.update().values(object_id=gmr).where(
            notification_table.c.id == notification.id))
Ejemplo n.º 30
0
def drop_token_related_User_columns(db):
    """
    Drop unneeded columns from the User table after switching to using
    itsdangerous tokens for email and forgot password verification.
    """
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, 'core__users')

    verification_key = user_table.columns['verification_key']
    fp_verification_key = user_table.columns['fp_verification_key']
    fp_token_expire = user_table.columns['fp_token_expire']

    verification_key.drop()
    fp_verification_key.drop()
    fp_token_expire.drop()

    db.commit()
Ejemplo n.º 31
0
def drop_token_related_User_columns(db):
    """
    Drop unneeded columns from the User table after switching to using
    itsdangerous tokens for email and forgot password verification.
    """
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, 'core__users')

    verification_key = user_table.columns['verification_key']
    fp_verification_key = user_table.columns['fp_verification_key']
    fp_token_expire = user_table.columns['fp_token_expire']

    verification_key.drop()
    fp_verification_key.drop()
    fp_token_expire.drop()

    db.commit()
Ejemplo n.º 32
0
def change_metadata_format(db):
    """Change orig_metadata format for multi-stream a-v"""
    db_metadata = MetaData(bind=db.bind)

    vid_data = inspect_table(db_metadata, "video__mediadata")

    for row in db.execute(vid_data.select()):
        metadata = json.loads(row.orig_metadata)

        if not metadata:
            continue

        # before this migration there was info about only one video or audio
        # stream. So, we store existing info as the first item in the list
        new_metadata = {'audio': [], 'video': [], 'common': {}}
        video_key_map = {  # old: new
                'videoheight': 'height',
                'videowidth': 'width',
                'videorate': 'rate',
                }
        audio_key_map = {  # old: new
            'audiochannels': 'channels',
        }
        common_key_map = {
            'videolength': 'length',
        }

        new_metadata['video'] = [
            dict((v, metadata.get(k)) for k, v in video_key_map.items()
                 if metadata.get(k))
        ]
        new_metadata['audio'] = [
            dict((v, metadata.get(k)) for k, v in audio_key_map.items()
                 if metadata.get(k))
        ]
        new_metadata['common'] = dict((v, metadata.get(k))
                                      for k, v in common_key_map.items()
                                      if metadata.get(k))

        # 'mimetype' should be in tags
        new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}
        if 'tags' in metadata:
            new_metadata['video'][0]['tags'] = {}
            new_metadata['audio'][0]['tags'] = {}

            tags = metadata['tags']

            video_keys = ['encoder', 'encoder-version', 'video-codec']
            audio_keys = ['audio-codec']

            for t, v in tags.items():
                if t in video_keys:
                    new_metadata['video'][0]['tags'][t] = tags[t]
                elif t in audio_keys:
                    new_metadata['audio'][0]['tags'][t] = tags[t]
                else:
                    new_metadata['common']['tags'][t] = tags[t]
        db.execute(vid_data.update().where(
            vid_data.c.media_entry == row.media_entry).values(
                orig_metadata=json.dumps(new_metadata)))
    db.commit()
Ejemplo n.º 33
0
def datetime_to_utc(db):
    """ Convert datetime stamps to UTC """
    # Get the server's timezone, this is what the database has stored
    server_timezone = dateutil.tz.tzlocal()

    ##
    # Look up all the timestamps and convert them to UTC
    ##
    metadata = MetaData(bind=db.bind)

    def dt_to_utc(dt):
        # Add the current timezone
        dt = dt.replace(tzinfo=server_timezone)

        # Convert to UTC
        return dt.astimezone(pytz.UTC)

    # Convert the User model
    user_table = inspect_table(metadata, "core__users")
    for user in db.execute(user_table.select()):
        db.execute(user_table.update().values(
            created=dt_to_utc(user.created)
        ).where(user_table.c.id==user.id))

    # Convert Client
    client_table = inspect_table(metadata, "core__clients")
    for client in db.execute(client_table.select()):
        db.execute(client_table.update().values(
            created=dt_to_utc(client.created),
            updated=dt_to_utc(client.updated)
        ).where(client_table.c.id==client.id))

    # Convert RequestToken
    rt_table = inspect_table(metadata, "core__request_tokens")
    for request_token in db.execute(rt_table.select()):
        db.execute(rt_table.update().values(
            created=dt_to_utc(request_token.created),
            updated=dt_to_utc(request_token.updated)
        ).where(rt_table.c.token==request_token.token))

    # Convert AccessToken
    at_table = inspect_table(metadata, "core__access_tokens")
    for access_token in db.execute(at_table.select()):
        db.execute(at_table.update().values(
            created=dt_to_utc(access_token.created),
            updated=dt_to_utc(access_token.updated)
        ).where(at_table.c.token==access_token.token))

    # Convert MediaEntry
    media_table = inspect_table(metadata, "core__media_entries")
    for media in db.execute(media_table.select()):
        db.execute(media_table.update().values(
            created=dt_to_utc(media.created)
        ).where(media_table.c.id==media.id))

    # Convert Media Attachment File
    media_attachment_table = inspect_table(metadata, "core__attachment_files")
    for ma in db.execute(media_attachment_table.select()):
        db.execute(media_attachment_table.update().values(
            created=dt_to_utc(ma.created)
        ).where(media_attachment_table.c.id==ma.id))

    # Convert MediaComment
    comment_table = inspect_table(metadata, "core__media_comments")
    for comment in db.execute(comment_table.select()):
        db.execute(comment_table.update().values(
            created=dt_to_utc(comment.created)
        ).where(comment_table.c.id==comment.id))

    # Convert Collection
    collection_table = inspect_table(metadata, "core__collections")
    for collection in db.execute(collection_table.select()):
        db.execute(collection_table.update().values(
            created=dt_to_utc(collection.created)
        ).where(collection_table.c.id==collection.id))

    # Convert Collection Item
    collection_item_table = inspect_table(metadata, "core__collection_items")
    for ci in db.execute(collection_item_table.select()):
        db.execute(collection_item_table.update().values(
            added=dt_to_utc(ci.added)
        ).where(collection_item_table.c.id==ci.id))

    # Convert Comment subscription
    comment_sub = inspect_table(metadata, "core__comment_subscriptions")
    for sub in db.execute(comment_sub.select()):
        db.execute(comment_sub.update().values(
            created=dt_to_utc(sub.created)
        ).where(comment_sub.c.id==sub.id))

    # Convert Notification
    notification_table = inspect_table(metadata, "core__notifications")
    for notification in db.execute(notification_table.select()):
        db.execute(notification_table.update().values(
            created=dt_to_utc(notification.created)
        ).where(notification_table.c.id==notification.id))

    # Convert ReportBase
    reportbase_table = inspect_table(metadata, "core__reports")
    for report in db.execute(reportbase_table.select()):
        db.execute(reportbase_table.update().values(
            created=dt_to_utc(report.created)
        ).where(reportbase_table.c.id==report.id))

    # Convert Generator
    generator_table = inspect_table(metadata, "core__generators")
    for generator in db.execute(generator_table.select()):
        db.execute(generator_table.update().values(
            published=dt_to_utc(generator.published),
            updated=dt_to_utc(generator.updated)
        ).where(generator_table.c.id==generator.id))

    # Convert Activity
    activity_table = inspect_table(metadata, "core__activities")
    for activity in db.execute(activity_table.select()):
        db.execute(activity_table.update().values(
            published=dt_to_utc(activity.published),
            updated=dt_to_utc(activity.updated)
        ).where(activity_table.c.id==activity.id))

    # Commit this to the database
    db.commit()
Ejemplo n.º 34
0
def create_moderation_tables(db):

    # First, we will create the new tables in the database.
    #--------------------------------------------------------------------------
    ReportBase_v0.__table__.create(db.bind)
    CommentReport_v0.__table__.create(db.bind)
    MediaReport_v0.__table__.create(db.bind)
    UserBan_v0.__table__.create(db.bind)
    Privilege_v0.__table__.create(db.bind)
    PrivilegeUserAssociation_v0.__table__.create(db.bind)

    db.commit()

    # Then initialize the tables that we will later use
    #--------------------------------------------------------------------------
    metadata = MetaData(bind=db.bind)
    privileges_table= inspect_table(metadata, "core__privileges")
    user_table = inspect_table(metadata, 'core__users')
    user_privilege_assoc = inspect_table(
        metadata, 'core__privileges_users')

    # This section initializes the default Privilege foundations, that
    # would be created through the FOUNDATIONS system in a new instance
    #--------------------------------------------------------------------------
    for parameters in PRIVILEGE_FOUNDATIONS_v0:
        db.execute(privileges_table.insert().values(**parameters))

    db.commit()

    # This next section takes the information from the old is_admin and status
    # columns and converts those to the new privilege system
    #--------------------------------------------------------------------------
    admin_users_ids, active_users_ids, inactive_users_ids = (
        db.execute(
            user_table.select().where(
                user_table.c.is_admin==True)).fetchall(),
        db.execute(
            user_table.select().where(
                user_table.c.is_admin==False).where(
                user_table.c.status==u"active")).fetchall(),
        db.execute(
            user_table.select().where(
                user_table.c.is_admin==False).where(
                user_table.c.status!=u"active")).fetchall())

    # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
    (admin_privilege_id, uploader_privilege_id,
     reporter_privilege_id, commenter_privilege_id,
     active_privilege_id) = [
        db.execute(privileges_table.select().where(
            privileges_table.c.privilege_name==privilege_name)).first()['id']
        for privilege_name in
            [u"admin",u"uploader",u"reporter",u"commenter",u"active"]
    ]

    # Give each user the appopriate privileges depending whether they are an
    # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    for admin_user in admin_users_ids:
        admin_user_id = admin_user['id']
        for privilege_id in [admin_privilege_id, uploader_privilege_id,
                            reporter_privilege_id, commenter_privilege_id,
                            active_privilege_id]:
            db.execute(user_privilege_assoc.insert().values(
                core__privilege_id=admin_user_id,
                core__user_id=privilege_id))

    for active_user in active_users_ids:
        active_user_id = active_user['id']
        for privilege_id in [uploader_privilege_id, reporter_privilege_id,
                            commenter_privilege_id, active_privilege_id]:
            db.execute(user_privilege_assoc.insert().values(
                core__privilege_id=active_user_id,
                core__user_id=privilege_id))

    for inactive_user in inactive_users_ids:
        inactive_user_id = inactive_user['id']
        for privilege_id in [uploader_privilege_id, reporter_privilege_id,
                             commenter_privilege_id]:
            db.execute(user_privilege_assoc.insert().values(
                core__privilege_id=inactive_user_id,
                core__user_id=privilege_id))

    db.commit()

    # And then, once the information is taken from is_admin & status columns
    # we drop all of the vestigial columns from the User table.
    #--------------------------------------------------------------------------
    if db.bind.url.drivername == 'sqlite':
        # SQLite has some issues that make it *impossible* to drop boolean
        # columns. So, the following code is a very hacky workaround which
        # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

        User_vR1.__table__.create(db.bind)
        db.commit()
        new_user_table = inspect_table(metadata, 'rename__users')
        replace_table_hack(db, user_table, new_user_table)
    else:
        # If the db is not run using SQLite, this process is much simpler ~~~~~

        status = user_table.columns['status']
        email_verified = user_table.columns['email_verified']
        is_admin = user_table.columns['is_admin']
        status.drop()
        email_verified.drop()
        is_admin.drop()

    db.commit()
Ejemplo n.º 35
0
def revert_username_index(db):
    """
    Revert the stuff we did in migration 22 above.

    There were a couple of problems with what we did:
     - There was never a need for this migration!  The unique
       constraint had an implicit b-tree index, so it wasn't really
       needed.  (This is my (Chris Webber's) fault for suggesting it
       needed to happen without knowing what's going on... my bad!)
     - On top of that, databases created after the models.py was
       changed weren't the same as those that had been run through
       migration 22 above.

    As such, we're setting things back to the way they were before,
    but as it turns out, that's tricky to do!
    """
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, "core__users")
    indexes = dict(
        [(index.name, index) for index in user_table.indexes])

    # index from unnecessary migration
    users_uploader_index = indexes.get(u'ix_core__users_uploader')
    # index created from models.py after (unique=True, index=True)
    # was set in models.py
    users_username_index = indexes.get(u'ix_core__users_username')

    if users_uploader_index is None and users_username_index is None:
        # We don't need to do anything.
        # The database isn't in a state where it needs fixing
        #
        # (ie, either went through the previous borked migration or
        #  was initialized with a models.py where core__users was both
        #  unique=True and index=True)
        return

    if db.bind.url.drivername == 'sqlite':
        # Again, sqlite has problems.  So this is tricky.

        # Yes, this is correct to use User_vR1!  Nothing has changed
        # between the *correct* version of this table and migration 18.
        User_vR1.__table__.create(db.bind)
        db.commit()
        new_user_table = inspect_table(metadata, 'rename__users')
        replace_table_hack(db, user_table, new_user_table)

    else:
        # If the db is not run using SQLite, we don't need to do crazy
        # table copying.

        # Remove whichever of the not-used indexes are in place
        if users_uploader_index is not None:
            users_uploader_index.drop()
        if users_username_index is not None:
            users_username_index.drop()

        # Given we're removing indexes then adding a unique constraint
        # which *we know might fail*, thus probably rolling back the
        # session, let's commit here.
        db.commit()

        try:
            # Add the unique constraint
            constraint = UniqueConstraint(
                'username', table=user_table)
            constraint.create()
        except ProgrammingError:
            # constraint already exists, no need to add
            db.rollback()

    db.commit()
Ejemplo n.º 36
0
def activity_migration(db):
    """
    Creates everything to create activities in GMG
    - Adds Activity, ActivityIntermediator and Generator table
    - Creates GMG service generator for activities produced by the server
    - Adds the activity_as_object and activity_as_target to objects/targets
    - Retroactively adds activities for what we can acurately work out
    """
    # Set constants we'll use later
    FOREIGN_KEY = "core__activity_intermediators.id"
    ACTIVITY_COLUMN = "activity"

    # Create the new tables.
    ActivityIntermediator_R0.__table__.create(db.bind)
    Generator_R0.__table__.create(db.bind)
    Activity_R0.__table__.create(db.bind)
    db.commit()

    # Initiate the tables we want to use later
    metadata = MetaData(bind=db.bind)
    user_table = inspect_table(metadata, "core__users")
    activity_table = inspect_table(metadata, "core__activities")
    generator_table = inspect_table(metadata, "core__generators")
    collection_table = inspect_table(metadata, "core__collections")
    media_entry_table = inspect_table(metadata, "core__media_entries")
    media_comments_table = inspect_table(metadata, "core__media_comments")
    ai_table = inspect_table(metadata, "core__activity_intermediators")


    # Create the foundations for Generator
    db.execute(generator_table.insert().values(
        name="GNU Mediagoblin",
        object_type="service",
        published=datetime.datetime.now(),
        updated=datetime.datetime.now()
    ))
    db.commit()

    # Get the ID of that generator
    gmg_generator = db.execute(generator_table.select(
        generator_table.c.name==u"GNU Mediagoblin")).first()


    # Now we want to modify the tables which MAY have an activity at some point
    media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
    media_col.create(media_entry_table)

    user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
    user_col.create(user_table)

    comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
    comments_col.create(media_comments_table)

    collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
    collection_col.create(collection_table)
    db.commit()


    # Now we want to retroactively add what activities we can
    # first we'll add activities when people uploaded media.
    # these can't have content as it's not fesible to get the
    # correct content strings.
    for media in db.execute(media_entry_table.select()):
        # Now we want to create the intermedaitory
        db_ai = db.execute(ai_table.insert().values(
            type="media",
        ))
        db_ai = db.execute(ai_table.select(
            ai_table.c.id==db_ai.inserted_primary_key[0]
        )).first()

        # Add the activity
        activity = {
            "verb": "create",
            "actor": media.uploader,
            "published": media.created,
            "updated": media.created,
            "generator": gmg_generator.id,
            "object": db_ai.id
        }
        db.execute(activity_table.insert().values(**activity))

        # Add the AI to the media.
        db.execute(media_entry_table.update().values(
            activity=db_ai.id
        ).where(media_entry_table.c.id==media.id))

    # Now we want to add all the comments people made
    for comment in db.execute(media_comments_table.select()):
        # Get the MediaEntry for the comment
        media_entry = db.execute(
            media_entry_table.select(
                media_entry_table.c.id==comment.media_entry
        )).first()

        # Create an AI for target
        db_ai_media = db.execute(ai_table.select(
            ai_table.c.id==media_entry.activity
        )).first().id

        db.execute(
            media_comments_table.update().values(
                activity=db_ai_media
        ).where(media_comments_table.c.id==media_entry.id))

        # Now create the AI for the comment
        db_ai_comment = db.execute(ai_table.insert().values(
            type="comment"
        )).inserted_primary_key[0]

        activity = {
            "verb": "comment",
            "actor": comment.author,
            "published": comment.created,
            "updated": comment.created,
            "generator": gmg_generator.id,
            "object": db_ai_comment,
            "target": db_ai_media,
        }

        # Now add the comment object
        db.execute(activity_table.insert().values(**activity))

        # Now add activity to comment
        db.execute(media_comments_table.update().values(
            activity=db_ai_comment
        ).where(media_comments_table.c.id==comment.id))

    # Create 'create' activities for all collections
    for collection in db.execute(collection_table.select()):
        # create AI
        db_ai = db.execute(ai_table.insert().values(
            type="collection"
        ))
        db_ai = db.execute(ai_table.select(
            ai_table.c.id==db_ai.inserted_primary_key[0]
        )).first()

        # Now add link the collection to the AI
        db.execute(collection_table.update().values(
            activity=db_ai.id
        ).where(collection_table.c.id==collection.id))

        activity = {
            "verb": "create",
            "actor": collection.creator,
            "published": collection.created,
            "updated": collection.created,
            "generator": gmg_generator.id,
            "object": db_ai.id,
        }

        db.execute(activity_table.insert().values(**activity))

        # Now add the activity to the collection
        db.execute(collection_table.update().values(
            activity=db_ai.id
        ).where(collection_table.c.id==collection.id))

    db.commit()