def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    dashboards = (
        session.query(Dashboard)
        .filter(Dashboard.json_metadata.like('%"native_filter_configuration"%'))
        .all()
    )
    changed_filters, changed_filter_sets = 0, 0
    for dashboard in dashboards:
        try:
            json_metadata = json.loads(dashboard.json_metadata)
            downgrades = downgrade_dashboard(json_metadata)
            changed_filters += downgrades[0]
            changed_filter_sets += downgrades[1]
            dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True)
        except Exception as e:
            print(f"Parsing json_metadata for dashboard {dashboard.id} failed.")
            raise e

    session.commit()
    session.close()
    print(
        f"Downgraded {changed_filters} filters and {changed_filter_sets} filter sets."
    )
コード例 #2
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    # Delete the orphaned metrics records.
    for record in session.query(DruidMetric).all():
        if record.datasource_id is None:
            session.delete(record)

    session.commit()

    # Enforce that metrics.metric_name column be non-nullable.
    with op.batch_alter_table('metrics') as batch_op:
        batch_op.alter_column(
            'metric_name',
            existing_type=String(255),
            nullable=False,
        )

    # Enforce that metrics.json column be non-nullable.
    with op.batch_alter_table('metrics') as batch_op:
        batch_op.alter_column(
            'json',
            existing_type=Text,
            nullable=False,
        )

    # Delete the orphaned sql_metrics records.
    for record in session.query(SqlMetric).all():
        if record.table_id is None:
            session.delete(record)

    session.commit()

    # Reduce the size of the sql_metrics.metric_name column for constraint
    # viability and enforce that it to be non-nullable.
    with op.batch_alter_table('sql_metrics') as batch_op:
        batch_op.alter_column(
            'metric_name',
            existing_type=String(512),
            nullable=False,
            type_=String(255),
        )

    # Enforce that sql_metrics.expression column be non-nullable.
    with op.batch_alter_table('sql_metrics') as batch_op:
        batch_op.alter_column(
            'expression',
            existing_type=Text,
            nullable=False,
        )

    # Add the missing uniqueness constraint to the sql_metrics table.
    with op.batch_alter_table('sql_metrics',
                              naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint(
            'uq_sql_metrics_metric_name',
            ['metric_name', 'table_id'],
        )
コード例 #3
0
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    # Remove uuid column
    with op.batch_alter_table("saved_query") as batch_op:
        batch_op.drop_constraint("uq_saved_query_uuid", type_="unique")
        batch_op.drop_column("uuid")
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for obj in session.query(Database).all():
        obj.allow_run_sync = True

    session.commit()
    session.close()
コード例 #5
0
def upgrade():
    """
    Adds the granularity param to charts without it populated. This is required for
    time range filtering to work properly. Uses the following approach:

    - Find all charts without a granularity or granularity_sqla param.
    - Get the dataset that backs the chart.
    - If the dataset has the main dttm column set, use it.
    - Otherwise, find all the dttm columns in the dataset and use the first one (this
      matches the behavior of Explore view on the frontend)
    - If no dttm columns exist in the dataset, don't change the chart.
    """
    bind = op.get_bind()
    session = db.Session(bind=bind)

    slices_changed = 0

    for slc in (session.query(Slice).filter(
            and_(Slice.datasource_type == "table",
                 Slice.params.notlike('%"granularity%'))).all()):
        try:
            params = json.loads(slc.params)
            if "granularity" in params or "granularity_sqla" in params:
                continue

            table = session.query(SqlaTable).get(slc.datasource_id)
            if not table:
                continue

            if table.main_dttm_col:
                params["granularity"] = table.main_dttm_col
                slc.params = json.dumps(params, sort_keys=True)
                print(
                    f"Set granularity for slice {slc.id} to {table.main_dttm_col}"
                )
                slices_changed += 1
                continue

            table_columns = (session.query(TableColumn).filter(
                TableColumn.table_id == table.id).filter(
                    TableColumn.is_dttm == True).all())
            if len(table_columns):
                params["granularity"] = table_columns[0].column_name
                slc.params = json.dumps(params, sort_keys=True)
                print(
                    f"Set granularity for slice {slc.id} to {table_columns[0].column_name}"
                )
                slices_changed += 1
        except Exception as e:
            print(e)
            print(f"Parsing params for slice {slc.id} failed.")
            pass

    print(f"{slices_changed} slices altered")
    session.commit()
    session.close()
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for model in (Query, SavedQuery):
        for record in session.query(model).filter(model.schema == "null"):
            record.schema = None

        session.commit()

    session.close()
コード例 #7
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    objects = session.query(Slice).all()
    objects += session.query(Dashboard).all()
    for obj in objects:
        if obj.created_by and obj.created_by not in obj.owners:
            obj.owners.append(obj.created_by)
        session.commit()
    session.close()
コード例 #8
0
def upgrade():
    op.create_table('sqlatable_user',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('user_id', sa.Integer(), nullable=True),
                    sa.Column('table_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['table_id'],
                        ['tables.id'],
                    ), sa.ForeignKeyConstraint(
                        ['user_id'],
                        ['ab_user.id'],
                    ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'druiddatasource_user', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('datasource_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['datasource_id'],
            ['datasources.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['ab_user.id'],
        ), sa.PrimaryKeyConstraint('id'))

    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)
    session = db.Session(bind=bind)

    tables = session.query(SqlaTable).all()
    for table in tables:
        if table.user_id is not None:
            session.execute(sqlatable_user.insert().values(
                user_id=table.user_id, table_id=table.id))

    druiddatasources = session.query(DruidDatasource).all()
    for druiddatasource in druiddatasources:
        if druiddatasource.user_id is not None:
            session.execute(druiddatasource_user.insert().values(
                user_id=druiddatasource.user_id,
                datasource_id=druiddatasource.id))

    session.close()
    with op.batch_alter_table('tables') as batch_op:
        batch_op.drop_constraint('user_id', type_='foreignkey')
        batch_op.drop_column('user_id')
    with op.batch_alter_table('datasources') as batch_op:
        batch_op.drop_constraint(generic_find_fk_constraint_name(
            'datasources',
            {'id'},
            'ab_user',
            insp,
        ),
                                 type_='foreignkey')
        batch_op.drop_column('user_id')
コード例 #9
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice):
        params = json.loads(slc.params)
        params.pop("time_range_endpoints", None)
        slc.params = json.dumps(params)

    session.commit()
    session.close()
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    # remove uuid from position_json
    update_dashboards(session, {})

    # remove uuid column
    for table_name in models:
        with op.batch_alter_table(table_name) as batch_op:
            batch_op.drop_constraint(f"uq_{table_name}_uuid", type_="unique")
            batch_op.drop_column("uuid")
コード例 #11
0
def upgrade():
    bind = op.get_bind()
    op.add_column("slices", sa.Column("perm", sa.String(length=2000), nullable=True))
    session = db.Session(bind=bind)

    # Use Slice class defined here instead of models.Slice
    for slc in session.query(Slice).all():
        if slc.datasource:
            slc.perm = slc.datasource.perm
            session.merge(slc)
            session.commit()
    db.session.close()
コード例 #12
0
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)
    for slc in session.query(Slice).all():
        if slc.datasource_type == 'druid':
            slc.druid_datasource_id = slc.datasource_id
        if slc.datasource_type == 'table':
            slc.table_id = slc.datasource_id
        session.merge(slc)
        session.commit()
    session.close()
    op.drop_column('slices', 'datasource_id')
コード例 #13
0
ファイル: base.py プロジェクト: EBoisseauSierra/superset
 def upgrade(cls) -> None:
     bind = op.get_bind()
     session = db.Session(bind=bind)
     slices = session.query(Slice).filter(Slice.viz_type == cls.source_viz_type)
     for slc in paginated_update(
         slices,
         lambda current, total: print(
             f"  Updating {current}/{total} charts", end="\r"
         ),
     ):
         new_viz = cls.upgrade_slice(slc)
         session.merge(new_viz)
コード例 #14
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)
    mapping = {'having': 'having_filters', 'where': 'filters'}

    for slc in session.query(Slice).all():
        try:
            params = json.loads(slc.params)

            if not 'adhoc_filters' in params:
                params['adhoc_filters'] = []

                for clause, filters in mapping.items():
                    if clause in params and params[clause] != '':
                        params['adhoc_filters'].append({
                            'clause':
                            clause.upper(),
                            'expressionType':
                            'SQL',
                            'filterOptionName':
                            str(uuid.uuid4()),
                            'sqlExpression':
                            params[clause],
                        })

                    if filters in params:
                        for filt in params[filters]:
                            params['adhoc_filters'].append({
                                'clause':
                                clause.upper(),
                                'comparator':
                                filt['val'],
                                'expressionType':
                                'SIMPLE',
                                'filterOptionName':
                                str(uuid.uuid4()),
                                'operator':
                                filt['op'],
                                'subject':
                                filt['col'],
                            })

            for key in ('filters', 'having', 'having_filters', 'where'):
                if key in params:
                    del params[key]

            slc.params = json.dumps(params, sort_keys=True)
        except Exception:
            pass

    session.commit()
    session.close()
コード例 #15
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice).all():
        try:
            form_data = json.loads(slc.params)
            update_time_range(form_data)
            slc.params = json.dumps(form_data, sort_keys=True)
        except Exception as ex:
            logging.exception(ex)

    session.commit()
コード例 #16
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    filter_box_slices = session.query(Slice).filter_by(viz_type='filter_box')
    for slc in filter_box_slices.all():
        try:
            upgrade_slice(slc)
        except Exception as e:
            logging.exception(e)

    session.commit()
    session.close()
コード例 #17
0
def upgrade():
    bind = op.get_bind()
    op.add_column('slices', sa.Column('datasource_id', sa.Integer()))
    session = db.Session(bind=bind)

    for slc in session.query(Slice).all():
        if slc.druid_datasource_id:
            slc.datasource_id = slc.druid_datasource_id
        if slc.table_id:
            slc.datasource_id = slc.table_id
        session.merge(slc)
        session.commit()
    session.close()
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)
    slices_updated = 0
    for slc in (session.query(Slice).filter(
            Slice.query_context.like("%time_range_endpoints%")).all()):
        updated_slice = upgrade_slice(slc)
        if updated_slice:
            slices_updated += 1

    print(f"slices updated with no time_range_endpoints: {slices_updated}")
    session.commit()
    session.close()
コード例 #19
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)
    x_dateunit_in_since = DateRangeMigration.x_dateunit_in_since
    x_dateunit_in_until = DateRangeMigration.x_dateunit_in_until

    if isinstance(bind.dialect, SQLiteDialect):
        # The REGEXP operator is a special syntax for the regexp() user function.
        # https://www.sqlite.org/lang_expr.html#regexp
        to_lower = sa.func.LOWER
        where_clause = or_(
            sa.func.REGEXP(to_lower(Slice.params), x_dateunit_in_since),
            sa.func.REGEXP(to_lower(Slice.params), x_dateunit_in_until),
        )
    elif isinstance(bind.dialect, MySQLDialect):
        to_lower = sa.func.LOWER
        where_clause = or_(
            to_lower(Slice.params).op("REGEXP")(x_dateunit_in_since),
            to_lower(Slice.params).op("REGEXP")(x_dateunit_in_until),
        )
    else:
        # isinstance(bind.dialect, PGDialect):
        where_clause = or_(
            Slice.params.op("~*")(x_dateunit_in_since),
            Slice.params.op("~*")(x_dateunit_in_until),
        )

    try:
        slices = session.query(Slice).filter(where_clause)
        total = slices.count()
        sep = " : "
        pattern = DateRangeMigration.x_dateunit
        idx = 0
        for slc in slices.yield_per(100):
            idx += 1
            print(f"Upgrading ({idx}/{total}): {slc.slice_name}#{slc.id}")
            params = json.loads(slc.params)
            time_range = params["time_range"]
            if sep in time_range:
                start, end = time_range.split(sep)
                if re.match(pattern, start):
                    start = f"{start.strip()} ago"
                if re.match(pattern, end):
                    end = f"{end.strip()} later"
                params["time_range"] = f"{start}{sep}{end}"
                slc.params = json.dumps(params, sort_keys=True, indent=4)
                session.commit()
    except OperationalError:
        pass

    session.close()
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    # remove uuid from position_json
    Dashboard = models["dashboards"]
    for dashboard in session.query(Dashboard).all():
        update_position_json(dashboard, session, {})

    # remove uuid column
    for table_name, model in models.items():
        with op.batch_alter_table(model) as batch_op:
            batch_op.drop_constraint(f"uq_{table_name}_uuid")
            batch_op.drop_column("uuid")
コード例 #21
0
def upgrade():
    op.create_table(
        "sqlatable_user",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("user_id", sa.Integer(), nullable=True),
        sa.Column("table_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(["table_id"], ["tables.id"]),
        sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"]),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "druiddatasource_user",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("user_id", sa.Integer(), nullable=True),
        sa.Column("datasource_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(["datasource_id"], ["datasources.id"]),
        sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"]),
        sa.PrimaryKeyConstraint("id"),
    )

    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)
    session = db.Session(bind=bind)

    tables = session.query(SqlaTable).all()
    for table in tables:
        if table.user_id is not None:
            session.execute(
                sqlatable_user.insert().values(user_id=table.user_id, table_id=table.id)
            )

    druiddatasources = session.query(DruidDatasource).all()
    for druiddatasource in druiddatasources:
        if druiddatasource.user_id is not None:
            session.execute(
                druiddatasource_user.insert().values(
                    user_id=druiddatasource.user_id, datasource_id=druiddatasource.id
                )
            )

    session.close()
    with op.batch_alter_table("tables") as batch_op:
        batch_op.drop_constraint("user_id", type_="foreignkey")
        batch_op.drop_column("user_id")
    with op.batch_alter_table("datasources") as batch_op:
        batch_op.drop_constraint(
            generic_find_fk_constraint_name("datasources", {"id"}, "ab_user", insp),
            type_="foreignkey",
        )
        batch_op.drop_column("user_id")
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for chart in session.query(Slice):
        params = json.loads(chart.params or "{}")

        if "time_compare" in params or "comparison_type" in params:
            params.pop("time_compare", None)
            params.pop("comparison_type", None)
            chart.params = json.dumps(params, sort_keys=True)

    session.commit()
    session.close()
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice).all():
        try:
            params = json.loads(slc.params)
            utils.convert_legacy_filters_into_adhoc(params)
            slc.params = json.dumps(params, sort_keys=True)
        except Exception:
            pass

    session.commit()
    session.close()
コード例 #24
0
def upgrade():
    with op.batch_alter_table("report_schedule") as batch_op:
        batch_op.add_column(sa.Column("force_screenshot", sa.Boolean(), default=False))

    bind = op.get_bind()
    session = db.Session(bind=bind)

    for report in session.query(ReportSchedule).all():
        # Update existing alerts that send chart screenshots so that the cache is
        # bypassed. We don't turn this one for dashboards because (1) it's currently
        # not supported but also because (2) it can be very expensive.
        report.force_screenshot = report.type == "Alert" and report.chart_id is not None

    session.commit()
コード例 #25
0
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice).filter(
            or_(Slice.viz_type.like('line'), Slice.viz_type.like('bar'))):
        params = json.loads(slc.params)
        layers = params.get('annotation_layers', [])
        if layers:
            params['annotation_layers'] = [layer['value'] for layer in layers]
            slc.params = json.dumps(params)
            session.merge(slc)
            session.commit()
    session.close()
コード例 #26
0
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice).filter(
        or_(Slice.viz_type.like("line"), Slice.viz_type.like("bar"))
    ):
        params = json.loads(slc.params)
        layers = params.get("annotation_layers", [])
        if layers:
            params["annotation_layers"] = [layer["value"] for layer in layers]
            slc.params = json.dumps(params)
            session.merge(slc)
            session.commit()
    session.close()
コード例 #27
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    op.execute(
        insert(DatasetUser).from_select(
            ["user_id", "dataset_id"],
            session.query(Dataset.created_by_fk, Dataset.id)
            .outerjoin(
                DatasetUser,
                and_(
                    DatasetUser.dataset_id == Dataset.id,
                    DatasetUser.user_id == Dataset.created_by_fk,
                ),
            )
            .filter(DatasetUser.dataset_id == None, Dataset.created_by_fk != None),
        )
    )

    op.execute(
        insert(SliceUser).from_select(
            ["user_id", "slice_id"],
            session.query(Slice.created_by_fk, Slice.id)
            .outerjoin(
                SliceUser,
                and_(
                    SliceUser.slice_id == Slice.id,
                    SliceUser.user_id == Slice.created_by_fk,
                ),
            )
            .filter(SliceUser.slice_id == None),
        )
    )

    op.execute(
        insert(SqlaTableUser).from_select(
            ["user_id", "table_id"],
            session.query(SqlaTable.created_by_fk, SqlaTable.id)
            .outerjoin(
                SqlaTableUser,
                and_(
                    SqlaTableUser.table_id == SqlaTable.id,
                    SqlaTableUser.user_id == SqlaTable.created_by_fk,
                ),
            )
            .filter(SqlaTableUser.table_id == None),
        )
    )
コード例 #28
0
def upgrade():
    """
    Remove any erroneous time grainularity fields from slices foor those visualization
    types which do not support time granularity.

    :see: https://github.com/apache/incubator-superset/pull/8674
    :see: https://github.com/apache/incubator-superset/pull/8764
    :see: https://github.com/apache/incubator-superset/pull/8800
    :see: https://github.com/apache/incubator-superset/pull/8825
    """

    bind = op.get_bind()
    session = db.Session(bind=bind)

    # Visualization types which support time grainularity (hence negate).
    viz_types = [
        "area",
        "bar",
        "big_number",
        "compare",
        "dual_line",
        "line",
        "pivot_table",
        "table",
        "time_pivot",
        "time_table",
    ]

    # Erroneous time grainularity fields for either Druid NoSQL or SQL slices which do
    # not support time grainularity.
    erroneous = ["grainularity", "time_grain_sqla"]

    for slc in session.query(Slice).filter(
            Slice.viz_type.notin_(viz_types)).all():
        try:
            params = json.loads(slc.params)

            if any(field in params for field in erroneous):
                for field in erroneous:
                    if field in params:
                        del params[field]

                slc.params = json.dumps(params, sort_keys=True)
        except Exception:
            pass

    session.commit()
    session.close()
コード例 #29
0
def downgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice).all():
        try:
            params = json.loads(slc.params)

            if params.get('time_grain_sqla') is None:
                params['time_grain_sqla'] = 'Time Column'
                slc.params = json.dumps(params, sort_keys=True)
        except Exception:
            pass

    session.commit()
    session.close()
コード例 #30
0
def upgrade():
    bind = op.get_bind()
    session = db.Session(bind=bind)

    for slc in session.query(Slice).all():
        try:
            params = json.loads(slc.params)

            if params.get("time_grain_sqla") == "Time Column":
                params["time_grain_sqla"] = None
                slc.params = json.dumps(params, sort_keys=True)
        except Exception:
            pass

    session.commit()
    session.close()