[migration] add unique constraint on dashboard_slices table (#7880)

1. remove duplicated entries in many-to-many relation tbl dashboard_slices
2. add unique constraint on tbl
3. update the model to include the uniqueness constraint
This commit is contained in:
Grace Guo 2019-07-22 16:48:41 -07:00 committed by GitHub
parent 92eed8a529
commit 9dd6a385e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 101 additions and 0 deletions

View File

@ -0,0 +1,100 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Remove duplicated entries in dashboard_slices table and add unique constraint
Revision ID: 190188938582
Revises: d6ffdf31bdd4
Create Date: 2019-07-15 12:00:32.267507
"""
import logging
from alembic import op
from sqlalchemy import and_, Column, ForeignKey, Integer, Table
from sqlalchemy.ext.declarative import declarative_base
from superset import db
# revision identifiers, used by Alembic.
revision = "190188938582"
down_revision = "d6ffdf31bdd4"
Base = declarative_base()
class DashboardSlices(Base):
__tablename__ = "dashboard_slices"
id = Column(Integer, primary_key=True)
dashboard_id = Column(Integer, ForeignKey("dashboards.id"))
slice_id = Column(Integer, ForeignKey("slices.id"))
def upgrade():
bind = op.get_bind()
session = db.Session(bind=bind)
# find dup records in dashboard_slices tbl
dup_records = (
session.query(
DashboardSlices.dashboard_id,
DashboardSlices.slice_id,
db.func.count(DashboardSlices.id),
)
.group_by(DashboardSlices.dashboard_id, DashboardSlices.slice_id)
.having(db.func.count(DashboardSlices.id) > 1)
.all()
)
# remove dup entries
for record in dup_records:
print(
"remove duplicates from dashboard {} slice {}".format(
record.dashboard_id, record.slice_id
)
)
ids = [
item.id
for item in session.query(DashboardSlices.id)
.filter(
and_(
DashboardSlices.slice_id == record.slice_id,
DashboardSlices.dashboard_id == record.dashboard_id,
)
)
.offset(1)
]
session.query(DashboardSlices).filter(DashboardSlices.id.in_(ids)).delete(
synchronize_session=False
)
# add unique constraint
try:
with op.batch_alter_table("dashboard_slices") as batch_op:
batch_op.create_unique_constraint(
"uq_dashboard_slice", ["dashboard_id", "slice_id"]
)
except Exception as e:
logging.exception(e)
def downgrade():
try:
with op.batch_alter_table("dashboard_slices") as batch_op:
batch_op.drop_constraint("uq_dashboard_slice", type_="unique")
except Exception as e:
logging.exception(e)

View File

@ -395,6 +395,7 @@ dashboard_slices = Table(
Column("id", Integer, primary_key=True),
Column("dashboard_id", Integer, ForeignKey("dashboards.id")),
Column("slice_id", Integer, ForeignKey("slices.id")),
UniqueConstraint("dashboard_id", "slice_id"),
)
dashboard_user = Table(