fix: pass if table is already removed on upgrade (#30017)

This commit is contained in:
Joe Li 2024-09-03 15:58:39 -07:00 committed by GitHub
parent 5c5b4d0f5f
commit c929f5ed7a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 75 additions and 39 deletions

View File

@ -19,8 +19,10 @@ from __future__ import annotations
from dataclasses import dataclass
from alembic import op
from sqlalchemy.dialects.sqlite.base import SQLiteDialect # noqa: E402
from sqlalchemy.engine.reflection import Inspector
from superset.migrations.shared.utils import has_table
from superset.utils.core import generic_find_fk_constraint_name
@ -75,13 +77,19 @@ def redefine(
def drop_fks_for_table(table_name: str) -> None:
"""
Drop all foreign key constraints for a table.
Drop all foreign key constraints for a table if it exist and the database
is not sqlite.
:param table_name: The table name to drop foreign key constraints for
"""
connection = op.get_bind()
inspector = Inspector.from_engine(connection)
foreign_keys = inspector.get_foreign_keys(table_name)
for fk in foreign_keys:
op.drop_constraint(fk["name"], table_name, type_="foreignkey")
if isinstance(connection.dialect, SQLiteDialect):
return # sqlite doesn't like constraints
if has_table(table_name):
foreign_keys = inspector.get_foreign_keys(table_name)
for fk in foreign_keys:
op.drop_constraint(fk["name"], table_name, type_="foreignkey")

View File

@ -168,3 +168,17 @@ def try_load_json(data: Optional[str]) -> dict[str, Any]:
except json.JSONDecodeError:
print(f"Failed to parse: {data}")
return {}
def has_table(table_name: str) -> bool:
"""
Check if a table exists in the database.
:param table_name: The table name
:returns: True if the table exists
"""
insp = inspect(op.get_context().bind)
table_exists = insp.has_table(table_name)
return table_exists

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "02f4f7811799"
down_revision = "f7b6750b67e8"
table_name = "sl_dataset_columns"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_dataset_columns")
op.drop_table("sl_dataset_columns")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_dataset_columns",
table_name,
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("column_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "39549add7bfc"
down_revision = "02f4f7811799"
table_name = "sl_table_columns"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_table_columns")
op.drop_table("sl_table_columns")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_table_columns",
table_name,
sa.Column("table_id", sa.Integer(), nullable=False),
sa.Column("column_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "38f4144e8558"
down_revision = "39549add7bfc"
table_name = "sl_dataset_tables"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_dataset_tables")
op.drop_table("sl_dataset_tables")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_dataset_tables",
table_name,
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("table_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "e53fd48cc078"
down_revision = "38f4144e8558"
table_name = "sl_dataset_users"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_dataset_users")
op.drop_table("sl_dataset_users")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_dataset_users",
table_name,
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "a6b32d2d07b1"
down_revision = "e53fd48cc078"
table_name = "sl_columns"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_columns")
op.drop_table("sl_columns")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_columns",
table_name,
sa.Column("uuid", sa.Numeric(precision=16), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "007a1abffe7e"
down_revision = "a6b32d2d07b1"
table_name = "sl_tables"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_tables")
op.drop_table("sl_tables")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_tables",
table_name,
sa.Column("uuid", sa.Numeric(precision=16), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),

View File

@ -26,22 +26,24 @@ import sqlalchemy as sa
from alembic import op
from superset.migrations.shared.constraints import drop_fks_for_table
from superset.migrations.shared.utils import has_table
# revision identifiers, used by Alembic.
revision = "48cbb571fa3a"
down_revision = "007a1abffe7e"
table_name = "sl_datasets"
def upgrade():
connection = op.get_bind()
if connection.dialect.name != "sqlite":
drop_fks_for_table("sl_datasets")
op.drop_table("sl_datasets")
if has_table(table_name):
drop_fks_for_table(table_name)
op.drop_table(table_name)
def downgrade():
op.create_table(
"sl_datasets",
table_name,
sa.Column("uuid", sa.Numeric(precision=16), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),