[schema] Updating the base metric schema (#5453)
This commit is contained in:
parent
f83b979342
commit
f488a3281d
|
|
@ -32,6 +32,11 @@ which adds missing non-nullable fields and uniqueness constraints to the
|
|||
`columns`and `table_columns` tables. Depending on the integrity of the data,
|
||||
manual intervention may be required.
|
||||
|
||||
* [5453](https://github.com/apache/incubator-superset/pull/5453): a change
|
||||
which adds missing non-nullable fields and uniqueness constraints to the metrics
|
||||
and sql_metrics tables. Depending on the integrity of the data, manual
|
||||
intervention may be required.
|
||||
|
||||
## Superset 0.32.0
|
||||
|
||||
* `npm run backend-sync` is deprecated and no longer needed, will fail if called
|
||||
|
|
|
|||
|
|
@ -411,7 +411,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
|
|||
__tablename__ = None # {connector_name}_metric
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
metric_name = Column(String(512))
|
||||
metric_name = Column(String(255), nullable=False)
|
||||
verbose_name = Column(String(1024))
|
||||
metric_type = Column(String(32))
|
||||
description = Column(Text)
|
||||
|
|
|
|||
|
|
@ -341,15 +341,14 @@ class DruidMetric(Model, BaseMetric):
|
|||
|
||||
__tablename__ = 'metrics'
|
||||
__table_args__ = (UniqueConstraint('metric_name', 'datasource_id'),)
|
||||
datasource_id = Column(
|
||||
Integer,
|
||||
ForeignKey('datasources.id'))
|
||||
datasource_id = Column(Integer, ForeignKey('datasources.id'))
|
||||
|
||||
# Setting enable_typechecks=False disables polymorphic inheritance.
|
||||
datasource = relationship(
|
||||
'DruidDatasource',
|
||||
backref=backref('metrics', cascade='all, delete-orphan'),
|
||||
enable_typechecks=False)
|
||||
json = Column(Text)
|
||||
json = Column(Text, nullable=False)
|
||||
|
||||
export_fields = (
|
||||
'metric_name', 'verbose_name', 'metric_type', 'datasource_id',
|
||||
|
|
|
|||
|
|
@ -209,7 +209,7 @@ class SqlMetric(Model, BaseMetric):
|
|||
'SqlaTable',
|
||||
backref=backref('metrics', cascade='all, delete-orphan'),
|
||||
foreign_keys=[table_id])
|
||||
expression = Column(Text)
|
||||
expression = Column(Text, nullable=False)
|
||||
|
||||
export_fields = (
|
||||
'metric_name', 'verbose_name', 'metric_type', 'table_id', 'expression',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,169 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""update base metrics
|
||||
|
||||
Note that the metrics table was previously partially modifed by revision
|
||||
f231d82b9b26.
|
||||
|
||||
Revision ID: e9df189e5c7e
|
||||
Revises: 7f2635b51f5d
|
||||
Create Date: 2018-07-20 15:57:48.118304
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e9df189e5c7e'
|
||||
down_revision = '7f2635b51f5d'
|
||||
|
||||
from alembic import op
|
||||
from sqlalchemy import Column, engine, Integer, String, Text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from superset import db
|
||||
from superset.utils.core import generic_find_uq_constraint_name
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
conv = {
|
||||
'uq': 'uq_%(table_name)s_%(column_0_name)s',
|
||||
}
|
||||
|
||||
|
||||
class BaseMetricMixin:
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
|
||||
class DruidMetric(BaseMetricMixin, Base):
|
||||
__tablename__ = 'metrics'
|
||||
|
||||
datasource_id = Column(Integer)
|
||||
|
||||
|
||||
class SqlMetric(BaseMetricMixin, Base):
|
||||
__tablename__ = 'sql_metrics'
|
||||
|
||||
table_id = Column(Integer)
|
||||
|
||||
|
||||
def upgrade():
|
||||
bind = op.get_bind()
|
||||
session = db.Session(bind=bind)
|
||||
|
||||
# Delete the orphaned metrics records.
|
||||
for record in session.query(DruidMetric).all():
|
||||
if record.datasource_id is None:
|
||||
session.delete(record)
|
||||
|
||||
session.commit()
|
||||
|
||||
# Enforce that metrics.metric_name column be non-nullable.
|
||||
with op.batch_alter_table('metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'metric_name',
|
||||
existing_type=String(255),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Enforce that metrics.json column be non-nullable.
|
||||
with op.batch_alter_table('metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'json',
|
||||
existing_type=Text,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Delete the orphaned sql_metrics records.
|
||||
for record in session.query(SqlMetric).all():
|
||||
if record.table_id is None:
|
||||
session.delete(record)
|
||||
|
||||
session.commit()
|
||||
|
||||
# Reduce the size of the sql_metrics.metric_name column for constraint
|
||||
# viability and enforce that it to be non-nullable.
|
||||
with op.batch_alter_table('sql_metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'metric_name',
|
||||
existing_type=String(512),
|
||||
nullable=False,
|
||||
type_=String(255),
|
||||
)
|
||||
|
||||
# Enforce that sql_metrics.expression column be non-nullable.
|
||||
with op.batch_alter_table('sql_metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'expression',
|
||||
existing_type=Text,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Add the missing uniqueness constraint to the sql_metrics table.
|
||||
with op.batch_alter_table('sql_metrics', naming_convention=conv) as batch_op:
|
||||
batch_op.create_unique_constraint(
|
||||
'uq_sql_metrics_metric_name',
|
||||
['metric_name', 'table_id'],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
bind = op.get_bind()
|
||||
insp = engine.reflection.Inspector.from_engine(bind)
|
||||
|
||||
# Remove the missing uniqueness constraint from the sql_metrics table.
|
||||
with op.batch_alter_table('sql_metrics', naming_convention=conv) as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
generic_find_uq_constraint_name(
|
||||
'sql_metrics',
|
||||
{'metric_name', 'table_id'},
|
||||
insp,
|
||||
) or 'uq_sql_metrics_table_id',
|
||||
type_='unique',
|
||||
)
|
||||
|
||||
# Restore the size of the sql_metrics.metric_name column and forego that it
|
||||
# be non-nullable.
|
||||
with op.batch_alter_table('sql_metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'metric_name',
|
||||
existing_type=String(255),
|
||||
nullable=True,
|
||||
type_=String(512),
|
||||
)
|
||||
|
||||
# Forego that the sql_metrics.expression column be non-nullable.
|
||||
with op.batch_alter_table('sql_metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'expression',
|
||||
existing_type=Text,
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Forego that the metrics.metric_name column be non-nullable.
|
||||
with op.batch_alter_table('metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'metric_name',
|
||||
existing_type=String(255),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Forego that the metrics.json column be non-nullable.
|
||||
with op.batch_alter_table('metrics') as batch_op:
|
||||
batch_op.alter_column(
|
||||
'json',
|
||||
existing_type=Text,
|
||||
nullable=True,
|
||||
)
|
||||
|
|
@ -72,7 +72,7 @@ class DictImportExportTests(SupersetTestCase):
|
|||
'params': json.dumps(params),
|
||||
'columns': [{'column_name': c}
|
||||
for c in cols_names],
|
||||
'metrics': [{'metric_name': c} for c in metric_names],
|
||||
'metrics': [{'metric_name': c, 'expression': ''} for c in metric_names],
|
||||
}
|
||||
|
||||
table = SqlaTable(
|
||||
|
|
@ -84,7 +84,7 @@ class DictImportExportTests(SupersetTestCase):
|
|||
for col_name in cols_names:
|
||||
table.columns.append(TableColumn(column_name=col_name))
|
||||
for metric_name in metric_names:
|
||||
table.metrics.append(SqlMetric(metric_name=metric_name))
|
||||
table.metrics.append(SqlMetric(metric_name=metric_name, expression=''))
|
||||
return table, dict_rep
|
||||
|
||||
def create_druid_datasource(
|
||||
|
|
@ -98,7 +98,7 @@ class DictImportExportTests(SupersetTestCase):
|
|||
'id': id,
|
||||
'params': json.dumps(params),
|
||||
'columns': [{'column_name': c} for c in cols_names],
|
||||
'metrics': [{'metric_name': c} for c in metric_names],
|
||||
'metrics': [{'metric_name': c, 'json': '{}'} for c in metric_names],
|
||||
}
|
||||
|
||||
datasource = DruidDatasource(
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ class ImportExportTests(SupersetTestCase):
|
|||
table.columns.append(
|
||||
TableColumn(column_name=col_name))
|
||||
for metric_name in metric_names:
|
||||
table.metrics.append(SqlMetric(metric_name=metric_name))
|
||||
table.metrics.append(SqlMetric(metric_name=metric_name, expression=''))
|
||||
return table
|
||||
|
||||
def create_druid_datasource(
|
||||
|
|
@ -130,7 +130,7 @@ class ImportExportTests(SupersetTestCase):
|
|||
DruidColumn(column_name=col_name))
|
||||
for metric_name in metric_names:
|
||||
datasource.metrics.append(DruidMetric(
|
||||
metric_name=metric_name))
|
||||
metric_name=metric_name, json='{}'))
|
||||
return datasource
|
||||
|
||||
def get_slice(self, slc_id):
|
||||
|
|
|
|||
Loading…
Reference in New Issue