fix: Bust chart cache when metric/column is changed (#15786)
This commit is contained in:
parent
6cb91ee142
commit
e969edc451
|
|
@ -43,9 +43,12 @@ from sqlalchemy import (
|
|||
Table,
|
||||
Text,
|
||||
UniqueConstraint,
|
||||
update,
|
||||
)
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy.orm import backref, relationship, Session
|
||||
from sqlalchemy.orm.mapper import Mapper
|
||||
from sqlalchemy.sql import expression
|
||||
|
||||
from superset import conf, db, security_manager
|
||||
|
|
@ -1688,5 +1691,24 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
return [{"name": k, "type": v.get("type")} for k, v in latest_metadata.items()]
|
||||
|
||||
|
||||
def update_datasource(
|
||||
_mapper: Mapper, _connection: Connection, obj: Union[DruidColumn, DruidMetric]
|
||||
) -> None:
|
||||
"""
|
||||
Forces an update to the datasource's changed_on value when a metric or column on
|
||||
the datasource is updated. This busts the cache key for all charts that use the
|
||||
datasource.
|
||||
|
||||
:param _mapper: Unused.
|
||||
:param _connection: Unused.
|
||||
:param obj: The metric or column that was updated.
|
||||
"""
|
||||
db.session.execute(
|
||||
update(DruidDatasource).where(DruidDatasource.id == obj.datasource.id)
|
||||
)
|
||||
|
||||
|
||||
sa.event.listen(DruidDatasource, "after_insert", security_manager.set_perm)
|
||||
sa.event.listen(DruidDatasource, "after_update", security_manager.set_perm)
|
||||
sa.event.listen(DruidMetric, "after_update", update_datasource)
|
||||
sa.event.listen(DruidColumn, "after_update", update_datasource)
|
||||
|
|
|
|||
|
|
@ -57,8 +57,11 @@ from sqlalchemy import (
|
|||
String,
|
||||
Table,
|
||||
Text,
|
||||
update,
|
||||
)
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.orm import backref, Query, relationship, RelationshipProperty, Session
|
||||
from sqlalchemy.orm.mapper import Mapper
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
from sqlalchemy.sql import column, ColumnElement, literal_column, table, text
|
||||
from sqlalchemy.sql.elements import ColumnClause
|
||||
|
|
@ -1667,9 +1670,24 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
|
|||
return extra_cache_keys
|
||||
|
||||
|
||||
def update_table(
|
||||
_mapper: Mapper, _connection: Connection, obj: Union[SqlMetric, TableColumn]
|
||||
) -> None:
|
||||
"""
|
||||
Forces an update to the table's changed_on value when a metric or column on the
|
||||
table is updated. This busts the cache key for all charts that use the table.
|
||||
|
||||
:param _mapper: Unused.
|
||||
:param _connection: Unused.
|
||||
:param obj: The metric or column that was updated.
|
||||
"""
|
||||
db.session.execute(update(SqlaTable).where(SqlaTable.id == obj.table.id))
|
||||
|
||||
|
||||
sa.event.listen(SqlaTable, "after_insert", security_manager.set_perm)
|
||||
sa.event.listen(SqlaTable, "after_update", security_manager.set_perm)
|
||||
|
||||
sa.event.listen(SqlMetric, "after_update", update_table)
|
||||
sa.event.listen(TableColumn, "after_update", update_table)
|
||||
|
||||
RLSFilterRoles = Table(
|
||||
"rls_filter_roles",
|
||||
|
|
|
|||
|
|
@ -14,7 +14,9 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import datetime
|
||||
import re
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
import pytest
|
||||
|
|
@ -24,6 +26,7 @@ from superset.charts.schemas import ChartDataQueryContextSchema
|
|||
from superset.common.query_context import QueryContext
|
||||
from superset.common.query_object import QueryObject
|
||||
from superset.connectors.connector_registry import ConnectorRegistry
|
||||
from superset.connectors.sqla.models import SqlMetric
|
||||
from superset.extensions import cache_manager
|
||||
from superset.utils.core import (
|
||||
AdhocMetricExpressionType,
|
||||
|
|
@ -144,6 +147,42 @@ class TestQueryContext(SupersetTestCase):
|
|||
# the new cache_key should be different due to updated datasource
|
||||
self.assertNotEqual(cache_key_original, cache_key_new)
|
||||
|
||||
def test_query_cache_key_changes_when_metric_is_updated(self):
|
||||
self.login(username="admin")
|
||||
payload = get_query_context("birth_names")
|
||||
|
||||
# make temporary change and revert it to refresh the changed_on property
|
||||
datasource = ConnectorRegistry.get_datasource(
|
||||
datasource_type=payload["datasource"]["type"],
|
||||
datasource_id=payload["datasource"]["id"],
|
||||
session=db.session,
|
||||
)
|
||||
|
||||
datasource.metrics.append(SqlMetric(metric_name="foo", expression="select 1;"))
|
||||
db.session.commit()
|
||||
|
||||
# construct baseline query_cache_key
|
||||
query_context = ChartDataQueryContextSchema().load(payload)
|
||||
query_object = query_context.queries[0]
|
||||
cache_key_original = query_context.query_cache_key(query_object)
|
||||
|
||||
# wait a second since mysql records timestamps in second granularity
|
||||
time.sleep(1)
|
||||
|
||||
datasource.metrics[0].expression = "select 2;"
|
||||
db.session.commit()
|
||||
|
||||
# create new QueryContext with unchanged attributes, extract new query_cache_key
|
||||
query_context = ChartDataQueryContextSchema().load(payload)
|
||||
query_object = query_context.queries[0]
|
||||
cache_key_new = query_context.query_cache_key(query_object)
|
||||
|
||||
datasource.metrics = []
|
||||
db.session.commit()
|
||||
|
||||
# the new cache_key should be different due to updated datasource
|
||||
self.assertNotEqual(cache_key_original, cache_key_new)
|
||||
|
||||
def test_query_cache_key_does_not_change_for_non_existent_or_null(self):
|
||||
self.login(username="admin")
|
||||
payload = get_query_context("birth_names", add_postprocessing_operations=True)
|
||||
|
|
|
|||
Loading…
Reference in New Issue