fix(imports): import query_context for imports with charts (#30887)

This commit is contained in:
Linden 2024-11-21 17:14:45 -06:00 committed by GitHub
parent 0b647b2dcc
commit 8905508d8f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 99 additions and 25 deletions

View File

@ -26,6 +26,7 @@ from superset.commands.chart.importers.v1.utils import import_chart
from superset.commands.database.importers.v1.utils import import_database from superset.commands.database.importers.v1.utils import import_database
from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.dataset.importers.v1.utils import import_dataset
from superset.commands.importers.v1 import ImportModelsCommand from superset.commands.importers.v1 import ImportModelsCommand
from superset.commands.utils import update_chart_config_dataset
from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.models import SqlaTable
from superset.daos.chart import ChartDAO from superset.daos.chart import ChartDAO
from superset.databases.schemas import ImportV1DatabaseSchema from superset.databases.schemas import ImportV1DatabaseSchema
@ -86,16 +87,10 @@ class ImportChartsCommand(ImportModelsCommand):
# update datasource id, type, and name # update datasource id, type, and name
dataset = datasets[config["dataset_uuid"]] dataset = datasets[config["dataset_uuid"]]
config.update( dataset_dict = {
{ "datasource_id": dataset.id,
"datasource_id": dataset.id, "datasource_type": "table",
"datasource_type": "table", "datasource_name": dataset.table_name,
"datasource_name": dataset.table_name, }
} config = update_chart_config_dataset(config, dataset_dict)
)
config["params"].update({"datasource": dataset.uid})
if "query_context" in config:
config["query_context"] = None
import_chart(config, overwrite=overwrite) import_chart(config, overwrite=overwrite)

View File

@ -34,6 +34,7 @@ from superset.commands.dashboard.importers.v1.utils import (
from superset.commands.database.importers.v1.utils import import_database from superset.commands.database.importers.v1.utils import import_database
from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.dataset.importers.v1.utils import import_dataset
from superset.commands.importers.v1 import ImportModelsCommand from superset.commands.importers.v1 import ImportModelsCommand
from superset.commands.utils import update_chart_config_dataset
from superset.daos.dashboard import DashboardDAO from superset.daos.dashboard import DashboardDAO
from superset.dashboards.schemas import ImportV1DashboardSchema from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.schemas import ImportV1DatabaseSchema from superset.databases.schemas import ImportV1DatabaseSchema
@ -113,11 +114,7 @@ class ImportDashboardsCommand(ImportModelsCommand):
): ):
# update datasource id, type, and name # update datasource id, type, and name
dataset_dict = dataset_info[config["dataset_uuid"]] dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict) config = update_chart_config_dataset(config, dataset_dict)
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:
config["query_context"] = None
chart = import_chart(config, overwrite=False) chart = import_chart(config, overwrite=False)
charts.append(chart) charts.append(chart)

View File

@ -39,6 +39,7 @@ from superset.commands.importers.v1.utils import (
validate_metadata_type, validate_metadata_type,
) )
from superset.commands.query.importers.v1.utils import import_saved_query from superset.commands.query.importers.v1.utils import import_saved_query
from superset.commands.utils import update_chart_config_dataset
from superset.dashboards.schemas import ImportV1DashboardSchema from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.schemas import ImportV1DatabaseSchema from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.schemas import ImportV1DatasetSchema from superset.datasets.schemas import ImportV1DatasetSchema
@ -113,11 +114,7 @@ class ImportAssetsCommand(BaseCommand):
for file_name, config in configs.items(): for file_name, config in configs.items():
if file_name.startswith("charts/"): if file_name.startswith("charts/"):
dataset_dict = dataset_info[config["dataset_uuid"]] dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict) config = update_chart_config_dataset(config, dataset_dict)
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:
config["query_context"] = None
chart = import_chart(config, overwrite=True) chart = import_chart(config, overwrite=True)
charts.append(chart) charts.append(chart)
chart_ids[str(chart.uuid)] = chart.id chart_ids[str(chart.uuid)] = chart.id

View File

@ -17,7 +17,7 @@
from __future__ import annotations from __future__ import annotations
from collections import Counter from collections import Counter
from typing import Optional, TYPE_CHECKING from typing import Any, Optional, TYPE_CHECKING
from flask import g from flask import g
from flask_appbuilder.security.sqla.models import Role, User from flask_appbuilder.security.sqla.models import Role, User
@ -34,6 +34,7 @@ from superset.daos.datasource import DatasourceDAO
from superset.daos.exceptions import DatasourceNotFound from superset.daos.exceptions import DatasourceNotFound
from superset.daos.tag import TagDAO from superset.daos.tag import TagDAO
from superset.tags.models import ObjectType, Tag, TagType from superset.tags.models import ObjectType, Tag, TagType
from superset.utils import json
from superset.utils.core import DatasourceType, get_user_id from superset.utils.core import DatasourceType, get_user_id
if TYPE_CHECKING: if TYPE_CHECKING:
@ -185,3 +186,43 @@ def update_tags(
TagDAO.create_custom_tagged_objects( TagDAO.create_custom_tagged_objects(
object_type, object_id, [tag.name for tag in tags_to_add] object_type, object_id, [tag.name for tag in tags_to_add]
) )
def update_chart_config_dataset(
config: dict[str, Any], dataset_info: dict[str, Any]
) -> dict[str, Any]:
"""
Update the chart configuration and query_context with new dataset information
:param config: The original chart configuration
:param dataset_info: Dict with datasource_id, datasource_type, and datasource_name
:return: The updated chart configuration
"""
# Update datasource id, type, and name
config.update(dataset_info)
dataset_uid = f"{dataset_info['datasource_id']}__{dataset_info['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config and config["query_context"] is not None:
try:
query_context = json.loads(config["query_context"])
query_context["datasource"] = {
"id": dataset_info["datasource_id"],
"type": dataset_info["datasource_type"],
}
if "form_data" in query_context:
query_context["form_data"]["datasource"] = dataset_uid
if "queries" in query_context:
for query in query_context["queries"]:
if "datasource" in query:
query["datasource"] = query_context["datasource"]
config["query_context"] = json.dumps(query_context)
except json.JSONDecodeError:
config["query_context"] = None
return config

View File

@ -139,7 +139,29 @@ class TestImportAssetsCommand(SupersetTestCase):
dataset = chart.table dataset = chart.table
assert str(dataset.uuid) == dataset_config["uuid"] assert str(dataset.uuid) == dataset_config["uuid"]
assert chart.query_context is None assert json.loads(chart.query_context) == {
"datasource": {"id": dataset.id, "type": "table"},
"force": False,
"queries": [
{
"annotation_layers": [],
"applied_time_extras": {},
"columns": [],
"custom_form_data": {},
"custom_params": {},
"extras": {"having": "", "time_grain_sqla": None, "where": ""},
"filters": [],
"metrics": [],
"order_desc": True,
"row_limit": 5000,
"time_range": " : ",
"timeseries_limit": 0,
"url_params": {},
}
],
"result_format": "json",
"result_type": "full",
}
assert json.loads(chart.params)["datasource"] == dataset.uid assert json.loads(chart.params)["datasource"] == dataset.uid
database = dataset.database database = dataset.database

View File

@ -620,7 +620,29 @@ class TestImportDashboardsCommand(SupersetTestCase):
dataset = chart.table dataset = chart.table
assert str(dataset.uuid) == dataset_config["uuid"] assert str(dataset.uuid) == dataset_config["uuid"]
assert chart.query_context is None assert json.loads(chart.query_context) == {
"datasource": {"id": dataset.id, "type": "table"},
"force": False,
"queries": [
{
"annotation_layers": [],
"applied_time_extras": {},
"columns": [],
"custom_form_data": {},
"custom_params": {},
"extras": {"having": "", "time_grain_sqla": None, "where": ""},
"filters": [],
"metrics": [],
"order_desc": True,
"row_limit": 5000,
"time_range": " : ",
"timeseries_limit": 0,
"url_params": {},
}
],
"result_format": "json",
"result_type": "full",
}
assert json.loads(chart.params)["datasource"] == dataset.uid assert json.loads(chart.params)["datasource"] == dataset.uid
database = dataset.database database = dataset.database