chore: Re-enable skipped tests (#31795)

This commit is contained in:
Michael S. Molina 2025-01-16 15:33:22 -03:00 committed by GitHub
parent ab6045691e
commit 547b8b9314
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 62 additions and 351 deletions

View File

@ -29,10 +29,6 @@ from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_data, # noqa: F401
)
pytest.skip(
reason="These tests will be changed to use the api/v1/data", allow_module_level=True
)
class TestCache(SupersetTestCase):
def setUp(self):
@ -51,20 +47,29 @@ class TestCache(SupersetTestCase):
app.config["DATA_CACHE_CONFIG"] = {"CACHE_TYPE": "NullCache"}
cache_manager.init_app(app)
slc = self.get_slice("Top 10 Girl Name Share")
json_endpoint = "/superset/explore_json/{}/{}/".format(
slc.datasource_type, slc.datasource_id
)
slc = self.get_slice("Pivot Table v2")
# Get chart metadata
metadata = self.get_json_resp(f"api/v1/chart/{slc.id}")
query_context = json.loads(metadata.get("result").get("query_context"))
query_context["form_data"] = slc.form_data
# Request chart for the first time
resp = self.get_json_resp(
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
"api/v1/chart/data",
json_=query_context,
)
# Request chart for the second time
resp_from_cache = self.get_json_resp(
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
"api/v1/chart/data",
json_=query_context,
)
# restore DATA_CACHE_CONFIG
app.config["DATA_CACHE_CONFIG"] = data_cache_config
assert not resp["is_cached"]
assert not resp_from_cache["is_cached"]
assert resp.get("result")[0].get("cached_dttm") is None
assert resp_from_cache.get("result")[0].get("cached_dttm") is None
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_slice_data_cache(self):
@ -78,30 +83,45 @@ class TestCache(SupersetTestCase):
}
cache_manager.init_app(app)
slc = self.get_slice("Top 10 Girl Name Share")
json_endpoint = "/superset/explore_json/{}/{}/".format(
slc.datasource_type, slc.datasource_id
)
slc = self.get_slice("Pivot Table v2")
# Get chart metadata
metadata = self.get_json_resp(f"api/v1/chart/{slc.id}")
query_context = json.loads(metadata.get("result").get("query_context"))
query_context["form_data"] = slc.form_data
# Request chart for the first time
resp = self.get_json_resp(
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
"api/v1/chart/data",
json_=query_context,
)
# Request chart for the second time
resp_from_cache = self.get_json_resp(
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
"api/v1/chart/data",
json_=query_context,
)
assert not resp["is_cached"]
assert resp_from_cache["is_cached"]
result = resp.get("result")[0]
cached_result = resp_from_cache.get("result")[0]
assert result.get("cached_dttm") is None
assert cached_result.get("cached_dttm") is not None
# should fallback to default cache timeout
assert resp_from_cache["cache_timeout"] == 10
assert resp_from_cache["status"] == QueryStatus.SUCCESS
assert resp["data"] == resp_from_cache["data"]
assert resp["query"] == resp_from_cache["query"]
assert cached_result["cache_timeout"] == 10
assert cached_result["status"] == QueryStatus.SUCCESS
assert result["data"] == cached_result["data"]
assert result["query"] == cached_result["query"]
# should exists in `data_cache`
assert (
cache_manager.data_cache.get(resp_from_cache["cache_key"])["query"]
== resp_from_cache["query"]
cache_manager.data_cache.get(cached_result["cache_key"])["query"]
== cached_result["query"]
)
# should not exists in `cache`
assert cache_manager.cache.get(resp_from_cache["cache_key"]) is None
assert cache_manager.cache.get(cached_result["cache_key"]) is None
# reset cache config
app.config["DATA_CACHE_CONFIG"] = data_cache_config

View File

@ -291,6 +291,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
# rollback changes
for association in tag_associations:
if db.session.query(TaggedObject).filter_by(id=association.id).first():
db.session.delete(association)
for chart in charts:
db.session.delete(chart)
@ -1035,7 +1036,6 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
assert response == expected_response
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_get_chart(self):
"""
Chart API: Test get chart
@ -1171,7 +1171,6 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
assert result[0]["slice_name"] == self.chart.slice_name
@pytest.mark.usefixtures("create_charts_some_with_tags")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_get_charts_tag_filters(self):
"""
Chart API: Test get charts with tag filters
@ -1972,7 +1971,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
@parameterized.expand(
[
"Pivot Table v2", # Non-legacy chart
"Pivot Table v2", # Non-legacy charts
],
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@ -2097,7 +2096,6 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
}
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_warm_up_cache_no_datasource(self) -> None:
self.login(ADMIN_USERNAME)
slc = self.get_slice("Top 10 Girl Name Share")
@ -2118,7 +2116,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
"result": [
{
"chart_id": slc.id,
"viz_error": "Chart's datasource does not exist",
"viz_error": "Chart's query context does not exist",
"viz_status": None,
},
],

View File

@ -42,9 +42,8 @@ from superset.connectors.sqla.models import SqlaTable
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.mssql import MssqlEngineSpec
from superset.exceptions import SupersetException
from superset.extensions import async_query_manager_factory, cache_manager
from superset.extensions import cache_manager
from superset.models import core as models
from superset.models.cache import CacheKey
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import Query
@ -136,11 +135,10 @@ class TestCore(SupersetTestCase):
resp = self.client.get("/superset/slice/-1/")
assert resp.status_code == 404
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_viz_cache_key(self):
self.login(ADMIN_USERNAME)
slc = self.get_slice("Top 10 Girl Name Share")
slc = self.get_slice("Life Expectancy VS Rural %")
viz = slc.viz
qobj = viz.query_obj()
@ -174,7 +172,6 @@ class TestCore(SupersetTestCase):
assert_admin_view_menus_in("Gamma", self.assertNotIn)
@pytest.mark.usefixtures("load_energy_table_with_slice")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_save_slice(self):
self.login(ADMIN_USERNAME)
slice_name = f"Energy Sankey" # noqa: F541
@ -206,11 +203,15 @@ class TestCore(SupersetTestCase):
slc = db.session.query(Slice).filter_by(id=new_slice_id).one()
assert slc.slice_name == copy_name
form_data["datasource"] = f"{tbl_id}__table"
form_data["slice_id"] = new_slice_id
assert slc.form_data == form_data
form_data.pop("slice_id") # We don't save the slice id when saving as
assert slc.viz.form_data == form_data
form_data = {
"adhoc_filters": [],
"datasource": f"{tbl_id}__table",
"viz_type": "sankey",
"groupby": ["source"],
"metric": "sum__value",
@ -225,7 +226,7 @@ class TestCore(SupersetTestCase):
)
slc = db.session.query(Slice).filter_by(id=new_slice_id).one()
assert slc.slice_name == new_slice_name
assert slc.viz.form_data == form_data
assert slc.form_data == form_data
# Cleanup
slices = (
@ -347,35 +348,6 @@ class TestCore(SupersetTestCase):
database.allow_run_async = False
db.session.commit()
@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices",
"load_energy_table_with_slice",
)
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_warm_up_cache(self):
self.login(ADMIN_USERNAME)
slc = self.get_slice("Top 10 Girl Name Share")
data = self.get_json_resp(f"/superset/warm_up_cache?slice_id={slc.id}")
assert data == [
{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}
]
data = self.get_json_resp(
"/superset/warm_up_cache?table_name=energy_usage&db_name=main"
)
assert len(data) > 0
dashboard = self.get_dash_by_slug("births")
assert self.get_json_resp(
f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}"
) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}]
assert self.get_json_resp(
f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters="
+ quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}]))
) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}]
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_warm_up_cache_error(self) -> None:
self.login(ADMIN_USERNAME)
@ -399,19 +371,6 @@ class TestCore(SupersetTestCase):
}
]
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_cache_logging(self):
self.login(ADMIN_USERNAME)
store_cache_keys = app.config["STORE_CACHE_KEYS_IN_METADATA_DB"]
app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = True
slc = self.get_slice("Top 10 Girl Name Share")
self.get_json_resp(f"/superset/warm_up_cache?slice_id={slc.id}")
ck = db.session.query(CacheKey).order_by(CacheKey.id.desc()).first()
assert ck.datasource_uid == f"{slc.table.id}__table"
db.session.delete(ck)
app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = store_cache_keys
@with_feature_flags(KV_STORE=False)
def test_kv_disabled(self):
self.login(ADMIN_USERNAME)
@ -562,271 +521,6 @@ class TestCore(SupersetTestCase):
== "The dataset associated with this chart no longer exists"
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_explore_json(self):
tbl_id = self.table_ids.get("birth_names")
form_data = {
"datasource": f"{tbl_id}__table",
"viz_type": "dist_bar",
"granularity_sqla": "ds",
"time_range": "No filter",
"metrics": ["count"],
"adhoc_filters": [],
"groupby": ["gender"],
"row_limit": 100,
}
self.login(ADMIN_USERNAME)
rv = self.client.post(
"/superset/explore_json/",
data={"form_data": json.dumps(form_data)},
)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 200
assert data["rowcount"] == 2
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_explore_json_dist_bar_order(self):
tbl_id = self.table_ids.get("birth_names")
form_data = {
"datasource": f"{tbl_id}__table",
"viz_type": "dist_bar",
"url_params": {},
"granularity_sqla": "ds",
"time_range": 'DATEADD(DATETIME("2021-01-22T00:00:00"), -100, year) : 2021-01-22T00:00:00', # noqa: E501
"metrics": [
{
"expressionType": "SIMPLE",
"column": {
"id": 334,
"column_name": "name",
"verbose_name": "null",
"description": "null",
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": False,
"type": "VARCHAR(255)",
"python_date_format": "null",
},
"aggregate": "COUNT",
"sqlExpression": "null",
"isNew": False,
"hasCustomLabel": False,
"label": "COUNT(name)",
"optionName": "metric_xdzsijn42f9_khi4h3v3vci",
},
{
"expressionType": "SIMPLE",
"column": {
"id": 332,
"column_name": "ds",
"verbose_name": "null",
"description": "null",
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": True,
"type": "TIMESTAMP WITHOUT TIME ZONE",
"python_date_format": "null",
},
"aggregate": "COUNT",
"sqlExpression": "null",
"isNew": False,
"hasCustomLabel": False,
"label": "COUNT(ds)",
"optionName": "metric_80g1qb9b6o7_ci5vquydcbe",
},
],
"order_desc": True,
"adhoc_filters": [],
"groupby": ["name"],
"columns": [],
"row_limit": 10,
"color_scheme": "supersetColors",
"label_colors": {},
"show_legend": True,
"y_axis_format": "SMART_NUMBER",
"bottom_margin": "auto",
"x_ticks_layout": "auto",
}
self.login(ADMIN_USERNAME)
rv = self.client.post(
"/superset/explore_json/",
data={"form_data": json.dumps(form_data)},
)
data = json.loads(rv.data.decode("utf-8"))
resp = self.run_sql(
"""
SELECT count(name) AS count_name, count(ds) AS count_ds
FROM birth_names
WHERE ds >= '1921-01-22 00:00:00.000000' AND ds < '2021-01-22 00:00:00.000000'
GROUP BY name
ORDER BY count_name DESC
LIMIT 10;
""", # noqa: E501
client_id="client_id_1",
username="admin",
)
count_ds = []
count_name = []
for series in data["data"]:
if series["key"] == "COUNT(ds)":
count_ds = series["values"]
if series["key"] == "COUNT(name)":
count_name = series["values"]
for expected, actual_ds, actual_name in zip(
resp["data"], count_ds, count_name, strict=False
):
assert expected["count_name"] == actual_name["y"]
assert expected["count_ds"] == actual_ds["y"]
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_explore_json_async(self):
tbl_id = self.table_ids.get("birth_names")
form_data = {
"datasource": f"{tbl_id}__table",
"viz_type": "dist_bar",
"granularity_sqla": "ds",
"time_range": "No filter",
"metrics": ["count"],
"adhoc_filters": [],
"groupby": ["gender"],
"row_limit": 100,
}
app._got_first_request = False
async_query_manager_factory.init_app(app)
self.login(ADMIN_USERNAME)
rv = self.client.post(
"/superset/explore_json/",
data={"form_data": json.dumps(form_data)},
)
data = json.loads(rv.data.decode("utf-8"))
keys = list(data.keys())
# If chart is cached, it will return 200, otherwise 202
assert rv.status_code in {200, 202}
if rv.status_code == 202:
assert keys == [
"channel_id",
"job_id",
"user_id",
"status",
"errors",
"result_url",
]
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
def test_explore_json_async_results_format(self):
tbl_id = self.table_ids.get("birth_names")
form_data = {
"datasource": f"{tbl_id}__table",
"viz_type": "dist_bar",
"granularity_sqla": "ds",
"time_range": "No filter",
"metrics": ["count"],
"adhoc_filters": [],
"groupby": ["gender"],
"row_limit": 100,
}
app._got_first_request = False
async_query_manager_factory.init_app(app)
self.login(ADMIN_USERNAME)
rv = self.client.post(
"/superset/explore_json/?results=true",
data={"form_data": json.dumps(form_data)},
)
assert rv.status_code == 200
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch(
"superset.utils.cache_manager.CacheManager.cache",
new_callable=mock.PropertyMock,
)
@mock.patch("superset.viz.BaseViz.force_cached", new_callable=mock.PropertyMock)
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_explore_json_data(self, mock_force_cached, mock_cache):
tbl_id = self.table_ids.get("birth_names")
form_data = dict( # noqa: C418
{
"form_data": {
"datasource": f"{tbl_id}__table",
"viz_type": "dist_bar",
"granularity_sqla": "ds",
"time_range": "No filter",
"metrics": ["count"],
"adhoc_filters": [],
"groupby": ["gender"],
"row_limit": 100,
}
}
)
class MockCache:
def get(self, key):
return form_data
def set(self):
return None
mock_cache.return_value = MockCache()
mock_force_cached.return_value = False
self.login(ADMIN_USERNAME)
rv = self.client.get("/superset/explore_json/data/valid-cache-key")
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 200
assert data["rowcount"] == 2
@mock.patch(
"superset.utils.cache_manager.CacheManager.cache",
new_callable=mock.PropertyMock,
)
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
def test_explore_json_data_no_login(self, mock_cache):
tbl_id = self.table_ids.get("birth_names")
form_data = dict( # noqa: C418
{
"form_data": {
"datasource": f"{tbl_id}__table",
"viz_type": "dist_bar",
"granularity_sqla": "ds",
"time_range": "No filter",
"metrics": ["count"],
"adhoc_filters": [],
"groupby": ["gender"],
"row_limit": 100,
}
}
)
class MockCache:
def get(self, key):
return form_data
def set(self):
return None
mock_cache.return_value = MockCache()
rv = self.client.get("/superset/explore_json/data/valid-cache-key")
assert rv.status_code == 403
def test_explore_json_data_invalid_cache_key(self):
self.login(ADMIN_USERNAME)
cache_key = "invalid-cache-key"

View File

@ -879,12 +879,11 @@ class TestUtils(SupersetTestCase):
assert form_data == {}
assert slc is None
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.skip(reason="This test will be changed to use the api/v1/data")
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_log_this(self) -> None:
# TODO: Add additional scenarios.
self.login(ADMIN_USERNAME)
slc = self.get_slice("Top 10 Girl Name Share")
slc = self.get_slice("Life Expectancy VS Rural %")
dashboard_id = 1
assert slc.viz is not None