feat: Synchronously return cached charts (#15157)

* feat: Synchronously return cached charts

* Fix lint issue

* Fix python lint error

* Change getChartDataRequest to return response

* Fix lint errors

* Add test

* explore_json: skip cached data check for forced refresh

Co-authored-by: Rob DiCiuccio <rob.diciuccio@gmail.com>
This commit is contained in:
Ben Reinhart 2021-06-22 10:00:57 -07:00 committed by GitHub
parent 4d48f0426d
commit ab153e66cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 181 additions and 83 deletions

View File

@ -145,11 +145,12 @@ const legacyChartDataRequest = async (
'GET' && isFeatureEnabled(FeatureFlag.CLIENT_CACHE)
? SupersetClient.get
: SupersetClient.post;
return clientMethod(querySettings).then(({ json }) =>
return clientMethod(querySettings).then(({ json, response }) =>
// Make the legacy endpoint return a payload that corresponds to the
// V1 chart data endpoint response signature.
({
result: [json],
response,
json: { result: [json] },
}),
);
};
@ -196,7 +197,8 @@ const v1ChartDataRequest = async (
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
};
return SupersetClient.post(querySettings).then(({ json }) => json);
return SupersetClient.post(querySettings);
};
export async function getChartDataRequest({
@ -390,14 +392,25 @@ export function exploreJSON(
dispatch(chartUpdateStarted(controller, formData, key));
const chartDataRequestCaught = chartDataRequest
.then(response => {
const queriesResponse = response.result;
.then(({ response, json }) => {
if (isFeatureEnabled(FeatureFlag.GLOBAL_ASYNC_QUERIES)) {
// deal with getChartDataRequest transforming the response data
const result = 'result' in response ? response.result[0] : response;
return waitForAsyncData(result);
const result = 'result' in json ? json.result[0] : json;
switch (response.status) {
case 200:
// Query results returned synchronously, meaning query was already cached.
return Promise.resolve([result]);
case 202:
// Query is running asynchronously and we must await the results
return waitForAsyncData(result);
default:
throw new Error(
`Received unexpected response status (${response.status}) while fetching chart data`,
);
}
}
return queriesResponse;
return json.result;
})
.then(queriesResponse => {
queriesResponse.forEach(resultItem =>
@ -541,11 +554,11 @@ export function postChartFormData(
export function redirectSQLLab(formData) {
return dispatch => {
getChartDataRequest({ formData, resultFormat: 'json', resultType: 'query' })
.then(({ result }) => {
.then(({ json }) => {
const redirectUrl = '/superset/sqllab/';
const payload = {
datasourceKey: formData.datasource,
sql: result[0].query,
sql: json.result[0].query,
};
postForm(redirectUrl, payload);
})

View File

@ -118,25 +118,36 @@ const FilterValue: React.FC<FilterProps> = ({
requestParams: { dashboardId: 0 },
ownState: filterOwnState,
})
.then(response => {
.then(({ response, json }) => {
if (isFeatureEnabled(FeatureFlag.GLOBAL_ASYNC_QUERIES)) {
// deal with getChartDataRequest transforming the response data
const result = 'result' in response ? response.result[0] : response;
waitForAsyncData(result)
.then((asyncResult: ChartDataResponseResult[]) => {
setIsRefreshing(false);
setIsLoading(false);
setState(asyncResult);
})
.catch((error: ClientErrorObject) => {
setError(
error.message || error.error || t('Check configuration'),
);
setIsRefreshing(false);
setIsLoading(false);
});
const result = 'result' in json ? json.result[0] : json;
if (response.status === 200) {
setIsRefreshing(false);
setIsLoading(false);
setState([result]);
} else if (response.status === 202) {
waitForAsyncData(result)
.then((asyncResult: ChartDataResponseResult[]) => {
setIsRefreshing(false);
setIsLoading(false);
setState(asyncResult);
})
.catch((error: ClientErrorObject) => {
setError(
error.message || error.error || t('Check configuration'),
);
setIsRefreshing(false);
setIsLoading(false);
});
} else {
throw new Error(
`Received unexpected response status (${response.status}) while fetching chart data`,
);
}
} else {
setState(response.result);
setState(json.result);
setError('');
setIsRefreshing(false);
setIsLoading(false);

View File

@ -421,24 +421,35 @@ const FiltersConfigForm = (
force,
requestParams: { dashboardId: 0 },
})
.then(response => {
.then(({ response, json }) => {
if (isFeatureEnabled(FeatureFlag.GLOBAL_ASYNC_QUERIES)) {
// deal with getChartDataRequest transforming the response data
const result = 'result' in response ? response.result[0] : response;
waitForAsyncData(result)
.then((asyncResult: ChartDataResponseResult[]) => {
setNativeFilterFieldValuesWrapper({
defaultValueQueriesData: asyncResult,
});
})
.catch((error: ClientErrorObject) => {
setError(
error.message || error.error || t('Check configuration'),
);
const result = 'result' in json ? json.result[0] : json;
if (response.status === 200) {
setNativeFilterFieldValuesWrapper({
defaultValueQueriesData: [result],
});
} else if (response.status === 202) {
waitForAsyncData(result)
.then((asyncResult: ChartDataResponseResult[]) => {
setNativeFilterFieldValuesWrapper({
defaultValueQueriesData: asyncResult,
});
})
.catch((error: ClientErrorObject) => {
setError(
error.message || error.error || t('Check configuration'),
);
});
} else {
throw new Error(
`Received unexpected response status (${response.status}) while fetching chart data`,
);
}
} else {
setNativeFilterFieldValuesWrapper({
defaultValueQueriesData: response.result,
defaultValueQueriesData: json.result,
});
}
})

View File

@ -149,9 +149,9 @@ export const DataTablesPane = ({
resultType,
ownState,
})
.then(response => {
.then(({ json }) => {
// Only displaying the first query is currently supported
const result = response.result[0];
const result = json.result[0];
setData(prevData => ({ ...prevData, [resultType]: result.data }));
setIsLoading(prevIsLoading => ({
...prevIsLoading,

View File

@ -58,9 +58,9 @@ const ViewQueryModal: React.FC<Props> = props => {
resultFormat: 'json',
resultType,
})
.then(response => {
.then(({ json }) => {
// Only displaying the first query is currently supported
const result = response.result[0];
const result = json.result[0];
setLanguage(result.language);
setQuery(result.query);
setIsLoading(false);

View File

@ -480,17 +480,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
except ChartBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))
def get_data_response(
self, command: ChartDataCommand, force_cached: bool = False
) -> Response:
try:
result = command.run(force_cached=force_cached)
except ChartDataCacheLoadError as exc:
return self.response_422(message=exc.message)
except ChartDataQueryFailedError as exc:
return self.response_400(message=exc.message)
def send_chart_response(self, result: Dict[Any, Any]) -> Response:
result_format = result["query_context"].result_format
if result_format == ChartDataResultFormat.CSV:
# Verify user has permission to export CSV file
if not security_manager.can_access("can_csv", "Superset"):
@ -512,6 +504,18 @@ class ChartRestApi(BaseSupersetModelRestApi):
return self.response_400(message=f"Unsupported result_format: {result_format}")
def get_data_response(
self, command: ChartDataCommand, force_cached: bool = False
) -> Response:
try:
result = command.run(force_cached=force_cached)
except ChartDataCacheLoadError as exc:
return self.response_422(message=exc.message)
except ChartDataQueryFailedError as exc:
return self.response_400(message=exc.message)
return self.send_chart_response(result)
@expose("/data", methods=["POST"])
@protect()
@statsd_metrics
@ -589,7 +593,22 @@ class ChartRestApi(BaseSupersetModelRestApi):
and query_context.result_format == ChartDataResultFormat.JSON
and query_context.result_type == ChartDataResultType.FULL
):
# First, look for the chart query results in the cache.
try:
result = command.run(force_cached=True)
except ChartDataCacheLoadError:
result = None # type: ignore
already_cached_result = result is not None
# If the chart query has already been cached, return it immediately.
if already_cached_result:
return self.send_chart_response(result)
# Otherwise, kick off a background job to run the chart query.
# Clients will either poll or be notified of query completion,
# at which point they will call the /data/<cache_key> endpoint
# to retrieve the results.
try:
command.validate_async_request(request)
except AsyncQueryTokenException:

View File

@ -434,6 +434,10 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
def get_samples(self, viz_obj: BaseViz) -> FlaskResponse:
return self.json_response({"data": viz_obj.get_samples()})
@staticmethod
def send_data_payload_response(viz_obj: BaseViz, payload: Any) -> FlaskResponse:
return data_payload_response(*viz_obj.payload_json_and_has_error(payload))
def generate_json(
self, viz_obj: BaseViz, response_type: Optional[str] = None
) -> FlaskResponse:
@ -452,7 +456,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return self.get_samples(viz_obj)
payload = viz_obj.get_payload()
return data_payload_response(*viz_obj.payload_json_and_has_error(payload))
return self.send_data_payload_response(viz_obj, payload)
@event_logger.log_this
@api
@ -609,6 +613,29 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
is_feature_enabled("GLOBAL_ASYNC_QUERIES")
and response_type == utils.ChartDataResultFormat.JSON
):
# First, look for the chart query results in the cache.
try:
viz_obj = get_viz(
datasource_type=cast(str, datasource_type),
datasource_id=datasource_id,
form_data=form_data,
force_cached=True,
force=force,
)
payload = viz_obj.get_payload()
except CacheLoadError:
payload = None # type: ignore
already_cached_result = payload is not None
# If the chart query has already been cached, return it immediately.
if already_cached_result:
return self.send_data_payload_response(viz_obj, payload)
# Otherwise, kick off a background job to run the chart query.
# Clients will either poll or be notified of query completion,
# at which point they will call the /explore_json/data/<cache_key>
# endpoint to retrieve the results.
try:
async_channel_id = async_query_manager.parse_jwt_from_request(
request

View File

@ -23,6 +23,7 @@ from typing import Optional
from unittest import mock
from zipfile import is_zipfile, ZipFile
from tests.conftest import with_feature_flags
from superset.models.sql_lab import Query
from tests.insert_chart_mixin import InsertChartMixin
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices
@ -46,7 +47,12 @@ from superset.models.dashboard import Dashboard
from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.models.slice import Slice
from superset.utils import core as utils
from superset.utils.core import AnnotationType, get_example_database, get_main_database
from superset.utils.core import (
AnnotationType,
ChartDataResultFormat,
get_example_database,
get_main_database,
)
from tests.base_api_tests import ApiOwnersTestCaseMixin
@ -1386,10 +1392,8 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
if get_example_database().backend != "presto":
assert "('boy' = 'boy')" in result
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_async(self):
"""
Chart data API: Test chart data query (async)
@ -1405,10 +1409,35 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
keys, ["channel_id", "job_id", "user_id", "status", "errors", "result_url"]
)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_async_cached_sync_response(self):
"""
Chart data API: Test chart data query returns results synchronously
when results are already cached.
"""
async_query_manager.init_app(app)
self.login(username="admin")
class QueryContext:
result_format = ChartDataResultFormat.JSON
cmd_run_val = {
"query_context": QueryContext(),
"queries": [{"query": "select * from foo"}],
}
with mock.patch.object(
ChartDataCommand, "run", return_value=cmd_run_val
) as patched_run:
request_payload = get_query_context("birth_names")
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
self.assertEqual(rv.status_code, 200)
data = json.loads(rv.data.decode("utf-8"))
patched_run.assert_called_once_with(force_cached=True)
self.assertEqual(data, {"result": [{"query": "select * from foo"}]})
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_async_results_type(self):
"""
@ -1421,10 +1450,8 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
self.assertEqual(rv.status_code, 200)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_async_invalid_token(self):
"""
Chart data API: Test chart data query (async)
@ -1439,10 +1466,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
self.assertEqual(rv.status_code, 401)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@mock.patch.object(ChartDataCommand, "load_query_context_from_cache")
def test_chart_data_cache(self, load_qc_mock):
"""
@ -1469,11 +1493,9 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
self.assertEqual(rv.status_code, 200)
self.assertEqual(data["result"][0]["rowcount"], expected_row_count)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@mock.patch.object(ChartDataCommand, "load_query_context_from_cache")
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_cache_run_failed(self, load_qc_mock):
"""
Chart data cache API: Test chart data async cache request with run failure
@ -1490,11 +1512,9 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
self.assertEqual(rv.status_code, 422)
self.assertEqual(data["message"], "Error loading data from cache")
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@mock.patch.object(ChartDataCommand, "load_query_context_from_cache")
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_cache_no_login(self, load_qc_mock):
"""
Chart data cache API: Test chart data async cache request (no login)
@ -1514,10 +1534,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
self.assertEqual(rv.status_code, 401)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
GLOBAL_ASYNC_QUERIES=True,
)
@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
def test_chart_data_cache_key_error(self):
"""
Chart data cache API: Test chart data async cache request with invalid cache key