chore: migrate /sql_json and /results to apiv1 (#22809)

This commit is contained in:
Diego Medina 2023-01-30 13:02:34 -03:00 committed by GitHub
parent c9b7507931
commit b94052e438
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 1460 additions and 880 deletions

File diff suppressed because it is too large Load Diff

View File

@ -35,7 +35,7 @@ describe('SqlLab query panel', () => {
cy.intercept({
method: 'POST',
url: '/superset/sql_json/',
url: '/api/v1/sqllab/execute/',
}).as('mockSQLResponse');
cy.get('.TableSelector .Select:eq(0)').click();
@ -148,7 +148,7 @@ describe('SqlLab query panel', () => {
});
it('Create a chart from a query', () => {
cy.intercept('/superset/sql_json/').as('queryFinished');
cy.intercept('/api/v1/sqllab/execute/').as('queryFinished');
cy.intercept('**/api/v1/explore/**').as('explore');
cy.intercept('**/api/v1/chart/**').as('chart');

View File

@ -17,6 +17,7 @@
* under the License.
*/
import shortid from 'shortid';
import rison from 'rison';
import { SupersetClient, t } from '@superset-ui/core';
import invert from 'lodash/invert';
import mapKeys from 'lodash/mapKeys';
@ -305,8 +306,13 @@ export function fetchQueryResults(query, displayLimit) {
return function (dispatch) {
dispatch(requestQueryResults(query));
const queryParams = rison.encode({
key: query.resultsKey,
rows: displayLimit || null,
});
return SupersetClient.get({
endpoint: `/superset/results/${query.resultsKey}/?rows=${displayLimit}`,
endpoint: `/api/v1/sqllab/results/?q=${queryParams}`,
parseMethod: 'json-bigint',
})
.then(({ json }) => dispatch(querySuccess(query, json)))
@ -347,7 +353,7 @@ export function runQuery(query) {
const search = window.location.search || '';
return SupersetClient.post({
endpoint: `/superset/sql_json/${search}`,
endpoint: `/api/v1/sqllab/execute/${search}`,
body: JSON.stringify(postPayload),
headers: { 'Content-Type': 'application/json' },
parseMethod: 'json-bigint',
@ -359,7 +365,11 @@ export function runQuery(query) {
})
.catch(response =>
getClientErrorObject(response).then(error => {
let message = error.error || error.statusText || t('Unknown error');
let message =
error.error ||
error.message ||
error.statusText ||
t('Unknown error');
if (message.includes('CSRF token')) {
message = t(COMMON_ERR_MESSAGES.SESSION_TIMED_OUT);
}

View File

@ -55,13 +55,13 @@ describe('async actions', () => {
afterEach(fetchMock.resetHistory);
const fetchQueryEndpoint = 'glob:*/superset/results/*';
const fetchQueryEndpoint = 'glob:*/api/v1/sqllab/results/*';
fetchMock.get(
fetchQueryEndpoint,
JSON.stringify({ data: mockBigNumber, query: { sqlEditorId: 'dfsadfs' } }),
);
const runQueryEndpoint = 'glob:*/superset/sql_json/';
const runQueryEndpoint = 'glob:*/api/v1/sqllab/execute/';
fetchMock.post(runQueryEndpoint, `{ "data": ${mockBigNumber} }`);
describe('saveQuery', () => {
@ -280,7 +280,8 @@ describe('async actions', () => {
};
it('makes the fetch request', async () => {
const runQueryEndpointWithParams = 'glob:*/superset/sql_json/?foo=bar';
const runQueryEndpointWithParams =
'glob:*/api/v1/sqllab/execute/?foo=bar';
fetchMock.post(
runQueryEndpointWithParams,
`{ "data": ${mockBigNumber} }`,

View File

@ -55,7 +55,7 @@ const MOCKED_SQL_EDITOR_HEIGHT = 500;
fetchMock.get('glob:*/api/v1/database/*', { result: [] });
fetchMock.get('glob:*/superset/tables/*', { options: [] });
fetchMock.post('glob:*/sql_json/*', { result: [] });
fetchMock.post('glob:*/sqllab/execute/*', { result: [] });
const middlewares = [thunk];
const mockStore = configureStore(middlewares);

View File

@ -688,6 +688,7 @@ export const query = {
sql: 'SELECT * FROM something',
description: 'test description',
schema: 'test schema',
resultsKey: 'test',
};
export const queryId = 'clientId2353';

View File

@ -150,6 +150,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
from superset.reports.api import ReportScheduleRestApi
from superset.reports.logs.api import ReportExecutionLogRestApi
from superset.security.api import SecurityRestApi
from superset.sqllab.api import SqlLabRestApi
from superset.views.access_requests import AccessRequestsModelView
from superset.views.alerts import AlertView, ReportView
from superset.views.annotations import AnnotationLayerView
@ -219,6 +220,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
appbuilder.add_api(ReportScheduleRestApi)
appbuilder.add_api(ReportExecutionLogRestApi)
appbuilder.add_api(SavedQueryRestApi)
appbuilder.add_api(SqlLabRestApi)
#
# Setup regular views
#

248
superset/sqllab/api.py Normal file
View File

@ -0,0 +1,248 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, cast, Dict, Optional
import simplejson as json
from flask import request
from flask_appbuilder.api import expose, protect, rison
from flask_appbuilder.models.sqla.interface import SQLAInterface
from marshmallow import ValidationError
from superset import app, is_feature_enabled
from superset.databases.dao import DatabaseDAO
from superset.extensions import event_logger
from superset.jinja_context import get_template_processor
from superset.models.sql_lab import Query
from superset.queries.dao import QueryDAO
from superset.sql_lab import get_sql_results
from superset.sqllab.command_status import SqlJsonExecutionStatus
from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand
from superset.sqllab.commands.results import SqlExecutionResultsCommand
from superset.sqllab.exceptions import (
QueryIsForbiddenToAccessException,
SqlLabException,
)
from superset.sqllab.execution_context_convertor import ExecutionContextConvertor
from superset.sqllab.query_render import SqlQueryRenderImpl
from superset.sqllab.schemas import (
ExecutePayloadSchema,
QueryExecutionResponseSchema,
sql_lab_get_results_schema,
)
from superset.sqllab.sql_json_executer import (
ASynchronousSqlJsonExecutor,
SqlJsonExecutor,
SynchronousSqlJsonExecutor,
)
from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext
from superset.sqllab.validators import CanAccessQueryValidatorImpl
from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import json_success
from superset.views.base_api import BaseSupersetApi, requires_json, statsd_metrics
config = app.config
logger = logging.getLogger(__name__)
class SqlLabRestApi(BaseSupersetApi):
datamodel = SQLAInterface(Query)
resource_name = "sqllab"
allow_browser_login = True
class_permission_name = "Query"
execute_model_schema = ExecutePayloadSchema()
apispec_parameter_schemas = {
"sql_lab_get_results_schema": sql_lab_get_results_schema,
}
openapi_spec_tag = "SQL Lab"
openapi_spec_component_schemas = (
ExecutePayloadSchema,
QueryExecutionResponseSchema,
)
@expose("/results/")
@protect()
@statsd_metrics
@rison(sql_lab_get_results_schema)
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".get_results",
log_to_statsd=False,
)
def get_results(self, **kwargs: Any) -> FlaskResponse:
"""Gets the result of a SQL query execution
---
get:
summary: >-
Gets the result of a SQL query execution
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/sql_lab_get_results_schema'
responses:
200:
description: SQL query execution result
content:
application/json:
schema:
$ref: '#/components/schemas/QueryExecutionResponseSchema'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
410:
$ref: '#/components/responses/410'
500:
$ref: '#/components/responses/500'
"""
params = kwargs["rison"]
key = params.get("key")
rows = params.get("rows")
result = SqlExecutionResultsCommand(key=key, rows=rows).run()
# return the result without special encoding
return json_success(
json.dumps(
result, default=utils.json_iso_dttm_ser, ignore_nan=True, encoding=None
),
200,
)
@expose("/execute/", methods=["POST"])
@protect()
@statsd_metrics
@requires_json
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".get_results",
log_to_statsd=False,
)
def execute_sql_query(self) -> FlaskResponse:
"""Executes a SQL query
---
post:
description: >-
Starts the execution of a SQL query
requestBody:
description: SQL query and params
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/ExecutePayloadSchema'
responses:
200:
description: Query execution result
content:
application/json:
schema:
$ref: '#/components/schemas/QueryExecutionResponseSchema'
202:
description: Query execution result, query still running
content:
application/json:
schema:
$ref: '#/components/schemas/QueryExecutionResponseSchema'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
try:
self.execute_model_schema.load(request.json)
except ValidationError as error:
return self.response_400(message=error.messages)
try:
log_params = {
"user_agent": cast(Optional[str], request.headers.get("USER_AGENT"))
}
execution_context = SqlJsonExecutionContext(request.json)
command = self._create_sql_json_command(execution_context, log_params)
command_result: CommandResult = command.run()
response_status = (
202
if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING
else 200
)
# return the execution result without special encoding
return json_success(command_result["payload"], response_status)
except SqlLabException as ex:
payload = {"errors": [ex.to_dict()]}
response_status = (
403 if isinstance(ex, QueryIsForbiddenToAccessException) else ex.status
)
return self.response(response_status, **payload)
@staticmethod
def _create_sql_json_command(
execution_context: SqlJsonExecutionContext, log_params: Optional[Dict[str, Any]]
) -> ExecuteSqlCommand:
query_dao = QueryDAO()
sql_json_executor = SqlLabRestApi._create_sql_json_executor(
execution_context, query_dao
)
execution_context_convertor = ExecutionContextConvertor()
execution_context_convertor.set_max_row_in_display(
int(config.get("DISPLAY_MAX_ROW")) # type: ignore
)
return ExecuteSqlCommand(
execution_context,
query_dao,
DatabaseDAO(),
CanAccessQueryValidatorImpl(),
SqlQueryRenderImpl(get_template_processor),
sql_json_executor,
execution_context_convertor,
config.get("SQLLAB_CTAS_NO_LIMIT"),
log_params,
)
@staticmethod
def _create_sql_json_executor(
execution_context: SqlJsonExecutionContext, query_dao: QueryDAO
) -> SqlJsonExecutor:
sql_json_executor: SqlJsonExecutor
if execution_context.is_run_asynchronous():
sql_json_executor = ASynchronousSqlJsonExecutor(query_dao, get_sql_results)
else:
sql_json_executor = SynchronousSqlJsonExecutor(
query_dao,
get_sql_results,
config.get("SQLLAB_TIMEOUT"), # type: ignore
is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"),
)
return sql_json_executor

View File

@ -0,0 +1,131 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-few-public-methods, too-many-arguments
from __future__ import annotations
import logging
from typing import Any, cast, Dict, Optional
from flask_babel import gettext as __, lazy_gettext as _
from superset import app, db, results_backend, results_backend_use_msgpack
from superset.commands.base import BaseCommand
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SerializationError, SupersetErrorException
from superset.models.sql_lab import Query
from superset.sqllab.utils import apply_display_max_row_configuration_if_require
from superset.utils import core as utils
from superset.utils.dates import now_as_float
from superset.views.utils import _deserialize_results_payload
config = app.config
SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"]
stats_logger = config["STATS_LOGGER"]
logger = logging.getLogger(__name__)
class SqlExecutionResultsCommand(BaseCommand):
_key: str
_rows: Optional[int]
_blob: Any
_query: Query
def __init__(
self,
key: str,
rows: Optional[int] = None,
) -> None:
self._key = key
self._rows = rows
def validate(self) -> None:
if not results_backend:
raise SupersetErrorException(
SupersetError(
message=__("Results backend is not configured."),
error_type=SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR,
level=ErrorLevel.ERROR,
)
)
read_from_results_backend_start = now_as_float()
self._blob = results_backend.get(self._key)
stats_logger.timing(
"sqllab.query.results_backend_read",
now_as_float() - read_from_results_backend_start,
)
if not self._blob:
raise SupersetErrorException(
SupersetError(
message=__(
"Data could not be retrieved from the results backend. You "
"need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
level=ErrorLevel.ERROR,
),
status=410,
)
self._query = (
db.session.query(Query).filter_by(results_key=self._key).one_or_none()
)
if self._query is None:
raise SupersetErrorException(
SupersetError(
message=__(
"The query associated with these results could not be found. "
"You need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
level=ErrorLevel.ERROR,
),
status=404,
)
def run(
self,
) -> Dict[str, Any]:
"""Runs arbitrary sql and returns data as json"""
self.validate()
payload = utils.zlib_decompress(
self._blob, decode=not results_backend_use_msgpack
)
try:
obj = _deserialize_results_payload(
payload, self._query, cast(bool, results_backend_use_msgpack)
)
except SerializationError as ex:
raise SupersetErrorException(
SupersetError(
message=__(
"Data could not be deserialized from the results backend. The "
"storage format might have changed, rendering the old data "
"stake. You need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
level=ErrorLevel.ERROR,
),
status=404,
) from ex
if self._rows:
obj = apply_display_max_row_configuration_if_require(obj, self._rows)
return obj

View File

@ -25,7 +25,7 @@ from jinja2.meta import find_undeclared_variables
from superset import is_feature_enabled
from superset.errors import SupersetErrorType
from superset.sqllab.command import SqlQueryRender
from superset.sqllab.commands.execute import SqlQueryRender
from superset.sqllab.exceptions import SqlLabException
from superset.utils import core as utils

View File

@ -0,0 +1,83 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from marshmallow import fields, Schema
sql_lab_get_results_schema = {
"type": "object",
"properties": {
"key": {"type": "string"},
},
"required": ["key"],
}
class ExecutePayloadSchema(Schema):
database_id = fields.Integer(required=True)
sql = fields.String(required=True)
client_id = fields.String(allow_none=True)
queryLimit = fields.Integer(allow_none=True)
sql_editor_id = fields.String(allow_none=True)
schema = fields.String(allow_none=True)
tab = fields.String(allow_none=True)
ctas_method = fields.String(allow_none=True)
templateParams = fields.String(allow_none=True)
tmp_table_name = fields.String(allow_none=True)
select_as_cta = fields.Boolean(allow_none=True)
json = fields.Boolean(allow_none=True)
runAsync = fields.Boolean(allow_none=True)
expand_data = fields.Boolean(allow_none=True)
class QueryResultSchema(Schema):
changedOn = fields.DateTime()
changed_on = fields.String()
dbId = fields.Integer()
db = fields.String() # pylint: disable=invalid-name
endDttm = fields.Float()
errorMessage = fields.String(allow_none=True)
executedSql = fields.String()
id = fields.String()
queryId = fields.Integer()
limit = fields.Integer()
limitingFactor = fields.String()
progress = fields.Integer()
rows = fields.Integer()
schema = fields.String()
ctas = fields.Boolean()
serverId = fields.Integer()
sql = fields.String()
sqlEditorId = fields.String()
startDttm = fields.Float()
state = fields.String()
tab = fields.String()
tempSchema = fields.String(allow_none=True)
tempTable = fields.String(allow_none=True)
userId = fields.Integer()
user = fields.String()
resultsKey = fields.String()
trackingUrl = fields.String(allow_none=True)
extra = fields.Dict(keys=fields.String())
class QueryExecutionResponseSchema(Schema):
status = fields.String()
data = fields.List(fields.Dict())
columns = fields.List(fields.Dict())
selected_columns = fields.List(fields.Dict())
expanded_columns = fields.List(fields.Dict())
query = fields.Nested(QueryResultSchema)
query_id = fields.Integer()

View File

@ -20,7 +20,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from superset import security_manager
from superset.sqllab.command import CanAccessQueryValidator
from superset.sqllab.commands.execute import CanAccessQueryValidator
if TYPE_CHECKING:
from superset.models.sql_lab import Query

View File

@ -3988,7 +3988,7 @@
"The query associated with the results was deleted.": [
"Die den Ergebnissen zugeordnete Abfrage wurde gelöscht."
],
"The query associated with these results could not be find. You need to re-run the original query.": [
"The query associated with these results could not be found. You need to re-run the original query.": [
"Die mit diesen Ergebnissen verknüpfte Abfrage konnte nicht gefunden werden. Sie müssen die ursprüngliche Abfrage erneut ausführen."
],
"The query contains one or more malformed template parameters.": [

View File

@ -12573,7 +12573,7 @@ msgstr "Die den Ergebnissen zugeordnete Abfrage wurde gelöscht."
#: superset/views/core.py:2297
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""
"Die mit diesen Ergebnissen verknüpfte Abfrage konnte nicht gefunden "

View File

@ -11715,7 +11715,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -12358,7 +12358,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -1380,7 +1380,7 @@
"Data could not be retrieved from the results backend. You need to re-run the original query.": [
"Impossible de récupérer les données depuis le backend. Rejouez la requête originale."
],
"The query associated with these results could not be find. You need to re-run the original query.": [
"The query associated with these results could not be found. You need to re-run the original query.": [
"La requête associée à ces résultats n'a pu être trouvée. Rejouez la requête originale."
],
"You are not authorized to see this query. If you think this is an error, please reach out to your administrator.": [

View File

@ -12626,7 +12626,7 @@ msgstr "La requête associée aux résutlats a été supprimée."
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""
"La requête associée à ces résultats n'a pu être trouvée. Rejouez la "

View File

@ -12065,7 +12065,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -12033,7 +12033,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -11953,7 +11953,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -11721,7 +11721,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -1263,7 +1263,7 @@
"Data could not be retrieved from the results backend. You need to re-run the original query.": [
""
],
"The query associated with these results could not be find. You need to re-run the original query.": [
"The query associated with these results could not be found. You need to re-run the original query.": [
""
],
"You are not authorized to see this query. If you think this is an error, please reach out to your administrator.": [

View File

@ -3372,7 +3372,7 @@ msgstr ""
#: superset/views/core.py:2321
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -12597,7 +12597,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -4506,7 +4506,7 @@
"The query associated with the results was deleted.": [
"Запрос, связанный с результатами, был удален."
],
"The query associated with these results could not be find. You need to re-run the original query.": [
"The query associated with these results could not be found. You need to re-run the original query.": [
""
],
"The query contains one or more malformed template parameters.": [""],

View File

@ -14931,7 +14931,7 @@ msgstr "Запрос, связанный с результатами, был у
#: superset/views/core.py:2222
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -11738,7 +11738,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -1299,7 +1299,7 @@
"Data could not be retrieved from the results backend. You need to re-run the original query.": [
"Podatkov ni bilo mogoče pridobiti iz zalednega sistema rezultatov. Ponovno morate zagnati izvorno poizvedbo."
],
"The query associated with these results could not be find. You need to re-run the original query.": [
"The query associated with these results could not be found. You need to re-run the original query.": [
"Poizvedbe, povezane s temi rezultati, ni bilo mogoče najti. Ponovno morate zagnati izvorno poizvedbo."
],
"You are not authorized to see this query. If you think this is an error, please reach out to your administrator.": [

View File

@ -3254,7 +3254,7 @@ msgstr ""
#: superset/views/core.py:2207
msgid ""
"The query associated with these results could not be find. You need to re-run the "
"The query associated with these results could not be found. You need to re-run the "
"original query."
msgstr ""
"Poizvedbe, povezane s temi rezultati, ni bilo mogoče najti. Ponovno morate "

View File

@ -12235,7 +12235,7 @@ msgstr ""
#: superset/views/core.py:2280
msgid ""
"The query associated with these results could not be find. You need to "
"The query associated with these results could not be found. You need to "
"re-run the original query."
msgstr ""

View File

@ -165,6 +165,7 @@ class BaseSupersetApiMixin:
"401": {"description": "Unauthorized", "content": error_payload_content},
"403": {"description": "Forbidden", "content": error_payload_content},
"404": {"description": "Not found", "content": error_payload_content},
"410": {"description": "Gone", "content": error_payload_content},
"422": {
"description": "Could not process entity",
"content": error_payload_content,
@ -210,7 +211,7 @@ class BaseSupersetApiMixin:
self.timing_stats("time", key, time_delta)
class BaseSupersetApi(BaseApi, BaseSupersetApiMixin):
class BaseSupersetApi(BaseSupersetApiMixin, BaseApi):
...

View File

@ -107,8 +107,8 @@ from superset.security.analytics_db_safety import check_sqlalchemy_uri
from superset.sql_lab import get_sql_results
from superset.sql_parse import ParsedQuery
from superset.sql_validators import get_validator_by_name
from superset.sqllab.command import CommandResult, ExecuteSqlCommand
from superset.sqllab.command_status import SqlJsonExecutionStatus
from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand
from superset.sqllab.exceptions import (
QueryIsForbiddenToAccessException,
SqlLabException,
@ -2090,6 +2090,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@has_access_api
@expose("/results/<key>/")
@event_logger.log_this
@deprecated()
def results(self, key: str) -> FlaskResponse:
return self.results_exec(key)
@ -2133,7 +2134,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
raise SupersetErrorException(
SupersetError(
message=__(
"The query associated with these results could not be find. "
"The query associated with these results could not be found. "
"You need to re-run the original query."
),
error_type=SupersetErrorType.RESULTS_BACKEND_ERROR,
@ -2313,6 +2314,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@handle_api_exception
@event_logger.log_this
@expose("/sql_json/", methods=["POST"])
@deprecated()
def sql_json(self) -> FlaskResponse:
errors = SqlJsonPayloadSchema().validate(request.json)
if errors:
@ -2352,7 +2354,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
SqlQueryRenderImpl(get_template_processor),
sql_json_executor,
execution_context_convertor,
config.get("SQLLAB_CTAS_NO_LIMIT"), # type: ignore
config.get("SQLLAB_CTAS_NO_LIMIT"),
log_params,
)

View File

@ -347,7 +347,7 @@ class SupersetTestCase(TestCase):
json_payload["schema"] = schema
resp = self.get_json_resp(
"/superset/sql_json/", raise_on_error=False, json_=json_payload
"/api/v1/sqllab/execute/", raise_on_error=False, json_=json_payload
)
if raise_on_error and "error" in resp:
raise Exception("run_sql failed")

View File

@ -96,7 +96,7 @@ def run_sql(
):
db_id = get_example_database().id
return test_client.post(
"/superset/sql_json/",
"/api/v1/sqllab/execute/",
json=dict(
database_id=db_id,
sql=sql,

View File

@ -0,0 +1,178 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
import datetime
import json
import random
import pytest
import prison
from sqlalchemy.sql import func
from unittest import mock
from tests.integration_tests.test_app import app
from superset import sql_lab
from superset.common.db_query_status import QueryStatus
from superset.models.core import Database
from superset.utils.database import get_example_database, get_main_database
from superset.utils import core as utils
from superset.models.sql_lab import Query
from tests.integration_tests.base_tests import SupersetTestCase
QUERIES_FIXTURE_COUNT = 10
class TestSqlLabApi(SupersetTestCase):
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_execute_required_params(self):
self.login()
client_id = "{}".format(random.getrandbits(64))[:10]
data = {"client_id": client_id}
rv = self.client.post(
"/api/v1/sqllab/execute/",
json=data,
)
failed_resp = {
"message": {
"sql": ["Missing data for required field."],
"database_id": ["Missing data for required field."],
}
}
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertDictEqual(resp_data, failed_resp)
self.assertEqual(rv.status_code, 400)
data = {"sql": "SELECT 1", "client_id": client_id}
rv = self.client.post(
"/api/v1/sqllab/execute/",
json=data,
)
failed_resp = {"message": {"database_id": ["Missing data for required field."]}}
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertDictEqual(resp_data, failed_resp)
self.assertEqual(rv.status_code, 400)
data = {"database_id": 1, "client_id": client_id}
rv = self.client.post(
"/api/v1/sqllab/execute/",
json=data,
)
failed_resp = {"message": {"sql": ["Missing data for required field."]}}
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertDictEqual(resp_data, failed_resp)
self.assertEqual(rv.status_code, 400)
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_execute_valid_request(self) -> None:
from superset import sql_lab as core
core.results_backend = mock.Mock()
core.results_backend.get.return_value = {}
self.login()
client_id = "{}".format(random.getrandbits(64))[:10]
data = {"sql": "SELECT 1", "database_id": 1, "client_id": client_id}
rv = self.client.post(
"/api/v1/sqllab/execute/",
json=data,
)
resp_data = json.loads(rv.data.decode("utf-8"))
self.assertEqual(resp_data.get("status"), "success")
self.assertEqual(rv.status_code, 200)
@mock.patch(
"tests.integration_tests.superset_test_custom_template_processors.datetime"
)
@mock.patch("superset.sqllab.api.get_sql_results")
def test_execute_custom_templated(self, sql_lab_mock, mock_dt) -> None:
mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1))
self.login()
sql = "SELECT '$DATE()' as test"
resp = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 1},
"data": [{"test": "'1970-01-01'"}],
}
sql_lab_mock.return_value = resp
dbobj = self.create_fake_db_for_macros()
json_payload = dict(database_id=dbobj.id, sql=sql)
self.get_json_resp(
"/api/v1/sqllab/execute/", raise_on_error=False, json_=json_payload
)
assert sql_lab_mock.called
self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test")
self.delete_fake_db_for_macros()
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_get_results_with_display_limit(self):
from superset.sqllab.commands import results as command
command.results_backend = mock.Mock()
self.login()
data = [{"col_0": i} for i in range(100)]
payload = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 100},
"data": data,
}
# limit results to 1
expected_key = {"status": "success", "query": {"rows": 100}, "data": data}
limited_data = data[:1]
expected_limited = {
"status": "success",
"query": {"rows": 100},
"data": limited_data,
"displayLimitReached": True,
}
query_mock = mock.Mock()
query_mock.sql = "SELECT *"
query_mock.database = 1
query_mock.schema = "superset"
# do not apply msgpack serialization
use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"]
app.config["RESULTS_BACKEND_USE_MSGPACK"] = False
serialized_payload = sql_lab._serialize_payload(payload, False)
compressed = utils.zlib_compress(serialized_payload)
command.results_backend.get.return_value = compressed
with mock.patch("superset.sqllab.commands.results.db") as mock_superset_db:
mock_superset_db.session.query().filter_by().one_or_none.return_value = (
query_mock
)
# get all results
arguments = {"key": "key"}
result_key = json.loads(
self.get_resp(f"/api/v1/sqllab/results/?q={prison.dumps(arguments)}")
)
arguments = {"key": "key", "rows": 1}
result_limited = json.loads(
self.get_resp(f"/api/v1/sqllab/results/?q={prison.dumps(arguments)}")
)
self.assertEqual(result_key, expected_key)
self.assertEqual(result_limited, expected_limited)
app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack

View File

@ -0,0 +1,161 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest import mock, skip
from unittest.mock import patch
import pytest
from superset import db, sql_lab
from superset.common.db_query_status import QueryStatus
from superset.errors import SupersetErrorType
from superset.exceptions import SerializationError, SupersetErrorException
from superset.models.core import Database
from superset.models.sql_lab import Query
from superset.sqllab.commands import results
from superset.utils import core as utils
from tests.integration_tests.base_tests import SupersetTestCase
class TestSqlExecutionResultsCommand(SupersetTestCase):
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_validation_no_results_backend(self) -> None:
results.results_backend = None
command = results.SqlExecutionResultsCommand("test", 1000)
with pytest.raises(SupersetErrorException) as ex_info:
command.run()
assert (
ex_info.value.error.error_type
== SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR
)
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_validation_data_cannot_be_retrieved(self) -> None:
results.results_backend = mock.Mock()
results.results_backend.get.return_value = None
command = results.SqlExecutionResultsCommand("test", 1000)
with pytest.raises(SupersetErrorException) as ex_info:
command.run()
assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_validation_query_not_found(self) -> None:
data = [{"col_0": i} for i in range(100)]
payload = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 100},
"data": data,
}
serialized_payload = sql_lab._serialize_payload(payload, False)
compressed = utils.zlib_compress(serialized_payload)
results.results_backend = mock.Mock()
results.results_backend.get.return_value = compressed
command = results.SqlExecutionResultsCommand("test", 1000)
with pytest.raises(SupersetErrorException) as ex_info:
command.run()
assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_validation_query_not_found2(self) -> None:
data = [{"col_0": i} for i in range(104)]
payload = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 104},
"data": data,
}
serialized_payload = sql_lab._serialize_payload(payload, False)
compressed = utils.zlib_compress(serialized_payload)
results.results_backend = mock.Mock()
results.results_backend.get.return_value = compressed
database = Database(database_name="my_database", sqlalchemy_uri="sqlite://")
query_obj = Query(
client_id="foo",
database=database,
tab_name="test_tab",
sql_editor_id="test_editor_id",
sql="select * from bar",
select_sql="select * from bar",
executed_sql="select * from bar",
limit=100,
select_as_cta=False,
rows=104,
error_message="none",
results_key="test_abc",
)
db.session.add(database)
db.session.add(query_obj)
with mock.patch(
"superset.views.utils._deserialize_results_payload",
side_effect=SerializationError(),
):
with pytest.raises(SupersetErrorException) as ex_info:
command = results.SqlExecutionResultsCommand("test", 1000)
command.run()
assert (
ex_info.value.error.error_type
== SupersetErrorType.RESULTS_BACKEND_ERROR
)
@mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False)
def test_run_succeeds(self) -> None:
data = [{"col_0": i} for i in range(104)]
payload = {
"status": QueryStatus.SUCCESS,
"query": {"rows": 104},
"data": data,
}
serialized_payload = sql_lab._serialize_payload(payload, False)
compressed = utils.zlib_compress(serialized_payload)
results.results_backend = mock.Mock()
results.results_backend.get.return_value = compressed
database = Database(database_name="my_database", sqlalchemy_uri="sqlite://")
query_obj = Query(
client_id="foo",
database=database,
tab_name="test_tab",
sql_editor_id="test_editor_id",
sql="select * from bar",
select_sql="select * from bar",
executed_sql="select * from bar",
limit=100,
select_as_cta=False,
rows=104,
error_message="none",
results_key="test_abc",
)
db.session.add(database)
db.session.add(query_obj)
command = results.SqlExecutionResultsCommand("test_abc", 1000)
result = command.run()
assert result.get("status") == "success"
assert result.get("query").get("rows") == 104
assert result.get("data") == data